content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
# python3
def fibo(n):
a = 0
if n==0:
return a
b = 1
if n==1:
return b
for i in range(2,n+1):
c = (a+b)%10
a = b
b = c
return c
if __name__ == '__main__':
n = int(input())
print(fibo(n))
| python |
access_token = "1115607808185995264-em8QLLFJ6ESWiVRM5G77euAA0rmaxU"
access_token_secret = "pnfdtIsloJsg9huAUb8mVAMApYqv9fyiJRqdTaJwkYvS0"
consumer_key = "wM7VnB9KDsU1ZiezePZmyRSZo"
consumer_secret = "0Vd3EiWZQppmOTkd8s8lTynU1T9rBs5auMQQvJy9xNE1O49yXJ"
filename = "/Users/tanujsinghal/Documents/trained_models/toxic-text-analyser/tweetsa.txt" | python |
from . import db
# The class that corresponds to the database table for decision reasons.
class DecisionReason(db.Model):
__tablename__ = 'decision_reason'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.VARCHAR(1024))
reason = db.Column(db.VARCHAR(1024))
| python |
from mastodon import Mastodon
from Zodiac import Zodiac
def announce(user_id, domain, zodiac):
if zodiac.bot_access_token is '' or zodiac.bot_access_token is None or zodiac.bot_base_url is '' or zodiac.bot_base_url is None:
return
bot = Mastodon(access_token=zodiac.bot_access_token, api_base_url=zodiac.bot_base_url)
status = '%s@%s が参加しました!' % (user_id, domain)
try:
bot.status_post(status=status, visibility='unlisted')
except:
pass
| python |
import pytest
from mypy_boto3_s3 import S3Client
from dc_logging_client.log_client import DummyLoggingClient, DCWidePostcodeLoggingClient
from dc_logging_client.log_entries import PostcodeLogEntry
def test_log_client_init_errors():
with pytest.raises(ValueError) as e_info:
DummyLoggingClient(fake=False)
assert str(e_info.value) == """`assume_role_arn` when not faking"""
assert DummyLoggingClient(fake=True)
def test_log_client_with_env_var(log_stream_arn_env, dc_wide_postcode_log_stream):
assert DCWidePostcodeLoggingClient(fake=False)
logger = DCWidePostcodeLoggingClient()
entry = logger.entry_class(dc_product=logger.dc_product.wcivf, postcode="SW1A 1AA")
logger.log(entry)
def test_log_client_init_working(firehose, sts, example_arn):
assert DummyLoggingClient(
fake=False,
assume_role_arn=example_arn,
)
def _read_log(s3_client, bucket_name):
key = s3_client.list_objects(Bucket=bucket_name)["Contents"][0]["Key"]
s3_client.get_object(Key=key, Bucket=bucket_name)
return s3_client.get_object(Key=key, Bucket=bucket_name)["Body"].read()
def test_log(dummy_log_stream: S3Client, example_arn):
logger = DummyLoggingClient(assume_role_arn=example_arn)
logger.log(logger.entry_class(text="test", dc_product=logger.dc_product.wcivf))
log = _read_log(dummy_log_stream, "firehose-test")
assert (
log
== b"""{"dc_product": "WCIVF", "text": "test", "utm_campaign": "", "utm_medium": "", "utm_source": ""}\n"""
)
def test_log_invalid_entry(dummy_log_stream, example_arn):
logger = DummyLoggingClient(assume_role_arn=example_arn)
with pytest.raises(ValueError) as e_info:
logger.log(
PostcodeLogEntry(postcode="SW1A 1AA", dc_product=logger.dc_product.wcivf)
)
assert str(e_info.value) == (
"""<class 'dc_logging_client.log_entries.PostcodeLogEntry'>"""
""" isn't a valid log entry for stream 'dummy'"""
)
with pytest.raises(ValueError) as e_info:
logger.log(logger.entry_class(text="test", dc_product="new product")) # type: ignore
assert str(e_info.value) == ("""'new product' is not currently supported""")
def test_log_batch(dummy_log_stream, example_arn):
logger = DummyLoggingClient(assume_role_arn=example_arn)
enteries = [
logger.entry_class(text="test1", dc_product=logger.dc_product.wcivf),
logger.entry_class(text="test2", dc_product=logger.dc_product.wdiv),
logger.entry_class(text="test3", dc_product=logger.dc_product.aggregator_api),
]
logger.log_batch(enteries)
log = _read_log(dummy_log_stream, "firehose-test")
assert (
log
== b"""{"dc_product": "WCIVF", "text": "test1", "utm_campaign": "", "utm_medium": "", "utm_source": ""}\n{"dc_product": "WDIV", "text": "test2", "utm_campaign": "", "utm_medium": "", "utm_source": ""}\n{"dc_product": "AGGREGATOR_API", "text": "test3", "utm_campaign": "", "utm_medium": "", "utm_source": ""}\n"""
)
| python |
import fileinput
TAGS = {"[": "]", "(": ")", "<": ">", "{": "}"}
def find_illegal(line):
stack = []
for c in line:
if c in TAGS:
stack.append(c)
else:
expected = TAGS[stack.pop()]
if c != expected:
return c
return None
def find_completion(line):
stack = []
for c in line:
if c in TAGS:
stack.append(c)
else:
expected = TAGS[stack.pop()]
if c != expected:
return c
return "".join(TAGS[c] for c in stack[::-1])
def score_completion(completion):
score = 0
points = {")": 1, "]": 2, "}": 3, ">": 4}
for c in completion:
score *= 5
score += points[c]
return score
def part1(lines):
illegal = [c for l in lines if (c := find_illegal(l))]
points = {")": 3, "]": 57, "}": 1197, ">": 25137}
return sum(points[c] for c in illegal)
def part2(lines):
incomplete = [l for l in lines if not find_illegal(l)]
completions = [find_completion(l) for l in incomplete]
scores = [score_completion(c) for c in completions]
return sorted(scores)[len(scores) // 2]
def main():
with fileinput.input("input") as f:
lines = [l.strip() for l in f]
print(part1(lines))
print(part2(lines))
if __name__ == "__main__":
main()
| python |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# Copyright (C) 2013-2014 Craig Phillips. All rights reserved.
"""Remote file synchronisation"""
import os, re, datetime
from libgsync.output import verbose, debug, itemize, Progress
from libgsync.sync import SyncType
from libgsync.sync.file import SyncFile, SyncFileInfo
from libgsync.options import GsyncOptions
from apiclient.http import MediaIoBaseUpload, MediaUploadProgress
from libgsync.drive import Drive
from dateutil.tz import tzutc
class SyncFileRemote(SyncFile):
"""SyncFileRemote implementation for the SyncFile adapter"""
def __init__(self, path):
super(SyncFileRemote, self).__init__(path)
self._path = self.normpath(path)
def __repr__(self):
return "SyncFileRemote(%s)" % repr(self._path)
def sync_type(self):
return SyncType.REMOTE
def normpath(self, path):
return Drive().normpath(path)
def strippath(self, path):
"""Strips path of the 'drive://' prefix using the Drive() method"""
return Drive().strippath(path)
def get_path(self, path = None):
if path is None or path == "":
return self._path
stripped_path = self.strippath(self._path)
stripped_rel_path = self.strippath(path)
debug("Joining: %s with %s" % (
repr(stripped_path), repr(stripped_rel_path))
)
ret = self.normpath(os.path.join(stripped_path, stripped_rel_path))
debug(" * got: %s" % repr(ret))
return ret
def get_uploader(self, path = None):
info = self.get_info(path)
if info is None:
raise Exception("Could not obtain file information: %s" % path)
path = self.get_path(path)
drive = Drive()
debug("Opening remote file for reading: %s" % repr(path))
fd = drive.open(path, "r")
if fd is None:
raise Exception("Open failed: %s" % path)
return MediaIoBaseUpload(fd, info.mimeType, resumable=True)
def get_info(self, path = None):
path = self.get_path(path)
debug("Fetching remote file metadata: %s" % repr(path))
# The Drive() instance is self caching.
drive = Drive()
info = drive.stat(path)
if info is None:
debug("File not found: %s" % repr(path))
return None
info = SyncFileInfo(**info)
debug("Remote file = %s" % repr(info), 3)
debug("Remote mtime: %s" % repr(info.modifiedDate))
return info
def _create_dir(self, path, src = None):
debug("Creating remote directory: %s" % repr(path))
if not GsyncOptions.dry_run:
drive = Drive()
drive.mkdir(path)
def _create_symlink(self, path, src):
debug("Creating remote symlink: %s" % repr(path))
if not GsyncOptions.dry_run:
#link_source = src.
#os.symlink(, path)
pass
def _create_file(self, path, src):
debug("Creating remote file: %s" % repr(path))
if GsyncOptions.dry_run:
return
drive = Drive()
info = drive.create(path, src.get_info())
if info is None:
debug("Creation failed")
def _update_dir(self, path, src):
pass
def _update_data(self, path, src):
debug("Updating remote file: %s" % repr(path))
total_bytes_written = self.bytes_written
bytes_written = 0
info = src.get_info()
def __callback(status):
bytes_written = int(status.resumable_progress)
self.bytes_written = total_bytes_written + bytes_written
progress = Progress(GsyncOptions.progress, __callback)
if GsyncOptions.dry_run:
bytes_written = info.fileSize
progress(MediaUploadProgress(bytes_written, bytes_written))
else:
progress.bytesTotal = info.fileSize
drive = Drive()
info = drive.update(
path, info, media_body=src.get_uploader(),
progress_callback=progress
)
if info is not None:
bytes_written = long(info.get('fileSize', '0'))
debug("Final file size: %d" % bytes_written)
else:
debug("Update failed")
progress.complete(bytes_written)
self.bytes_written = total_bytes_written + bytes_written
def _update_attrs(self, path, src, attrs):
debug("Updating remote file attrs: %s" % repr(path))
if GsyncOptions.dry_run:
return
info = self.get_info(path)
if not info:
return
st_info = list(tuple(info.statInfo))
if attrs.mode is not None:
st_info[0] = attrs.mode
if attrs.uid is not None:
st_info[4] = attrs.uid
if attrs.gid is not None:
st_info[5] = attrs.gid
if attrs.atime is not None:
st_info[7] = attrs.atime
info.set_stat_info(st_info)
mtime_utc = datetime.datetime.utcfromtimestamp(
#attrs.mtime).isoformat()
#attrs.mtime).replace(tzinfo=tzutc()).isoformat()
attrs.mtime).replace(tzinfo=tzutc()).strftime("%Y-%m-%dT%H:%M:%S.%f%z")
Drive().update(path, properties = {
'description': info.description,
'modifiedDate': mtime_utc,
}, options = {
'setModifiedDate': GsyncOptions.times
})
| python |
"""Collection of helper methods.
All containing methods are legacy helpers that should not be used by new
components. Instead call the service directly.
"""
from homeassistant.components.group import (
ATTR_ADD_ENTITIES, ATTR_CONTROL, ATTR_ENTITIES, ATTR_OBJECT_ID, ATTR_VIEW,
ATTR_VISIBLE, DOMAIN, SERVICE_REMOVE, SERVICE_SET, SERVICE_SET_VISIBILITY)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_ICON, ATTR_NAME, SERVICE_RELOAD)
from homeassistant.core import callback
from homeassistant.loader import bind_hass
@bind_hass
def reload(hass):
"""Reload the automation from config."""
hass.add_job(async_reload, hass)
@callback
@bind_hass
def async_reload(hass):
"""Reload the automation from config."""
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_RELOAD))
@bind_hass
def set_group(hass, object_id, name=None, entity_ids=None, visible=None,
icon=None, view=None, control=None, add=None):
"""Create/Update a group."""
hass.add_job(
async_set_group, hass, object_id, name, entity_ids, visible, icon,
view, control, add)
@callback
@bind_hass
def async_set_group(hass, object_id, name=None, entity_ids=None, visible=None,
icon=None, view=None, control=None, add=None):
"""Create/Update a group."""
data = {
key: value for key, value in [
(ATTR_OBJECT_ID, object_id),
(ATTR_NAME, name),
(ATTR_ENTITIES, entity_ids),
(ATTR_VISIBLE, visible),
(ATTR_ICON, icon),
(ATTR_VIEW, view),
(ATTR_CONTROL, control),
(ATTR_ADD_ENTITIES, add),
] if value is not None
}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_SET, data))
@callback
@bind_hass
def async_remove(hass, object_id):
"""Remove a user group."""
data = {ATTR_OBJECT_ID: object_id}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_REMOVE, data))
@bind_hass
def set_visibility(hass, entity_id=None, visible=True):
"""Hide or shows a group."""
data = {ATTR_ENTITY_ID: entity_id, ATTR_VISIBLE: visible}
hass.services.call(DOMAIN, SERVICE_SET_VISIBILITY, data)
| python |
import FWCore.ParameterSet.Config as cms
patEventContentNoCleaning = [
'keep *_selectedPatPhotons*_*_*',
'keep *_selectedPatOOTPhotons*_*_*',
'keep *_selectedPatElectrons*_*_*',
'keep *_selectedPatMuons*_*_*',
'keep *_selectedPatTaus*_*_*',
'keep *_selectedPatJets*_*_*',
'drop *_*PF_caloTowers_*',
'drop *_*JPT_pfCandidates_*',
'drop *_*Calo_pfCandidates_*',
'keep *_patMETs*_*_*',
'keep *_selectedPatPFParticles*_*_*',
'keep *_selectedPatTrackCands*_*_*'
]
patEventContent = [
'keep *_selectedPatJets*_*_*', ## keep refactorized pat jet elements
'drop patJets_selectedPatJets*_*_*', ## drop the actual selected pat jets, they're redundant
'drop *_selectedPatJets_pfCandidates_*', ## drop for default patJets which are CaloJets
'drop *_*PF_caloTowers_*', ## drop collections not needed for the corresponding jet types
'drop *_*JPT_pfCandidates_*', ## drop collections not needed for the corresponding jet types
'drop *_*Calo_pfCandidates_*', ## drop collections not needed for the corresponding jet types
'keep *_cleanPatPhotons*_*_*',
'keep *_cleanPatElectrons*_*_*',
'keep *_cleanPatMuons*_*_*',
'keep *_cleanPatTaus*_*_*',
'keep *_cleanPatJets*_*_*',
'keep *_patMETs*_*_*',
'keep *_cleanPatHemispheres*_*_*',
'keep *_cleanPatPFParticles*_*_*',
'keep *_cleanPatTrackCands*_*_*'
]
patExtraAodEventContent = [
# GEN
'keep recoGenParticles_genParticles*_*_*',
'keep GenEventInfoProduct_*_*_*',
'keep GenRunInfoProduct_*_*_*',
# RECO
'keep recoTracks_generalTracks*_*_*',
'keep *_towerMaker_*_*',
'keep *_offlineBeamSpot_*_*',
'keep *_offlinePrimaryVertices*_*_*',
# TRIGGER
'keep edmTriggerResults_TriggerResults*_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep L1GlobalTriggerReadoutRecord_gtDigis_*_*',
# COND
'keep edmConditionsIn*Block_conditionsInEdm_*_*'
]
patTriggerEventContent = [
'keep patTriggerAlgorithms_patTrigger_*_*',
'keep patTriggerConditions_patTrigger_*_*',
'keep patTriggerObjects_patTrigger_*_*',
'keep patTriggerFilters_patTrigger_*_*',
'keep patTriggerPaths_patTrigger_*_*',
'keep *_patTriggerEvent_*_*'
]
patTriggerStandAloneEventContent = [
'keep patTriggerObjectStandAlones_patTrigger_*_*',
'keep patTriggerObjectStandAlonesedmAssociation_*_*_*'
]
patTriggerL1RefsEventContent = [
'keep *_l1extraParticles_*_*',
'keep *_gctDigis_*_*'
]
patEventContentTriggerMatch = [
'keep *_*PatPhotons*TriggerMatch_*_*',
'keep *_*PatElectrons*TriggerMatch_*_*',
'keep *_*PatMuons*TriggerMatch_*_*',
'keep *_*PatTaus*TriggerMatch_*_*',
'keep *_*PatJets*TriggerMatch_*_*',
'keep *_patMETs*TriggerMatch_*_*'
]
patHiEventContent = [
'keep patPhotons_selected*_*_*',
'keep patMuons_selected*_*_*',
'keep patJets_selected*_*_*',
'keep patHeavyIon_heavyIon_*_*'
]
patHiExtraAodEventContent = [
'keep recoGenParticles_hiGenParticles*_*_*',
'keep recoGenJets_iterativeCone5HiGenJets*_*_*', # until a better solution
'keep recoTracks_hiSelectedTracks*_*_*'
]
| python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Extract S3 OLCI SNOW processor results from S3 OLCI images
Written by Maxim Lamare
"""
import sys
from pathlib import Path
from argparse import ArgumentParser, ArgumentTypeError
import csv
import pandas as pd
from datetime import datetime
import re
from snappy_funcs import getS3values
def str2bool(instring):
"""Convert string to boolean.
Converts an input from a given list of possible inputs to the corresponding
boolean.
Args:
instring (str): Input string: has to be in a predefined list.
Returns:
(bool): Boolean according to the input string.
"""
if instring.lower() in ("yes", "true", "t", "y", "1"):
return True
elif instring.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise ArgumentTypeError("Boolean value expected.")
def natural_keys(text):
"""Sort strings naturally.
Sort a list of strings in the natural sorting order.
Args:
text (str): Input text to be sorted
Returns:
(list): list of naturally sorted objects
"""
def atoi(text):
return int(text) if text.isdigit() else text
return [atoi(c) for c in re.split("(\d+)", text)]
def main(
sat_fold,
coords_file,
out_fold,
pollution,
delta_pol,
gains,
dem_prods,
recovery,
sat_platform
):
"""S3 OLCI extract.
Extract the products generated by the S3 SNOW Processor for all images
contained in a specified folder at given coordinates, specified in a csv
file. Note, the images have to be unzipped raw S3 OLCI images. For each
scene, the data is located in a *.SEN3 folder, in which the
"xfdumanifest.xml" is stored.
Args:
sat_fold (PosixPath): Path to a folder containing S3 OLCI images
coords_file (PosixPath): Path to a csv containing site coordinates
out_fold (PosixPath): Path to a folder in which the output will be\
written
pollution (bool): S3 SNOW dirty snow flag
delta_pol (int): Delta value to consider dirty snow in S3 SNOW
gains (bool): Consider vicarious calibration gains
"""
# Initialise the list of coordinates
coords = []
# Open the list of coordinates to be processed
with open(str(coords_file), "r") as f:
rdr = csv.reader(f)
for row in rdr:
coords.append((row[0], float(row[1]), float(row[2])))
# If the recovery mode is activated, don't process data: skip to data
# sorting to salvage the coordinates that were saved
if recovery:
# List temporary files present in the output folder
tmp_files = [x.name for x in out_fold.iterdir() if "tmp" in x.name]
if tmp_files is None:
raise Exception("No temporary files found!")
else:
# Get the sites that have a temporary file to salvage
selected_coords = []
for tmp in tmp_files:
for x in coords:
if x[0] == tmp.split("_tmp")[0]:
selected_coords.append(x)
# Overwrite coords variable for later generic processing
coords = selected_coords
# If not in recovery mode, then process as normal
else:
counter = 1 # Set counter
# Set the path of the log file for failed processing
output_errorfile = out_fold / "failed_log.txt"
# Run the extraction from S3 and put results in dataframe
# List folders in the satellite image directory (include all .SEN3
# folders that are located in sub-directories within 'sat_fold')
satfolders = []
for p in sat_fold.rglob("*"):
if p.as_posix().endswith(".SEN3"):
satfolders.append(p)
for sat_image in satfolders:
# To store results, make a dictionnary with sites as keys
all_site = dict.fromkeys([x[0] for x in coords], pd.DataFrame())
# Only process image if it is from the desired platform
sat_image_platform = sat_image.name[2]
if sat_image_platform != sat_platform and sat_platform != "AB":
continue
total_images = len(satfolders)
print(
"Processing image n°%s/%s: %s"
% (counter, total_images, sat_image.name)
)
# Satellite image's full path
s3path = sat_image / "xfdumanifest.xml"
# Extract S3 data for the coordinates contained in the images
s3_results = getS3values(
str(s3path),
coords,
pollution,
delta_pol,
gains,
dem_prods,
output_errorfile,
)
# Get time from the satellite image folder (quicker than
# reading the xml file)
sat_date = datetime.strptime(
sat_image.name.split("_")[7], "%Y%m%dT%H%M%S"
)
# Put the data from the image into a panda dataframe
for site in s3_results:
alb_df = pd.DataFrame(s3_results[site], index=[sat_date])
# Append date and time columns
alb_df["year"] = int(sat_date.year)
alb_df["month"] = int(sat_date.month)
alb_df["day"] = int(sat_date.day)
alb_df["hour"] = int(sat_date.hour)
alb_df["minute"] = int(sat_date.minute)
alb_df["second"] = int(sat_date.second)
alb_df["dayofyear"] = int(sat_date.timetuple().tm_yday)
# Append platform ID as numeric value (A=0, B=1)
if sat_image_platform == 'A':
sat_image_platform_num = 0
else:
sat_image_platform_num = 1
alb_df["platform"] = int(sat_image_platform_num)
# Add the image data to the general dataframe
all_site[site] = all_site[site].append(alb_df)
# Save to file to avoid storing in memory
fname = "%s_tmp.csv" % site
output_file = out_fold / fname
# Save dataframe to the csv file
# Save header if first write
if output_file.is_file():
all_site[site].to_csv(
str(output_file),
mode="a",
na_rep=-999,
header=False,
index=False,
)
else:
all_site[site].to_csv(
str(output_file),
mode="a",
na_rep=-999,
header=True,
index=False,
)
counter += 1 # Increment counter
# After having run the process for the images, reopen the temp files
# and sort the data correctly
# Set column order for sorted files
columns = [
"year",
"month",
"day",
"hour",
"minute",
"second",
"dayofyear",
"platform",
"grain_diameter",
"snow_specific_area",
"ndsi",
"ndbi",
"auto_cloud",
"sza",
"vza",
"saa",
"vaa",
]
# If the S3SNOW DEM plugin is run, add columns to the list
if dem_prods:
[
columns.append(x)
for x in ["altitude", "slope", "aspect", "elevation_variance"]
]
# Open temp files
for location in coords:
# Read the csv file to a pandas dataframe
csv_name = "%s_tmp.csv" % location[0]
incsv = out_fold / csv_name
if incsv.is_file():
temp_df = pd.read_csv(str(incsv), sep=",")
# Get all rBRR, albedo and reflectance bands and natural sort
alb_columns = [x for x in temp_df.columns if "albedo_bb" in x]
alb_columns.sort(key=natural_keys)
rbrr_columns = [x for x in temp_df.columns if "BRR" in x]
rbrr_columns.sort(key=natural_keys)
planar_albedo_columns = [
x for x in temp_df.columns if "spectral_planar" in x
]
planar_albedo_columns.sort(key=natural_keys)
rtoa_columns = [x for x in temp_df.columns if "reflectance" in x]
rtoa_columns.sort(key=natural_keys)
# Reorder dataframe colmuns
temp_df = temp_df[
columns
+ alb_columns
+ rtoa_columns
+ rbrr_columns
+ planar_albedo_columns
]
# Reorder dates
temp_df["dt"] = pd.to_datetime(
temp_df[["year", "month", "day", "hour", "minute", "second"]]
)
temp_df.set_index("dt", inplace=True)
temp_df.sort_index(inplace=True)
# Save reordered file
fname = "%s.csv" % location[0]
output_file = out_fold / fname
# Save dataframe to the csv file
temp_df.to_csv(
str(output_file),
mode="a",
na_rep=-999,
header=True,
index=False,
)
incsv.unlink() # Remove temporary file
if __name__ == "__main__":
# If no arguments, return a help message
if len(sys.argv) == 1:
print(
'No arguments provided. Please run the command: "python %s -h"'
"for help." % sys.argv[0]
)
sys.exit(2)
else:
# Parse Arguments from command line
parser = ArgumentParser(
description="Import parameters for the complex"
" terrain algrithm."
)
parser.add_argument(
"-i",
"--insat",
metavar="Satellite image repository",
required=True,
help="Path to the folder containing the S3 OLCI images to be"
" processed.",
)
parser.add_argument(
"-c",
"--coords",
metavar="Site coordinates",
required=True,
help="Path to the input file containing the coordiantes for each"
" site. Has to be a csv in format: site,lat,lon.",
)
parser.add_argument(
"-o",
"--output",
metavar="Output",
required=True,
help="Path to the output folder, where the results will be saved.",
)
parser.add_argument(
"-p",
"--pollution",
metavar="Consider snow pollution",
default=False,
type=str2bool,
help="Boolean condition: switch the pollution flag on/off in the"
" S3 SNOW processor.",
)
parser.add_argument(
"-d",
"--delta_p",
metavar="Pollution delta",
type=float,
default=0.1,
help="Reflectance delta (compared to theory) threshold to trigger"
" the snow pollution calculations, when the pollution flag"
" is on.",
)
parser.add_argument(
"-g",
"--gains",
metavar="OLCI gain correction",
type=str2bool,
default=False,
help="Boolean condition: switch the gain corrections on/off in the"
" S3 SNOW processor.",
)
parser.add_argument(
"-e",
"--elevation",
metavar="S3SNOW dem products",
type=str2bool,
default=False,
help="Boolean condition: run the DEM product plugin.",
)
parser.add_argument(
"-r",
"--recovery",
metavar="Recovery mode",
type=str2bool,
default=False,
help="Boolean condition: run the recovery mode to salvage data.",
)
parser.add_argument(
"-f",
"--platform",
metavar="Sentinel-3 atellite platform",
required=False,
default="AB",
help="Specify the Sentinel-3 platform to include data from."
"Options are 'A', 'B', or 'AB' (for both platforms).",
)
input_args = parser.parse_args()
# Run main
main(
Path(input_args.insat),
Path(input_args.coords),
Path(input_args.output),
input_args.pollution,
input_args.delta_p,
input_args.gains,
input_args.elevation,
input_args.recovery,
input_args.platform,
)
| python |
import tensorflow as tf
import numpy as np
import yaml
from src.preprocessing import clean_doc
# load conifig
with open('config.yaml', 'r') as f:
conf = yaml.load(f)
MAX_NUM_WORDS = conf["EMBEDDING"]["MAX_NUM_WORDS"]
MAX_SEQUENCE_LENGTH = conf["EMBEDDING"]["MAX_SEQUENCE_LENGTH"]
def get_data_tensor(texts, training_size):
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=MAX_NUM_WORDS, oov_token=1)
tokenizer.fit_on_texts(texts[:training_size])
sequences = tokenizer.texts_to_sequences(texts)
word_index = tokenizer.word_index
return tf.keras.preprocessing.sequence.pad_sequences(sequences, maxlen=MAX_SEQUENCE_LENGTH), word_index
def get_embeddings_index(model):
embeddings_index = model.wv.vocab
for word, vocab in embeddings_index.items():
embeddings_index[word] = model.wv.vectors[vocab.index]
return embeddings_index, model.vector_size
def get_embedding_layer(word_index, embedding_index, embedding_dim, static=False):
num_words = min(MAX_NUM_WORDS, len(word_index))
embedding_matrix = np.zeros((num_words+1, embedding_dim))
for word, i in word_index.items():
if i > MAX_NUM_WORDS:
continue
embedding_vector = embedding_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
return tf.keras.layers.Embedding(
embedding_matrix.shape[0],
embedding_matrix.shape[1],
weights=[embedding_matrix],
input_length=MAX_SEQUENCE_LENGTH,
mask_zero=True,
trainable=static)
class TextIdCoverter:
def __init__(self, word_index):
self.word_index = word_index
self.id_index = {value:key for key,value in word_index.items()}
def id2text(self, ids):
ids = ids.reshape((MAX_SEQUENCE_LENGTH))
return ' '.join('[?]' if id == 1 else self.id_index[id] for id in ids if id != 0)
def text2id(self, text):
text = clean_doc(text)
text = [self.word_index.get(id) or 1 for id in text.split(' ')]
text = tf.keras.preprocessing.sequence.pad_sequences([text], maxlen=MAX_SEQUENCE_LENGTH)[0]
return text | python |
from _init_paths import *
import cv2
import numpy as np
import pickle
import json
import os.path as osp
import skimage.transform
import argparse
import h5py
import time
# Preprocess image
def prep_image(fname, mean_values):
im = cv2.imread(fname)
h, w, _ = im.shape
if h < w:
im = skimage.transform.resize(im, (256, w*256/h), preserve_range=True)
else:
im = skimage.transform.resize(im, (h*256/w, 256), preserve_range=True)
h, w, _ = im.shape
im = im[h//2-112:h//2+112, w//2-112:w//2+112]
im = np.swapaxes(np.swapaxes(im, 1, 2), 0, 1)
im = im - mean_values
return im[np.newaxis].astype('float32')
if __name__ == '__main__':
parser = argparse.ArgumentParser("caffe model generate features")
parser.add_argument('--split', type=str, required=True, help='choose a split')
parser.add_argument('--concepts', type=str, required=True, help='choose a concept file')
parser.add_argument('--type', type=str, required=True, help='choose a cnn type')
parser.add_argument('--dataset', type=str, required=True, help='choose a dataset')
parser.add_argument('--index', type=int, default=0)
parser.add_argument('--max', type=int, default=1)
parser.add_argument('--mapping', type=str, help='choose a mapping function')
args = parser.parse_args()
# Specify the caffe file name and batch size
if args.type == 'resnet':
net_caffe = caffe.Net('../Models/ResNet-152-deploy.prototxt', '../Models/ResNet-152-model.caffemodel', caffe.TEST)
mean_values = np.load('mean_value.npz')['mean']
feature_size = 2048
name = 'pool5_feats'
batch_size = 20
elif args.type == 'vgg':
net_caffe = caffe.Net('../Models/vgg-16-deploy.prototxt', '../Models/vgg-16-model.caffemodel', caffe.TEST)
mean_values = np.load('mean_value.npz')['mean']
feature_size =4096
name = 'fc7_feats'
batch_size = 50
# Load f_visual_concept, used to extract image id
f_visual_concept = json.load(open(args.concepts))
if args.dataset == 'coco':
mapping = pickle.load(open(args.mapping))
prefix = coco_image_base # imported from config.py
elif args.dataset == 'flickr':
mapping = None
prefix = flickr_image_base # imported from config.py
# Specify the h5 file, noramlly it should already exist, we need to add dataset into it
fname = '../Data/%s/feats_%s.h5'%(args.dataset, args.split)
if not osp.exists(fname):
f = h5py.File(fname, 'w')
else:
f = h5py.File(fname, 'r+')
if name in f.keys():
cnn_dataset = f['/%s'%name]
else:
cnn_dataset = f.create_dataset(name, dtype='float32', shape=(len(f_visual_concept), feature_size))
# Retrieve the number of images
visual_concept_num = len(f_visual_concept)/args.max
print "Generate captions for %d images"%visual_concept_num
# Start generating iamges
tik = time.time()
for start in range(args.index*visual_concept_num, (args.index+1)*visual_concept_num, batch_size):
end = min(start + batch_size, (args.index+1)*visual_concept_num)
im = np.zeros((batch_size, 3, 224, 224), dtype='float32')
for i in range(start, end):
path = '%d.jpg'%f_visual_concept[i]['id'] if mapping is None else mapping[f_visual_concept[i]['id']]
im[i-start] = prep_image(osp.join(prefix, path), mean_values)
net_caffe.forward(data=im)
f_pool5[start:end] = net_caffe.blobs[name].data.squeeze()[:end-start]
print "finished %d/%d within time %d"%(start-args.index*visual_concept_num, visual_concept_num, time.time() - tik)
tik = time.time()
f.close()
| python |
#
# Copyright 2012-2014 John Whitlock
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
from jsonfield import JSONField
from multigtfs.models.base import models, Base
@python_2_unicode_compatible
class ServiceDate(Base):
"""Dates that a route is active.
Implements calendar_dates.txt
"""
service = models.ForeignKey('Service', on_delete=models.CASCADE)
date = models.DateField(
help_text="Date that the service differs from the norm.")
exception_type = models.IntegerField(
default=1, choices=((1, 'Added'), (2, 'Removed')),
help_text="Is service added or removed on this date?")
extra_data = JSONField(default={}, blank=True, null=True)
def __str__(self):
return (
"%d-%s %s %s" % (
self.service.feed.id, self.service.service_id, self.date,
'Added' if self.exception_type == 1 else 'Removed'))
class Meta:
db_table = 'service_date'
app_label = 'multigtfs'
# For Base import/export
_column_map = (
('service_id', 'service__service_id'),
('date', 'date'),
('exception_type', 'exception_type'))
_filename = 'calendar_dates.txt'
_rel_to_feed = 'service__feed'
_sort_order = ('date', 'exception_type')
_unique_fields = ('service_id', 'date')
| python |
from typing import List, Tuple
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.db_wrapper import DBWrapper2
import logging
log = logging.getLogger(__name__)
class HintStore:
db_wrapper: DBWrapper2
@classmethod
async def create(cls, db_wrapper: DBWrapper2):
self = cls()
self.db_wrapper = db_wrapper
async with self.db_wrapper.write_db() as conn:
if self.db_wrapper.db_version == 2:
await conn.execute("CREATE TABLE IF NOT EXISTS hints(coin_id blob, hint blob, UNIQUE (coin_id, hint))")
else:
await conn.execute(
"CREATE TABLE IF NOT EXISTS hints(id INTEGER PRIMARY KEY AUTOINCREMENT, coin_id blob, hint blob)"
)
await conn.execute("CREATE INDEX IF NOT EXISTS hint_index on hints(hint)")
return self
async def get_coin_ids(self, hint: bytes) -> List[bytes32]:
async with self.db_wrapper.read_db() as conn:
cursor = await conn.execute("SELECT coin_id from hints WHERE hint=?", (hint,))
rows = await cursor.fetchall()
await cursor.close()
coin_ids = []
for row in rows:
coin_ids.append(row[0])
return coin_ids
async def add_hints(self, coin_hint_list: List[Tuple[bytes32, bytes]]) -> None:
if len(coin_hint_list) == 0:
return None
async with self.db_wrapper.write_db() as conn:
if self.db_wrapper.db_version == 2:
cursor = await conn.executemany(
"INSERT OR IGNORE INTO hints VALUES(?, ?)",
coin_hint_list,
)
else:
cursor = await conn.executemany(
"INSERT INTO hints VALUES(?, ?, ?)",
[(None,) + record for record in coin_hint_list],
)
await cursor.close()
async def count_hints(self) -> int:
async with self.db_wrapper.read_db() as conn:
async with conn.execute("select count(*) from hints") as cursor:
row = await cursor.fetchone()
assert row is not None
[count] = row
return int(count)
| python |
# Copyright 2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pathlib
import tempfile
import unittest
import unittest.mock
from typing import TypedDict
import click.testing
import cpo.lib.ibmcloud.oc.cluster
import cpo.lib.ibmcloud.status
import cpo.utils.process
from cpo.config.cluster_credentials_manager import cluster_credentials_manager
from cpo.cpo import cli
from cpo.lib.cluster.cluster import AbstractCluster
class ClusterData(TypedDict):
alias: str
cluster_name: str
server: str
class TestAddClusterCommands(unittest.TestCase):
def test_add_cluster_command(self):
clusters_file_path = pathlib.Path(tempfile.gettempdir()) / "clusters.json"
if clusters_file_path.exists():
os.remove(clusters_file_path)
cluster_credentials_manager.get_clusters_file_path = unittest.mock.MagicMock(return_value=clusters_file_path)
cluster_credentials_manager.reload()
# create cluster-1 and check that the number of clusters is 1
self._add_cluster_1()
# create cluster-2 and check that the number of clusters is 2
self._add_cluster_2()
# create cluster-1 and check that the exit code of the command is 1 as
# the server already exists
with self.assertRaisesRegex(Exception, "Server already exists"):
self._add_cluster_1()
# create cluster-3 and check that the exit code of the command is 1 as
# the alias already exists
with self.assertRaisesRegex(Exception, "Alias already exists"):
self._add_cluster_3()
def _add_cluster(self, cluster_data: ClusterData, num_expected_cluster: int):
server = cluster_data["server"]
cpo.lib.ibmcloud.status.execute_ibmcloud_command = unittest.mock.MagicMock(
return_value=cpo.utils.process.ProcessResult(
stderr="", stdout=f'{{"serverURL": "{server}"}}', return_code=0
)
)
runner = click.testing.CliRunner()
result = runner.invoke(
cli,
[
"ibmcloud",
"oc",
"cluster",
"add",
"--alias",
cluster_data["alias"],
"--cluster-name",
cluster_data["cluster_name"],
],
)
if result.exception is not None:
raise (result.exception)
self.assertEqual(result.exit_code, 0)
self.assertEqual(
len(cluster_credentials_manager.get_clusters_file_contents_with_default()["clusters"]),
num_expected_cluster,
)
cluster = cluster_credentials_manager.get_cluster(cluster_data["server"])
self.assertIsNotNone(cluster)
if cluster is not None:
self._check_cluster(cluster, cluster_data)
def _add_cluster_1(self) -> ClusterData:
cluster_1_data: ClusterData = {
"alias": "cluster-1-alias",
"cluster_name": "cluster-1",
"server": "https://cluster-1.us-south.containers.cloud.ibm.com:12345",
}
self._add_cluster(cluster_1_data, 1)
return cluster_1_data
def _add_cluster_2(self) -> ClusterData:
cluster_2_data: ClusterData = {
"alias": "",
"cluster_name": "cluster-2",
"server": "https://cluster-2.us-south.containers.cloud.ibm.com:12345",
}
self._add_cluster(cluster_2_data, 2)
return cluster_2_data
def _add_cluster_3(self) -> ClusterData:
cluster_3_data: ClusterData = {
"alias": "cluster-1-alias",
"cluster_name": "cluster-1",
"server": "https://cluster-3.us-south.containers.cloud.ibm.com:12345",
}
self._add_cluster(cluster_3_data, 1)
return cluster_3_data
def _check_cluster(self, cluster: AbstractCluster, cluster_data: ClusterData):
cluster_name = cluster_data["cluster_name"]
returned_cluster_data = cluster.get_cluster_data()
self.assertEqual(returned_cluster_data["alias"], cluster_data["alias"])
self.assertEqual(returned_cluster_data["cluster_name"], cluster_name)
self.assertEqual(
returned_cluster_data["type"],
cpo.lib.ibmcloud.oc.cluster.CLUSTER_TYPE_ID,
)
| python |
# Generated by Django 2.0.3 on 2018-07-25 06:07
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('firstgis', '0004_auto_20180725_0157'),
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('geometry', django.contrib.gis.db.models.fields.PointField(srid=4326)),
],
options={
'verbose_name_plural': 'cities',
'ordering': ('name',),
},
),
migrations.DeleteModel(
name='Incidences',
),
]
| python |
from .CompoundEditor import CompoundEditor
from .Icons import Icons
from .TypeUtils import getListElemTypeHint
import copy
class EditorList(CompoundEditor):
canDeleteElements = True
canMoveElements = True
def _getProperties(self):
for i in range(len(self._targetObject)):
name = str(i)
value = self._targetObject[i]
setter = lambda val, thisI=i: self._setListElem(thisI, val)
elemHint = getListElemTypeHint(self._typeHint)
yield name, value, setter, elemHint
# This is a replacement for this, which isn't valid:
# setter = lambda val, thisI=i: targetObject[thisI] = val
def _setListElem(self, i, val):
self._targetObject[i] = val
def _addClicked(self):
with self._editorGenerator.threadLock():
if self._typeHint:
elemHint = getListElemTypeHint(self._typeHint)
self._targetObject.append(elemHint())
else:
self._targetObject.append(copy.deepcopy(self._targetObject[0]))
self._createWidgetsForObject()
self.dataChanged.emit(self._targetObject)
def _deleteClicked(self, name):
with self._editorGenerator.threadLock():
i = int(name)
del self._targetObject[i]
self._createWidgetsForObject()
self.dataChanged.emit(self._targetObject)
def _moveClicked(self, name, delta):
with self._editorGenerator.threadLock():
i = int(name)
if i + delta < 0 or i + delta >= len(self._targetObject):
return
swap = self._targetObject[i + delta]
self._targetObject[i + delta] = self._targetObject[i]
self._targetObject[i] = swap
self._createWidgetsForObject()
self.dataChanged.emit(self._targetObject)
def _getHeaderWidgets(self):
addButton = self._editorGenerator.createButton(Icons.Add)
addButton.clicked.connect(self._addClicked)
return [addButton]
class EditorListHorizontal(EditorList):
isHorizontalLayout = True
| python |
"""
@author:
@file: urls.py
@time: 2018/1/31 13:20
"""
from app.case.views import *
from app.case import case
case.add_url_rule('/add_cases', view_func=AddtestcaseView.as_view('add_cases'))
case.add_url_rule('/edit_case/<int:id>', view_func=EditcaseView.as_view('edit_case'))
case.add_url_rule('/import_cases', view_func=DaorucaseView.as_view('import_cases'))
case.add_url_rule('/ser_yongli', view_func=SeryongliView.as_view('ser_yongli'))
case.add_url_rule('/makeonlyonecase', view_func=MakeonlyoneCase.as_view('makeonlyonecase'))
case.add_url_rule('/duoyongli', view_func=DuoyongliView.as_view('duoyongli'))
case.add_url_rule('/export_cases', view_func=ExportCase.as_view('export_cases'))
case.add_url_rule('/caseonedeteil', view_func=OnecaseDetial.as_view('caseonedeteil'))
| python |
#!/usr/bin/env python
"""
Generate database file in .h5 containing master, slaves sensor, and pseudo groundtruth.
The final output fps depends on the sampling rate input.
"""
import os, os.path
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
os.sys.path.insert(0, parentdir)
import argparse
import math
import numpy as np
import h5py
from scipy import misc
import csv
from eulerangles import mat2euler, euler2mat
import yaml
from os.path import join, dirname
import cv2
SCALER = 1.0 # scale label: 1, 100, 10000
RADIUS_2_DEGREE = 180.0 / math.pi
def rotated_to_local(T_w_c):
# Input is 7 DoF absolute poses (3 trans, 4 quat), output is 6 DoF relative poses
poses_local = []
# T_w_c = np.insert(T_w_c, 0, 1, axis=1) # add dummy timestamp
for i in range(1, len(T_w_c)):
T_w_c_im1 = transform44(T_w_c[i-1])
T_w_c_i = transform44(T_w_c[i])
T_c_im1_c_i = np.dot(np.linalg.pinv(T_w_c_im1), T_w_c_i)
# 3D: x, y, z, roll, pitch, yaw
eular_c_im1_c_i = mat2euler(T_c_im1_c_i[0:3, 0:3])
poses_local.append([SCALER * T_c_im1_c_i[0, 3], SCALER * T_c_im1_c_i[1, 3], SCALER * T_c_im1_c_i[2, 3],
SCALER * eular_c_im1_c_i[2] * RADIUS_2_DEGREE, SCALER * eular_c_im1_c_i[1] * RADIUS_2_DEGREE,
SCALER * eular_c_im1_c_i[0] * RADIUS_2_DEGREE])
poses_local = np.array(poses_local)
return poses_local
def transform44(l):
"""
Generate a 4x4 homogeneous transformation matrix from a 3D point and unit quaternion.
Input:
l -- tuple consisting of (stamp,tx,ty,tz,qx,qy,qz,qw) where
(tx,ty,tz) is the 3D position and (qx,qy,qz,qw) is the unit quaternion.
Output:
matrix -- 4x4 homogeneous transformation matrix
"""
_EPS = np.finfo(float).eps * 4.0
t = l[1:4]
q = np.array(l[4:8], dtype=np.float64, copy=True)
nq = np.dot(q, q)
if nq < _EPS:
return np.array((
(1.0, 0.0, 0.0, t[0])
(0.0, 1.0, 0.0, t[1])
(0.0, 0.0, 1.0, t[2])
(0.0, 0.0, 0.0, 1.0)
), dtype=np.float64)
q *= np.sqrt(2.0 / nq)
q = np.outer(q, q)
return np.array((
(1.0 - q[1, 1] - q[2, 2], q[0, 1] - q[2, 3], q[0, 2] + q[1, 3], t[0]),
(q[0, 1] + q[2, 3], 1.0 - q[0, 0] - q[2, 2], q[1, 2] - q[0, 3], t[1]),
(q[0, 2] - q[1, 3], q[1, 2] + q[0, 3], 1.0 - q[0, 0] - q[1, 1], t[2]),
(0.0, 0.0, 0.0, 1.0)), dtype=np.float64)
def iround(x):
"""iround(number) -> integer
Round a number to the nearest integer."""
y = round(x) - .5
return int(y) + (y > 0)
def main():
print('FOR **Master and 2 slaves** ONLY!')
DESCRIPTION = """This script receives a working directory and a dataset mean for each modality."""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('--dataroot', required=True, help='''Specify the dataroot directory.''')
parser.add_argument('--ref_file_name', required=True,
help='''Specify the reference (synchronized) filename to load the data''')
parser.add_argument('--master', required=True, help='''Specify the master.''')
parser.add_argument('--slave_1', required=True, help='''Specify the slave_1.''')
parser.add_argument('--slave_2', required=True, help='''Specify the slave_2.''')
parser.add_argument('--mean_master_file', required=True, help='''Specify the dataset mean for master.''')
parser.add_argument('--mean_slave1_file', help='''Specify the dataset mean for slave 1.''')
parser.add_argument('--range_master_file', required=True, help='''Specify the range file for master.''')
parser.add_argument('--range_slave1_file', required=True, help='''Specify the range file for slave 1.''')
parser.add_argument('--save_dir', help='''Specify save directory.''')
parser.add_argument('--gap', required=True, help='''Specify the sampling gap.''')
args = parser.parse_args()
dataroot = args.dataroot
save_dir = args.save_dir
ref_file_name = args.ref_file_name
master = args.master
slave_1 = args.slave_1
slave_2 = args.slave_2
mean_master_file = args.mean_master_file
mean_slave1_file = args.mean_slave1_file
range_master_file = args.range_master_file
range_slave1_file = args.range_slave1_file
GAP = int(args.gap)
parent_dir = dirname(dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))))
with open(join(parent_dir, 'config.yaml'), 'r') as f:
cfg = yaml.load(f)
all_exps = cfg['dataset_creation']['all_exp_files']
if not os.path.exists(save_dir):
os.makedirs(save_dir)
file_mean_master = open(mean_master_file, "r")
mean_master_str = file_mean_master.readlines()[0]
file_mean_master.close()
file_mean_slave_1 = open(mean_slave1_file, "r")
mean_slave_1_str = file_mean_slave_1.readlines()[0]
file_mean_slave_1.close()
file_range_master = open(range_master_file, "r")
range_master_str = file_range_master.readlines()[0]
file_range_master.close()
file_range_slave_1 = open(range_slave1_file, "r")
range_slave_1_str = file_range_slave_1.readlines()[0]
file_range_slave_1.close()
# IMPORTANT, PLEASE SPECIFY THE SAMPLING RATE/GAP
odom_data_GAP = [GAP] * len(all_exps)
seq_counter = 1
total_img_counter = 0
# for exp_file in all_exps:
for j in range(len(all_exps)):
# img_dir = join(dataroot, exp_file, data_type)
master_dir = join(dataroot, all_exps[j], master)
slave_1_dir = join(dataroot, all_exps[j], slave_1)
file_full_path = join(dataroot, all_exps[j], ref_file_name)
with open(file_full_path, 'r') as the_files:
file_lines = [line for line in the_files]
# Sampling file based on the specified gap
sampled_files = []
sampling = odom_data_GAP[j]
for k in range(0, np.size(file_lines), sampling):
sampled_files.append(file_lines[k])
# Variables to save data
train_timestamp = []
train_label = []
train_slave_2 = np.empty((len(sampled_files), 20, 6), dtype=np.float64) # imu
# do it with pre-allocated size, it is faster
if master == 'thermal' or master == 'mmwave_middle' or master == 'lidar' or master == 'depth':
train_master = np.empty((len(sampled_files), 1, 512, 640, 1), dtype=np.float32)
else:
train_master = np.empty((len(sampled_files), 1, 512, 640, 3), dtype=np.float32)
# train_master = []
if slave_1 == 'thermal' or slave_1 == 'mmwave_middle' or slave_1 == 'lidar' or slave_1 == 'depth':
train_slave_1 = np.empty((len(sampled_files), 1, 480, 640, 1), dtype=np.float32)
else:
train_slave_1 = np.empty((len(sampled_files), 1, 480, 640, 3), dtype=np.float32)
# save timestamp
timestamp = [line[:-1].split(',')[2] for line in sampled_files]
print('Total timestamp: ', np.shape(timestamp))
train_timestamp.append(timestamp)
gt_lines_float = []
for line in sampled_files:
gt_lines_float.append(np.array(
[float(line[:-1].split(',')[2]), # timestamp
float(line[:-1].split(',')[3]), float(line[:-1].split(',')[4]), float(line[:-1].split(',')[5]),
# translation
float(line[:-1].split(',')[6]), float(line[:-1].split(',')[7]),
float(line[:-1].split(',')[8]), float(line[:-1].split(',')[9])])) # quaternion
lidar_rel_poses = rotated_to_local(gt_lines_float)
train_label.append(lidar_rel_poses)
print('GT size: ', np.shape(train_label))
for k in range(0, len(sampled_files)):
# read master corresponding to pose
min_range_master = float(range_master_str.split(',')[0])
max_range_master = float(range_master_str.split(',')[1])
master_path = master_dir + '/' + sampled_files[k].split(',')[0] # idx 0 is always for the master!
# normalize master image
master_img = misc.imread(master_path)
master_img = master_img.astype('float32')
# np.clip(master_img, 0, 1, out=master_img)
master_img = cv2.normalize(master_img, None, 0, 255, cv2.NORM_MINMAX) # convert to 8-bit
master_img = (master_img - min_range_master) * 1.0 / (max_range_master - min_range_master)
master_img -= float(mean_master_str)
master_img = np.expand_dims(master_img, axis=-1)
master_img = np.expand_dims(master_img, axis=0) # add dimension for timestamp
# train_master.append(master_img)
train_master[k] = master_img
# read slave corresponding to pose
min_range_slave_1 = float(range_slave_1_str.split(',')[0])
max_range_slave_1 = float(range_slave_1_str.split(',')[1])
slave_1_path = slave_1_dir + '/' + sampled_files[k].split(',')[1] # idx 1 is always for the slave!
# normalize slave image
slave_1_img = misc.imread(slave_1_path, mode='RGB')
slave_1_img = slave_1_img.astype('float32')
slave_1_img[:, :, [0, 1, 2]] = slave_1_img[:, :, [2, 1, 0]]
slave_1_img = (slave_1_img - min_range_slave_1) * 1.0 / (max_range_slave_1 - min_range_slave_1)
# slave_1_img -= float(mean_master_str)
slave_1_img[:, :, 0] -= float(mean_slave_1_str.split(",")[0])
slave_1_img[:, :, 1] -= float(mean_slave_1_str.split(",")[1])
slave_1_img[:, :, 2] -= float(mean_slave_1_str.split(",")[2])
slave_1_img = np.expand_dims(slave_1_img, axis=0) # add dimension for timestamp
# train_slave_1.append(slave_1_img)
train_slave_1[k] = slave_1_img
# read IMU data
# the imu data starts at column 10 in sampled_files for 1 slave
# the imu data starts at column 11 in sampled_files for 2 slaves
imu_start = 11
for l in range(20):
# notes that we have loaded imu data in 1x120 format, and we need to save it in 20x6
# rstrip() -> remove trailing new line \n
train_slave_2[k][l] = np.array(sampled_files[k].rstrip().split(',')[imu_start:(imu_start + 6)],
dtype=np.float64)
imu_start += 6
total_img_counter += 1
print('Processing folder: ', all_exps[j], 'Total img idx ', str(total_img_counter),
': ', sampled_files[k].split(',')[0], '. Master size: ', np.shape(train_master),
'. Slave 1 size: ', np.shape(train_slave_1),
'. Slave 2 size: ', np.shape(train_slave_2))
print('Saving to h5 file ....')
train_timestamp_np = np.array(train_timestamp)
train_master_data_np = np.array(train_master)
train_master_data_np = np.expand_dims(train_master_data_np, axis=0) # add dimension for batch
train_slave_1_data_np = np.array(train_slave_1)
train_slave_1_data_np = np.expand_dims(train_slave_1_data_np, axis=0) # add dimension for batch
train_slave_2_data_np = np.array(train_slave_2)
train_slave_2_data_np = np.expand_dims(train_slave_2_data_np, axis=0) # add dimension for batch
train_label_np = np.array(train_label)
print('Data has been collected:')
print('Master => ', master, ': ', np.shape(train_master_data_np))
print('Slave 1 => ', slave_1, ': ', np.shape(train_slave_1_data_np))
print('Slave 2 => ', slave_2, ': ', np.shape(train_slave_2_data_np))
print('Label : ', np.shape(train_label_np))
file_save = join(save_dir, 'turtle_seq_' + str(seq_counter) + '.h5')
with h5py.File(file_save, 'w') as hf:
hf.create_dataset('timestamp', data=np.array(train_timestamp_np).astype(int))
hf.create_dataset(str(master + '_data'), data=train_master_data_np)
hf.create_dataset(str(slave_1 + '_data'), data=train_slave_1_data_np)
hf.create_dataset(str(slave_2 + '_data'), data=train_slave_2_data_np)
hf.create_dataset('label_data', data=train_label_np)
print('Finished! File saved in: ' + file_save)
seq_counter += 1
return 0
if __name__ == '__main__':
main() | python |
from django.apps import AppConfig
class FontExemplosConfig(AppConfig):
name = 'font_exemplos'
| python |
import glob
import os
import pickle
import re
import gensim
from gensim.models.callbacks import CallbackAny2Vec
from gensim.models import Word2Vec
import numpy as np
from mat2vec.processing.process import MaterialsTextProcessor
text_processing = MaterialsTextProcessor()
COMMON_TERMS = ["-", "-", b"\xe2\x80\x93", b"'s", b"\xe2\x80\x99s", "from",
"as", "at", "by", "of", "on", "into", "to", "than", "over",
"in", "the", "a", "an", "/", "under", ":"]
EXCLUDE_PUNCT = [
"=",
".",
",",
"(",
")",
"<",
">",
"\"",
"“",
"”",
"≥",
"≤",
"<nUm>"]
EXCLUDE_TERMS = ["=", ".", ",", "(", ")", "<", ">", "\"", "“", "”", "≥", "≤",
"<nUm>", "been", "be", "are",
"which", "were", "where", "have", "important", "has", "can",
"or", "we", "our",
"article", "paper", "show", "there", "if", "these", "could",
"publication",
"while", "measured", "measure", "demonstrate", "investigate",
"investigated",
"demonstrated", "when", "prepare", "prepared", "use", "used",
"determine",
"determined", "find", "successfully", "newly", "present",
"reported", "report", "new", "characterize", "characterized",
"experimental",
"result", "results", "showed", "shown", "such", "after",
"but", "this", "that", "via", "is", "was", "and", "using"]
INCLUDE_PHRASES = ["oxygen_reduction_reaction"]
def save_obj(obj, name):
with open(name + ".pkl", "wb") as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name):
with open(name + ".pkl", "rb") as f:
return pickle.load(f)
def keep_simple_formula(word, count, min_count):
if text_processing.is_simple_formula(word):
return gensim.utils.RULE_KEEP
else:
return gensim.utils.RULE_DEFAULT
def compute_epoch_accuracies(root, prefix, analogy_file):
filenames = glob.glob(os.path.join(root, prefix + "_epoch*.model"))
nr_epochs = len(filenames)
accuracies = dict()
losses = [0] * nr_epochs
for filename in filenames:
epoch = int(re.search(r"\d+\.model", filename).group()[:-6])
m = Word2Vec.load(filename)
losses[epoch] = m.get_latest_training_loss()
sections = m.wv.accuracy(analogy_file)
for sec in sections:
if sec["section"] not in accuracies:
accuracies[sec["section"]] = [0] * nr_epochs
correct, incorrect = len(sec["correct"]), len(sec["incorrect"])
if incorrect > 0:
accuracy = correct / (correct + incorrect)
else:
accuracy = 0
accuracies[sec["section"]][epoch] = (correct, incorrect, accuracy)
save_obj(accuracies, os.path.join("models", prefix + "_accuracies"))
save_obj(np.concatenate([np.array([losses[0]]), np.diff(
losses)]), os.path.join("models", prefix + "_loss"))
class EpochSaver(CallbackAny2Vec):
"""Callback to save model after every epoch."""
def __init__(self, path_prefix):
self.path_prefix = path_prefix
self.epoch = 0
def on_epoch_end(self, m):
output_path = "{}_epoch{}.model".format(self.path_prefix, self.epoch)
print("Save model to {}.".format(output_path))
m.save(output_path)
self.epoch += 1
| python |
def main():
str = raw_input()
print str[::-1]
#nixuzifuchuan
if __name__ == '__main__':
main() | python |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, print_function, unicode_literals
from unittest import mock
from demo.models import DemoModel
from demo.views import InstanceViewset, SimpleFlowViewSet, SimpleViewSet
from django.test import TestCase
from django.urls import resolve
from tests.utils import UnittestClient
class DemoSimpleViewTest(TestCase):
unittest_client = UnittestClient()
def test_error_url(self):
response = self.unittest_client.get("/v3/demo/error_url/")
self.assertEqual(response.message, "您所请求的内容不存在")
def test_run_with_success(self):
found = resolve("/v3/demo/succeed/")
self.assertEqual(found.func.cls, SimpleViewSet)
def test_run_with_exception(self):
response = self.unittest_client.get("/v3/demo/fail/")
self.assertEqual(response.message, "带有参数的异常,aaa,bbb")
self.assertEqual(response.code, "1500102")
def test_return_with_json(self):
response = self.unittest_client.get("/v3/demo/return_with_json/")
self.assertEqual(response.code, "1500200")
self.assertIsInstance(response.response, dict)
def test_return_with_data_response(self):
response = self.unittest_client.get("/v3/demo/return_with_data_response/")
self.assertEqual(response.message, "self-message")
def test_get_params(self):
response = self.unittest_client.get("/v3/demo/get_params/", {"param1": "value"})
self.assertEqual(response.data["param1"], "value")
def test_post_params(self):
response = self.unittest_client.post("/v3/demo/post_params/", {"param1": "value"})
self.assertEqual(
response.data, {"param1": "value", "bk_username": "bk_demo_unittest", "bk_app_code": "bk_demo_unittest"}
)
class DemoViewTest(TestCase):
databases = "__all__"
unittest_client = UnittestClient()
def test_url_to_simple_flow(self):
found = resolve("/v3/demo/flows/1/")
self.assertEqual(found.func.cls, SimpleFlowViewSet)
def test_url_to_instance_view(self):
found = resolve("/v3/demo/instances/")
self.assertEqual(found.func.cls, InstanceViewset)
def test_instances_list_view_response(self):
found = resolve("/v3/demo/instances/")
self.assertEqual(found.func.cls, InstanceViewset)
response = self.unittest_client.get("/v3/demo/instances/")
self.assertEqual(response.data, list(range(100000)))
def test_instance_create_view_response(self):
obj_dict = {"id": 1, "field1": "ins", "field2": 1, "field3": "test"}
mock.patch("demo.models.DemoModel.objects.create", return_value=DemoModel(**obj_dict))
response = self.unittest_client.post("/v3/demo/instances/", data=obj_dict)
self.assertEqual(response.data["field3"], "test")
def test_instance_destroy_view_response(self):
DemoModel.objects.create(**{"id": 1, "field1": "ins", "field2": 1, "field3": "test"})
mock.patch(
"demo.models.DemoModel.objects.get",
return_value=DemoModel(**{"id": 1, "field1": "ins", "field2": 1, "field3": "test"}),
)
mock.patch("demo.models.DemoModel.objects.delete", return_value=None)
response = self.unittest_client.delete("/v3/demo/instances/1/")
self.assertEqual(response.message, "ok")
| python |
"""Consts used by pilight."""
CONF_DIMLEVEL_MAX = "dimlevel_max"
CONF_DIMLEVEL_MIN = "dimlevel_min"
CONF_ECHO = "echo"
CONF_OFF = "off"
CONF_OFF_CODE = "off_code"
CONF_OFF_CODE_RECEIVE = "off_code_receive"
CONF_ON = "on"
CONF_ON_CODE = "on_code"
CONF_ON_CODE_RECEIVE = "on_code_receive"
CONF_SYSTEMCODE = "systemcode"
CONF_UNIT = "unit"
CONF_UNITCODE = "unitcode"
| python |
"""Shared Handlers
This file is used to drive the handlers for the following intents:
Intent Handler
====== =======
ChangeUnitsIntent ChangeUnitsIntentHandler
"""
import ask_sdk_core.utils as ask_utils
from ask_sdk_core.skill_builder import SkillBuilder
from ask_sdk_core.dispatch_components import AbstractRequestHandler
from ask_sdk_core.dispatch_components import AbstractExceptionHandler
from ask_sdk_core.handler_input import HandlerInput
class ChangeUnitsHandler(AbstractRequestHandler):
"""Handler for Change units Intent."""
def can_handle(self, handler_input):
# type: (HandlerInput) -> bool
return ask_utils.is_intent_name("ChangeUnitsIntent")(handler_input)
def handle(self, handler_input):
# type: (HandlerInput) -> Response
slots = handler_input.request_envelope.request.intent.slots
units = slots['units'].value
speak_output = "Your units are now," + str(units) + " "
handler_input.attributes_manager.session_attributes["Units"] = str(units)
return (
handler_input.response_builder
.speak(speak_output)
.ask(speak_output)
.response
)
| python |
from . import base
reload(base)
from . import xref
reload(xref)
from . import line
reload(line)
from . import function
reload(function)
from . import switch
reload(switch)
from . import instruction
reload(instruction)
from . import segment
reload(segment)
from .base import *
from .line import Line, lines
from .function import Function, functions
from .switch import Switch, is_switch
from .segment import Segment, segments | python |
import ever as er
import torch.nn as nn
from core.mixin import ChangeMixin
from module.segmentation import Segmentation
@er.registry.MODEL.register()
class ChangeStar(er.ERModule):
def __init__(self, config):
super().__init__(config)
segmentation = Segmentation(self.config.segmenation)
layers = [nn.Conv2d(self.config.classifier.in_channels, self.config.classifier.out_channels, 3, 1, 1),
nn.UpsamplingBilinear2d(scale_factor=self.config.classifier.scale)]
classifier = nn.Sequential(*layers)
self.changemixin = ChangeMixin(segmentation, classifier, self.config.detector, self.config.loss_config)
def forward(self, x, y=None):
if self.training or x.size(1) == 6:
# segmentation + change detection
return self.changemixin(x, y)
if x.size(1) == 3:
# only segmentation
seg_logit = self.changemixin.classify(self.changemixin.extract_feature(x))
return seg_logit.sigmoid()
def set_default_config(self):
self.config.update(dict(
segmenation=dict(),
classifier=dict(
in_channels=256,
out_channels=1,
scale=4.0
),
detector=dict(
name='convs',
in_channels=256 * 2,
inner_channels=16,
out_channels=1,
num_convs=4,
),
loss_config=dict(
semantic=dict(ignore_index=-1),
change=dict(ignore_index=-1)
)
))
def log_info(self):
return dict(
cfg=self.config
)
| python |
import json
import os
import sys
import urllib.error
from http import HTTPStatus
from typing import Generator
from urllib.request import Request
from urllib.request import urlopen
from pyro.Comparators import endswith
from pyro.Remotes.RemoteBase import RemoteBase
class BitbucketRemote(RemoteBase):
def _fetch_payloads(self, request_url: str) -> Generator:
"""
Recursively generates payloads from paginated responses
"""
request = Request(request_url)
try:
response = urlopen(request, timeout=30)
except urllib.error.HTTPError as e:
status: HTTPStatus = HTTPStatus(e.code)
yield 'Failed to load remote: "%s" (%s %s)' % (request_url, e.code, status.phrase)
sys.exit(1)
if response.status != 200:
status: HTTPStatus = HTTPStatus(response.status)
yield 'Failed to load remote: "%s" (%s %s)' % (request_url, response.status, status.phrase)
sys.exit(1)
payload: dict = json.loads(response.read().decode('utf-8'))
yield payload
if 'next' in payload:
yield from self._fetch_payloads(payload['next'])
def fetch_contents(self, url: str, output_path: str) -> Generator:
"""
Downloads files from URL to output path
"""
request_url = self.extract_request_args(url)
script_count: int = 0
for payload in self._fetch_payloads(request_url.url):
for payload_object in payload['values']:
payload_object_type = payload_object['type']
target_path = os.path.normpath(os.path.join(output_path, request_url.owner, request_url.repo, payload_object['path']))
download_url = payload_object['links']['self']['href']
if payload_object_type == 'commit_file':
# we only care about scripts
if not endswith(download_url, '.psc', ignorecase=True):
continue
file_response = urlopen(download_url, timeout=30)
if file_response.status != 200:
yield f'Failed to download ({file_response.status}): "{download_url}"'
continue
os.makedirs(os.path.dirname(target_path), exist_ok=True)
with open(target_path, mode='w+b') as f:
f.write(file_response.read())
script_count += 1
elif payload_object_type == 'commit_directory':
yield from self.fetch_contents(download_url, output_path)
if script_count > 0:
yield f'Downloaded {script_count} scripts from "{request_url.url}"'
| python |
from functools import cached_property
from typing import Union
from wtforms import DecimalField, IntegerField
from app.data_models.answer_store import AnswerStore
from app.forms.field_handlers.field_handler import FieldHandler
from app.forms.fields import DecimalFieldWithSeparator, IntegerFieldWithSeparator
from app.forms.validators import DecimalPlaces, NumberCheck, NumberRange
from app.questionnaire import Location
from app.settings import MAX_NUMBER
class NumberHandler(FieldHandler):
MANDATORY_MESSAGE_KEY = "MANDATORY_NUMBER"
def __init__(
self,
answer_schema: dict,
error_messages: dict = None,
answer_store: AnswerStore = None,
metadata: dict = None,
location: Location = None,
disable_validation: bool = False,
question_title: str = None,
):
super().__init__(
answer_schema,
error_messages,
answer_store,
metadata,
location,
disable_validation,
question_title,
)
self.references = self.get_field_references()
@cached_property
def max_decimals(self):
return self.answer_schema.get("decimal_places", 0)
@cached_property
def validators(self):
validate_with = []
if self.disable_validation is False:
validate_with = super().validators + self._get_number_field_validators()
return validate_with
def get_field(self) -> Union[DecimalField, IntegerField]:
field_type = (
DecimalFieldWithSeparator
if self.max_decimals > 0
else IntegerFieldWithSeparator
)
return field_type(
label=self.label, validators=self.validators, description=self.guidance
)
def get_field_references(self):
schema_minimum = self.answer_schema.get("minimum", {})
schema_maximum = self.answer_schema.get("maximum", {})
minimum = self.get_schema_value(schema_minimum) if schema_minimum else 0
maximum = (
self.get_schema_value(schema_maximum) if schema_maximum else MAX_NUMBER
)
return {
"min_exclusive": schema_minimum.get("exclusive", False),
"max_exclusive": schema_maximum.get("exclusive", False),
"minimum": minimum,
"maximum": maximum,
}
def _get_number_field_validators(self):
answer_errors = self.error_messages.copy()
for error_key in self.validation_messages.keys():
answer_errors[error_key] = self.get_validation_message(error_key)
return [
NumberCheck(answer_errors["INVALID_NUMBER"]),
NumberRange(
minimum=self.references["minimum"],
minimum_exclusive=self.references["min_exclusive"],
maximum=self.references["maximum"],
maximum_exclusive=self.references["max_exclusive"],
messages=answer_errors,
currency=self.answer_schema.get("currency"),
),
DecimalPlaces(max_decimals=self.max_decimals, messages=answer_errors),
]
| python |
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various command line arguments and configuration file parameters."""
import os
from test_framework.test_framework import bitcoinRTestFramework
class ConfArgsTest(bitcoinRTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def test_config_file_parser(self):
# Assume node is stopped
inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf')
with open(os.path.join(self.nodes[0].datadir, 'bitcoinr.conf'), 'a', encoding='utf-8') as conf:
conf.write('includeconf={}\n'.format(inc_conf_file_path))
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('-dash=1\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('nono\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
def run_test(self):
self.stop_node(0)
self.test_config_file_parser()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
default_data_dir = self.nodes[0].datadir
new_data_dir = os.path.join(default_data_dir, 'newdatadir')
new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2')
# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir = new_data_dir
self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
# Check that using non-existent datadir in conf file fails
conf_file = os.path.join(default_data_dir, "bitcoinr.conf")
# datadir needs to be set before [regtest] section
conf_file_contents = open(conf_file, encoding='utf8').read()
with open(conf_file, 'w', encoding='utf8') as f:
f.write("datadir=" + new_data_dir + "\n")
f.write(conf_file_contents)
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoinr)
#self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
# Create the directory and ensure the config file now works
os.mkdir(new_data_dir)
# Temporarily disabled, because this test would access the user's home dir (~/.bitcoinr)
#self.start_node(0, ['-conf='+conf_file, '-wallet=w1'])
#self.stop_node(0)
#assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1'))
# Ensure command line argument overrides datadir in conf
os.mkdir(new_data_dir_2)
self.nodes[0].datadir = new_data_dir_2
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2'])
assert os.path.exists(os.path.join(new_data_dir_2, 'regtest', 'wallets', 'w2'))
if __name__ == '__main__':
ConfArgsTest().main()
| python |
#
# hdg-from -- Generate HDG files for GEMSS
#
# Copyright (C) 2017 Di WU
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
#
# Compatibility with Pyhton 2.7
from __future__ import absolute_import, division, print_function, unicode_literals
from argparse import ArgumentParser
from datetime import datetime
from sys import argv, stdout
from hdgfrom.flow import Flow, Unit
from hdgfrom.adapters import FileFormats, AdapterLibrary
from hdgfrom.errors import InvalidDateError
class Arguments:
"""
Encapsulate the arguments received from the command line
"""
@staticmethod
def read_from(command_line):
parser = Arguments._prepare_parser()
arguments = parser.parse_args(command_line)
return Arguments(
input_file=arguments.input_file,
input_format=arguments.format,
start_date=arguments.start_date,
user_name=arguments.user_name,
water_body=arguments.water_body,
output_file=arguments.output,
unit=arguments.unit
)
@staticmethod
def _prepare_parser():
parser = ArgumentParser(
"hdg-from",
description="Generate HDG file for GEMSS")
parser.add_argument(
"input_file",
help="The file that must be converted to HDG")
parser.add_argument(
"-f",
"--format",
choices=["swmm"],
default="swmm",
help="Format of the input file")
parser.add_argument(
"-o", "--output",
help="The HDG file to generate")
parser.add_argument(
"-s", "--start-date",
default="2017-1-1T12:00:00",
help="Start date used to convert timestamp (i.e., YYYY-MM-DDThh:mm:ss")
parser.add_argument(
"-n", "--user-name",
help="The name of the user that create the file")
parser.add_argument(
"-u", "--unit",
choices=["CMS", "CFS", "MGD", "GPM", "CMD", "CMH"],
default="CMD",
help="The flow rate unit to use in the HDG file")
parser.add_argument(
"-w", "--water-body",
help="The name of the water body")
return parser
def __init__(self, input_file, input_format, start_date, user_name,
water_body, output_file, unit):
self._input_file = input_file
self._input_format = FileFormats.match(input_format)
self._start_date = self._validate(start_date)
self._user_name = user_name
self._water_body = water_body
self._output_file = output_file
self._unit = Unit.by_name(unit)
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
@staticmethod
def _validate(text):
try:
return datetime.strptime(text, Arguments.DATE_FORMAT)
except ValueError:
raise InvalidDateError(text)
@property
def input_file(self):
return self._input_file
@property
def input_format(self):
return self._input_format
@property
def output_file(self):
if self._output_file is None:
return self._input_file.replace(".txt", ".hdg")
return self._output_file
@property
def start_date(self):
return self._start_date
@property
def include_user_name(self):
return self.user_name is not None
@property
def user_name(self):
return self._user_name
@property
def include_water_body(self):
return self._water_body is not None
@property
def water_body(self):
return self._water_body
@property
def unit(self):
return self._unit
class Display:
"""
Encapsulate printing messages on the console.
"""
INPUT_FILE_LOADED = (
"{count} observation(s) loaded from '{file}'.\n"
)
CONVERSION_COMPLETE = (
"File '{file}' successfully generated.\n"
)
WARNING_ALL_ZERO_FLOW = (
"WARNING: The conversion to '{unit}' leads to only near-zero values\n"
" You may need a different unit.\n"
)
ERROR_INPUT_FILE_NOT_FOUND = (
"ERROR: Unable to open the input file '{file}'.\n"
" {hint}\n"
)
ERROR_INVALID_DATE = (
"ERROR: The value '{date}' is not a valid ISO 8601 date.\n"
" ISO 8601 format is YYYY-MM-DDThh:mm:ss.\n"
)
def __init__(self, output):
self._output = output or stdout
def input_file_loaded(self, path, flow):
self._display(self.INPUT_FILE_LOADED,
file=path,
count=len(flow.observations))
def conversion_complete(self, path):
self._display(self.CONVERSION_COMPLETE,
file=path)
def warn_about_only_zeros(self, unit):
self._display(self.WARNING_ALL_ZERO_FLOW,
unit=unit.symbol)
def error_input_file_not_found(self, arguments, error):
self._display(self.ERROR_INPUT_FILE_NOT_FOUND,
file=arguments.input_file,
hint=error.strerror)
def error_invalid_date(self, date):
self._display(self.ERROR_INVALID_DATE,
date=date)
def _display(self, message, **arguments):
text = message.format(**arguments)
self._output.write(text)
class CLI:
"""
Parse the command line and then read the flow from the input file,
and write the same flow down as an HDG file.
"""
def __init__(self, adapters=None, output=None):
self._adapters = adapters or AdapterLibrary()
self._display = Display(output)
def run(self, command_line):
try:
arguments = Arguments.read_from(command_line)
flow = self._read_flow_from(arguments.input_format, arguments.input_file)
flow = self._convert_to_unit(flow, arguments.unit)
self._adjust_metadata(flow, arguments)
self._write_flow_to(flow, FileFormats.HDG, arguments.output_file)
except InvalidDateError as error:
self._display.error_invalid_date(error.date)
except IOError as e:
self._display.error_input_file_not_found(arguments, e)
def _read_flow_from(self, file_format, path):
with open(path, "r") as input_file:
flow = self._adapters.read_from(file_format, input_file)
self._display.input_file_loaded(path, flow)
return flow
def _convert_to_unit(self, flow, unit):
new_flow = flow.convert_to(unit)
if new_flow.contains_only_values_smaller_than(1e-2):
self._display.warn_about_only_zeros(new_flow.unit)
return new_flow
def _adjust_metadata(self, flow, arguments):
flow.start_date = arguments.start_date
if arguments.include_user_name:
flow.user_name = arguments.user_name
if arguments.include_water_body:
flow.water_body = arguments.water_body
def _write_flow_to(self, flow, format, path):
with open(path, "w") as output:
self._adapters.write_to(flow, format, output)
self._display.conversion_complete(path)
def main():
"""
Entry point of the program
"""
CLI().run(argv[1:])
| python |
from PyQt5.QtWidgets import QAbstractButton, QSizePolicy
from PyQt5.Qt import QPainter, QSize
class QIconButton(QAbstractButton):
def __init__(self, image=None, imageSelected=None, parent=None):
super(QIconButton, self).__init__(parent)
self.image = image
if imageSelected is None:
self.imageSelected = image
else:
self.imageSelected = imageSelected
self.pressed.connect(self.update)
self.pressed.connect(self.toggle)
self.setInitialSizePolicy()
self.status = False
def setStatus(self, boolean):
self.status = boolean
self.update()
def paintEvent(self, event):
if self.underMouse():
img = self.imageSelected
elif self.isDown():
img = self.imageSelected
elif self.status:
img = self.imageSelected
else:
img = self.image
painter = QPainter(self)
painter.drawPixmap(event.rect(), img)
def enterEvent(self, event):
self.update()
def leaveEvent(self, event):
self.update()
def sizeHint(self):
return QSize(50, 50)
def setIcons(self, image, imageSelected=None):
self.image = image
if imageSelected is None:
self.imageSelected = image
else:
self.imageSelected = imageSelected
self.update()
def setInitialSizePolicy(self):
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())
self.setSizePolicy(sizePolicy)
def toggle(self):
self.status = not self.status | python |
import time
import os
"""
一些对象
Writer
Reader
"""
class _OO:
def orun(self, ident):
ll = len(self.data)
wres = []
if type(self.handler) is type:
flag = issubclass(self.handler, HandleUnit)
else:
flag = False
try:
for idx in range(ll):
if idx < self.begin or idx >= self.end:
continue
item = self.data[idx]
try:
if flag:
"""此时handler是类需要执行实例方法,入参为item"""
res = self.handler(item).handle()
else:
"""此时handler是方法"""
res = self.handler(item)
except Exception as ee:
print(f"ident:{ident} index:{idx} item:{self.data[idx]} error {ee}")
raise ee
if self.test:
print(f'ident:{ident} num:{idx} item:{item} res:{res}')
wres.append([str(ident), str(idx), str(item), str(res)])
if self.sleep:
time.sleep(self.sleep)
finally:
if wres:
wr = Writer(self.filename, path=self.path, suffix=self.suffix)
wr.write(wres)
class HandleUnit:
"""
处理单元
"""
def handle(self):
"""
处理方法,子对象需要实现此方法,用作处理函数
"""
pass
class Writer:
def __init__(self, filename, suffix='txt', delimiter='_#_', path='', title_translate=None):
self.suffix = suffix
self.delimiter = delimiter
self.path = os.path.join(path, f'{filename}.{suffix}')
self.title_translate = title_translate
def _workbook_write(self, ws, data, is_dict=False):
if is_dict:
titles = data[0].keys()
if self.title_translate:
tran = [self.title_translate[i] for i in titles]
else:
tran = [titles]
for item in data:
row = []
for title in titles:
row.append(item[title])
tran.append(row)
data = tran
for row, item in enumerate(data):
for col, dat in enumerate(item):
ws.write(row, col, dat)
def write(self, data):
if self.suffix == 'xlsx':
from xlsxwriter import workbook
wb = workbook.Workbook(self.path)
ws = wb.add_worksheet()
self._workbook_write(ws, data, isinstance(data[0], dict))
wb.close()
elif self.suffix == 'txt':
ff = open(self.path, 'w', encoding='utf8')
for item in data:
line = self.delimiter.join(item)
ff.write(line + '\n')
ff.close()
class Reader:
def __init__(self, filename, suffix='txt', delimiter='_#_', path=''):
self.suffix = suffix
self.delimiter = delimiter
self.path = os.path.join(path, f'{filename}.{suffix}')
def read(self):
data = []
if self.suffix == 'txt':
ff = open(self.path, 'r', encoding='utf8')
for i in ff.readlines():
da = i.strip()
if da:
data.append(da.split(self.delimiter))
ff.close()
return data
| python |
"""Faça um programa que tenha uma função notas() que pode receber várias notas de alunos e vai retornar um dicionário
com as seguintes informações:
Quantidade de notas
A maior nota
A menor nota
A média da turma
Situação (opcional)
Adicione também as docstrings da função."""
def notas(*n, sit=False):
"""
-> Função para analisar notas e situações de vários alunos.
:param n: uma ou mais notas dos alunos (aceita várias)
:param sit: valor opcional, indicando se deve ou não adicionar a situação
:return: dicionário com várias informações sobre a situação da turma.
"""
r = {}
r["total"] = len(n)
r["maior"] = max(n)
r["menor"] = min(n)
r["media"] = sum(n) / len(n)
if sit:
if r["media"] >= 7:
r["situação"] = "BOA"
elif r["media"] >= 5:
r["situação"] = "RAZOÁVEL"
else:
r["situação"] = "RUIM"
return r
resp = notas(5.5, 2.5, 1.5, sit=True)
print(resp)
help(notas) | python |
# This file was originally authored by
# Brandon Davidson from the University of Oregon.
# The Rocks Developers thank Brandon for his contribution.
#
# @copyright@
# Copyright (c) 2006 - 2019 Teradata
# All rights reserved. Stacki(r) v5.x stacki.com
# https://github.com/Teradata/stacki/blob/master/LICENSE.txt
# @copyright@
#
# @rocks@
# Copyright (c) 2000 - 2010 The Regents of the University of California
# All rights reserved. Rocks(r) v5.4 www.rocksclusters.org
# https://github.com/Teradata/stacki/blob/master/LICENSE-ROCKS.txt
# @rocks@
import pathlib
import shutil
import stack.commands
import stack.deferable
from stack.argument_processors.pallet import PalletArgProcessor
from stack.exception import ArgRequired
class command(PalletArgProcessor, stack.commands.remove.command):
pass
class Command(command):
"""
Remove a pallet from both the database and filesystem.
<arg type='string' name='pallet' repeat='1'>
List of pallets. This should be the pallet base name (e.g., base, hpc,
kernel).
</arg>
<param type='string' name='version'>
The version number of the pallet to be removed. If no version number is
supplied, then all versions of a pallet will be removed.
</param>
<param type='string' name='release'>
The release id of the pallet to be removed. If no release id is
supplied, then all releases of a pallet will be removed.
</param>
<param type='string' name='arch'>
The architecture of the pallet to be removed. If no architecture is
supplied, then all architectures will be removed.
</param>
<param type='string' name='os'>
The OS of the pallet to be removed. If no OS is
supplied, then all OSes will be removed.
</param>
<param type='bool' name='run_hooks'>
Controls whether pallets hooks are run. This defaults to True.
</param>
<example cmd='remove pallet kernel'>
Remove all versions and architectures of the kernel pallet.
</example>
<example cmd='remove pallet ganglia version=5.0 arch=i386'>
Remove version 5.0 of the Ganglia pallet for i386 nodes.
</example>
<related>add pallet</related>
<related>enable pallet</related>
<related>disable pallet</related>
<related>list pallet</related>
<related>create pallet</related>
"""
@stack.deferable.rewrite_frontend_repo_file
def run(self, params, args):
if len(args) < 1:
raise ArgRequired(self, 'pallet')
run_hooks, = self.fillParams([
('run_hooks', True),
])
self.beginOutput()
for pallet in self.get_pallets(args, params):
# Run any hooks before we regenerate the repo file and remove the pallet.
if run_hooks:
self.run_pallet_hooks(operation="remove", pallet_info=pallet)
self.clean_pallet(pallet)
self.endOutput(padChar='')
def clean_pallet(self, pallet):
"""
Remove pallet files and database entry for this arch and OS.
"""
self.addOutput('',
f'Removing {pallet.name} {pallet.version}-{pallet.rel}-'
f'{pallet.os}-{pallet.arch} pallet ...'
)
# Remove the pallet files and as much as the tree as possible
tree = [
'/export/stack/pallets', pallet.name, pallet.version,
pallet.rel, pallet.os, pallet.arch
]
# Walk up the tree to clean it up, but stop at the top directory
while len(tree) > 1:
path = pathlib.Path().joinpath(*tree)
# if for some reason the directory is already partially deleted
if not path.exists():
tree.pop()
continue
# The arch is the bottom of the tree, we remove everything
if tree[-1] == pallet.arch:
shutil.rmtree(path)
else:
# Just remove the directory if possible
try:
path.rmdir()
except OSError:
# Directory wasn't empty, we are done
break
# Move up a level in the tree
tree.pop()
# remove the pallet hooks for this pallet
# not all pallets (notably retail distro media) will have hooks
pallet_hook_dir = pathlib.Path(self.get_pallet_hook_directory(pallet_info=pallet))
if pallet_hook_dir.exists():
shutil.rmtree(self.get_pallet_hook_directory(pallet_info=pallet))
# Remove the pallet from the database
self.db.execute('delete from rolls where id=%s', (pallet.id,))
| python |
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from typing import TYPE_CHECKING
from unittest import skipUnless
from urllib.error import ContentTooShortError, HTTPError
import torch
from parameterized import parameterized
from monai.networks import eval_mode
from monai.networks.nets import (
BlockArgs,
EfficientNetBN,
EfficientNetBNFeatures,
drop_connect,
get_efficientnet_image_size,
)
from monai.utils import optional_import
from tests.utils import skip_if_quick, test_pretrained_networks, test_script_save
if TYPE_CHECKING:
import torchvision
has_torchvision = True
else:
torchvision, has_torchvision = optional_import("torchvision")
if TYPE_CHECKING:
import PIL
has_pil = True
else:
PIL, has_pil = optional_import("PIL")
def get_model_names():
return [f"efficientnet-b{d}" for d in range(8)]
def get_expected_model_shape(model_name):
model_input_shapes = {
"efficientnet-b0": 224,
"efficientnet-b1": 240,
"efficientnet-b2": 260,
"efficientnet-b3": 300,
"efficientnet-b4": 380,
"efficientnet-b5": 456,
"efficientnet-b6": 528,
"efficientnet-b7": 600,
}
return model_input_shapes[model_name]
def get_block_args():
# test string list
return [
"r1_k3_s11_e1_i32_o16_se0.25",
"r2_k3_s22_e6_i16_o24_se0.25",
"r2_k5_s22_e6_i24_o40_se0.25",
"r3_k3_s22_e6_i40_o80_se0.25",
"r3_k5_s11_e6_i80_o112_se0.25",
"r4_k5_s22_e6_i112_o192_se0.25",
"r1_k3_s11_e6_i192_o320_se0.25",
"r1_k3_s11_e1_i32_o16_se0.25_noskip",
"r2_k3_s22_e6_i16_o24_se0.25_noskip",
"r2_k5_s22_e6_i24_o40_se0.25_noskip",
"r3_k3_s22_e6_i40_o80_se0.25_noskip",
"r3_k5_s11_e6_i80_o112_se0.25_noskip",
"r4_k5_s22_e6_i112_o192_se0.25_noskip",
"r1_k3_s11_e6_i192_o320_se0.25_noskip",
]
def make_shape_cases(
models,
spatial_dims,
batches,
pretrained,
in_channels=3,
num_classes=1000,
norm=("batch", {"eps": 1e-3, "momentum": 0.01}),
):
ret_tests = []
for spatial_dim in spatial_dims: # selected spatial_dims
for batch in batches: # check single batch as well as multiple batch input
for model in models: # selected models
for is_pretrained in pretrained: # pretrained or not pretrained
kwargs = {
"model_name": model,
"pretrained": is_pretrained,
"progress": False,
"spatial_dims": spatial_dim,
"in_channels": in_channels,
"num_classes": num_classes,
"norm": norm,
}
ret_tests.append(
[
kwargs,
(batch, in_channels) + (get_expected_model_shape(model),) * spatial_dim,
(batch, num_classes),
]
)
return ret_tests
# create list of selected models to speed up redundant tests
# only test the models B0, B3, B7
SEL_MODELS = [get_model_names()[i] for i in [0, 3, 7]]
# pretrained=False cases
# 1D models are cheap so do test for all models in 1D
CASES_1D = make_shape_cases(
models=get_model_names(), spatial_dims=[1], batches=[1, 4], pretrained=[False], in_channels=3, num_classes=1000
)
# 2D and 3D models are expensive so use selected models
CASES_2D = make_shape_cases(
models=SEL_MODELS,
spatial_dims=[2],
batches=[1, 4],
pretrained=[False],
in_channels=3,
num_classes=1000,
norm="instance",
)
CASES_3D = make_shape_cases(
models=[SEL_MODELS[0]],
spatial_dims=[3],
batches=[1],
pretrained=[False],
in_channels=3,
num_classes=1000,
norm="batch",
)
# pretrained=True cases
# tabby kitty test with pretrained model
# needs 'testing_data/kitty_test.jpg'
# image from: https://commons.wikimedia.org/wiki/File:Tabby_cat_with_blue_eyes-3336579.jpg
CASES_KITTY_TRAINED = [
(
{
"model_name": "efficientnet-b0",
"pretrained": True,
"progress": False,
"spatial_dims": 2,
"in_channels": 3,
"num_classes": 1000,
"norm": ("batch", {"eps": 1e-3, "momentum": 0.01}),
"adv_prop": False,
},
os.path.join(os.path.dirname(__file__), "testing_data", "kitty_test.jpg"),
282, # ~ tiger cat
),
(
{
"model_name": "efficientnet-b3",
"pretrained": True,
"progress": False,
"spatial_dims": 2,
"in_channels": 3,
"num_classes": 1000,
},
os.path.join(os.path.dirname(__file__), "testing_data", "kitty_test.jpg"),
282, # ~ tiger cat
),
(
{
"model_name": "efficientnet-b7",
"pretrained": True,
"progress": False,
"spatial_dims": 2,
"in_channels": 3,
"num_classes": 1000,
},
os.path.join(os.path.dirname(__file__), "testing_data", "kitty_test.jpg"),
282, # ~ tiger cat
),
]
# varying num_classes and in_channels
CASES_VARIATIONS = []
# change num_classes test
# 10 classes
# 2D
CASES_VARIATIONS.extend(
make_shape_cases(
models=SEL_MODELS, spatial_dims=[2], batches=[1], pretrained=[False, True], in_channels=3, num_classes=10
)
)
# 3D
CASES_VARIATIONS.extend(
make_shape_cases(
models=[SEL_MODELS[0]], spatial_dims=[3], batches=[1], pretrained=[False], in_channels=3, num_classes=10
)
)
# change in_channels test
# 1 channel
# 2D
CASES_VARIATIONS.extend(
make_shape_cases(
models=SEL_MODELS, spatial_dims=[2], batches=[1], pretrained=[False, True], in_channels=1, num_classes=1000
)
)
# 8 channel
# 2D
CASES_VARIATIONS.extend(
make_shape_cases(
models=SEL_MODELS, spatial_dims=[2], batches=[1], pretrained=[False, True], in_channels=8, num_classes=1000
)
)
# 3D
CASES_VARIATIONS.extend(
make_shape_cases(
models=[SEL_MODELS[0]], spatial_dims=[3], batches=[1], pretrained=[False], in_channels=1, num_classes=1000
)
)
CASE_EXTRACT_FEATURES = [
(
{
"model_name": "efficientnet-b8",
"pretrained": True,
"progress": False,
"spatial_dims": 2,
"in_channels": 2,
"adv_prop": True,
},
[1, 2, 224, 224],
([1, 32, 112, 112], [1, 56, 56, 56], [1, 88, 28, 28], [1, 248, 14, 14], [1, 704, 7, 7]),
)
]
class TestEFFICIENTNET(unittest.TestCase):
@parameterized.expand(CASES_1D + CASES_2D + CASES_3D + CASES_VARIATIONS)
def test_shape(self, input_param, input_shape, expected_shape):
device = "cuda" if torch.cuda.is_available() else "cpu"
try:
# initialize model
net = EfficientNetBN(**input_param).to(device)
except (ContentTooShortError, HTTPError, RuntimeError) as e:
print(str(e))
return # skipping the tests because of http errors
# run inference with random tensor
with eval_mode(net):
result = net(torch.randn(input_shape).to(device))
# check output shape
self.assertEqual(result.shape, expected_shape)
@parameterized.expand(CASES_1D + CASES_2D)
def test_non_default_shapes(self, input_param, input_shape, expected_shape):
device = "cuda" if torch.cuda.is_available() else "cpu"
try:
# initialize model
net = EfficientNetBN(**input_param).to(device)
except (ContentTooShortError, HTTPError, RuntimeError) as e:
print(str(e))
return # skipping the tests because of http errors
# override input shape with different variations
num_dims = len(input_shape) - 2
non_default_sizes = [128, 256, 512]
for candidate_size in non_default_sizes:
input_shape = input_shape[0:2] + (candidate_size,) * num_dims
# run inference with random tensor
with eval_mode(net):
result = net(torch.randn(input_shape).to(device))
# check output shape
self.assertEqual(result.shape, expected_shape)
@parameterized.expand(CASES_KITTY_TRAINED)
@skip_if_quick
@skipUnless(has_torchvision, "Requires `torchvision` package.")
@skipUnless(has_pil, "Requires `pillow` package.")
def test_kitty_pretrained(self, input_param, image_path, expected_label):
device = "cuda" if torch.cuda.is_available() else "cpu"
# open image
image_size = get_efficientnet_image_size(input_param["model_name"])
img = PIL.Image.open(image_path)
# define ImageNet transforms
tfms = torchvision.transforms.Compose(
[
torchvision.transforms.Resize(image_size),
torchvision.transforms.CenterCrop(image_size),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
]
)
# preprocess and prepare image tensor
img = tfms(img).unsqueeze(0).to(device)
# initialize a pretrained model
net = test_pretrained_networks(EfficientNetBN, input_param, device)
# run inference
with eval_mode(net):
result = net(img)
pred_label = torch.argmax(result, dim=-1)
# check output label
self.assertEqual(pred_label, expected_label)
def test_drop_connect_layer(self):
p_list = [float(d + 1) / 10.0 for d in range(9)]
# testing 1D, 2D and 3D shape
for rand_tensor_shape in [(512, 16, 4), (384, 16, 4, 4), (256, 16, 4, 4, 4)]:
# test validation mode, out tensor == in tensor
training = False
for p in p_list:
in_tensor = torch.rand(rand_tensor_shape) + 0.1
out_tensor = drop_connect(in_tensor, p, training=training)
self.assertTrue(torch.equal(out_tensor, in_tensor))
# test training mode, sum((out tensor * (1.0 - p)) != in tensor)/out_tensor.size() == p
# use tolerance of 0.175 to account for rounding errors due to finite set in/out
tol = 0.175
training = True
for p in p_list:
in_tensor = torch.rand(rand_tensor_shape) + 0.1
out_tensor = drop_connect(in_tensor, p, training=training)
p_calculated = 1.0 - torch.sum(torch.isclose(in_tensor, out_tensor * (1.0 - p))) / float(
in_tensor.numel()
)
p_calculated = p_calculated.cpu().numpy()
self.assertTrue(abs(p_calculated - p) < tol)
def test_block_args_decode(self):
blocks_args_str = get_block_args()
# convert strings to BlockArgs
blocks_args = [BlockArgs.from_string(s) for s in blocks_args_str]
# convert BlockArgs back to string
blocks_args_str_convert = [s.to_string() for s in blocks_args]
# check if converted strings match original
[self.assertEqual(original, converted) for original, converted in zip(blocks_args_str, blocks_args_str_convert)]
def test_ill_arg(self):
with self.assertRaises(ValueError):
# wrong spatial_dims
EfficientNetBN(model_name="efficientnet-b0", spatial_dims=4)
# wrong model_name
EfficientNetBN(model_name="efficientnet-b10", spatial_dims=3)
def test_func_get_efficientnet_input_shape(self):
for model in get_model_names():
result_shape = get_efficientnet_image_size(model_name=model)
expected_shape = get_expected_model_shape(model)
self.assertEqual(result_shape, expected_shape)
def test_script(self):
net = EfficientNetBN(model_name="efficientnet-b0", spatial_dims=2, in_channels=3, num_classes=1000)
net.set_swish(memory_efficient=False) # at the moment custom memory efficient swish is not exportable with jit
test_data = torch.randn(1, 3, 224, 224)
test_script_save(net, test_data)
class TestExtractFeatures(unittest.TestCase):
@parameterized.expand(CASE_EXTRACT_FEATURES)
def test_shape(self, input_param, input_shape, expected_shapes):
device = "cuda" if torch.cuda.is_available() else "cpu"
try:
# initialize model
net = EfficientNetBNFeatures(**input_param).to(device)
except (ContentTooShortError, HTTPError, RuntimeError) as e:
print(str(e))
return # skipping the tests because of http errors
# run inference with random tensor
with eval_mode(net):
features = net(torch.randn(input_shape).to(device))
# check output shape
self.assertEqual(len(features), len(expected_shapes))
for feature, expected_shape in zip(features, expected_shapes):
self.assertEqual(feature.shape, torch.Size(expected_shape))
if __name__ == "__main__":
unittest.main()
| python |
from numba.core.descriptors import TargetDescriptor
from numba.core.options import TargetOptions
from .target import HSATargetContext, HSATypingContext
class HSATargetOptions(TargetOptions):
pass
class HSATargetDesc(TargetDescriptor):
options = HSATargetOptions
typingctx = HSATypingContext()
targetctx = HSATargetContext(typingctx)
| python |
from discord.ext import commands
from ..services import status_service, config_service
from ..helpers import game_mapping_helper
from ..clients import ecs_client
class Config(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(help='Get the current admins')
async def admins(self, ctx):
game = await game_mapping_helper.game_from_context(ctx, self.bot)
if game is not None:
current_admins = await config_service.get_admins(game)
if any(current_admins):
await ctx.send(f'Current admins are: {", ".join(current_admins)}. ' +
'(Restart may be required.)')
else:
await ctx.send('There are no current admins. (Restart may be required.)')
@commands.command(help='Give player(s) admin permissions', name='admins-add')
async def admins_add(self, ctx, *players):
game = await game_mapping_helper.game_from_context(ctx, self.bot)
if game is not None:
await config_service.add_admins(game, players)
await ctx.send('Admins have been added, but **you will need to restart the ' +
'server for this to take effect** (use `!restart`).')
@commands.command(help='Revoke admin permissions for player(s)', name='admins-remove')
async def admins_remove(self, ctx, *players):
game = await game_mapping_helper.game_from_context(ctx, self.bot)
if game is not None:
await config_service.remove_admins(game, players)
await ctx.send('Admins have been removed, but **you will need to restart the ' +
'server for this to take effect** (use `!restart`).')
@commands.command(help='Restart the server',
description='This is required for any config changes to take effect.')
async def restart(self, ctx):
game = await game_mapping_helper.game_from_context(ctx, self.bot)
if game is not None:
await status_service.check_game_is_running(game)
await ctx.send('Restarting server...')
await ecs_client.restart_service(game)
await ctx.send('Server has been restarted :hatching_chick:')
| python |
from django.contrib import admin
# Register your models here.
from .models import Pessoa
admin.site.register(Pessoa) | python |
# Generated by Django 2.1.7 on 2019-03-01 02:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0018_auto_20190228_1807'),
]
operations = [
migrations.AlterField(
model_name='wxuserintegrallog',
name='log_type',
field=models.CharField(choices=[('replace', '积分换购'), ('share', '分享奖励'), ('grouping', '拼团成功奖励'), ('gift', '店铺赠送'), ('deduction', '店铺扣减')], max_length=128, verbose_name='记录类型'),
),
]
| python |
from quakefeeds import QuakeFeed
from datetime import datetime, timezone
import logging
import tweepy
import io
import os
import json
from urllib.request import urlopen, Request
logging.basicConfig(filename='tweet.bot.log', level=logging.INFO)
logger = logging.getLogger()
base_url = os.getenv("API_HTTPS").rstrip('/')
POINT_ENDPOINT = "api/point"
def check_events(api, last_events=[]):
try:
feed = QuakeFeed("significant", "hour")
if len(feed) == 0:
return last_events
new_last_events = []
for event in feed:
try:
e_id = event['id']
new_last_events.append(e_id)
if e_id in last_events:
continue
what = event['properties']['type']
coordinates = event['geometry']['coordinates']
lon = float(coordinates[0])
lat = float(coordinates[1])
place = event['properties']['place']
magnitude = event['properties']['mag']
depth = coordinates[2]
time_raw = event['properties']['time']
time = datetime.fromtimestamp(float(time_raw) / 1000.0, tz=timezone.utc)
formatted_time = time.strftime("%b %d, %Y - %H:%M:%S")
url = event['properties']['url']
msg = f"{formatted_time}; {what} @ {place}.\nMagnitude: {magnitude}, depth: {depth}\nMore info: {url}"
geo_data = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [lon, lat],
},
"properties": {
"marker": "true"
}
}
]
}
geojson = json.dumps(geo_data, sort_keys=True)
# Tweet now!
url = f"{base_url}/{POINT_ENDPOINT}/{lon}/{lat}?near=10000&cropped=1"
# get file; Need Browser info to avoid Error 403!
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3',
'Content-Type': 'application/json; charset=utf-8'
}
jsondataasbytes = geojson.encode('utf-8')
req = Request(url=url, headers=headers, data=jsondataasbytes)
req.add_header('Content-Length', len(jsondataasbytes))
fd = urlopen(req)
image_file = io.BytesIO(fd.read())
# reply to mention
api.update_with_media(
f"{e_id}.png",
status=msg, # Still needed for valid reply
lat=lat,
long=lon,
file=image_file
)
logger.info(f"Earthquake_id: {e_id}")
except Exception as e:
logger.warning(f"Error in event routine: {e}", exc_info=True)
continue
except Exception as e:
logger.warning(f"Error in retrieving feed: {e}", exc_info=True)
return new_last_events
| python |
import tensorflow as tf
from nn_basic_layers import *
from ops import *
import numpy as np
import os
class FCNNRNN(object):
def __init__(self, config, is_eog=True, is_emg=True):
self.g_enc_depths = [16, 16, 32, 32, 64, 64, 128, 128, 256]
self.d_num_fmaps = [16, 16, 32, 32, 64, 64, 128, 128, 256]
# Placeholders for input, output and dropout
self.config = config
self.is_emg = is_emg
self.is_eog = is_eog
self.input_x = tf.placeholder(tf.float32,shape=[None, self.config.epoch_step, self.config.ntime, self.config.nchannel],name='input_x')
self.input_y = tf.placeholder(tf.float32, shape=[None, self.config.epoch_step, self.config.nclass], name='input_y')
self.dropout_cnn = tf.placeholder(tf.float32, name="dropout_cnn")
self.dropout_rnn = tf.placeholder(tf.float32, name="dropout_rnn")
self.istraining = tf.placeholder(tf.bool, name='istraining') # indicate training for batch normmalization
self.epoch_seq_len = tf.placeholder(tf.int32, [None]) # for the dynamic RNN
X = tf.reshape(self.input_x, [-1, self.config.ntime, self.config.nchannel])
conv_feat = self.all_convolution_block(X,"conv_eeg")
Nfeat = 6*self.g_enc_depths[-1]
conv_feat = tf.reshape(conv_feat, [-1, Nfeat])
print("conv_feat")
print(conv_feat.get_shape())
rnn_input = tf.reshape(conv_feat, [-1, self.config.epoch_seq_len, Nfeat])
with tf.variable_scope("epoch_rnn_layer") as scope:
fw_cell, bw_cell = bidirectional_recurrent_layer(self.config.nhidden,
self.config.nlayer,
input_keep_prob=self.dropout_rnn,
output_keep_prob=self.dropout_rnn)
rnn_out, rnn_state = bidirectional_recurrent_layer_output(fw_cell,
bw_cell,
rnn_input,
self.epoch_seq_len,
scope=scope)
print(rnn_out.get_shape())
self.scores = []
self.predictions = []
with tf.variable_scope("output_layer"):
for i in range(self.config.epoch_step):
score_i = fc(tf.squeeze(rnn_out[:,i,:]),
self.config.nhidden * 2,
self.config.nclass,
name="output",
relu=False)
pred_i = tf.argmax(score_i, 1, name="pred-%s" % i)
self.scores.append(score_i)
self.predictions.append(pred_i)
# calculate cross-entropy loss
self.output_loss = 0
with tf.name_scope("output-loss"):
for i in range(self.config.epoch_step):
output_loss_i = tf.nn.softmax_cross_entropy_with_logits(labels=tf.squeeze(self.input_y[:,i,:]), logits=self.scores[i])
output_loss_i = tf.reduce_sum(output_loss_i, axis=[0])
self.output_loss += output_loss_i
self.output_loss = self.output_loss/self.config.epoch_step
# add on regularization
with tf.name_scope("l2_loss"):
vars = tf.trainable_variables()
l2_loss = tf.add_n([ tf.nn.l2_loss(v) for v in vars])
self.loss = self.output_loss + self.config.l2_reg_lambda*l2_loss
self.accuracy = []
# Accuracy
with tf.name_scope("accuracy"):
for i in range(self.config.epoch_step):
correct_prediction_i = tf.equal(self.predictions[i], tf.argmax(tf.squeeze(self.input_y[:,i,:]), 1))
accuracy_i = tf.reduce_mean(tf.cast(correct_prediction_i, "float"), name="accuracy-%s" % i)
self.accuracy.append(accuracy_i)
def all_convolution_block(self, input, name):
in_dims = input.get_shape().as_list()
print(in_dims)
h_i = input
if len(in_dims) == 2:
h_i = tf.expand_dims(input, -1)
elif len(in_dims) < 2 or len(in_dims) > 3:
raise ValueError('Generator input must be 2-D or 3-D')
kwidth = 31
with tf.variable_scope(name, reuse=tf.AUTO_REUSE) as scope:
for layer_idx, layer_depth in enumerate(self.g_enc_depths):
bias_init = tf.constant_initializer(0.)
h_i_dwn = downconv(h_i, layer_depth, kwidth=kwidth,
init=tf.truncated_normal_initializer(stddev=0.02),
bias_init=bias_init,
name='enc_{}'.format(layer_idx))
print("h_i_dwn")
print(h_i_dwn.get_shape())
print('Downconv {} -> {}'.format(h_i.get_shape(),h_i_dwn.get_shape()))
h_i = h_i_dwn
print('-- Enc: leakyrelu activation --')
h_i = leakyrelu(h_i)
if(layer_idx < len(self.g_enc_depths) - 1):
h_i = dropout(h_i, self.dropout_cnn)
return h_i
| python |
class Rule():
"""This class defines a rule"""
def __init__(self,name):
self.name = name
self.datasets = []
def add_dataset(self,dataset):
self.datasets.append(dataset)
def __str__(self):
return "Rule %s"%(self.name)
class DataSet():
"""This is a dataset"""
def __init__(self,name):
self.name = name
self.conditions = []
def add_condition(self,condition):
self.conditions.append(condition)
def __str__(self):
return "Condition is {name}".format(name=self.name)
class Condition():
"""Condition"""
def __init__(self,attribute):
self.attribute = attribute
self.condition = None
def __str__(self):
return "Condition for attribute %s"%(self.attribute)
class ListCondition(Condition):
"""This takes a list of values to filter"""
def __init__(self,attribute,values_list):
Condition.__init__(self,attribute)
self.values = list(values_list)
def __str__(self):
return "Condition for attribute %s with value %s"%(self.attribute,self.values)
class ValueCondition(Condition):
"""This takes exact one value to compare """
def __init__(self,attribute,value):
Condition.__init__(self,attribute)
self.value = value
def __str__(self):
return "Condition for attribute %s with value %s"%(self.attribute,str(self.value))
if __name__ == '__main__':
pass
| python |
print "this is a syntax error"
| python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 4 14:03:21 2019
@author: 3535008
"""
try:
import Tkinter as tk
import ttk
except ImportError: # Python 3
import tkinter as tk
from tkinter import ttk
from tincan import tracing_mrpython as tracing
class CloseableNotebook(ttk.Notebook):
"""A ttk Notebook with close buttons on each tab"""
__initialized = False
def __init__(self, *args, **kwargs):
if not self.__initialized:
self.__initialize_custom_style()
self.__inititialized = True
kwargs["style"] = "CustomNotebook"
ttk.Notebook.__init__(self, *args, **kwargs)
self._active = None
self.bind("<ButtonPress-1>", self.on_close_press, True)
self.bind("<ButtonRelease-1>", self.on_close_release)
self.old_tab = ""
self.new_tab = ""
def get_filename(self, tab_path):
try:
return self.nametowidget(tab_path).get_file_name()
except KeyError as path:
error = "no widget with this path:{}".format(path)
print(error)
return error
def on_close_press(self, event):
"""Called when the button is pressed over the close button"""
self.old_tab = self.select()
element = self.identify(event.x, event.y)
if "close" in element:
index = self.index("@%d,%d" % (event.x, event.y))
self.state(['pressed'])
self._active = index
def on_close_release(self, event):
"""Called when the button is released over the close button"""
# Code for tracing changed tabs
self.new_tab = self.select()
if not self.instate(['pressed']) and self.old_tab != self.new_tab and self.old_tab != "":
old_tab_filename = self.get_filename(self.old_tab)
new_tab_filename = self.get_filename(self.new_tab)
tracing.send_statement("switched", "file",
{"https://www.lip6.fr/mocah/invalidURI/extensions/old-tab": old_tab_filename,
"https://www.lip6.fr/mocah/invalidURI/extensions/current-tab": new_tab_filename})
# Code for closing tab
if not self.instate(['pressed']):
return
element = self.identify(event.x, event.y)
try:
index = self.index("@%d,%d" % (event.x, event.y))
except tk.TclError:
return
if "close" in element and self._active == index:
#do the proper linking to the event
old_tab_filename = self.get_filename(self.old_tab)
self.close_current_editor()
self.new_tab = self.select()
if self.new_tab != "":
new_tab_filename = self.get_filename(self.new_tab)
else:
new_tab_filename = "no tab selected"
tracing.send_statement("closed", "file",
{"https://www.lip6.fr/mocah/invalidURI/extensions/closed-tab": old_tab_filename,
"https://www.lip6.fr/mocah/invalidURI/extensions/current-tab": new_tab_filename})
self.event_generate("<<NotebookTabClosed>>")
self.state(["!pressed"])
self._active = None
def close_current_editor(self,event=None):
print("Should be overrided")
def __initialize_custom_style(self):
style = ttk.Style()
self.images = (
tk.PhotoImage("img_close", data='''
R0lGODlhCAAIAMIBAAAAADs7O4+Pj9nZ2Ts7Ozs7Ozs7Ozs7OyH+EUNyZWF0ZWQg
d2l0aCBHSU1QACH5BAEKAAQALAAAAAAIAAgAAAMVGDBEA0qNJyGw7AmxmuaZhWEU
5kEJADs=
'''),
tk.PhotoImage("img_closeactive", data='''
R0lGODlhCAAIAMIEAAAAAP/SAP/bNNnZ2cbGxsbGxsbGxsbGxiH5BAEKAAQALAAA
AAAIAAgAAAMVGDBEA0qNJyGw7AmxmuaZhWEU5kEJADs=
'''),
tk.PhotoImage("img_closepressed", data='''
R0lGODlhCAAIAMIEAAAAAOUqKv9mZtnZ2Ts7Ozs7Ozs7Ozs7OyH+EUNyZWF0ZWQg
d2l0aCBHSU1QACH5BAEKAAQALAAAAAAIAAgAAAMVGDBEA0qNJyGw7AmxmuaZhWEU
5kEJADs=
''')
)
style.element_create("close", "image", "img_close",
("active", "pressed", "!disabled", "img_closepressed"),
("active", "!disabled", "img_closeactive"), border=8, sticky='')
style.layout("CustomNotebook", [("CustomNotebook.client", {"sticky": "nswe"})])
style.layout("CustomNotebook.Tab", [
("CustomNotebook.tab", {
"sticky": "nswe",
"children": [
("CustomNotebook.padding", {
"side": "top",
"sticky": "nswe",
"children": [
("CustomNotebook.focus", {
"side": "top",
"sticky": "nswe",
"children": [
("CustomNotebook.label", {"side": "left", "sticky": ''}),
("CustomNotebook.close", {"side": "left", "sticky": ''}),
]
})
]
})
]
})
]) | python |
"""
BIR module status
Output card to control 8 250V/8A two-pole relays.
:author: Zilvinas Binisevicius <[email protected]>
"""
import json
import domintell
from domintell.messages import GenericAOStatusMessage
class DDIMStatusMessage(GenericAOStatusMessage):
COMMAND_CODE = 'DIM'
"""
DDIM module status
"""
def __init__(self, address=None):
GenericAOStatusMessage.__init__(self, 8)
self.moduleType = DDIMStatusMessage.COMMAND_CODE
domintell.register_command(DDIMStatusMessage.COMMAND_CODE, DDIMStatusMessage)
| python |
# -*- coding: utf-8 -*-
import logging
import oss2
from django.conf import settings
from django.db import transaction
from django.db.models import Count
from chisch.common import dependency
from chisch.common.decorators import login_required, lecturer_required
from chisch.common.retwrapper import RetWrapper
from chisch.common.serializer import s as _s
from chisch.common.views import DetailView, ListView
logger = logging.getLogger('django')
@dependency.requires('curriculum_manager')
class CurriculumCategoryListView(ListView):
def get(self, request):
result = self.curriculum_manager.category
return RetWrapper.wrap_and_return(result)
@dependency.requires('curriculum_manager', 'oss_manager')
class CurriculumListView(ListView):
@login_required
@lecturer_required
@transaction.atomic
def create(self, request, *args, **kwargs):
lecturer_id = request.user.id
f = kwargs.pop('files')[0] if ('files' in kwargs) \
and len(kwargs['files']) > 0 else None
try:
curriculum = self.\
curriculum_manager.create(lecturer_id=lecturer_id, **kwargs)
except Exception, e:
return RetWrapper.wrap_and_return(e)
if f:
from oss.cores import get_object_key
key = get_object_key('create_curriculum',
curriculum.id,
settings.IMAGE_TYPE)
permission = oss2.OBJECT_ACL_PUBLIC_READ
try:
cover_url, _ = \
self.oss_manager.single_object_upload(key, f, permission)
except Exception, e:
return RetWrapper.wrap_and_return(e)
try:
curriculum.cover_url = cover_url
curriculum.save()
except Exception, e:
return RetWrapper.wrap_and_return(e)
result = _s(curriculum, own=True)
return RetWrapper.wrap_and_return(result)
def page_list(self, request, *args, **kwargs):
page_size = kwargs['page_size']
page_num = kwargs['page_num']
offset = (page_num-1) * page_size
limit = offset + page_size
try:
curriculums = self.curriculum_manager.all()[offset: limit]
curriculums_count = self.curriculum_manager.aggregate(Count("id"))
except Exception, e:
return RetWrapper.wrap_and_return(e)
result = {}
result['rows'] = _s(curriculums, own=True)
result['pagination'] = {
'total': curriculums_count['id__count'],
}
return RetWrapper.wrap_and_return(result)
def get_curriculum_categories(self, request, *args, **kwargs):
try:
category = self.curriculum_manager.get_curriculum_categories()
except Exception, e:
return RetWrapper.wrap_and_return(e)
return RetWrapper.wrap_and_return(category)
@dependency.requires('curriculum_manager', 'oss_manager')
class CurriculumDetailView(DetailView):
def update(self, request, *args, **kwargs):
pass
| python |
# -*- coding: utf-8 -*-
test_input_folder = 'test_input/'
sangam_tamil = __import__("sangam_tamil")
cdeeplearn = __import__("cdeeplearn")
sangam_class = sangam_tamil.SangamPoems()
config = sangam_tamil.config
GREEN_CHECK = u'\u2714 '
RED_CROSS = u'\u274C '
GEQ = u' \u2265 '
STATUS_CHECK = lambda rc : GREEN_CHECK if rc else RED_CROSS
def unit_test(test_name,expected,actual, assert_test=False,show_output=True):
unit_test.counter +=1
status = 'Passed'
if (expected != actual):
status = 'Failed'
unit_test.failed += 1
unit_test.failed_tests += str(unit_test.counter) +';'
if show_output:
print('Test#:',unit_test.counter,'Test:',STATUS_CHECK(expected == actual)+test_name, \
"\tExpected Result:",expected, \
'\tActual Result :',actual, \
'\tStatus:',status
)
else:
print('Test#:',unit_test.counter,'Test:',STATUS_CHECK(expected == actual)+test_name, \
'\tStatus:',status
)
if assert_test:
assert(status)
def unit_test_actual_contains_expected(test_name,expected,actual, assert_test=False,show_output=True):
unit_test.counter +=1
status = 'Passed'
if (not expected in actual):
status = 'Failed'
unit_test.failed += 1
unit_test.failed_tests += str(unit_test.counter) +';'
if show_output:
print('Test#:',unit_test.counter,'Test:',STATUS_CHECK(expected in actual)+test_name, \
"\tExpected Result:",expected, \
'\tActual Result :',actual, \
'\tStatus:',status
)
else:
print('Test#:',unit_test.counter,'Test:',STATUS_CHECK(expected in actual)+test_name, \
'\tStatus:',status
)
if assert_test:
assert(status)
def class_method_unit_test(class_name, init_value, function_name, expected_result, *args):
obj = eval(class_name)(init_value)
test_name = str(class_name) +'-' + function_name + ' ' + init_value +' args: '+' '.join(map(str, args))
actual_result = getattr(obj,function_name)(*args)
unit_test(test_name,expected_result,actual_result)
def class_attribute_unit_test(class_name, init_value, attribute_name, expected_result):
obj = eval(class_name)(init_value)
test_name = str(class_name) +'-' + attribute_name + ' ' + init_value
actual_result = getattr(obj,attribute_name)
unit_test(test_name,expected_result,actual_result)
def sangam_poem_tests():
show_output = False
poem_number = 13
config_keywords = config["key_words"]
POEM_DICT = {"அகநானூறு":"தென்னவன்", "புறநானூறு":"களிற்று", "ஐங்குறுநூறு":"அடைகரை", "கலித்தொகை":"சுவைத்துத்", "குறுந்தொகை":"கழீஇய", "நற்றிணை":"பெருந்தோளோயே", "பதிற்றுப்பத்து":"யாக்கை", "பட்டினப்பாலை":"புணரியோடு",
"முல்லைப்பாட்டு":"பதைப்பன்ன", "நெடுநல்வாடை":"நுண்ணிதின்","குறிஞ்சிப்பாட்டு":"மொய்ம்பு","மலைபடுகடாம்":"பயம்புமார்", "மதுரைக்காஞ்சி":"உறைதும்","பொருநராற்றுப்படை":"கிளந்தனம்",
"பெரும்பாணாற்றுப்படை":"மறம்பூண்", "சிறுபாணாற்றுப்படை":"கடம்பின்","திருமுருகாற்றுப்படை":"மஞ்ஞை","ஐந்திணை எழுபது":"முயங்கினேன்","ஐந்திணை ஐம்பது":"மயங்கல்","கார் நாற்பது":"வனப்பின்",
"திணைமொழி ஐம்பது":"மலர்ந்தன","கைந்நிலை":"செலவுரைப்பக்","திணைமாலை நூற்றைம்பது":"ஆயுங்கால்"}#,"திருக்குறள்"]
for poem in POEM_DICT.keys():
user_input = poem + " " + str(poem_number)
expected_result = POEM_DICT[poem]
for value in config_keywords[poem]:
user_input = value + " " + str(poem_number)
actual_result = sangam_class.respond_to_bot_user_input(user_input)
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test_actual_contains_expected(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
def sangam_thirukural_keywords_tests():
show_output = False
search_types = {"contains":"தாள்சேர்ந்தார்க்","ends_with":"குறிப்பு.", "begins_with":"கண்ணொடு"}
poem = "திருக்குறள்"
config_keywords = config["key_words"][poem]
for poem_value in config_keywords:
for search_type in search_types.keys():
search_word = search_types[search_type]
for search_value in config["key_words"][search_type]:
user_input = poem_value + " " +search_value + " "+ search_word
expected_result = search_word
actual_result = sangam_class.respond_to_bot_user_input(user_input)
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test_actual_contains_expected(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
## Reverse the key value
user_input = poem_value + " "+ search_word + " " +search_value
expected_result = search_word
actual_result = sangam_class.respond_to_bot_user_input(user_input)
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test_actual_contains_expected(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
def sangam_thirukural_get_tests():
show_output = False
search_types = {
"get":"சீரற்ற தேர்வு (random choice)",
"get 12":"அறத்துப்பால்",
"get 12,3":"நடுவிகந்தாம்",
"get 1234":"அதிகார எண் 133 க்குள் இருக்க வேண்டும்",
"kural 1234":"பைந்தொடி"
}
poem = "திருக்குறள்"
config_keywords = config["key_words"][poem]
for poem_value in config_keywords:
for search_key in search_types.keys():
search_value = search_types[search_key]
user_input = poem_value + " " +search_key
expected_result = search_value
actual_result = sangam_class.respond_to_bot_user_input(user_input)
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test_actual_contains_expected(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
def sangam_poet_count_tests():
show_output = False
config_key = 'poet_count'
config_keywords = config["key_words"]
POEM_DICT = {"அகநானூறு":174, "புறநானூறு":160, "ஐங்குறுநூறு":5, "கலித்தொகை":6, "குறுந்தொகை":216, "நற்றிணை":200, "பதிற்றுப்பத்து":9, "பட்டினப்பாலை":1,
"முல்லைப்பாட்டு":1, "நெடுநல்வாடை":1,"குறிஞ்சிப்பாட்டு":1,"மலைபடுகடாம்":1, "மதுரைக்காஞ்சி":1,"பொருநராற்றுப்படை":1,
"பெரும்பாணாற்றுப்படை":1, "சிறுபாணாற்றுப்படை":1,"திருமுருகாற்றுப்படை":1,"ஐந்திணை எழுபது":1,"ஐந்திணை ஐம்பது":1,"கார் நாற்பது":1,
"திணைமொழி ஐம்பது":1,"கைந்நிலை":1,"திணைமாலை நூற்றைம்பது":1}#,"திருக்குறள்"]
for poem in POEM_DICT.keys():
expected_result = poem + " எழுதிய புலவர்கள் எண்ணிக்கை: "+str(POEM_DICT[poem])
for value in config_keywords[config_key]:
user_input = poem + " " + str(value)
actual_result = sangam_class.respond_to_bot_user_input(user_input)
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test_actual_contains_expected(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
def sangam_poet_poems_tests():
show_output = False
config_key = 'poet_poems'
config_keywords = config["key_words"]
POEM_DICT = {"அகநானூறு":["பரணர்",34], "புறநானூறு":["ஔவையார்",33], "ஐங்குறுநூறு":["அம்மூவனார்",100], "கலித்தொகை":["பாலை பாடிய பெருங்கடுங்கோ",35], "குறுந்தொகை":["அள்ளூர் நன்முல்லையார்",9], "நற்றிணை":["உலோச்சனார்",20], "பதிற்றுப்பத்து":["அரிசில்கிழார்",12], "பட்டினப்பாலை":["உருத்திரங்கண்ணனார்",40],
"முல்லைப்பாட்டு":["காவிரிப்பூம்பட்டினத்துப் பொன்வணிகனார்",18], "நெடுநல்வாடை":["கணக்காயனார்",27],"குறிஞ்சிப்பாட்டு":["கபிலர்",28],"மலைபடுகடாம்":["பெருங்கெளசிகனார்",44], "மதுரைக்காஞ்சி":["மாங்குடி மருதனார்",63],"பொருநராற்றுப்படை":["முடத்தாமக் கண்ணியார்",20],
"பெரும்பாணாற்றுப்படை":["கடியலூர் உருத்திரங் கண்ணனார்",41], "சிறுபாணாற்றுப்படை":["நத்தத்தனார்",50],"திருமுருகாற்றுப்படை":["நக்கீரர்",30],"ஐந்திணை எழுபது":["மூவாதியார்",70],"ஐந்திணை ஐம்பது":["மாறன் பொறையனார்",50],"கார் நாற்பது":["மதுரைக் கண்ணங்கூத்தனார்",40],
"திணைமொழி ஐம்பது":["கண்ணன்சேந்தனார்",50],"கைந்நிலை":["புல்லங்காடனார்",60],"திணைமாலை நூற்றைம்பது":["கணிமேதாவியார்",153]
}#,"திருக்குறள்"]
for poem in POEM_DICT.keys():
poet_name = str(POEM_DICT[poem][0])
expected_result = str(POEM_DICT[poem][1])
for value in config_keywords[config_key]:
user_input = poem + " " + value + " "+poet_name
actual_result = str(sangam_class.respond_to_bot_user_input(user_input).count(poet_name))
test_name = poem+"-"+user_input
print("Test: "+test_name+" ...")
unit_test(test_name,expected_result,actual_result, assert_test=False,show_output=show_output)
def sangam_start_end_words():
show_output = False
data_files = ['test_1','test_2','test_3']
data_files = ["./sangam_tamil_poems/" + d + "_poems.txt" for d in data_files]
cdeeplearn.set_parameters(corpus_file='sangam_corpus.json', model_weights_file='sangam_corpus.h5',
starting_word_file='sangam_starting_words.json', ending_word_file='sangam_ending_words.json')
_,starting_words,ending_words = cdeeplearn._create_corpus_files(data_files,end_token_boundary=None)
expected_result = ['மல்லர்க்','உவவுமதி','மண்','கண்ணி','இருங்கழி','கோழ்','அணி','வண்டு','மின்னும்']
unit_test("Starting-Unique-Words",set(expected_result),set(starting_words), assert_test=False,show_output=show_output)
expected_result = ['வன்மையானே','கொண்டன்றே','ஞான்றே','தோள்','அருந்தவத்தோற்கே','இறந்தோரே','கார்','போன்றே']
unit_test("Ending-Unique-Words",set(expected_result),set(ending_words), assert_test=False,show_output=show_output)
def run_all_unit_tests():
sangam_poem_tests()
sangam_poet_count_tests()
sangam_poet_poems_tests()
sangam_thirukural_keywords_tests()
sangam_thirukural_get_tests()
sangam_start_end_words()
pass
def run_specific_tests():
pass
if __name__ == '__main__':
unit_test.counter = 0
unit_test.failed=0
unit_test.failed_tests = ''
#run_specific_tests()
run_all_unit_tests()
if unit_test.failed > 0:
print(str(unit_test.failed)+ ' out of ' + str(unit_test.counter) + " tests Failed. Test id's of failed tests:",unit_test.failed_tests)
else:
print('All (' + str(unit_test.counter)+') unit tests passed.')
exit()
| python |
# Problem 1
# You are learning how to make milkshakes.
# First, you will be given two sequences of integers representing chocolates and cups of milk.
# You have to start from the last chocolate and try to match it with the first cup of milk. If their values are equal,
# you should make a milkshake and remove both ingredients. Otherwise you should move the cup of milk at the end of the
# sequence and decrease the value of the chocolate by 5 without moving it from its position.
# If any of the values are equal to or below 0, you should remove them from the records before trying to mix it with
# the other ingredient.
# When you successfully prepare 5 chocolate milkshakes or you have no more chocolate or cups of milk left, you need to
# stop making chocolate milkshakes.
from collections import deque
chocolate = [int(el) for el in input().split(', ')]
milk = deque([int(el) for el in input().split(', ')])
matches = 0
while chocolate and milk and matches != 5:
is_time_for_next_turn = False
current_chocolate = chocolate[-1]
current_milk = milk[0]
if current_milk <= 0:
milk.popleft()
is_time_for_next_turn = True
if current_chocolate <= 0:
chocolate.pop()
is_time_for_next_turn = True
if is_time_for_next_turn:
continue
if current_chocolate == current_milk:
chocolate.pop()
milk.popleft()
matches += 1
continue
else:
milk.append(milk.popleft())
chocolate[-1] -= 5
if matches == 5:
print("Great! You made all the chocolate milkshakes needed!")
else:
print("Not enough milkshakes.")
if chocolate:
print(f"Chocolate: {', '.join(str(el) for el in chocolate)}")
else:
print("Chocolate: empty")
if milk:
print(f"Milk: {', '.join(str(el) for el in milk)}")
else:
print("Milk: empty")
should_continue = False
if last_choko <= 0:
cups_of_milk.appendleft(first_milk)
should_continue = True
if first_milk <= 0:
chocolates.append(last_choko)
should_continue = True
if should_continue:
continue | python |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
""" Provide the numeric properties.
"""
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from .bases import ParameterizedProperty
from .primitive import Float, Int
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'Angle',
'Byte',
'Interval',
'NonNegativeInt',
'Percent',
'PositiveInt',
'Size',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class NonNegativeInt(Int):
""" Accept non-negative integers. """
def validate(self, value, detail=True):
super().validate(value, detail)
if not (value is None or value >= 0):
raise ValueError(f"expected non-negative integer, got {value!r}")
class PositiveInt(Int):
""" Accept positive integers. """
def validate(self, value, detail=True):
super().validate(value, detail)
if not (value is None or value > 0):
raise ValueError(f"expected positive integer, got {value!r}")
class Interval(ParameterizedProperty):
""" Accept numeric values that are contained within a given interval.
Args:
interval_type (numeric property):
numeric types for the range, e.g. ``Int``, ``Float``
start (number) :
A minimum allowable value for the range. Values less than
``start`` will result in validation errors.
end (number) :
A maximum allowable value for the range. Values greater than
``end`` will result in validation errors.
Example:
.. code-block:: python
>>> class RangeModel(HasProps):
... prop = Range(Float, 10, 20)
...
>>> m = RangeModel()
>>> m.prop = 10
>>> m.prop = 20
>>> m.prop = 15
>>> m.prop = 2 # ValueError !!
>>> m.prop = 22 # ValueError !!
>>> m.prop = "foo" # ValueError !!
"""
def __init__(self, interval_type, start, end, default=None, help=None):
self.interval_type = self._validate_type_param(interval_type)
# Make up a property name for validation purposes
self.interval_type.validate(start)
self.interval_type.validate(end)
self.start = start
self.end = end
super().__init__(default=default, help=help)
def __str__(self):
class_name = self.__class__.__name__
return f"{class_name}({self.interval_type}, {self.start!r}, {self.end!r})"
@property
def type_params(self):
return [self.interval_type]
def validate(self, value, detail=True):
super().validate(value, detail)
if not (value is None or self.interval_type.is_valid(value) and value >= self.start and value <= self.end):
msg = "" if not detail else f"expected a value of type {self.interval_type} in range [{self.start}, {self.end}], got {value!r}"
raise ValueError(msg)
class Byte(Interval):
""" Accept integral byte values (0-255).
Example:
.. code-block:: python
>>> class ByteModel(HasProps):
... prop = Byte(default=0)
...
>>> m = ByteModel()
>>> m.prop = 255
>>> m.prop = 256 # ValueError !!
>>> m.prop = 10.3 # ValueError !!
"""
def __init__(self, default=0, help=None):
super().__init__(Int, 0, 255, default=default, help=help)
class Size(Float):
""" Accept non-negative numeric values.
Args:
default (float or None, optional) :
A default value for attributes created from this property to
have (default: None)
help (str or None, optional) :
A documentation string for this property. It will be automatically
used by the :ref:`bokeh.sphinxext.bokeh_prop` extension when
generating Spinx documentation. (default: None)
serialized (bool, optional) :
Whether attributes created from this property should be included
in serialization (default: True)
readonly (bool, optional) :
Whether attributes created from this property are read-only.
(default: False)
Example:
.. code-block:: python
>>> class SizeModel(HasProps):
... prop = Size()
...
>>> m = SizeModel()
>>> m.prop = 0
>>> m.prop = 10e6
>>> m.prop = -10 # ValueError !!
>>> m.prop = "foo" # ValueError !!
"""
def validate(self, value, detail=True):
super().validate(value, detail)
if not (value is None or 0.0 <= value):
msg = "" if not detail else f"expected a non-negative number, got {value!r}"
raise ValueError(msg)
class Percent(Float):
""" Accept floating point percentage values.
``Percent`` can be useful and semantically meaningful for specifying
things like alpha values and extents.
Args:
default (float or None, optional) :
A default value for attributes created from this property to
have (default: None)
help (str or None, optional) :
A documentation string for this property. It will be automatically
used by the :ref:`bokeh.sphinxext.bokeh_prop` extension when
generating Spinx documentation. (default: None)
serialized (bool, optional) :
Whether attributes created from this property should be included
in serialization (default: True)
readonly (bool, optional) :
Whether attributes created from this property are read-only.
(default: False)
Example:
.. code-block:: python
>>> class PercentModel(HasProps):
... prop = Percent()
...
>>> m = PercentModel()
>>> m.prop = 0.0
>>> m.prop = 0.2
>>> m.prop = 1.0
>>> m.prop = -2 # ValueError !!
>>> m.prop = 5 # ValueError !!
"""
def validate(self, value, detail=True):
super().validate(value, detail)
if value is None or 0.0 <= value <= 1.0:
return
msg = "" if not detail else f"expected a value in range [0, 1], got {value!r}"
raise ValueError(msg)
class Angle(Float):
""" Accept floating point angle values.
``Angle`` is equivalent to :class:`~bokeh.core.properties.Float` but is
provided for cases when it is more semantically meaningful.
Args:
default (float or None, optional) :
A default value for attributes created from this property to
have (default: None)
help (str or None, optional) :
A documentation string for this property. It will be automatically
used by the :ref:`bokeh.sphinxext.bokeh_prop` extension when
generating Spinx documentation. (default: None)
serialized (bool, optional) :
Whether attributes created from this property should be included
in serialization (default: True)
readonly (bool, optional) :
Whether attributes created from this property are read-only.
(default: False)
"""
pass
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| python |
import requests
import json
from device_configs.csr_1000_devnet import router
print(router)
session = requests.Session()
session.auth = (router['username'], router['password'])
session.headers = ({
'Accept': 'application/yang-data+json',
'Content-Type': 'application/yang-data+json'
})
host = router['host']
port = router['port']
container = 'Cisco-IOS-XE-interfaces-oper'
leaf = 'interfaces'
request = 'interface'
value = 'GigabitEthernet3'
# url = "https://{}:{}/restconf/data/{}:{}/{}={}".format(host, port, container, leaf, request, value)
url = "https://{}:{}/restconf/data/ietf-interfaces:interfaces/interface=GigabitEthernet3".format(host, port)
response = session.get(url, verify=False)
print(response)
print(response.status_code)
json_response = response.json()
print(json_response)
exit()
interface = response.json()["{}:{}".format(container, request)]
print(interface['name'])
print(interface['description'])
if 'ipv4' in interface:
print(interface['ipv4'])
print(interface['ipv4-subnet-mask'])
print('*' * 50)
| python |
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import random
import time
dizi1 = [5,8,7,1,35,42,32,12,56,45,21,326,20,28,54]
arananEleman = 1
######## Linear Arama #############
for diziDeger in dizi1:
if arananEleman == diziDeger:
print("Aranan Eleman Dizinin İcerisinde Vardir")
break
print("{} icin for calisti".format(diziDeger))
arananDegerIndex = ""
bulunmaDurumu = False
for index in range(0,len(dizi1)):
if arananEleman == dizi1[index]:
bulunmaDurumu = True
print("Aranan Deger Bulundu")
arananDegerIndex = index
break
if not bulunmaDurumu:
print("Aranan Deger Dizinin İcerisinde Bulunmamaktadir.")
dizi1.sort()
def recursiveBinarySearch(arr, arananDeger):
index = (int)(len(arr)/2) #ortadaki index i bul
baslangic = 0
bitis = len(arr)
#ortanca = arr[index]
if len(arr) < 1:
return "Aranan Deger Dizinin İcerisinde Yoktur"
if arr[index] == arananDeger:
return "Aranan Deger Bulundu"
elif arr[index] > arananDeger:
return recursiveBinarySearch(arr[baslangic:index - 1], arananDeger)
elif arr[index] < arananDeger:
return recursiveBinarySearch(arr[index + 1:bitis], arananDeger)
dizi1 = random.sample(range(0,10000000), 1000000)
dizi1.sort()
arananEleman = dizi1[len(dizi1)-1]
basla = time.time()
arananDegerIndex = ""
bulunmaDurumu = False
for index in range(0,len(dizi1)):
if arananEleman == dizi1[index]:
bulunmaDurumu = True
print("Aranan Deger Bulundu")
arananDegerIndex = index
break
if not bulunmaDurumu:
print("Aranan Deger Dizinin İcerisinde Bulunmamaktadir.")
bitis = time.time()
print("Linear Search Süresi {}".format(bitis-basla))
basla = time.time()
recursiveBinarySearch(dizi1, arananEleman)
bitis = time.time()
print("Binary Search Süresi {}".format(bitis-basla))
0.15623998641967773/0.04684305191040039
| python |
import tensorflow as tf
import numpy as np
# demo1
# def my_image_file(input):
# conv1_weights = tf.Variable(tf.random_normal([3,4]),name="conv1_weights")
# return conv1_weights
#
# input1=tf.get_variable(name="var1", initializer=np.ones (shape=[2,3],dtype=np.float32))
# input2=tf.get_variable(name="var2", initializer=np.zeros(shape=[2,3],dtype=np.float32))
#
# #对于这个地方,如果我们想 input1 和 input2都进入到 my_image_file函数, 并使用同一组参数进行处理,那么当我们这样
# #调用函数的时候 是做不到这样的目的 因为调用了2次会产生2组conv1_weights 而不是一个.,如果我们想实现共享变量问题,则看Demo2
# ret1=my_image_file(input1)
# ret2=my_image_file(input2)
#
# init =tf.global_variables_initializer()
# with tf.Session() as sess:
# sess.run(init)
# print (sess.run(ret1))
# print (sess.run(ret2))
# demo2
#利用字典(或者全局变量方式) 先创建一个全局的变量, 这样是可以实现权重共享的.
# variables_dict = {
# "conv1_weights":tf.Variable(tf.random_normal([2,3]),name="conv1_weights"),
# "conv1_biases":tf.Variable(tf.zeros([5]), name="conv1_biases")
# }
#
# def my_image_file(input):
# conv1_weights = variables_dict['conv1_weights']
# return conv1_weights
#
# input1=tf.get_variable(name="var1", initializer=np.ones (shape=[2,3],dtype=np.float32))
# input2=tf.get_variable(name="var2", initializer=np.zeros(shape=[2,3],dtype=np.float32))
#
# ret1=my_image_file(input1)
# ret2=my_image_file(input2)
#
# init =tf.global_variables_initializer()
# with tf.Session() as sess:
# sess.run(init)
# print (sess.run(ret1))
# print (sess.run(ret2))
# demo3
# 利用scope方式进行
# def my_image_file(input_images):
# conv1_weights = tf.get_variable("weights", [3,4],initializer=tf.random_normal_initializer())
# return conv1_weights
#
#
# input1=tf.get_variable(name="var1", initializer=np.ones (shape=[2,3],dtype=np.float32))
# input2=tf.get_variable(name="var2", initializer=np.zeros(shape=[2,3],dtype=np.float32))
#
# #variable scope = image_filters
# with tf.variable_scope("image_filters") as scope:
#
# ret1 = my_image_file(input1)
#
# #这是关键代码
# scope.reuse_variables()
#
# ret2 = my_image_file(input2)
#
#
#
# init =tf.global_variables_initializer()
#
# with tf.Session() as sess:
# sess.run(init)
# print (ret1.name,sess.run(ret1))
# print (ret2.name,sess.run(ret2))
#demo4
#这个个错误的例子
def my_image_file(input_images):
with tf.variable_scope("my_image_file") as scope:
conv1_weights = tf.get_variable("weights2", [3,4],initializer=tf.random_normal_initializer())
return conv1_weights
input1=tf.get_variable(name="var1", initializer=np.ones (shape=[2,3],dtype=np.float32))
input2=tf.get_variable(name="var2", initializer=np.zeros(shape=[2,3],dtype=np.float32))
with tf.variable_scope("image_filters",reuse=tf.AUTO_REUSE) as scope:
# 创建在这里面创建一个
# 如果是tf.Variable 创建的变量 如果在使用 tf.get_variable 想得到同一名字的变量时,会产生错误.因此需要用 tf.get_variable来创建变量,设置reuse标志
# ret0 = tf.Variable(initial_value=tf.random_normal([3, 4]), name="weights")
# scope.reuse_variables()
#一旦调用这个上面这个函数后,会发现就不能用tf.get_variable 创建变量了,因此我觉得 在这个函数之后,在使用
#tf.get_variable只会查看已经有的又tf.get_variable生成的变量,
#但是如果with tf.variable_scope("image_filters",reuse=tf.AUTO_REUSE) as scope: 而不调用scope.reuse_variables()
#则还是可以进行正常的变量提取,有则提取有的,无则创建新的
#如果二者同时都有,则还是不能用scope.reuse_variables()创建新的变量只能使用以前用的.
#如果不是绝对强调一定要复用某些变量的话 则最好不要使用scope.reuse_variables()的方式 而是
# 采用 with tf.variable_scope("image_filters",reuse=tf.AUTO_REUSE) as scope:的方式
#这个地方其实有三种状态 reuse=tf.AUTO_REUSE(如果没有变量则会创建) True(完全的复用模式) None(继承上层的关系)
ret0 = tf.get_variable("weights", [3,4],initializer=tf.random_normal_initializer())
ret3 = tf.get_variable("weights1", [3, 4], initializer=tf.random_normal_initializer())
#
ret1 = my_image_file(input1)
ret2 = my_image_file(input2)
init =tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
print (ret0.name,sess.run(ret0))
print (ret1.name,sess.run(ret1))
print (ret2.name,sess.run(ret2)) | python |
import numpy as np
import math as math
from pathfinding.core.diagonal_movement import DiagonalMovement
from pathfinding.core.grid import Grid
from pathfinding.finder.a_star import AStarFinder
UNOCCUPIED = 1
OCCUPIED = -1
FOOD = 2
HEAD = -2
TAIL = 4
HEALTHLIM = 25
FOODDIST = 3
game_state = ""
directions = {'up': 0, 'down': 0, 'left': 0, 'right': 0}
LASTDIR = 'up'
def calculate_move(board_matrix, game_state):
set_game_state(game_state)
height = game_state["board"]["height"]
head = game_state['you']["body"][0]
x = head["x"]
y = head["y"]
print("Head:", x, y)
health = game_state['you']["health"]
# Check up
if head["y"] - 1 < 0+1 or (board_matrix[y-1][x] == OCCUPIED or board_matrix[y-2][x] == OCCUPIED or board_matrix[y-2][x-1] == OCCUPIED or board_matrix[y-2][x+1] == OCCUPIED):
directions["up"] = -1000
else:
directions["up"] = sum(board_matrix, head["x"], head["y"] - 1, height, game_state)
directions["up"] -= int(y-1 <= 1 )*500
# Check down
if head["y"] + 1 > (height) - 2 or (board_matrix[y+1][x] == OCCUPIED or board_matrix[y+2][x] == OCCUPIED or board_matrix[y+2][x-1] == OCCUPIED or board_matrix[y+2][x+1] == OCCUPIED) : #and board_matrix[y+2][x] and board_matrix[y+2][x+1] and board_matrix[y+2][x-1]
directions["down"] = -1000
else:
directions["down"] = sum(board_matrix, head["x"], head["y"] + 1, height, game_state)
directions["down"] -= int(y+1 > height -1 )*500
# Check Left
if head["x"] - 1 < 0+1 or (board_matrix[y][x-1] == OCCUPIED or board_matrix[y-1][x-2] == OCCUPIED or board_matrix[y][x-2] == OCCUPIED or board_matrix[y-2][x-2] == OCCUPIED ):
directions["left"] = -1000
else:
directions["left"] = sum(board_matrix, head["x"] - 1, head["y"], height, game_state)
directions["left"] -= int(x-1 <= 1)*500
# check right
if head["x"] + 1 > (height - 2) or (board_matrix[y][x+1] == OCCUPIED or board_matrix[y-1][x+2] == OCCUPIED or board_matrix[y][x+2] == OCCUPIED or board_matrix[y+1][x+2] == OCCUPIED ):
directions["right"] = -1000
else:
directions["right"] = sum(board_matrix, head["x"] + 1, head["y"], height, game_state)
directions["right"] -= int(x+1 <= height - 1)*500
# Manipulate the food array
# Goal is that if the food is ADJACENT and no obstacles, the snake should go for the food
# initialize the array of food positions
arrfood = np.zeros([len(game_state["board"]["food"]),3])
i=0
for loc in game_state["board"]["food"]:
# Hopefully grab the indices for all of the food so we can find the closest food
arrfood[i,0] = loc["y"]
arrfood[i,1] = loc["x"]
# Calculate the distance to food
arrfood[i,2] = int(math.sqrt((arrfood[i,0]-y)**2+(arrfood[i,1]-x)**2))
i += 1
# return the index of the minimal distance
nearFood = np.argmin(arrfood[:,2])
#print(nearFood)
#print(arrfood[nearFood])
# Location of food identified, move in that directions
# Pick directions
if arrfood[nearFood][2] == 1:
# find the direction to the food. Pick that direction
if arrfood[nearFood][0]-y == 1:
directions["down"] += 750
elif arrfood[nearFood][0]-y == -1:
directions["up"] += 750
elif arrfood[nearFood][1]-x == 1:
directions["right"] += 750
elif arrfood[nearFood][1]-x == 1:
directions["left"] += 750
if( health < HEALTHLIM and len(game_state['board']['food'])>0):
find_food(game_state, board_matrix)
# print(max(directions, key=lambda k: directions[k]))
# quad(board_matrix, game_state)
# print("UP", directions["up"])
# print("DOWN", directions["down"])
# print("LEFT", directions["left"])
# print("RIGHT", directions["right"])
# Final direction
if LASTDIR == 'up':
directions["down"] += -2000
elif LASTDIR == 'right':
directions["left"] -= 2000
final_dir = max(directions, key=lambda k: directions[k])
LASTDIR = final_dir
print(LASTDIR)
return final_dir
def sum(matrix, x, y, height, gamestate):
sum = 0
if matrix[y ][x] == HEAD:
snek = get_snek(x, y , game_state)
if is_bigger(snek, gamestate):
sum += 0
else:
sum += -100
# print(snek)
if (x - 1) >= 0:
sum += matrix[y][x-1]
if matrix[y][x-1] == HEAD :
snek = get_snek(x-1, y, game_state)
if is_bigger(snek, gamestate):
sum += 200
else:
sum += -75
# print(snek)
if (x + 1) < height:
sum += matrix[y][x+1]
if matrix[y][x+1] == HEAD :
snek = get_snek(x+1, y, game_state)
if(is_bigger(snek, gamestate)):
sum += 200
else:
sum += -75
# print(snek)
if (y - 1) >= 0:
sum += matrix[y-1][x]
if matrix[y-1][x] == HEAD :
snek = get_snek(x, y-1, game_state)
if is_bigger(snek, gamestate):
sum += 200
else:
sum += -75
# print(snek)
if (y + 1) < height:
sum += matrix[y+1][x]
if matrix[y+1][x] == HEAD :
snek = get_snek(x, y+1, game_state)
if is_bigger(snek, gamestate):
sum += 200
else:
sum += -75
# print(snek)
if (x-1) >= 0 and (y+1) < height:
sum += matrix[y+1][x-1]
if (x-1) >= 0 and (y-1) > 0:
sum += matrix[y-1][x-1]
if (x+1)< height and (y+1) < height:
sum += matrix[y+1][x+1]
if (x-1) > 0 and (y-1) > 0:
sum += matrix[y-1][x-1]
return sum + matrix[y][x]
def find_food(game_state, board_matrix ):
minsum = 1000
y = game_state['you']["body"][0]["y"]
x = game_state['you']["body"][0]["x"]
for food in game_state["board"]["food"]:
tot = abs(food['x'] - x)
tot += abs(food['y'] - y)
if (tot < minsum):
goodfood = food
minsum = tot
find_path(game_state, board_matrix,x,y, goodfood["x"], goodfood['y'])
def find_path(game_state, board_matrix, x, y, foodx, foody):
height = game_state["board"]["height"]
grid = Grid(width=height, height=height, matrix=board_matrix)
start = grid.node(x, y)
end = grid.node(foodx, foody)
finder = AStarFinder(diagonal_movement=DiagonalMovement.never)
path, runs = finder.find_path(start, end, grid)
if (len(path) > 0):
pathx = path[1][0]
pathy = path[1][1]
y = game_state['you']["body"][0]["y"]
x = game_state['you']["body"][0]["x"]
# go up
if ((y - 1) == pathy) and (x == pathx):
directions["up"] += 20
print("Pick: UP")
# go down
if ((y + 1) == pathy) and (x == pathx):
directions["down"] += 20
print("Pick: down")
# go left
if ((x - 1) == pathx) and (y == pathy):
directions["left"] += 20
print("Pick: left")
# go right
if ((x + 1) == pathx) and (y == pathy):
directions["right"] += 20
print("Pick: right")
def quad(matrix, game_state):
x =game_state["you"]["body"][0]["x"]
y = game_state["you"]["body"][0]["y"]
height = game_state['board']['height']
quad1 = 0
quad2 = 0
quad3 = 0
quad4 = 0
for i in range(y):
for j in range(x):
if(matrix[j][i]== UNOCCUPIED):
quad1 += 1
for i in range(y):
for j in range(x, height):
if(matrix[j][i]== UNOCCUPIED):
quad2 += 1
for i in range(y, height):
for j in range(x):
if(matrix[j][i]== UNOCCUPIED):
quad3 += 1
for i in range(y, height):
for j in range(x, height):
if(matrix[j][i]== UNOCCUPIED):
quad4 += 1
directions['up'] += (quad1 + quad2)/height
directions['down'] += (quad3 + quad4)/height
directions['left'] += (quad1 + quad3)/height
directions['right'] += (quad2 + quad4)/height
# print(quad1, quad2, quad3, quad4)
def is_bigger(snek, game):
if len(game["you"]["body"]) > snek:
print("length**************")
return True
print("Snake length", snek, "our length ", len(game['you']['body']))
return False
def get_snek(x, y, game_state):
for snek in game_state["board"]["snakes"]:
snake_body = snek['body']
for xy in snake_body[0:]:
if( xy["y"]== y and xy["x"]==x):
return len(snake_body)
def set_game_state(new_game_state):
global game_state
game_state = new_game_state
def get_game_State():
return game_state | python |
#!/usr/bin/python
# @FarPixel & @DavidMaitland
# https://github.com/davidmaitland/GifPro
import os
import time
import pytumblr
import urllib
import uuid
from subprocess import call
frequency = 10 # Loop interval
frames = 20 # Fames to take
delay = 0.2 # Delay between frames
gifDelay = 20 # Used for timing GIF generation
# Tumblr config
tumblrName = config.tumblrName
consumerKey = config.consumerKey
consumerSecret = config.consumerSecret
oauthToken = config.oauthToken
oauthSecret = config.oauthSecret
tags = config.tags
# Authenticate via OAuth to Tumblr
tumblr = pytumblr.TumblrRestClient(consumerKey, consumerSecret, oauthToken, oauthSecret)
# Files config
data = "data/"
tmp = data + "tmp/"
output = data + "output/"
# GoPro
goProUrl = "http://10.5.5.9:8080/live/amba.mp4"
def getGif():
# Remove tmp files
call("rm -f " + tmp + "*", shell=True)
# Get MP4 files from GoPro
print "[+] Talking to GoPro"
for i in range(frames):
num = str(i).zfill(3)
goGoPro(num)
time.sleep(delay)
# Convert MP4 files into single GIF images
print "[+] Converting GoPro files"
for i in range(frames):
num = str(i).zfill(3)
mp4ToGif(num)
# Make the final GIF
print "[+] Making GIF"
filename = makeGif()
# Post
post(filename)
print "Done: " + filename
def mp4ToGif(num):
call("ffmpeg -i " + tmp + num + ".mp4 -ss 00:00:00 -vframes 1 " + tmp + num + ".png", shell=True)
call("convert " + tmp + num + ".png " + tmp + num + ".gif", shell=True)
def makeGif():
uid = str(uuid.uuid4())
filename = output + uid + ".gif"
call("gifsicle --colors 256 --delay=" + str(gifDelay) + " --loop " + tmp + "*.gif >" + filename, shell=True)
return filename
def goGoPro(num):
urllib.urlretrieve(goProUrl, tmp + num + ".mp4")
def post(filename):
try:
tumblr.create_photo(tumblrName, state="published", tags=tags, data=filename)
except:
pass
return True
def status(state):
print state
return True
if __name__ == "__main__":
if not os.path.exists(data):
os.makedirs(data)
os.makedirs(tmp)
os.makedirs(output)
while 1 == 1:
try:
getGif()
except:
print "[!] Failed for some reason"
time.sleep(frequency)
| python |
# Generated by Django 3.0.3 on 2020-03-26 13:50
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('tags', '0001_initial'),
('programs', '0002_auto_20200326_2050'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='tag',
name='created_by',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='tag',
name='program',
field=models.ManyToManyField(to='programs.Program'),
),
]
| python |
"""
A top-level linear programming interface. Currently this interface solves
linear programming problems via the Simplex and Interior-Point methods.
.. versionadded:: 0.15.0
Functions
---------
.. autosummary::
:toctree: generated/
linprog
linprog_verbose_callback
linprog_terse_callback
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from .optimize import OptimizeResult
from ._linprog_ip import _linprog_ip
from ._linprog_simplex import _linprog_simplex
from ._linprog_util import (
_parse_linprog, _presolve, _get_Abc, _postprocess
)
__all__ = ['linprog', 'linprog_verbose_callback', 'linprog_terse_callback']
__docformat__ = "restructuredtext en"
def linprog_verbose_callback(res):
"""
A sample callback function demonstrating the linprog callback interface.
This callback produces detailed output to sys.stdout before each iteration
and after the final iteration of the simplex algorithm.
Parameters
----------
res : A `scipy.optimize.OptimizeResult` consisting of the following fields:
x : 1D array
The independent variable vector which optimizes the linear
programming problem.
fun : float
Value of the objective function.
success : bool
True if the algorithm succeeded in finding an optimal solution.
slack : 1D array
The values of the slack variables. Each slack variable corresponds
to an inequality constraint. If the slack is zero, then the
corresponding constraint is active.
con : 1D array
The (nominally zero) residuals of the equality constraints, that is,
``b - A_eq @ x``
phase : int
The phase of the optimization being executed. In phase 1 a basic
feasible solution is sought and the T has an additional row
representing an alternate objective function.
status : int
An integer representing the exit status of the optimization::
0 : Optimization terminated successfully
1 : Iteration limit reached
2 : Problem appears to be infeasible
3 : Problem appears to be unbounded
4 : Serious numerical difficulties encountered
nit : int
The number of iterations performed.
message : str
A string descriptor of the exit status of the optimization.
"""
x = res['x']
fun = res['fun']
success = res['success']
phase = res['phase']
status = res['status']
nit = res['nit']
message = res['message']
complete = res['complete']
saved_printoptions = np.get_printoptions()
np.set_printoptions(linewidth=500,
formatter={'float': lambda x: "{0: 12.4f}".format(x)})
if status:
print('--------- Simplex Early Exit -------\n'.format(nit))
print('The simplex method exited early with status {0:d}'.format(status))
print(message)
elif complete:
print('--------- Simplex Complete --------\n')
print('Iterations required: {}'.format(nit))
else:
print('--------- Iteration {0:d} ---------\n'.format(nit))
if nit > 0:
if phase == 1:
print('Current Pseudo-Objective Value:')
else:
print('Current Objective Value:')
print('f = ', fun)
print()
print('Current Solution Vector:')
print('x = ', x)
print()
np.set_printoptions(**saved_printoptions)
def linprog_terse_callback(res):
"""
A sample callback function demonstrating the linprog callback interface.
This callback produces brief output to sys.stdout before each iteration
and after the final iteration of the simplex algorithm.
Parameters
----------
res : A `scipy.optimize.OptimizeResult` consisting of the following fields:
x : 1D array
The independent variable vector which optimizes the linear
programming problem.
fun : float
Value of the objective function.
success : bool
True if the algorithm succeeded in finding an optimal solution.
slack : 1D array
The values of the slack variables. Each slack variable corresponds
to an inequality constraint. If the slack is zero, then the
corresponding constraint is active.
con : 1D array
The (nominally zero) residuals of the equality constraints, that is,
``b - A_eq @ x``
phase : int
The phase of the optimization being executed. In phase 1 a basic
feasible solution is sought and the T has an additional row
representing an alternate objective function.
status : int
An integer representing the exit status of the optimization::
0 : Optimization terminated successfully
1 : Iteration limit reached
2 : Problem appears to be infeasible
3 : Problem appears to be unbounded
4 : Serious numerical difficulties encountered
nit : int
The number of iterations performed.
message : str
A string descriptor of the exit status of the optimization.
"""
nit = res['nit']
x = res['x']
if nit == 0:
print("Iter: X:")
print("{0: <5d} ".format(nit), end="")
print(x)
def linprog(c, A_ub=None, b_ub=None, A_eq=None, b_eq=None,
bounds=None, method='simplex', callback=None,
options=None):
"""
Minimize a linear objective function subject to linear
equality and inequality constraints. Linear Programming is intended to
solve the following problem form:
Minimize::
c @ x
Subject to::
A_ub @ x <= b_ub
A_eq @ x == b_eq
lb <= x <= ub
where ``lb = 0`` and ``ub = None`` unless set in ``bounds``.
Parameters
----------
c : 1D array
Coefficients of the linear objective function to be minimized.
A_ub : 2D array, optional
2D array such that ``A_ub @ x`` gives the values of the upper-bound
inequality constraints at ``x``.
b_ub : 1D array, optional
1D array of values representing the upper-bound of each inequality
constraint (row) in ``A_ub``.
A_eq : 2D, optional
2D array such that ``A_eq @ x`` gives the values of the equality
constraints at ``x``.
b_eq : 1D array, optional
1D array of values representing the RHS of each equality constraint
(row) in ``A_eq``.
bounds : sequence, optional
``(min, max)`` pairs for each element in ``x``, defining
the bounds on that parameter. Use None for one of ``min`` or
``max`` when there is no bound in that direction. By default
bounds are ``(0, None)`` (non-negative).
If a sequence containing a single tuple is provided, then ``min`` and
``max`` will be applied to all variables in the problem.
method : str, optional
Type of solver. :ref:`'simplex' <optimize.linprog-simplex>`
and :ref:`'interior-point' <optimize.linprog-interior-point>`
are supported.
callback : callable, optional (simplex only)
If a callback function is provided, it will be called within each
iteration of the simplex algorithm. The callback must require a
`scipy.optimize.OptimizeResult` consisting of the following fields:
x : 1D array
The independent variable vector which optimizes the linear
programming problem.
fun : float
Value of the objective function.
success : bool
True if the algorithm succeeded in finding an optimal solution.
slack : 1D array
The values of the slack variables. Each slack variable
corresponds to an inequality constraint. If the slack is zero,
the corresponding constraint is active.
con : 1D array
The (nominally zero) residuals of the equality constraints
that is, ``b - A_eq @ x``
phase : int
The phase of the optimization being executed. In phase 1 a basic
feasible solution is sought and the T has an additional row
representing an alternate objective function.
status : int
An integer representing the exit status of the optimization::
0 : Optimization terminated successfully
1 : Iteration limit reached
2 : Problem appears to be infeasible
3 : Problem appears to be unbounded
4 : Serious numerical difficulties encountered
nit : int
The number of iterations performed.
message : str
A string descriptor of the exit status of the optimization.
options : dict, optional
A dictionary of solver options. All methods accept the following
generic options:
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
For method-specific options, see :func:`show_options('linprog')`.
Returns
-------
res : OptimizeResult
A :class:`scipy.optimize.OptimizeResult` consisting of the fields:
x : 1D array
The independent variable vector which optimizes the linear
programming problem.
fun : float
Value of the objective function.
slack : 1D array
The values of the slack variables. Each slack variable
corresponds to an inequality constraint. If the slack is zero,
then the corresponding constraint is active.
con : 1D array
The (nominally zero) residuals of the equality constraints,
that is, ``b - A_eq @ x``
success : bool
Returns True if the algorithm succeeded in finding an optimal
solution.
status : int
An integer representing the exit status of the optimization::
0 : Optimization terminated successfully
1 : Iteration limit reached
2 : Problem appears to be infeasible
3 : Problem appears to be unbounded
4 : Serious numerical difficulties encountered
nit : int
The number of iterations performed.
message : str
A string descriptor of the exit status of the optimization.
See Also
--------
show_options : Additional options accepted by the solvers
Notes
-----
This section describes the available solvers that can be selected by the
'method' parameter. The default method
is :ref:`Simplex <optimize.linprog-simplex>`.
:ref:`Interior point <optimize.linprog-interior-point>` is also available.
Method *simplex* uses the simplex algorithm (as it relates to linear
programming, NOT the Nelder-Mead simplex) [1]_, [2]_. This algorithm
should be reasonably reliable and fast for small problems.
.. versionadded:: 0.15.0
Method *interior-point* uses the primal-dual path following algorithm
as outlined in [4]_. This algorithm is intended to provide a faster
and more reliable alternative to *simplex*, especially for large,
sparse problems. Note, however, that the solution returned may be slightly
less accurate than that of the simplex method and may not correspond with a
vertex of the polytope defined by the constraints.
Before applying either method a presolve procedure based on [8]_ attempts to
identify trivial infeasibilities, trivial unboundedness, and potential
problem simplifications. Specifically, it checks for:
- rows of zeros in ``A_eq`` or ``A_ub``, representing trivial constraints;
- columns of zeros in ``A_eq`` `and` ``A_ub``, representing unconstrained
variables;
- column singletons in ``A_eq``, representing fixed variables; and
- column singletons in ``A_ub``, representing simple bounds.
If presolve reveals that the problem is unbounded (e.g. an unconstrained
and unbounded variable has negative cost) or infeasible (e.g. a row of
zeros in ``A_eq`` corresponds with a nonzero in ``b_eq``), the solver
terminates with the appropriate status code. Note that presolve terminates
as soon as any sign of unboundedness is detected; consequently, a problem
may be reported as unbounded when in reality the problem is infeasible
(but infeasibility has not been detected yet). Therefore, if the output
message states that unboundedness is detected in presolve and it is
necessary to know whether the problem is actually infeasible, set option
``presolve=False``.
If neither infeasibility nor unboundedness are detected in a single pass
of the presolve check, bounds are tightened where possible and fixed
variables are removed from the problem. Then, linearly dependent rows
of the ``A_eq`` matrix are removed, (unless they represent an
infeasibility) to avoid numerical difficulties in the primary solve
routine. Note that rows that are nearly linearly dependent (within a
prescribed tolerance) may also be removed, which can change the optimal
solution in rare cases. If this is a concern, eliminate redundancy from
your problem formulation and run with option ``rr=False`` or
``presolve=False``.
Several potential improvements can be made here: additional presolve
checks outlined in [8]_ should be implemented, the presolve routine should
be run multiple times (until no further simplifications can be made), and
more of the efficiency improvements from [5]_ should be implemented in the
redundancy removal routines.
After presolve, the problem is transformed to standard form by converting
the (tightened) simple bounds to upper bound constraints, introducing
non-negative slack variables for inequality constraints, and expressing
unbounded variables as the difference between two non-negative variables.
References
----------
.. [1] Dantzig, George B., Linear programming and extensions. Rand
Corporation Research Study Princeton Univ. Press, Princeton, NJ,
1963
.. [2] Hillier, S.H. and Lieberman, G.J. (1995), "Introduction to
Mathematical Programming", McGraw-Hill, Chapter 4.
.. [3] Bland, Robert G. New finite pivoting rules for the simplex method.
Mathematics of Operations Research (2), 1977: pp. 103-107.
.. [4] Andersen, Erling D., and Knud D. Andersen. "The MOSEK interior point
optimizer for linear programming: an implementation of the
homogeneous algorithm." High performance optimization. Springer US,
2000. 197-232.
.. [5] Andersen, Erling D. "Finding all linearly dependent rows in
large-scale linear programming." Optimization Methods and Software
6.3 (1995): 219-227.
.. [6] Freund, Robert M. "Primal-Dual Interior-Point Methods for Linear
Programming based on Newton's Method." Unpublished Course Notes,
March 2004. Available 2/25/2017 at
https://ocw.mit.edu/courses/sloan-school-of-management/15-084j-nonlinear-programming-spring-2004/lecture-notes/lec14_int_pt_mthd.pdf
.. [7] Fourer, Robert. "Solving Linear Programs by Interior-Point Methods."
Unpublished Course Notes, August 26, 2005. Available 2/25/2017 at
http://www.4er.org/CourseNotes/Book%20B/B-III.pdf
.. [8] Andersen, Erling D., and Knud D. Andersen. "Presolving in linear
programming." Mathematical Programming 71.2 (1995): 221-245.
.. [9] Bertsimas, Dimitris, and J. Tsitsiklis. "Introduction to linear
programming." Athena Scientific 1 (1997): 997.
.. [10] Andersen, Erling D., et al. Implementation of interior point
methods for large scale linear programming. HEC/Universite de
Geneve, 1996.
Examples
--------
Consider the following problem:
Minimize::
f = -1x[0] + 4x[1]
Subject to::
-3x[0] + 1x[1] <= 6
1x[0] + 2x[1] <= 4
x[1] >= -3
-inf <= x[0] <= inf
This problem deviates from the standard linear programming problem.
In standard form, linear programming problems assume the variables x are
non-negative. Since the problem variables don't have the standard bounds of
``(0, None)``, the variable bounds must be set using ``bounds`` explicitly.
There are two upper-bound constraints, which can be expressed as
dot(A_ub, x) <= b_ub
The input for this problem is as follows:
>>> c = [-1, 4]
>>> A = [[-3, 1], [1, 2]]
>>> b = [6, 4]
>>> x0_bounds = (None, None)
>>> x1_bounds = (-3, None)
>>> from scipy.optimize import linprog
>>> res = linprog(c, A_ub=A, b_ub=b, bounds=(x0_bounds, x1_bounds),
... options={"disp": True})
Optimization terminated successfully.
Current function value: -22.000000
Iterations: 5 # may vary
>>> print(res)
con: array([], dtype=float64)
fun: -22.0
message: 'Optimization terminated successfully.'
nit: 5 # may vary
slack: array([39., 0.]) # may vary
status: 0
success: True
x: array([10., -3.])
"""
meth = method.lower()
default_tol = 1e-12 if meth == 'simplex' else 1e-9
c, A_ub, b_ub, A_eq, b_eq, bounds, solver_options = _parse_linprog(
c, A_ub, b_ub, A_eq, b_eq, bounds, options)
tol = solver_options.get('tol', default_tol)
iteration = 0
complete = False # will become True if solved in presolve
undo = []
# Keep the original arrays to calculate slack/residuals for original
# problem.
c_o, A_ub_o, b_ub_o, A_eq_o, b_eq_o = c.copy(
), A_ub.copy(), b_ub.copy(), A_eq.copy(), b_eq.copy()
# Solve trivial problem, eliminate variables, tighten bounds, etc...
c0 = 0 # we might get a constant term in the objective
if solver_options.pop('presolve', True):
rr = solver_options.pop('rr', True)
(c, c0, A_ub, b_ub, A_eq, b_eq, bounds, x, undo, complete, status,
message) = _presolve(c, A_ub, b_ub, A_eq, b_eq, bounds, rr, tol)
if not complete:
A, b, c, c0 = _get_Abc(c, c0, A_ub, b_ub, A_eq, b_eq, bounds, undo)
T_o = (c_o, A_ub_o, b_ub_o, A_eq_o, b_eq_o, bounds, undo)
if meth == 'simplex':
x, status, message, iteration = _linprog_simplex(
c, c0=c0, A=A, b=b, callback=callback, _T_o=T_o, **solver_options)
elif meth == 'interior-point':
x, status, message, iteration = _linprog_ip(
c, c0=c0, A=A, b=b, callback=callback, **solver_options)
else:
raise ValueError('Unknown solver %s' % method)
# Eliminate artificial variables, re-introduce presolved variables, etc...
# need modified bounds here to translate variables appropriately
disp = solver_options.get('disp', False)
x, fun, slack, con, status, message = _postprocess(
x, c_o, A_ub_o, b_ub_o, A_eq_o, b_eq_o, bounds,
complete, undo, status, message, tol, iteration, disp)
sol = {
'x': x,
'fun': fun,
'slack': slack,
'con': con,
'status': status,
'message': message,
'nit': iteration,
'success': status == 0}
return OptimizeResult(sol)
| python |
"""
constraint_aggregator.py
Aggregated Constraints from Astroplan as well as our own user-defined constraints.
In our architecture we define the concept of a "Static Constraint" as one that always applies no matter
how far we are scheduling into the future
"Dynamic Constraints" are those that only apply if our total schedule window does not exceed some preset
time in the future
"""
from .constraints import TsoOutageConstraint
from astroplan.constraints import AtNightConstraint, AirmassConstraint
from tso.scheduler.weather_constraint import WeatherConstraint
def create_unmapped_constraint(*values):
return None
def create_air_mass_constraint(values):
return AirmassConstraint(
max=values.get("max"),
boolean_constraint=values.get("boolean_constraint")
)
def create_at_night_constraint(*values):
return AtNightConstraint.twilight_civil()
def create_weather_constraint(values):
"""Initialiazes the weather constraint
Parameters
----------
values:
contains the start datetime and end datetime required to initialize the weather constraint
Returns:
--------
An initialized weather constraint for the given time values
"""
return WeatherConstraint(
start_time=values.get("start_datetime"),
end_time=values.get("end_datetime"),
cloud_threshold=values.get("cloud_threshold"),
cloud_average_threshold=values.get("cloud_average_threshold"),
rain_threshold=values.get("rain_threshold")
)
def create_tso_outage_constraint(values):
return TsoOutageConstraint(outage_config=values)
constraint_map = {
"AirmassConstraint": create_air_mass_constraint,
"AtNightConstraint": create_at_night_constraint,
"TsoOutageConstraint": create_tso_outage_constraint,
"WeatherConstraint": create_weather_constraint
}
"""Initializes all the necessary constraints as specified by the user
Parameters:
----------
constraint_configuration:
contains all necessary data for constraint initialization
start_datetime:
Datetime object that indicates the beginning of the schedule
end_time:
Datetime object that indicates the finalizaion of the schedule
no_weather_constraint:
Boolean that specifies if the weather constraint should be considered or not
"""
def initialize_constraints(constraint_configuration, start_datetime, end_datetime, no_weather_constraints):
if not no_weather_constraints:
if 'WeatherConstraint' in constraint_configuration:
constraint_configuration['WeatherConstraint']['start_datetime'] = start_datetime
constraint_configuration['WeatherConstraint']['end_datetime'] = end_datetime
else:
if 'WeatherConstraint' in constraint_configuration:
del constraint_configuration['WeatherConstraint']
global_constraints = []
for key, value in constraint_configuration.items():
mapped_c = constraint_map.get(key, create_unmapped_constraint)(value)
if mapped_c is not None:
global_constraints.append(mapped_c)
return global_constraints
| python |
import os
import uuid
import json
import mimetypes
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate, login as django_login, logout
from django.core import serializers
from django.core.serializers.json import DjangoJSONEncoder
from cloud.decorators.userRequired import user_required
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, HttpResponseNotFound, JsonResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.utils.crypto import get_random_string
from .tokens import tokenizer
from .forms import LoginForm, MkdirForm, RecoverPasswordForm, RenameForm, SearchForm, ResetForm, UploadForm, GroupShareForm, UserShareForm
from .mailer import send_password_request_email, send_share_email
from .models import Group, GroupShare, ShareUrl, User, UserGroup, UserShare
from .fileManager import FileManager
@user_required
def file_explorer(request):
"""
Create user directory if account created with
python3 manage.py createsuperuser
as the user directory might not have been created
"""
user_directory = settings.MEDIA_ROOT + "/" + request.user.user_id
user_trash = settings.TRASH_ROOT + "/" + request.user.user_id
if not os.path.exists(user_directory):
try:
os.mkdir(user_directory)
except OSError:
messages.error(request, "Error accessing your data.<br/>Contact admin")
logout(request)
return redirect("index")
if not os.path.exists(user_trash):
try:
os.mkdir(user_trash)
except OSError:
messages.error(request, "Error accessing your data.<br/>Contact admin")
logout(request)
return redirect("index")
fm = FileManager(request.user)
mkdir_form = MkdirForm()
rename_form = RenameForm()
upload_form = UploadForm()
usershare_from = UserShareForm()
groupshare_form = GroupShareForm()
if 'p' in dict(request.GET) and len(dict(request.GET)['p'][0]) > 0:
new_path = dict(request.GET)['p'][0].replace("../", "") # No previous directory browsing
fm.update_path(new_path)
mkdir_form.initial['dir_path'] = new_path
upload_form.initial['upload_path'] = new_path
context = {'files': fm.directory_list(), 'uploadForm': upload_form, 'mkdirForm': mkdir_form, 'renameForm': rename_form,
'usershareForm': usershare_from, 'groupshareForm': groupshare_form}
fm.update_context_data(context)
return render(request, 'cloud/fileManager.html', context)
@user_required
def file_move(request):
fm = FileManager(request.user)
if request.method == 'GET':
# Send directory information
mkdir_form = MkdirForm()
if 'p' in dict(request.GET) and len(dict(request.GET)['p'][0]) > 0:
new_path = dict(request.GET)['p'][0].replace("../", "") # No previous directory browsing
fm.update_path(new_path)
mkdir_form.initial['dir_path'] = new_path
context = {'dirs': fm.directory_list(False), 'mkdirForm': mkdir_form}
fm.update_context_data(context)
return render(request, 'cloud/moveExplorer.html', context)
elif request.method == 'POST':
# Move file to new destination
cur_path = request.POST.get("fp", None)
move_path = request.POST.get("np", None)
if cur_path == None or move_path == None:
return JsonResponse({'result': 2, 'message': 'Request Error'})
else:
return fm.move(cur_path.replace("../", ""), move_path.replace("../", ""))
else:
return HttpResponseNotFound("Unknown Request")
@user_required
def trash_explorer(request):
user_directory = settings.MEDIA_ROOT + "/" + request.user.user_id
user_trash = settings.TRASH_ROOT + "/" + request.user.user_id
if not os.path.exists(user_directory):
try:
os.mkdir(user_directory)
except OSError:
messages.error(request, "Error accessing your data.<br/>Contact admin")
logout(request)
return redirect("index")
if not os.path.exists(user_trash):
try:
os.mkdir(user_trash)
except OSError:
messages.error(request, "Error accessing your data.<br/>Contact admin")
logout(request)
return redirect("index")
fm = FileManager(request.user)
context = {'files': fm.trash_list()}
return render(request, 'cloud/trashManager.html', context)
def file_browser(request):
# Todo: file handling, sharing and security
return HttpResponse("File: " + request.GET.get("f"))
def file_delete(request):
if request.method == 'POST':
file_path = request.POST.get("fp", None)
if file_path == None:
return HttpResponseNotFound("Missing file")
else:
file_path = file_path.replace("../", "") # No previous directory browsing
fm = FileManager(request.user)
return fm.delete_item(file_path)
else:
# Get not allowed
return HttpResponseForbidden("Not allowed")
def file_delete_perm(request):
if request.method == 'POST':
file_path = request.POST.get("fp", None)
if file_path == None:
return HttpResponseNotFound("Missing file")
else:
file_path = file_path.replace("../", "") # No previous directory browsing
fm = FileManager(request.user)
return fm.purge_item(file_path)
else:
# Get not allowed
return HttpResponseForbidden("Not allowed")
def file_restore(request):
if request.method == 'POST':
file_path = request.POST.get("fp", None)
if file_path == None:
return HttpResponseNotFound("Missing file")
else:
file_path = file_path.replace("../", "") # No previous directory browsing
fm = FileManager(request.user)
return fm.restore_item(file_path)
else:
# Get not allowed
return HttpResponseForbidden("Not allowed")
def empty_trash(request):
if request.method == 'POST':
fm = FileManager(request.user)
return fm.empty_trash()
else:
# Get not allowed
return HttpResponseForbidden("Not allowed")
def file_details(request):
if request.method == 'POST':
user_rec = None
file_share = request.POST.get("fs")
if file_share == "":
user_rec = request.user
else:
if not ShareUrl.objects.filter(url=file_share).exists():
return HttpResponseNotFound("Missing file")
else:
share_data = get_object_or_404(ShareUrl, url=file_share)
if share_data.is_private and not user_has_access(request.user, file_share):
return HttpResponseForbidden("Access Denied")
else:
user_rec = share_data.owner
fm = FileManager(user_rec)
file_information = {}
file_path = request.POST.get("filepath", None)
if file_path == None:
return HttpResponseNotFound("Missing file")
else:
file_path = file_path.replace("../", "") # No previous directory browsing
file_information = fm.file_details(file_path)
if bool(file_information): # Not empty
return JsonResponse(file_information)
else:
return HttpResponseNotFound("Missing file")
else:
# Reject get request
return HttpResponseForbidden("Not allowed")
@user_required
def file_search(request):
if request.method == "POST":
search_form = SearchForm(request.POST)
search_form.full_clean()
if search_form.is_valid():
fm = FileManager(request.user)
return fm.file_search(search_form.cleaned_data['search_item'].replace("../", ""))
else:
return JsonResponse({'result': 1})
else:
search_form = SearchForm()
context = {'search_form': search_form}
return render(request, 'cloud/fileSearch.html', context)
def file_rename(request):
if request.method == 'POST':
rename_form = RenameForm(request.POST)
rename_form.full_clean()
if rename_form.is_valid():
fm = FileManager(request.user)
if fm.rename(rename_form.cleaned_data['rename_path'].replace("../", ""), rename_form.cleaned_data['new_name'].replace("../", "")):
return JsonResponse({'result': 0})
else:
return JsonResponse({'result': 1})
else:
# Reject get request
return HttpResponseForbidden("Not allowed")
def file_download(request):
file_share = request.GET.get("fs", None)
if file_share == None:
fm = FileManager(request.user)
return fm.download_file(request.GET.get("file"))
else:
if not ShareUrl.objects.filter(url=file_share).exists():
return render(request, 'cloud/e404.html', status=404) # 404
else:
share_data = get_object_or_404(ShareUrl, url=file_share)
fm = FileManager(share_data.owner)
is_file = fm.set_share_path(share_data.path)
if is_file == 1:
# Download file
return fm.download_file(share_data.path)
else:
# Download file from shared directory
return fm.download_file(request.GET.get("file"))
def check_quota(request):
file_share = request.POST.get("fs")
if file_share == "":
return JsonResponse({'available': request.user.get_remaining_quota()})
else:
if not ShareUrl.objects.filter(url=file_share).exists():
return JsonResponse({'available': -1}) # 404
else:
share_data = get_object_or_404(ShareUrl, url=file_share)
return JsonResponse({'available': share_data.owner.get_remaining_quota()})
def file_upload(request):
if request.method == 'POST':
upload_form = UploadForm(request.POST, request.FILES)
upload_form.full_clean()
user_files = request.FILES.getlist('user_files')
if upload_form.is_valid():
file_share = upload_form.cleaned_data['share_url']
user_rec = None
if file_share == "":
user_rec = request.user
else:
if not ShareUrl.objects.filter(url=file_share).exists():
return JsonResponse({'result': 1})
else:
share_data = get_object_or_404(ShareUrl, url=file_share)
if share_data.is_private and not user_has_access(request.user, file_share):
return JsonResponse({'result': 1})
else:
user_rec = share_data.owner
fm = FileManager(user_rec)
fm.update_path(upload_form.cleaned_data['upload_path'])
user_db = get_object_or_404(User, pk=user_rec.user_id)
insufficient_count = 0
for file_to_upload in user_files:
user_db = get_object_or_404(User, pk=user_rec.user_id)
if file_to_upload.size <= user_db.get_remaining_quota():
fm.upload_file(file_to_upload)
else:
# Not enough space to upload file
insufficient_count = insufficient_count + 1
# messages.success(request, "Files uploaded successfully")
return JsonResponse({'result': 0, 'insufficient': insufficient_count})
else:
# messages.error(request, "Files could not be uploaded")
return JsonResponse({'result': 1})
else:
# No get allowed
return HttpResponseForbidden("Upload Rejected")
def create_directory(request):
if request.method == 'POST':
mkdir_form = MkdirForm(request.POST)
mkdir_form.full_clean()
if mkdir_form.is_valid():
file_share = mkdir_form.cleaned_data['share_url']
user_rec = None
if file_share == "":
user_rec = request.user
else:
if not ShareUrl.objects.filter(url=file_share).exists():
return JsonResponse({'result': 1})
else:
share_data = get_object_or_404(ShareUrl, url=file_share)
if share_data.is_private and not user_has_access(request.user, file_share):
return JsonResponse({'result': 1})
else:
user_rec = share_data.owner
fm = FileManager(user_rec)
fm.update_path(mkdir_form.cleaned_data['dir_path'])
mkdir_status = fm.create_directory(mkdir_form.cleaned_data['dir_name'])
if mkdir_status:
return JsonResponse({'result': 0})
else:
return JsonResponse({'result': 2})
else:
return JsonResponse({'result': 1})
else:
# No get allowed
return HttpResponseForbidden("Invalid Request")
def group_share(request):
if request.method == 'POST':
if 'lst' not in request.POST and 'del' not in request.POST:
# Share
group_form = GroupShareForm(request.POST)
group_form.full_clean()
if group_form.is_valid():
# Form valid
group_name = group_form.cleaned_data['groupname']
can_edit_check = group_form.cleaned_data['can_edit']
# Check if group available
if Group.objects.filter(name=group_name).exists():
# Share to group
try:
user = get_object_or_404(User, user_id=request.user.pk)
grup = get_object_or_404(Group, name=group_name)
# Check if user is a group member
if UserGroup.objects.filter(group=grup, user=user).exists():
if GroupShare.objects.filter(url__owner=user, group=grup, url__path=request.POST.get("fp", "")).exists():
return JsonResponse({'result': 2})
else:
# Create link
new_url = str(get_random_string(length=12)) # Random share link
while ShareUrl.objects.filter(url=new_url).exists():
# Check if random url has not been used before
new_url = str(get_random_string(length=12)) # Regenerate random share link
group_pub_link = ShareUrl.objects.create(owner=user, path=request.POST.get("fp", ""), url=new_url, can_edit=can_edit_check, is_private=True)
if group_pub_link:
group_shr = GroupShare.objects.create(url=group_pub_link, group=grup)
if not group_shr:
return JsonResponse({'result': 1})
else:
# Email group members
grup_members = UserGroup.objects.filter(group=grup)
for member in grup_members:
if member.user != user:
# Do not email myself
send_share_email(member.user.email, member.user.name, member.user.surname, user.name, user.surname,
user.user_id, request.POST.get("fn", ""))
return JsonResponse({'result': 0}) # Success
else:
return JsonResponse({'result': 1})
else:
return JsonResponse({'result': 3}) # Not a group member
except Exception as ex:
return JsonResponse({'result': 4})
else:
# Group does not exist
return JsonResponse({'result': 1})
else:
return JsonResponse({'result': 4}) # Error
elif 'del' in request.POST:
# Unshare
group_id = request.POST.get("del", None)
if group_id is None:
return JsonResponse({'result': 1}) # Error
else:
try:
grup = get_object_or_404(Group, pk=group_id)
sharer = get_object_or_404(User, user_id=request.user.pk)
groupshare = GroupShare.objects.filter(url__owner=sharer, group=grup, url__path=request.POST.get("fp", "")).values("url")
share_url = ShareUrl.objects.filter(url__in=groupshare)
share_url.delete()
# Removal complete
return JsonResponse({'result': 0})
except Exception as ex:
return JsonResponse({'result': 1}) # Error
else:
# Return share list
owner_urls = ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("fp", "")).values("url")
group_share_list = GroupShare.objects.filter(url__in=owner_urls).values("group__pk","group__name","url__can_edit")
#json_data = serializers.serialize('json', group_share_list, fields=('name', 'edit'))
json_data = json.dumps(list(group_share_list), cls=DjangoJSONEncoder)
return HttpResponse(json_data, content_type='application/json')
else:
return HttpResponseForbidden()
def user_share(request):
if request.method == 'POST':
if 'lst' not in request.POST and 'del' not in request.POST:
# Share
user_form = UserShareForm(request.POST)
user_form.full_clean()
if user_form.is_valid():
# Form valid
user_name = user_form.cleaned_data['username']
can_edit_check = user_form.cleaned_data['can_edit']
# Check if group available
if User.objects.filter(user_id=user_name).exists():
# Share to user
try:
user = get_object_or_404(User, user_id=user_name)
sharer = get_object_or_404(User, user_id=request.user.pk)
if sharer == user:
return JsonResponse({'result': 3}) # Cannot share with yourself
else:
if UserShare.objects.filter(url__owner=sharer, shared_with=user, url__path=request.POST.get("fp", "")).exists():
return JsonResponse({'result': 2})
else:
# Create link
new_url = str(get_random_string(length=12)) # Random share link
while ShareUrl.objects.filter(url=new_url).exists():
# Check if random url has not been used before
new_url = str(get_random_string(length=12)) # Regenerate random share link
user_pub_link = ShareUrl.objects.create(owner=sharer, path=request.POST.get("fp", ""), url=new_url, can_edit=can_edit_check, is_private=True)
if user_pub_link:
user_shr = UserShare.objects.create(url=user_pub_link, shared_with=user)
if not user_shr:
return JsonResponse({'result': 1})
else:
# Email user
send_share_email(user.email, user.name, user.surname, sharer.name, sharer.surname, sharer.user_id, request.POST.get("fn", ""))
return JsonResponse({'result': 0}) # Success
else:
return JsonResponse({'result': 1})
except Exception as ex:
return JsonResponse({'result': 4})
else:
# User does not exist
return JsonResponse({'result': 1})
else:
return JsonResponse({'result': 4}) # Error
elif 'del' in request.POST:
# Unshare
users_id = request.POST.get("del", None)
if users_id is None:
return JsonResponse({'result': 1}) # Error
else:
try:
user = get_object_or_404(User, user_id=users_id)
sharer = get_object_or_404(User, user_id=request.user.pk)
usershare = UserShare.objects.filter(url__owner=sharer, shared_with=user, url__path=request.POST.get("fp", "")).values("url")
share_url = ShareUrl.objects.filter(url__in=usershare)
share_url.delete()
# Removal complete
return JsonResponse({'result': 0})
except Exception as ex:
return JsonResponse({'result': 1}) # Error
else:
# Return share list
owner_urls = ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("fp", "")).values("url")
user_share_list = UserShare.objects.filter(url__in=owner_urls).values("shared_with__pk","shared_with__title",
"shared_with__initials","shared_with__name","shared_with__surname","shared_with__email","url__can_edit")
#json_data = serializers.serialize('json', User.objects.filter(user_id__in=user_share_list), fields=('title','initials','name','surname','email'))
json_data = json.dumps(list(user_share_list), cls=DjangoJSONEncoder)
return HttpResponse(json_data, content_type='application/json')
else:
return HttpResponseForbidden()
@user_required
def public_share(request):
if request.method == 'POST':
if 'lst' not in request.POST:
if ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("filepath", "")).exists():
# Delete link
try:
share_url = ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("filepath", ""))
share_url.delete()
except Exception as del_ex:
return JsonResponse({'result': 2})
return JsonResponse({'result': 1})
else:
# Share
#new_url = str(uuid.uuid4().hex[:16]) # Generate unique link
new_url = str(get_random_string(length=12)) # Random share link
while ShareUrl.objects.filter(url=new_url).exists():
# Check if random url has not been used before
new_url = str(get_random_string(length=12)) # Regenerate random share link
try:
user = get_object_or_404(User, user_id=request.user.pk)
can_public_edit = False
if int(request.POST.get("canedit", 0)) == 1:
can_public_edit = True
else:
can_public_edit = False
new_share = ShareUrl.objects.create(owner=user, path=request.POST.get("filepath", None), url=new_url, can_edit=can_public_edit, is_private=False)
if new_share:
return JsonResponse({'result': 0, 'sharelink': settings.EXTERNAL_URL + 's/' + new_url, 'shareedit': can_public_edit})
else:
return JsonResponse({'result': 2})
except Exception as ex:
return JsonResponse({'result': 2})
else:
# Return share list
if ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("filepath", "")).exists():
share_url = ShareUrl.objects.filter(owner=User(user_id=request.user.pk), path=request.POST.get("filepath", "")).values_list("url", "can_edit")
return JsonResponse({'result': 0, 'sharelink': settings.EXTERNAL_URL + 's/' + str(share_url[0][0]), 'shareedit': share_url[0][1]})
else:
return JsonResponse({'result': 1})
else:
return HttpResponseForbidden()
def public_access(request, share_url):
if not ShareUrl.objects.filter(url=share_url).exists():
return render(request, 'cloud/e404.html', status=404) # 404
else:
share_data = get_object_or_404(ShareUrl, url=share_url)
if share_data.is_private:
# Not for public access
return render(request, 'cloud/e404.html', status=404) # 404
else:
# Public access
fm = FileManager(share_data.owner)
is_file = fm.set_share_path(share_data.path)
if is_file == 1:
# File details
context = fm.file_details(share_data.path)
context.update({'fileowner': share_data.owner, 'shareurl': share_url})
return render(request, 'cloud/fileShare.html', context)
else:
# Directory Explorer
mkdir_form = MkdirForm()
upload_form = UploadForm()
mkdir_form.initial['dir_path'] = share_data.path # Default path
upload_form.initial['upload_path'] = share_data.path # Set defaiult path
if 'p' in dict(request.GET) and len(dict(request.GET)['p'][0]) > 0:
new_path = dict(request.GET)['p'][0].replace("../", "") # No previous directory browsing
fm.update_path(new_path)
mkdir_form.initial['dir_path'] = new_path
upload_form.initial['upload_path'] = new_path
mkdir_form.initial['share_url'] = share_url
upload_form.initial['share_url'] = share_url
context = {'files': fm.directory_list(), 'uploadForm': upload_form, 'mkdirForm': mkdir_form,
'shareurl': share_url, 'canEdit': share_data.can_edit, 'sharelink': settings.EXTERNAL_URL + 's/' + share_url}
fm.update_context_data(context)
return render(request, 'cloud/directoryShare.html', context)
@user_required
def shared_with_me(request):
# User Share
shared_items = UserShare.objects.filter(shared_with=request.user).values("url")
# Group share
user_groups = UserGroup.objects.filter(user=request.user).values("group")
group_items = GroupShare.objects.filter(group__in=user_groups).values("url")
# Combined urls
swm_urls = ShareUrl.objects.filter(url__in=shared_items) | ShareUrl.objects.filter(url__in=group_items)
swm_data = []
mimetypes.init()
for swmurl in swm_urls:
if swmurl.owner == request.user:
# No need to show user their files
continue
file_mime = "unknown"
guessed_mime = mimetypes.guess_type(swmurl.path)[0]
if(guessed_mime == None):
if os.path.isfile(swmurl.path):
file_mime = "unknown"
else:
file_mime = "directory"
else:
file_mime = str(guessed_mime)
is_group = False
if GroupShare.objects.filter(url__url=swmurl.url).exists():
is_group = True
swm_data.append({
'url': swmurl.url,
'owner': swmurl.owner,
'filename': os.path.basename(os.path.normpath(swmurl.path)),
'filetype': file_mime,
'isgroup': is_group,
'canedit': swmurl.can_edit
})
context = {'swm_data': swm_data}
return render(request, 'cloud/sharedBrowser.html', context)
def user_has_access(user, res_url):
# Check if the user has access to a shared resource
if not user.is_authenticated:
# Unauthenticated users not allowed
return False
user_has_access_to_res = False
# Check user share
if UserShare.objects.filter(url__url=res_url, shared_with=user).exists():
user_has_access_to_res = True
# Check group share
if UserGroup.objects.filter(group__in=GroupShare.objects.filter(url__url=res_url).values("group"), user=user).exists():
user_has_access_to_res = True
return user_has_access_to_res
@user_required
def private_access(request, share_url):
if not ShareUrl.objects.filter(url=share_url).exists():
return render(request, 'cloud/e404.html', status=404) # 404
else:
share_data = get_object_or_404(ShareUrl, url=share_url)
if not share_data.is_private:
# Not for private access
return redirect("publicAccess", share_url)
else:
if not user_has_access(request.user, share_url):
return render(request, 'cloud/e403.html', status=403) # 403
else:
fm = FileManager(share_data.owner)
is_file = fm.set_share_path(share_data.path)
if is_file == 1:
# File details
context = fm.file_details(share_data.path)
context.update({'fileowner': share_data.owner, 'shareurl': share_url})
return render(request, 'cloud/fileShare.html', context)
else:
# Directory Explorer
mkdir_form = MkdirForm()
upload_form = UploadForm()
mkdir_form.initial['dir_path'] = share_data.path # Default path
upload_form.initial['upload_path'] = share_data.path # Set defaiult path
if 'p' in dict(request.GET) and len(dict(request.GET)['p'][0]) > 0:
new_path = dict(request.GET)['p'][0].replace("../", "") # No previous directory browsing
fm.update_path(new_path)
mkdir_form.initial['dir_path'] = new_path
upload_form.initial['upload_path'] = new_path
mkdir_form.initial['share_url'] = share_url
upload_form.initial['share_url'] = share_url
context = {'files': fm.directory_list(), 'uploadForm': upload_form, 'mkdirForm': mkdir_form,
'shareurl': share_url, 'canEdit': share_data.can_edit, 'sharelink': settings.EXTERNAL_URL + 'swma/' + share_url}
fm.update_context_data(context)
return render(request, 'cloud/directoryShare.html', context)
| python |
import logging
from django import forms
import requests
from .base import BaseAction, BaseActionForm
logger = logging.getLogger('zentral.core.actions.backends.trello')
class TrelloClient(object):
"""Trello API Client"""
API_BASE_URL = "https://api.trello.com/1"
def __init__(self, app_key, token):
super(TrelloClient, self).__init__()
self.common_args = {
"key": app_key,
"token": token
}
def get_board(self, board_name):
url = "%s/members/me/boards" % self.API_BASE_URL
args = self.common_args.copy()
args["fields"] = "name"
r = requests.get(url, data=args)
if not r.ok:
logger.error(r.text)
r.raise_for_status()
existing_boards = r.json()
for existing_board in existing_boards:
if existing_board["name"].lower() == board_name.lower():
return existing_board["id"]
raise ValueError("board not found")
def get_list(self, board_id, list_name):
url = "%s/boards/%s/lists" % (self.API_BASE_URL, board_id)
args = self.common_args.copy()
args["fields"] = "name"
r = requests.get(url, data=args)
if not r.ok:
logger.error(r.text)
r.raise_for_status()
existing_lists = r.json()
for existing_list in existing_lists:
if existing_list["name"].lower() == list_name.lower():
return existing_list["id"]
raise ValueError("list not found")
def get_or_create_label(self, board_id, color, text):
url = "%s/boards/%s/labels" % (self.API_BASE_URL, board_id)
r = requests.get(url, data=self.common_args)
if not r.ok:
logger.error(r.text)
r.raise_for_status()
exisiting_labels = r.json()
for exisiting_label in exisiting_labels:
if exisiting_label["name"] == text and exisiting_label["color"] == color:
return exisiting_label["id"]
# not found - create label
args = self.common_args.copy()
args["name"] = text
args["color"] = color
r = requests.post(url, data=args)
if not r.ok:
logger.error(r.text)
r.raise_for_status()
new_label = r.json()
return new_label["id"]
def create_card(self, board_name, list_name, name, desc, labels=None):
# labels = [{"name": "bla", "color": "red"},{"color": "green"}
board_id = self.get_board(board_name)
list_id = self.get_list(board_id, list_name)
if labels is None:
labels = []
id_labels = []
for label in labels:
if "name" not in label:
label["name"] = ""
id_labels.append(self.get_or_create_label(board_id, label["color"], label["name"]))
args = self.common_args.copy()
args.update({"name": name,
"due": None,
"idList": list_id,
"desc": desc,
"idLabels": id_labels,
"pos": "top"})
url = "%s/cards" % self.API_BASE_URL
r = requests.post(url, data=args)
if not r.ok:
logger.error(r.text)
r.raise_for_status()
class ActionForm(BaseActionForm):
board = forms.CharField()
list = forms.CharField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in ("board", "list"):
default_value = self.config_d.get("default_{}".format(field_name), None)
if default_value:
self.fields[field_name].initial = default_value
class Action(BaseAction):
action_form_class = ActionForm
def __init__(self, config_d):
super(Action, self).__init__(config_d)
self.client = TrelloClient(config_d["application_key"],
config_d["token"])
self.default_board = config_d.get("default_board", None)
self.default_list = config_d.get("default_list", None)
def trigger(self, event, probe, action_config_d):
action_config_d = action_config_d or {}
board_name = action_config_d.get("board", self.default_board)
if not board_name:
raise ValueError("No board name")
list_name = action_config_d.get("list", self.default_list)
if not list_name:
raise ValueError("No list name")
self.client.create_card(board_name, list_name,
event.get_notification_subject(probe),
event.get_notification_body(probe),
action_config_d.get('labels', []))
| python |
import os
from unittest.mock import patch
from unittest.mock import MagicMock
import cauldron
from cauldron.cli.commands import save
from cauldron.environ.response import Response
from cauldron.test import support
from cauldron.test.support import scaffolds
class TestSave(scaffolds.ResultsTest):
def test_fails_no_project(self):
""" should fail if there is no open project """
path = self.get_temp_path('save-fail-1')
r = support.run_command('save "{}"'.format(path))
self.assertTrue(r.failed)
self.assertGreater(len(r.errors), 0)
self.assertEqual(r.errors[0].code, 'NO_PROJECT')
@patch('cauldron.cli.commands.save.write_file')
def test_fails_write(self, write_func):
""" should fail when the write function raises an exception """
write_func.side_effect = IOError('Write failed')
support.create_project(self, 'rex')
path = self.get_temp_path('save-fail-2')
r = support.run_command('save "{}"'.format(path))
self.assertTrue(r.failed)
self.assertGreater(len(r.errors), 0)
self.assertEqual(r.errors[0].code, 'WRITE_SAVE_ERROR')
def test_save_directory_success(self):
""" should write a cauldron file """
support.create_project(self, 'triceratops')
path = self.get_temp_path('save-success-1')
r = support.run_command('save "{}"'.format(path))
self.assertFalse(r.failed)
self.assertTrue(os.path.exists(r.data['path']))
project = cauldron.project.get_internal_project()
self.assertTrue(
r.data['path'].endswith('{}.cauldron'.format(project.title))
)
def test_save_file_no_extension_success(self):
""" should write a cauldron file """
support.create_project(self, 'tyrannosaurus')
path = self.get_temp_path('save-success-2', 'project')
r = support.run_command('save "{}"'.format(path))
self.assertFalse(r.failed)
self.assertTrue(os.path.exists(r.data['path']))
self.trace('PATH:', r.data['path'])
self.assertTrue(r.data['path'].endswith('project.cauldron'))
def test_save_file_success(self):
""" should write a cauldron file """
support.create_project(self, 'apatosaurus')
path = self.get_temp_path(
'save-success-3',
'folder',
'project.cauldron'
)
r = support.run_command('save "{}"'.format(path))
self.assertFalse(r.failed)
self.assertTrue(os.path.exists(r.data['path']))
self.assertTrue(r.data['path'].endswith('project.cauldron'))
def test_remote_save_no_project(self):
""" """
response = support.run_remote_command('save')
self.assertTrue(response.failed)
@patch('cauldron.cli.sync.comm.download_file')
def test_remote_download_error(self, download_file: MagicMock):
""" """
download_file.return_value = Response().fail().response
support.create_project(self, 'apophis')
project = cauldron.project.get_internal_project()
support.run_remote_command('open "{}"'.format(project.source_directory))
response = support.run_remote_command('save')
self.assertTrue(response.failed)
@patch('cauldron.cli.sync.comm.download_file')
def test_remote(self, download_file: MagicMock):
""" """
download_file.return_value = Response()
support.create_project(self, 'apophis')
project = cauldron.project.get_internal_project()
support.run_remote_command('open "{}"'.format(project.source_directory))
response = support.run_remote_command('save')
self.assert_has_success_code(response, 'DOWNLOAD_SAVED')
def test_get_default_path_no_project(self):
""" """
path = save.get_default_path()
self.assertTrue(os.path.exists(path))
| python |
import cv2
import numpy as np
import torchvision.datasets as datasets
class CIFAR10Noise(datasets.CIFAR10):
"""CIFAR10 Dataset with noise.
Args:
clip (bool): If True, clips a value between 0 and 1 (default: True).
seed (int): Random seed (default: 0).
This is a subclass of the `CIFAR10` Dataset.
"""
def __init__(self, clip=True, seed=0, **kwargs):
self.clip = clip
self.seed = seed
super(CIFAR10Noise, self).__init__(**kwargs)
assert (seed + 1) * len(self) - 1 <= 2**32 - 1
def __getitem__(self, index):
img, target = self.data[index], self.targets[index]
noise = self.generate_noise(index)
img = img / 255.
noise = noise / 255.
img = img + noise
img, target = self.postprocess(img, target)
return img, target
def postprocess(self, img, target):
if self.clip:
img = np.clip(img, 0., 1.)
if self.transform is not None:
img = img.astype(np.float32)
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def generate_noise(self):
raise NotImplementedError
class CIFAR10AdditiveGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with additive Gaussian noise.
Args:
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self, noise_scale=25., noise_scale_high=None, **kwargs):
self.noise_scale = noise_scale
self.noise_scale_high = noise_scale_high
super(CIFAR10AdditiveGaussianNoise, self).__init__(**kwargs)
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
return rng.randn(*self.data[index].shape) * noise_scale
class CIFAR10LocalGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with local Gaussian noise.
Args:
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
patch_size (int): The height/width of the noise patch (default: 16.).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
patch_max_size (int): The maximum height/width of the noise patch
(default: None, i.e., `patch_size`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_scale=25.,
patch_size=16,
noise_scale_high=None,
patch_max_size=None,
**kwargs):
self.noise_scale = noise_scale
self.patch_size = patch_size
self.noise_scale_high = noise_scale_high
self.patch_max_size = patch_max_size
super(CIFAR10LocalGaussianNoise, self).__init__(**kwargs)
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
patch_shape = (self.data[index].shape[0], self.data[index].shape[1], 1)
patch = np.zeros(patch_shape, dtype=np.uint8)
if self.patch_max_size is None:
patch_width = self.patch_size
patch_height = self.patch_size
else:
patch_width = rng.randint(self.patch_size, self.patch_max_size + 1)
patch_height = rng.randint(self.patch_size,
self.patch_max_size + 1)
x = rng.randint(0, patch_shape[1] - patch_width + 1)
y = rng.randint(0, patch_shape[0] - patch_height + 1)
patch[y:y + patch_height, x:x + patch_width] = 1
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
noise = rng.randn(*self.data[index].shape) * noise_scale
return noise * patch
class CIFAR10UniformNoise(CIFAR10Noise):
"""CIFAR10 Dataset with uniform noise.
Args:
noise_scale (float): The scale of uniform noise (default: 50.).
noise_scale_high (float): The upper bound of the scale of uniform noise
(default: None, i.e., `noise_scale`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self, noise_scale=50., noise_scale_high=None, **kwargs):
self.noise_scale = noise_scale
self.noise_scale_high = noise_scale_high
super(CIFAR10UniformNoise, self).__init__(**kwargs)
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
return rng.uniform(-1, 1, self.data[index].shape) * noise_scale
class CIFAR10MixtureNoise(CIFAR10Noise):
"""CIFAR10 Dataset with mixture noise.
Args:
noise_scale_list (float list): The values, except for the last one,
indicate the standard deviations of additive Gaussian noises. The
last value indicates the scale of uniform noise (default:
[15., 25., 50.]).
mixture_rate_list (float list): The mixture rates of the noises
(default: [0.7, 0.2, 0.1]).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_scale_list=[15., 25., 50.],
mixture_rate_list=[0.7, 0.2, 0.1],
**kwargs):
self.noise_scale_list = noise_scale_list
self.mixture_rate_list = mixture_rate_list
super(CIFAR10MixtureNoise, self).__init__(**kwargs)
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
height, width, channel = list(self.data[index].shape)
noise = np.zeros((height * width, channel))
perm = rng.permutation(height * width)
rand = rng.rand(height * width)
cumsum = np.cumsum([0] + self.mixture_rate_list)
for i, noise_scale in enumerate(self.noise_scale_list):
inds = (rand >= cumsum[i]) * (rand < cumsum[i + 1])
if i == len(self.noise_scale_list) - 1:
noise[perm[inds], :] = rng.uniform(
-1, 1, (np.sum(inds), channel)) * noise_scale
else:
noise[perm[inds], :] = rng.randn(np.sum(inds),
channel) * noise_scale
noise = np.reshape(noise, (height, width, channel))
return noise
class CIFAR10BrownGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with Brown Gaussian noise.
Args:
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
kernel_size (int): The Gaussian kernel size (default: 5).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_scale=25.,
noise_scale_high=None,
kernel_size=5,
**kwargs):
self.noise_scale = noise_scale
self.noise_scale_high = noise_scale_high
self.kernel_size = kernel_size
super(CIFAR10BrownGaussianNoise, self).__init__(**kwargs)
self.kernel = (cv2.getGaussianKernel(kernel_size, 0) *
cv2.getGaussianKernel(kernel_size, 0).transpose())
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
noise = rng.randn(*self.data[index].shape) * noise_scale
return (cv2.GaussianBlur(noise, (self.kernel_size, self.kernel_size),
0,
borderType=cv2.BORDER_CONSTANT) /
np.sqrt(np.sum(self.kernel**2)))
class CIFAR10AdditiveBrownGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with additive Brown Gaussian noise.
Args:
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
kernel_size (int): The Gaussian kernel size (default: 5).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_scale=25.,
noise_scale_high=None,
kernel_size=5,
**kwargs):
self.noise_scale = noise_scale
self.noise_scale_high = noise_scale_high
self.kernel_size = kernel_size
super(CIFAR10AdditiveBrownGaussianNoise, self).__init__(**kwargs)
self.kernel = (cv2.getGaussianKernel(kernel_size, 0) *
cv2.getGaussianKernel(kernel_size, 0).transpose())
def generate_noise(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
noise = rng.randn(*self.data[index].shape) * noise_scale
return noise + (cv2.GaussianBlur(noise,
(self.kernel_size, self.kernel_size),
0,
borderType=cv2.BORDER_CONSTANT) /
np.sqrt(np.sum(self.kernel**2)))
class CIFAR10MultiplicativeGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with multiplicative Gaussian noise.
Args:
multi_noise_scale (float): The standard deviation of multiplicative
Gaussian noise (default: 25.).
multi_noise_scale_high (float): The upper bound of the standard
deviation of multiplicative Gaussian noise (default: None, i.e.,
`multi_noise_scale`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
multi_noise_scale=25.,
multi_noise_scale_high=None,
**kwargs):
self.multi_noise_scale = multi_noise_scale
self.multi_noise_scale_high = multi_noise_scale_high
super(CIFAR10MultiplicativeGaussianNoise, self).__init__(**kwargs)
def __getitem__(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
img, target = self.data[index], self.targets[index]
img = img / 255.
if self.multi_noise_scale_high is None:
multi_noise_scale = self.multi_noise_scale
else:
multi_noise_scale = rng.uniform(self.multi_noise_scale,
self.multi_noise_scale_high)
noise = rng.randn(*img.shape) * multi_noise_scale * img / 255.
img = img + noise
img, target = self.postprocess(img, target)
return img, target
class CIFAR10AdditiveMultiplicativeGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with additive and multiplicative Gaussian noise.
Args:
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
multi_noise_scale (float): The standard deviation of multiplicative
Gaussian noise (default: 25.).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
multi_noise_scale_high (float): The upper bound of the standard
deviation of multiplicative Gaussian noise (default: None, i.e.,
`multi_noise_scale`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_scale=25.,
multi_noise_scale=25.,
noise_scale_high=None,
multi_noise_scale_high=None,
**kwargs):
self.noise_scale = noise_scale
self.multi_noise_scale = multi_noise_scale
self.noise_scale_high = noise_scale_high
self.multi_noise_scale_high = multi_noise_scale_high
super(CIFAR10AdditiveMultiplicativeGaussianNoise,
self).__init__(**kwargs)
def __getitem__(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
img, target = self.data[index], self.targets[index]
img = img / 255.
if self.multi_noise_scale_high is None:
multi_noise_scale = self.multi_noise_scale
else:
multi_noise_scale = rng.uniform(self.multi_noise_scale,
self.multi_noise_scale_high)
noise = rng.randn(*img.shape) * multi_noise_scale * img / 255.
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
noise = noise + rng.randn(*img.shape) * noise_scale / 255.
img = img + noise
img, target = self.postprocess(img, target)
return img, target
class CIFAR10PoissonNoise(CIFAR10Noise):
"""CIFAR10 Dataset with Poisson noise.
Args:
noise_lam (float): The total number of events for Poisson noise
(default: 30.).
noise_lam_high (float): The maximum total number of events for Poisson
noise (default: None, i.e., `noise_lam`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self, noise_lam=30., noise_lam_high=None, **kwargs):
self.noise_lam = noise_lam
self.noise_lam_high = noise_lam_high
super(CIFAR10PoissonNoise, self).__init__(**kwargs)
def __getitem__(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
img, target = self.data[index], self.targets[index]
img = img / 255.
if self.noise_lam_high is None:
noise_lam = self.noise_lam
else:
noise_lam = rng.uniform(self.noise_lam, self.noise_lam_high)
img = rng.poisson(noise_lam * img) / noise_lam
img, target = self.postprocess(img, target)
return img, target
class CIFAR10PoissonGaussianNoise(CIFAR10Noise):
"""CIFAR10 Dataset with Poisson-Gaussian noise.
Args:
noise_lam (float): The total number of events for Poisson noise
(default: 30.).
noise_scale (float): The standard deviation of additive Gaussian noise
(default: 25.).
noise_lam_high (float): The maximum total number of events for Poisson
noise (default: None, i.e., `noise_lam`).
noise_scale_high (float): The upper bound of the standard deviation of
additive Gaussian noise (default: None, i.e., `noise_scale`).
This is a subclass of the `CIFAR10Noise` Dataset.
"""
def __init__(self,
noise_lam=30.,
noise_scale=3.,
noise_lam_high=None,
noise_scale_high=None,
**kwargs):
self.noise_lam = noise_lam
self.noise_lam_high = noise_lam_high
self.noise_scale = noise_scale
self.noise_scale_high = noise_scale_high
super(CIFAR10PoissonGaussianNoise, self).__init__(**kwargs)
def __getitem__(self, index):
rng = np.random.RandomState(self.seed * len(self) + index)
img, target = self.data[index], self.targets[index]
img = img / 255.
if self.noise_lam_high is None:
noise_lam = self.noise_lam
else:
noise_lam = rng.uniform(self.noise_lam, self.noise_lam_high)
img = rng.poisson(noise_lam * img) / noise_lam
if self.noise_scale_high is None:
noise_scale = self.noise_scale
else:
noise_scale = rng.uniform(self.noise_scale, self.noise_scale_high)
img = img + rng.randn(*img.shape) * noise_scale / 255.
img, target = self.postprocess(img, target)
return img, target
| python |
class ReturnInInitE0101:
def __init__(self, value):
# Should trigger "return-in-init"
return value
| python |
#!/usr/bin/python
help_msg = 'calculate contact order from PDB structure file'
import os, sys, glob
import imp
from Bio.PDB import NeighborSearch, PDBParser, Atom, Residue, Polypeptide
from Bio import PDB
import numpy as np
CWD = os.getcwd()
UTLTS_DIR = CWD[:CWD.index('proteomevis_scripts')]+'/proteomevis_scripts/utlts'
sys.path.append(UTLTS_DIR)
from parse_user_input import help_message, false_or_true
from read_in_file import read_in
from parse_data import organism
from output import writeout, print_next_step
def contact_order(contact_matrix):
CO = 0
for res1, contact_res in enumerate(contact_matrix):
for res2, contact in enumerate(contact_res):
if contact:
CO+= abs(res1-res2)
return CO / (float(len(contact_matrix)*contact_matrix.sum()))
if __name__ == "__main__":
help_message(help_msg)
extra = ''
method = false_or_true("Calculate contact density like Shakh2006 [default Zhou2008]?")
if false_or_true("Relax selection criterion 2"):
extra += 'pre_output'
contact_defn = ['Bloom', 'Shakh'][method]
d_input = read_in('pdb', 'oln', filename = extra)
d_input1 = read_in('pdb', 'uniprot', filename = extra)
d_output = {}
module = imp.load_source("run", "../../contact_density/run.py") #normal import doesnt work
for pdb, oln in d_input.iteritems():
protein_contact = module.ProteinContact(pdb, contact_defn)
CO = contact_order(protein_contact.contact_matrix())
if organism=='protherm':
d_output[d_input1[pdb]] = CO
x_name = 'uniprot'
else:
d_output[oln] = CO
x_name = 'oln'
filename = 'PDB'
if method:
filename+='_shakh'
writeout([x_name, 'contact_order'], d_output, filename = '{0}{1}'.format(filename, extra))
print_next_step()
| python |
from matplotlib import pyplot as plt
import numpy as np
import argparse
def prettyPrint(data):
x = np.linspace(1,len(data[0]),len(data[0]))
y = np.mean(data, axis=0)
print(y)
std = np.std(data,axis=0)
plt.plot(x,y,'k-',label='Mean')
plt.xlabel("Generation")
plt.ylabel("Max fitness")
plt.fill_between(x, y-std, y+std, color='orange', label='Standard deviation', )
plt.legend()
plt.show()
if __name__ == "__main__":
## Parse input
parser = argparse.ArgumentParser(description=('Pretty Print for Neat'))
parser.add_argument('-d', '--directory', type=str, help='Directory Rewards', default='log/learn/')
parser.add_argument('-f', '--file', type=str, help='Rewards', default='rewards.npy')
args = parser.parse_args()
## End Parse Input
prettyPrint(data=np.load(args.directory + args.file))
| python |
from typing import Tuple, List, Optional
import mvc
import pygame
import pygame.locals as pg
import time
import terrain
import civ
import cv2
import numpy as np
import image
import sprite
import gaia
class Model(mvc.Model):
def __init__(self) -> None:
self.sprites: List[sprite.Sprite] = []
self.sprites.append(sprite.Button((200, 100), 'Load'))
self.sprites.append(sprite.Button((800, 300), '1 Player'))
self.sprites.append(sprite.Button((800, 400), '2 Players Hot Seat'))
self.sprites.append(sprite.Button((800, 500), '3 Players Hot Seat'))
self.sprites.append(sprite.Button((800, 600), '4 Players Hot Seat'))
def update(self) -> bool:
return False
def find_sprite(self, pos: Tuple[int, int]) -> Optional[sprite.Sprite]:
d = 100000000
s: Optional[sprite.Sprite] = None
for spr in self.sprites:
r = spr.rect()
if pos[0] >= r[0] and pos[1] >= r[1] and pos[0] < r[2] and pos[1] < r[3]:
dd = (spr.pos[0] - pos[0]) ** 2 + (spr.pos[1] - pos[1]) ** 2
if dd < d:
d = dd
s = spr
return s
def do_action(self, action: str) -> None:
if action == 'Load':
c = gaia.Controller(0)
c.run()
elif action == '1 Player':
c = gaia.Controller(1)
c.run()
elif action == '2 Players Hot Seat':
c = gaia.Controller(2)
c.run()
elif action == '3 Players Hot Seat':
c = gaia.Controller(3)
c.run()
elif action == '4 Players Hot Seat':
c = gaia.Controller(4)
c.run()
else:
raise ValueError('Unrecognized action: ' + action)
class View(mvc.View):
def __init__(self, model: Model) -> None:
self.model = model
super().__init__(model)
def update(self) -> None:
self.screen.fill([130, 180, 200])
for s in self.model.sprites:
s.draw(self.screen)
pygame.display.flip()
class Controller(mvc.Controller):
def __init__(self) -> None:
self.model = Model()
self.view = View(self.model)
super().__init__(self.view)
def update(self) -> None:
for event in pygame.event.get():
if event.type == pg.QUIT:
self.keep_going = False
elif event.type == pg.KEYDOWN:
if event.key == pg.K_ESCAPE:
self.keep_going = False
elif event.type == pygame.MOUSEBUTTONDOWN:
mpos = pygame.mouse.get_pos()
s = self.model.find_sprite(mpos)
if s:
s.on_mouse_down()
elif event.type == pygame.MOUSEBUTTONUP:
mpos = pygame.mouse.get_pos()
s = self.model.find_sprite(mpos)
if s:
s.on_mouse_up()
if s.is_button():
self.model.do_action(s.text) # type: ignore
elif event.type == pygame.MOUSEMOTION:
pass
keys = pygame.key.get_pressed()
c = Controller()
c.run()
| python |
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
def GetRGBColor(colorName):
'''
Return the red, green and blue components for a
color as doubles.
'''
rgb = [0.0, 0.0, 0.0] # black
vtk.vtkNamedColors().GetColorRGB(colorName, rgb)
return rgb
# create pipeline
# reader reads slices
v16 = vtk.vtkVolume16Reader()
v16.SetDataDimensions(64, 64)
v16.SetDataByteOrderToLittleEndian()
v16.SetFilePrefix(VTK_DATA_ROOT + "/Data/headsq/quarter")
v16.SetDataSpacing(3.2, 3.2, 1.5)
v16.SetImageRange(30, 50)
v16.SetDataMask(0x7fff)
# create points on edges
edgePoints = vtk.vtkEdgePoints()
edgePoints.SetInputConnection(v16.GetOutputPort())
edgePoints.SetValue(1150)
#
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(edgePoints.GetOutputPort())
mapper.ImmediateModeRenderingOn()
mapper.ScalarVisibilityOff()
head = vtk.vtkActor()
head.SetMapper(mapper)
head.GetProperty().SetColor(GetRGBColor('raw_sienna'))
# Create the RenderWindow, Renderer and Interactor
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(head)
ren1.SetBackground(1, 1, 1)
renWin.SetSize(300, 300)
ren1.SetBackground(GetRGBColor('slate_grey'))
ren1.GetActiveCamera().SetPosition(99.8847, 537.86, 22.4716)
ren1.GetActiveCamera().SetFocalPoint(99.8847, 109.81, 15)
ren1.GetActiveCamera().SetViewAngle(20)
ren1.GetActiveCamera().SetViewUp(0, -1, 0)
ren1.ResetCameraClippingRange()
# render the image
#
renWin.Render()
#iren.Start()
| python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2018年2月1日
@author: Irony."[讽刺]
@site: https://pyqt5.com , https://github.com/892768447
@email: [email protected]
@file: PushButtonLine
@description:
'''
from random import randint
import sys
from PyQt5.QtCore import QTimer, QThread, pyqtSignal
from PyQt5.QtGui import QPainter, QColor, QPen
from PyQt5.QtWidgets import QPushButton, QApplication, QWidget, QVBoxLayout
__Author__ = "By: Irony.\"[讽刺]\nQQ: 892768447\nEmail: [email protected]"
__Copyright__ = "Copyright (c) 2018 Irony.\"[讽刺]"
__Version__ = "Version 1.0"
StyleSheet = '''
PushButtonLine {
color: white;
border: none;
min-height: 48px;
background-color: #90caf9;
}
'''
class LoadingThread(QThread):
valueChanged = pyqtSignal(float) # 当前值/最大值
def __init__(self, *args, **kwargs):
super(LoadingThread, self).__init__(*args, **kwargs)
self.totalValue = randint(100, 200) # 模拟最大
def run(self):
for i in range(self.totalValue + 1):
self.valueChanged.emit(i / self.totalValue)
QThread.msleep(randint(50, 100))
class PushButtonLine(QPushButton):
lineColor = QColor(0, 150, 136)
def __init__(self, *args, **kwargs):
self._waitText = kwargs.pop("waitText", "等待中")
super(PushButtonLine, self).__init__(*args, **kwargs)
self._text = self.text()
self._percent = 0
self._timer = QTimer(self, timeout=self.update)
self.clicked.connect(self.start)
def paintEvent(self, event):
super(PushButtonLine, self).paintEvent(event)
if not self._timer.isActive():
return
# 画进度
painter = QPainter(self)
pen = QPen(self.lineColor)
pen.setWidth(4)
painter.setPen(pen)
painter.drawLine(0, self.height(), self.width()
* self._percent, self.height())
def start(self):
if hasattr(self, "loadingThread"):
return self.stop()
self.loadingThread = LoadingThread(self)
self.loadingThread.valueChanged.connect(self.setPercent)
self._timer.start(100) # 100ms
self.loadingThread.start()
self.setText(self._waitText)
def stop(self):
self.loadingThread.valueChanged.disconnect(self.setPercent)
self.loadingThread.terminate()
self.loadingThread.deleteLater()
QThread.msleep(100) # 延迟等待deleteLater执行完毕
del self.loadingThread
self._percent = 0
self._timer.stop()
self.setText(self._text)
def setPercent(self, v):
self._percent = v
if v == 1:
self.stop()
self.update()
def setLineColor(self, color):
self.lineColor = QColor(color)
return self
class Window(QWidget):
def __init__(self, *args, **kwargs):
super(Window, self).__init__(*args, **kwargs)
layout = QVBoxLayout(self)
layout.addWidget(PushButtonLine("点击加载"))
layout.addWidget(PushButtonLine("点击加载").setLineColor("#ef5350"))
layout.addWidget(PushButtonLine("点击加载").setLineColor("#ffc107"))
if __name__ == "__main__":
app = QApplication(sys.argv)
app.setStyleSheet(StyleSheet)
w = Window()
w.show()
sys.exit(app.exec_())
| python |
import glob
import numpy as np
import pre_processing2 as pre
import cv2
import matplotlib.pyplot as plt
images = []
for imagePath in glob.glob('data/library/train/*'):
images.append(imagePath)
faceList = []
labelList = [0,0,0,0,0,0,0,0,0,0]
index = 0
for path in images:
temp = pre.getFaceGray(path)
temp = cv2.resize(temp,(369,512))
faceList.append(temp)
print "[INFO] Image Loaded: " + str(index+1)
print faceList[-1].shape
plt.subplot2grid((5,3),(index%5,index/5))
plt.imshow(faceList[-1])
index = index + 1
print labelList
faceRecognizer = cv2.face.LBPHFaceRecognizer_create(1,8,8,8,123)
faceRecognizer.train(faceList,np.array(labelList))
imagesTest = []
for imagePath in glob.glob('data/library/test/*'):
imagesTest.append(imagePath)
print "[INFO] ========TESTING======="
faceListTest = []
prediction = {}
index = 0
for path in imagesTest:
testSample = pre.getFaceGray(path) #np.array.shape = (256,256)
testSample = cv2.resize(testSample,(369,512))
print "[INFO] Test Image Loaded: " + str(index+1)
prediction[index] = []
predictedLabel, confidence = faceRecognizer.predict(testSample)
plt.subplot2grid((5,3),(index,2))
plt.imshow(testSample,cmap='gray')
plt.title(str(predictedLabel) + " : " + str(confidence))
prediction[index].extend([predictedLabel,confidence])
index = index + 1
plt.tight_layout()
plt.show()
print prediction
| python |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/firestore_v1beta1/proto/admin/index.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/cloud/firestore_v1beta1/proto/admin/index.proto',
package='google.firestore.admin.v1beta1',
syntax='proto3',
serialized_pb=_b('\n6google/cloud/firestore_v1beta1/proto/admin/index.proto\x12\x1egoogle.firestore.admin.v1beta1\x1a\x1cgoogle/api/annotations.proto\"\x9c\x01\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12=\n\x04mode\x18\x02 \x01(\x0e\x32/.google.firestore.admin.v1beta1.IndexField.Mode\";\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x02\x12\x0e\n\nDESCENDING\x10\x03\"\xe8\x01\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12:\n\x06\x66ields\x18\x03 \x03(\x0b\x32*.google.firestore.admin.v1beta1.IndexField\x12:\n\x05state\x18\x06 \x01(\x0e\x32+.google.firestore.admin.v1beta1.Index.State\"B\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x03\x12\t\n\x05READY\x10\x02\x12\t\n\x05\x45RROR\x10\x05\x42\xa5\x01\n\"com.google.firestore.admin.v1beta1B\nIndexProtoP\x01ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\xa2\x02\x04GCFS\xaa\x02$Google.Cloud.Firestore.Admin.V1Beta1b\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_INDEXFIELD_MODE = _descriptor.EnumDescriptor(
name='Mode',
full_name='google.firestore.admin.v1beta1.IndexField.Mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MODE_UNSPECIFIED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ASCENDING', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DESCENDING', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=218,
serialized_end=277,
)
_sym_db.RegisterEnumDescriptor(_INDEXFIELD_MODE)
_INDEX_STATE = _descriptor.EnumDescriptor(
name='State',
full_name='google.firestore.admin.v1beta1.Index.State',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STATE_UNSPECIFIED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CREATING', index=1, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='READY', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ERROR', index=3, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=446,
serialized_end=512,
)
_sym_db.RegisterEnumDescriptor(_INDEX_STATE)
_INDEXFIELD = _descriptor.Descriptor(
name='IndexField',
full_name='google.firestore.admin.v1beta1.IndexField',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='field_path', full_name='google.firestore.admin.v1beta1.IndexField.field_path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mode', full_name='google.firestore.admin.v1beta1.IndexField.mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_INDEXFIELD_MODE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=121,
serialized_end=277,
)
_INDEX = _descriptor.Descriptor(
name='Index',
full_name='google.firestore.admin.v1beta1.Index',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.firestore.admin.v1beta1.Index.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='collection_id', full_name='google.firestore.admin.v1beta1.Index.collection_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fields', full_name='google.firestore.admin.v1beta1.Index.fields', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='google.firestore.admin.v1beta1.Index.state', index=3,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_INDEX_STATE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=280,
serialized_end=512,
)
_INDEXFIELD.fields_by_name['mode'].enum_type = _INDEXFIELD_MODE
_INDEXFIELD_MODE.containing_type = _INDEXFIELD
_INDEX.fields_by_name['fields'].message_type = _INDEXFIELD
_INDEX.fields_by_name['state'].enum_type = _INDEX_STATE
_INDEX_STATE.containing_type = _INDEX
DESCRIPTOR.message_types_by_name['IndexField'] = _INDEXFIELD
DESCRIPTOR.message_types_by_name['Index'] = _INDEX
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IndexField = _reflection.GeneratedProtocolMessageType('IndexField', (_message.Message,), dict(
DESCRIPTOR = _INDEXFIELD,
__module__ = 'google.cloud.firestore_v1beta1.proto.admin.index_pb2'
,
__doc__ = """A field of an index.
Attributes:
field_path:
The path of the field. Must match the field path specification
described by
[google.firestore.v1beta1.Document.fields][fields]. Special
field path ``__name__`` may be used by itself or at the end of
a path. ``__type__`` may be used only at the end of path.
mode:
The field's mode.
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.IndexField)
))
_sym_db.RegisterMessage(IndexField)
Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), dict(
DESCRIPTOR = _INDEX,
__module__ = 'google.cloud.firestore_v1beta1.proto.admin.index_pb2'
,
__doc__ = """An index definition.
Attributes:
name:
The resource name of the index.
collection_id:
The collection ID to which this index applies. Required.
fields:
The fields to index.
state:
The state of the index. The state is read-only. @OutputOnly
""",
# @@protoc_insertion_point(class_scope:google.firestore.admin.v1beta1.Index)
))
_sym_db.RegisterMessage(Index)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\"com.google.firestore.admin.v1beta1B\nIndexProtoP\001ZCgoogle.golang.org/genproto/googleapis/firestore/admin/v1beta1;admin\242\002\004GCFS\252\002$Google.Cloud.Firestore.Admin.V1Beta1'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
| python |
'''
Demo for running training or linear models.
'''
import utils
from kahip.kmkahip import run_kmkahip
if __name__ == '__main__':
opt = utils.parse_args()
#adjust the number of parts and the height of the hierarchy
n_cluster_l = [opt.n_clusters]
height_l = [opt.height]
# load dataset
if opt.glove:
dataset = utils.load_glove_data('train').to(utils.device)
queryset = utils.load_glove_data('query').to(utils.device)
neighbors = utils.load_glove_data('answers').to(utils.device)
elif opt.glove_25:
dataset = utils.load_glove_25_data('train').to(utils.device)
queryset = utils.load_glove_25_data('query').to(utils.device)
neighbors = utils.load_glove_25_data('answers').to(utils.device)
elif opt.glove_200:
dataset = utils.load_glove_200_data('train').to(utils.device)
queryset = utils.load_glove_200_data('query').to(utils.device)
neighbors = utils.load_glove_200_data('answers').to(utils.device)
elif opt.sift:
dataset = utils.load_sift_data('train').to(utils.device)
queryset = utils.load_sift_data('query').to(utils.device)
neighbors = utils.load_sift_data('answers').to(utils.device)
elif opt.gist:
dataset = utils.load_gist_data('train').to(utils.device)
queryset = utils.load_gist_data('query').to(utils.device)
neighbors = utils.load_gist_data('answers').to(utils.device)
elif opt.lastfm:
dataset = utils.load_lastfm_data('train').to(utils.device)
queryset = utils.load_lastfm_data('query').to(utils.device)
neighbors = utils.load_lastfm_data('answers').to(utils.device)
else:
dataset = utils.load_data('train').to(utils.device)
queryset = utils.load_data('query').to(utils.device)
neighbors = utils.load_data('answers').to(utils.device)
#specify which action to take at each level, actions can be km, kahip, train, or svm. Lower keys indicate closer to leaf.
#Note that if 'kahip' is included, evaluation must be on training rather than test set, since partitioning was performed on training, but not test, set.
#e.g.: opt.level2action = {0:'km', 1:'train', 3:'train'}
opt.level2action = {0:'train', 1:'train', 2:'train', 3: 'train'}
if opt.height == 2 and opt.n_clusters == 256:
opt.level2action = {0: 'km', 1: 'train'}
for n_cluster in n_cluster_l:
print('n_cluster {}'.format(n_cluster))
opt.n_clusters = n_cluster
opt.n_class = n_cluster
for height in height_l:
run_kmkahip(height, opt, dataset, queryset, neighbors)
| python |
#!/usr/bin/python3
import spidev
import smbus
import adpi
import sys
from time import sleep
RAW_OFFSET = (1 << 23)
RAW_SCALE = (
0.000596040,
0.000298020,
0.000149010,
0.000074500,
0.000037250,
0.000018620,
0.000009310,
0.000004650,
)
TEMP_VREF = 1.17
def v2k(rate, val):
for k, v in rate.items():
if v == val:
return k
def single_conversion(dev, ch):
c = dev.adc.channel[ch]
g, _ = dev.read_configuration()
dev.write_configuration(g, c)
_, r = dev.read_mode()
dev.write_mode(dev.adc.mode['single'], r)
rate = v2k(dev.adc.rate, r)
while True:
sleep(2 * 1.0 / float(rate))
if not dev.read_status() & 0x80:
break
raw = dev.read_data()
return raw, g
def get_voltage(dev, ch):
raw, g = single_conversion(dev, ch)
vol = RAW_SCALE[g] * (raw - RAW_OFFSET)
return " Ch {} : {:.1f}".format(ch,vol)
if __name__ == "__main__":
spibus = 0
spics = 0
eeprombus = 1
eepromaddr = 0x57
gpiobus = 1
gpioaddr = 0x27
spi = spidev.SpiDev()
i2c = smbus.SMBus(eeprombus)
while(1):
try:
spi.open(spibus, spics)
spi.mode = 0b11
spi.max_speed_hz = 1000000
ad = adpi.ADPiPro(spi, i2c, eepromaddr, gpioaddr)
print("\r"+get_voltage(ad, "1")+get_voltage(ad, "2")+get_voltage(ad, "3")+get_voltage(ad, "4"), end='')
# print(get_voltage(ad, "2"))
# print(get_voltage(ad, "3"))
# print(get_voltage(ad, "4"))
sleep(0.2)
except (IndexError, ValueError):
sys.exit(2)
finally:
spi.close() | python |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Time: 2020/11/05 17:35:42
# Author: Yingying Li
import yaml
import os
import collections
import numpy as np
class LoadData(object):
def __init__(self, path):
yaml_path = path
file = open(yaml_path, 'r', encoding='utf-8')
content = file.read()
data = yaml.load(content, Loader=yaml.FullLoader)
self.data = data
def parser_yaml(self):
nodes = self.data
graph_dict = collections.OrderedDict()
for node in nodes['Nodes']:
key = str(node['id'])
adj = np.nonzero(node['adj'])[0]
value = [i for i in adj]
graph_dict[key] = value
node['adj'] = value #
return graph_dict, nodes['Nodes']
# 应用归简法求解拓扑排序,计算拓扑结构中每个节点的入度,移除入度为0的节点,(入度为 0 表没有任何节点指向它),
#然后再判断解决剩下的节点
def topological_sort(self,graph):
in_degrees = dict((u, 0) for u in graph)
for u in graph:#遍历键值
for v in graph[u]: # 根据键找出值也就是下级节点
in_degrees[str(v)] += 1 # 对获取到的下级节点的入度加 1
# 循环结束之后的结果:['4', '3', '5', '1', '0', '2', '6']
Q = [u for u in graph if in_degrees[u] == 0] # 找出入度为 0 的节点
in_degrees_zero = []
while Q:
u = Q.pop() # 默认从最后一个移除
in_degrees_zero.append(u) # 存储入度为 0 的节点
for v in graph[u]:
in_degrees[str(v)] -= 1 # 删除入度为 0 的节点,以及移除其指向
if in_degrees[str(v)] == 0:
Q.append(str(v))
return in_degrees_zero
def get_DAG_data(self):
graph_dict, nodes_data = self.parser_yaml()
Data =[]
DAG = self.topological_sort(graph_dict)
for i in DAG:
for node in nodes_data:
if node['id'] == int(i):
Data.append(node)
return Data
## test
root = os.getcwd()
file_path = os.path.join(root, 'test.yaml')
# file_path = os.path.join(root, 'envs/chip_place_gym/envs/test.yaml')
data = LoadData(file_path)
Data = LoadData.get_DAG_data(data)
print(Data)
| python |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.decorators import method_decorator
from honeypot.decorators import check_honeypot
from userprofiles.views import RegistrationView
from ..forms import RegistrationMemberForm, RegistrationCommunityForm
class RegistrationView(RegistrationView):
template_name = 'userprofiles/registration.html'
register = RegistrationView.as_view()
class RegistrationMemberView(RegistrationView):
template_name = 'userprofiles/registration_member.html'
form_class = RegistrationMemberForm
@method_decorator(check_honeypot(field_name='Name'))
def dispatch(self, request, *args, **kwargs):
return super(RegistrationMemberView, self).dispatch(
request, *args, **kwargs)
register_member = RegistrationMemberView.as_view()
class RegistrationCommunityView(RegistrationView):
template_name = 'userprofiles/registration_community.html'
form_class = RegistrationCommunityForm
@method_decorator(check_honeypot(field_name='Name'))
def dispatch(self, request, *args, **kwargs):
return super(RegistrationCommunityView, self).dispatch(
request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(RegistrationCommunityView, self).get_context_data(**kwargs)
context['formset_seed'] = self.form_class.SeedInlineFormSet(instance=None)
return context
register_community = RegistrationCommunityView.as_view()
| python |
#!/usr/bin/env python3
#
# Updater script of CVE/CPE database
#
# Copyright (c) 2012-2016 Alexandre Dulaunoy - [email protected]
# Copyright (c) 2014-2016 Pieter-Jan Moreels - [email protected]
# Imports
import os
import sys
runPath = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(runPath, ".."))
import shlex
import subprocess
import argparse
import time
import logging
from lib.Config import Configuration
from lib.PluginManager import PluginManager
import lib.DatabaseLayer as db
runPath = os.path.dirname(os.path.realpath(__file__))
sources = [{'name': "cve",
'updater': "python3 " + os.path.join(runPath, "db_mgmt.py -u")},
{'name': "cpe",
'updater': "python3 " + os.path.join(runPath, "db_mgmt_cpe_dictionary.py")},
{'name': "cpeother",
'updater': "python3 " + os.path.join(runPath, "db_mgmt_cpe_other_dictionary.py")}]
posts = [{'name': "ensureindex",
'updater': "python3 " + os.path.join(runPath, "db_mgmt_create_index.py")}]
argParser = argparse.ArgumentParser(description='Database updater for cve-search')
argParser.add_argument('-v', action='store_true', help='Logging on stdout')
argParser.add_argument('-l', action='store_true', help='Running at regular interval', default=False)
argParser.add_argument('-i', action='store_true', help='Indexing new cves entries in the fulltext indexer', default=False)
argParser.add_argument('-c', action='store_true', help='Enable CPE redis cache', default=False)
argParser.add_argument('-f', action='store_true', help='Drop collections and force initial import', default=False)
argParser.add_argument('-m', action='store_true', help='Minimal import', default=False)
argParser.add_argument('-o', action='store_true', help='Save log output', default=False)
argParser.add_argument('-p', action='store_true', help='Public sources only', default=False)
args = argParser.parse_args()
if not args.m:
sources.extend([{'name': 'cwe',
'updater': "{} {}".format(sys.executable, os.path.join(runPath, "db_mgmt_cwe.py"))},
{'name': 'capec',
'updater': "{} {}".format(sys.executable, os.path.join(runPath, "db_mgmt_capec.py"))},
{'name': 'redis-cache-cpe',
'updater': "{} {}".format(sys.executable, os.path.join(runPath, "db_cpe_browser.py"))},
{'name': 'via4',
'updater': "{} {}".format(sys.executable, os.path.join(runPath, "db_mgmt_ref.py"))},
])
if not args.v:
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
if args.f and args.l:
print ("Drop collections and running in loop should not be used.")
argParser.print_help()
sys.exit(2)
def nbelement(collection=None):
if collection is None or collection == "cve":
collection = "cves"
return db.getSize(collection)
def dropcollection(collection=None):
if collection is None:
return False
return db.dropCollection(collection)
def log(message=""):
if args.o:
with open(Configuration.getUpdateLogFile(), "a") as log:
log .write(message + "\n")
if args.v:
print (message)
else:
logging.info(message)
loop = True
if args.f:
log("Dropping metadata")
dropcollection("info")
while (loop):
if args.v:
log("==========================")
log(time.strftime("%a %d %B %Y %H:%M", time.gmtime()))
log("==========================")
if not args.l:
loop = False
newelement = 0
for source in sources:
if not Configuration.includesFeed(source['name']) and source['name'] is not "redis-cache-cpe":
continue
if args.f and source['name'] is not "redis-cache-cpe":
log("Dropping collection: " + source['name'])
dropcollection(collection=source['name'])
log( source['name'] + " dropped")
if source['name'] is "cpeother":
if "cpeother" not in db.getTableNames():
continue
if source['name'] is not "redis-cache-cpe":
log('Starting ' + source['name'])
before = nbelement(collection=source['name'])
if args.f and source['name'] is "cves":
updater = "{} {}".format(sys.executable, os.path.join(runPath, "db_mgmt.py -p"))
subprocess.Popen((shlex.split(updater))).wait()
else:
subprocess.Popen((shlex.split(source['updater']))).wait()
after = nbelement(collection=source['name'])
message = source['name'] + " has " + str(after) + " elements (" + str(after - before) + " update)"
newelement = str(after - before)
log(message)
elif (args.c is True and source['name'] is "redis-cache-cpe"):
log('Starting ' + source['name'])
subprocess.Popen((shlex.split(source['updater']))).wait()
log(source['name'] + " updated")
for post in posts:
log('Starting ' + post['name'])
subprocess.Popen((shlex.split(post['updater']))).wait()
if args.i and int(newelement) > 0:
subprocess.Popen((shlex.split("python3 " + os.path.join(runPath, "db_fulltext.py -v -l" + newelement)))).wait()
if args.l is not False:
log("Sleeping...")
time.sleep(3600)
log()
if not args.p:
plugManager = PluginManager()
plugManager.loadPlugins()
plugins = plugManager.getPlugins()
if len(plugins) != 0:
for plug in plugins:
log("Starting " + plug.getName() + " plugin")
message = plug.onDatabaseUpdate()
if message: log(message)
| python |
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gRPC Python interceptors."""
import collections
import sys
import grpc
class _ServicePipeline(object):
def __init__(self, interceptors):
self.interceptors = tuple(interceptors)
def _continuation(self, thunk, index):
return lambda context: self._intercept_at(thunk, index, context)
def _intercept_at(self, thunk, index, context):
if index < len(self.interceptors):
interceptor = self.interceptors[index]
thunk = self._continuation(thunk, index + 1)
return interceptor.intercept_service(thunk, context)
else:
return thunk(context)
def execute(self, thunk, context):
return self._intercept_at(thunk, 0, context)
def service_pipeline(interceptors):
return _ServicePipeline(interceptors) if interceptors else None
class _ClientCallDetails(
collections.namedtuple(
'_ClientCallDetails',
('method', 'timeout', 'metadata', 'credentials', 'wait_for_ready')),
grpc.ClientCallDetails):
pass
def _unwrap_client_call_details(call_details, default_details):
try:
method = call_details.method
except AttributeError:
method = default_details.method
try:
timeout = call_details.timeout
except AttributeError:
timeout = default_details.timeout
try:
metadata = call_details.metadata
except AttributeError:
metadata = default_details.metadata
try:
credentials = call_details.credentials
except AttributeError:
credentials = default_details.credentials
try:
wait_for_ready = call_details.wait_for_ready
except AttributeError:
wait_for_ready = default_details.wait_for_ready
return method, timeout, metadata, credentials, wait_for_ready
class _FailureOutcome(grpc.RpcError, grpc.Future, grpc.Call):
def __init__(self, exception, traceback):
super(_FailureOutcome, self).__init__()
self._exception = exception
self._traceback = traceback
def initial_metadata(self):
return None
def trailing_metadata(self):
return None
def code(self):
return grpc.StatusCode.INTERNAL
def details(self):
return 'Exception raised while intercepting the RPC'
def cancel(self):
return False
def cancelled(self):
return False
def is_active(self):
return False
def time_remaining(self):
return None
def running(self):
return False
def done(self):
return True
def result(self, ignored_timeout=None):
raise self._exception
def exception(self, ignored_timeout=None):
return self._exception
def traceback(self, ignored_timeout=None):
return self._traceback
def add_callback(self, callback):
return False
def add_done_callback(self, fn):
fn(self)
def __iter__(self):
return self
def __next__(self):
raise self._exception
def next(self):
return self.__next__()
class _UnaryOutcome(grpc.Call, grpc.Future):
def __init__(self, response, call):
self._response = response
self._call = call
def initial_metadata(self):
return self._call.initial_metadata()
def trailing_metadata(self):
return self._call.trailing_metadata()
def code(self):
return self._call.code()
def details(self):
return self._call.details()
def is_active(self):
return self._call.is_active()
def time_remaining(self):
return self._call.time_remaining()
def cancel(self):
return self._call.cancel()
def add_callback(self, callback):
return self._call.add_callback(callback)
def cancelled(self):
return False
def running(self):
return False
def done(self):
return True
def result(self, ignored_timeout=None):
return self._response
def exception(self, ignored_timeout=None):
return None
def traceback(self, ignored_timeout=None):
return None
def add_done_callback(self, fn):
fn(self)
class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
def __init__(self, thunk, method, interceptor):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
response, ignored_call = self._with_call(
request,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready)
return response
def _with_call(self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
try:
response, call = self._thunk(new_method).with_call(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
return _UnaryOutcome(response, call)
except grpc.RpcError as rpc_error:
return rpc_error
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
call = self._interceptor.intercept_unary_unary(
continuation, client_call_details, request)
return call.result(), call
def with_call(self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
return self._with_call(
request,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready)
def future(self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
return self._thunk(new_method).future(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
try:
return self._interceptor.intercept_unary_unary(
continuation, client_call_details, request)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
def __init__(self, thunk, method, interceptor):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(self,
request,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
return self._thunk(new_method)(
request,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
try:
return self._interceptor.intercept_unary_stream(
continuation, client_call_details, request)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
def __init__(self, thunk, method, interceptor):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
response, ignored_call = self._with_call(
request_iterator,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready)
return response
def _with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request_iterator):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
try:
response, call = self._thunk(new_method).with_call(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
return _UnaryOutcome(response, call)
except grpc.RpcError as rpc_error:
return rpc_error
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
call = self._interceptor.intercept_stream_unary(
continuation, client_call_details, request_iterator)
return call.result(), call
def with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
return self._with_call(
request_iterator,
timeout=timeout,
metadata=metadata,
credentials=credentials,
wait_for_ready=wait_for_ready)
def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request_iterator):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
return self._thunk(new_method).future(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
try:
return self._interceptor.intercept_stream_unary(
continuation, client_call_details, request_iterator)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
def __init__(self, thunk, method, interceptor):
self._thunk = thunk
self._method = method
self._interceptor = interceptor
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None,
wait_for_ready=None):
client_call_details = _ClientCallDetails(
self._method, timeout, metadata, credentials, wait_for_ready)
def continuation(new_details, request_iterator):
new_method, new_timeout, new_metadata, new_credentials, new_wait_for_ready = (
_unwrap_client_call_details(new_details, client_call_details))
return self._thunk(new_method)(
request_iterator,
timeout=new_timeout,
metadata=new_metadata,
credentials=new_credentials,
wait_for_ready=new_wait_for_ready)
try:
return self._interceptor.intercept_stream_stream(
continuation, client_call_details, request_iterator)
except Exception as exception: # pylint:disable=broad-except
return _FailureOutcome(exception, sys.exc_info()[2])
class _Channel(grpc.Channel):
def __init__(self, channel, interceptor):
self._channel = channel
self._interceptor = interceptor
def subscribe(self, callback, try_to_connect=False):
self._channel.subscribe(callback, try_to_connect=try_to_connect)
def unsubscribe(self, callback):
self._channel.unsubscribe(callback)
def unary_unary(self,
method,
request_serializer=None,
response_deserializer=None):
thunk = lambda m: self._channel.unary_unary(m, request_serializer, response_deserializer)
if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor):
return _UnaryUnaryMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def unary_stream(self,
method,
request_serializer=None,
response_deserializer=None):
thunk = lambda m: self._channel.unary_stream(m, request_serializer, response_deserializer)
if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor):
return _UnaryStreamMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def stream_unary(self,
method,
request_serializer=None,
response_deserializer=None):
thunk = lambda m: self._channel.stream_unary(m, request_serializer, response_deserializer)
if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor):
return _StreamUnaryMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def stream_stream(self,
method,
request_serializer=None,
response_deserializer=None):
thunk = lambda m: self._channel.stream_stream(m, request_serializer, response_deserializer)
if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor):
return _StreamStreamMultiCallable(thunk, method, self._interceptor)
else:
return thunk(method)
def _close(self):
self._channel.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._close()
return False
def close(self):
self._channel.close()
def intercept_channel(channel, *interceptors):
for interceptor in reversed(list(interceptors)):
if not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor) and \
not isinstance(interceptor, grpc.UnaryStreamClientInterceptor) and \
not isinstance(interceptor, grpc.StreamUnaryClientInterceptor) and \
not isinstance(interceptor, grpc.StreamStreamClientInterceptor):
raise TypeError('interceptor must be '
'grpc.UnaryUnaryClientInterceptor or '
'grpc.UnaryStreamClientInterceptor or '
'grpc.StreamUnaryClientInterceptor or '
'grpc.StreamStreamClientInterceptor or ')
channel = _Channel(channel, interceptor)
return channel
| python |
VERSION = "0.13.3"
| python |
import logging
__all__ = (
"logger", "set_verbosity_level"
)
logging.basicConfig(
stream=None, level=logging.CRITICAL,
format="%(asctime)s - %(name)s (%(levelname)s): %(message)s"
)
logger = logging.getLogger("siliqua")
def set_verbosity_level(verbosity_level=0):
"""
Set the logging verbosity level
:param verbosity_level: Verbosity level as defined in `logging` module
"""
if verbosity_level == 0:
logger.setLevel(logging.ERROR)
elif verbosity_level == 1:
logger.setLevel(logging.WARNING)
elif verbosity_level == 2:
logger.setLevel(logging.INFO)
elif verbosity_level >= 3:
logger.setLevel(logging.DEBUG)
| python |
#!/usr/bin/python
#
# Filename: prependTimestamps.py
#
# Version: 1.0.1
#
# Author: Joe Gervais (TryCatchHCF)
#
# Summary: Inserts datetimestamps in front of each line of a file. Used to
# add noise to a cloaked file (see cloakify.py) in order to degrade frequency
# analysis attacks against the cloaked payload.
#
# Description:
# Takes current date and randomly subtracts 1011-1104 days to generate a
# starting date. Then starts randomly incrementing the datetimestamp (between
# 0-664 seconds) for each entry in the cloaked file. If the datetimestamp
# reaches the current date, repeats the above steps to avoid generating
# timestamps into the future.
#
# Example:
#
# $ ./prependTimestamps.py cloaked.txt > exfiltrateMe.txt
#
# Remove timestamps before trying to decloak the file
#
# $ cat exfiltrateMe.txt | cut -d" " -f 3- > cloaked.txt
import os, sys, getopt, datetime, random
MIN_DAYS_BACK = 1011
MAX_DAYS_BACK = 1104
MIN_SECONDS_STEP = 0
MAX_SECONDS_STEP = 664
TODAY = datetime.date.today()
START_DATE = TODAY - datetime.timedelta(days=random.randint(MIN_DAYS_BACK, MAX_DAYS_BACK))
STEP = datetime.timedelta(seconds=random.randint(MIN_SECONDS_STEP, MAX_SECONDS_STEP))
T = datetime.time( random.randint(0,23),random.randint(0,59),random.randint(0,59) )
def prependTimestamps(cloakedFilename:str):
fakeDate = datetime.datetime.combine(START_DATE, T)
if cloakedFilename:
# Prepend noise generator output to file
with open(cloakedFilename, encoding="utf-8") as file:
cloakedFile = file.readlines()
with open(cloakedFilename, "w", encoding="utf-8") as file:
for line in cloakedFile:
file.write(f"{fakeDate} {line}"),
step = datetime.timedelta(seconds=random.randint(MIN_SECONDS_STEP, MAX_SECONDS_STEP))
fakeDate += step
else:
# Generate sample of noise generator output
for _ in range(20):
print(f"{fakeDate}")
step = datetime.timedelta(seconds=random.randint(MIN_SECONDS_STEP, MAX_SECONDS_STEP))
fakeDate += step
if __name__ == "__main__":
if len(sys.argv) == 2:
prependTimestamps(sys.argv[1])
else:
print("usage: prependTimestamps.py <exfilFilename>")
print()
print("Strip leading timestamps prior to decloaking the cloaked file.")
print()
| python |
"""Actions that X can take"""
from enum import Enum
class MegamanAction(Enum):
"""Enum of possible actions"""
MOVE_RIGHT = 1
MOVE_LEFT = 2
STOP_MOVEMENT = 3
JUMP = 4
SHOOT = 5
CHARGE = 6
DASH = 7
CHANGE_WEAPON = 8
START = 9
| python |
import json
filename = './project_data_files/population_data.json'
with open(filename) as f:
pop_data = json.load(f)
for pop_dict in pop_data:
if pop_dict['Year'] == '2010':
country_name = pop_dict['Country Name']
population = pop_dict['Value']
print(f'{country_name}: {population}')
| python |
# File: sudokuTests.py
# from chapter 11 of _Genetic Algorithms with Python_
#
# Author: Clinton Sheppard <[email protected]>
# Copyright (c) 2016 Clinton Sheppard
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
import datetime
import random
import unittest
import genetic
def get_fitness(genes, validationRules):
try:
firstFailingRule = next(rule for rule in validationRules
if genes[rule.Index] == genes[rule.OtherIndex])
except StopIteration:
fitness = 100
else:
fitness = (1 + index_row(firstFailingRule.OtherIndex)) * 10 \
+ (1 + index_column(firstFailingRule.OtherIndex))
return fitness
def display(candidate, startTime):
timeDiff = datetime.datetime.now() - startTime
for row in range(9):
line = ' | '.join(
' '.join(str(i)
for i in candidate.Genes[row * 9 + i:row * 9 + i + 3])
for i in [0, 3, 6])
print("", line)
if row < 8 and row % 3 == 2:
print(" ----- + ----- + -----")
print(" - = - - = - - = - {}\t{}\n"
.format(candidate.Fitness, timeDiff))
def mutate(genes, validationRules):
selectedRule = next(rule for rule in validationRules
if genes[rule.Index] == genes[rule.OtherIndex])
if selectedRule is None:
return
if index_row(selectedRule.OtherIndex) % 3 == 2 \
and random.randint(0, 10) == 0:
sectionStart = section_start(selectedRule.Index)
current = selectedRule.OtherIndex
while selectedRule.OtherIndex == current:
shuffle_in_place(genes, sectionStart, 80)
selectedRule = next(rule for rule in validationRules
if genes[rule.Index] == genes[rule.OtherIndex])
return
row = index_row(selectedRule.OtherIndex)
start = row * 9
indexA = selectedRule.OtherIndex
indexB = random.randrange(start, len(genes))
genes[indexA], genes[indexB] = genes[indexB], genes[indexA]
def shuffle_in_place(genes, first, last):
while first < last:
index = random.randint(first, last)
genes[first], genes[index] = genes[index], genes[first]
first += 1
class SudokuTests(unittest.TestCase):
def test(self):
geneset = [i for i in range(1, 9 + 1)]
startTime = datetime.datetime.now()
optimalValue = 100
def fnDisplay(candidate):
display(candidate, startTime)
validationRules = build_validation_rules()
def fnGetFitness(genes):
return get_fitness(genes, validationRules)
def fnCreate():
return random.sample(geneset * 9, 81)
def fnMutate(genes):
mutate(genes, validationRules)
best = genetic.get_best(fnGetFitness, None, optimalValue, None,
fnDisplay, fnMutate, fnCreate, maxAge=50)
self.assertEqual(best.Fitness, optimalValue)
def test_benchmark(self):
genetic.Benchmark.run(lambda: self.test())
def build_validation_rules():
rules = []
for index in range(80):
itsRow = index_row(index)
itsColumn = index_column(index)
itsSection = row_column_section(itsRow, itsColumn)
for index2 in range(index + 1, 81):
otherRow = index_row(index2)
otherColumn = index_column(index2)
otherSection = row_column_section(otherRow, otherColumn)
if itsRow == otherRow or \
itsColumn == otherColumn or \
itsSection == otherSection:
rules.append(Rule(index, index2))
rules.sort(key=lambda x: x.OtherIndex * 100 + x.Index)
return rules
def index_row(index):
return int(index / 9)
def index_column(index):
return int(index % 9)
def row_column_section(row, column):
return int(row / 3) * 3 + int(column / 3)
def index_section(index):
return row_column_section(index_row(index), index_column(index))
def section_start(index):
return int((index_row(index) % 9) / 3) * 27 + int(
index_column(index) / 3) * 3
class Rule:
def __init__(self, it, other):
if it > other:
it, other = other, it
self.Index = it
self.OtherIndex = other
def __eq__(self, other):
return self.Index == other.Index and \
self.OtherIndex == other.OtherIndex
def __hash__(self):
return self.Index * 100 + self.OtherIndex
if __name__ == '__main__':
unittest.main()
| python |
'''
Created on Dec 20, 2017
@author: William Tucker
'''
class ParserError(Exception):
pass
| python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
'''
model.py
Andy Freeland and Dan Levy
5 June 2010
Contains functions to handle database queries.
'''
import web
from config import db
def add_object(name):
'''Adds an object with the given name to the objects table in the database.
Also initializes weights for each question in the data table.'''
object_id = db.insert('objects', name=name)
# initialize weights for each question in data
questions = get_questions()
for question in questions:
add_data(object_id, question.id)
return object_id
def add_question(question):
'''Adds a question with the given text to the questions table in the database.
Also initializes weights for each object in the data table.'''
question_id = db.insert('questions', text=question)
# initialize weights for each object in data
objects = get_objects()
for object in objects:
add_data(object.id, question_id)
return question_id
def add_data(object_id, question_id, value=0):
'''Inserts a weight with value=value for a specified object_id and question_id
into data. Defaults to value=0.'''
db.insert('data', object_id=object_id, question_id=question_id, value=value)
def update_data(object_id, question_id, value):
'''Updates the weight for a specified object_id and question_id in data with
the specified value.'''
db.update('data', where='object_id = $object_id AND question_id = $question_id', vars=locals(), value=value)
#def update_weights(object_id, asked_questions):
## Dictionary {question: value}
#for question in asked_questions:
#value = asked_questions[question]
#update_data(object_id, question, value)
def get_objects():
'''Returns an IterBetter of all the objects in database, where each row is a Storage object.'''
return db.select('objects')
def get_data():
'''Returns an IterBetter of all the data in the database, where each row is a Storage object.'''
return db.select('data')
def get_questions():
'''Returns an IterBetter of all the quesitons in the database, where each row is a Storage object.'''
return db.select('questions')
def get_value(object_id, question_id):
'''Returns the weight for given object_id question_id from data. If the weight
does not exist, returns None.'''
where = 'object_id=%d AND question_id=%d' %(object_id, question_id)
try:
return db.select('data', vars=locals(), where=where)[0].value
except IndexError:
return None
def get_object_by_name(name):
'''Returns a Storage object containing an object where name=name.'''
try:
return db.select('objects', vars=locals(), where='name=$name')[0]
except IndexError:
return None
def get_object_by_id(id):
'''Returns a Storage object containing an object where id=id.'''
try:
return db.select('objects', vars=locals(), where='id = $id')[0]
except IndexError:
return None
def get_question_by_id(id):
'''Returns a Storage object containing a question where id=id.'''
try:
return db.select('questions', vars=locals(), where='id=$id')[0]
except IndexError:
return None
def get_question_by_text(text):
'''Returns Storage object containing a question where text=text.'''
try:
return db.select('questions', vars=locals(), where='text=$text')[0]
except IndexError:
return None
def get_data_by_question_id(question_id):
'''Returns an IterBetter all weights for a particular question_id, where each
row is a Storage object.'''
try:
return db.select('data', vars=locals(), where='question_id=$question_id')
except IndexError:
return None
def get_data_by_object_id(object_id):
'''Returns an IterBetter of all weights for a particular object_id, where each
row is a Storage object.'''
try:
return db.select('data', vars=locals(), where='object_id=$object_id')
except IndexError:
return None
def get_data_dictionary():
'''Returns the data as a dictionary object, where keys are (object_id, question_id)
tuples, and values are the weights for that pair.'''
d = get_data()
data = {}
for row in d:
data[(row.object_id, row.question_id)] = row.value
return data
def get_num_unknowns(object_tuple, question_id):
'''Returns the number of objects in the object_tuple where the value for the
given question_id is zero, or unknown.'''
assert type(object_tuple) == tuple
where = 'object_id in %s AND question_id=%d AND value =0' %(object_tuple, question_id)
try:
rows = db.select('data', vars=locals(), where=where, what='count(*) AS count')
return rows[0].count
except IndexError:
return 0
def get_num_positives(object_tuple, question_id):
'''Returns the number of objects in the object_tuple where the value for the
given question_id is positive.'''
assert type(object_tuple) == tuple
where = 'object_id IN %s AND question_id=%d AND value >0' %(object_tuple, question_id)
try:
rows = db.select('data', vars=locals(), where=where, what='count(*) AS count')
return rows[0].count
except IndexError:
return 0
def get_num_negatives(object_tuple, question_id):
'''Returns the number of objects in the object_tuple where the value for the
given question_id is negative.'''
assert type(object_tuple) == tuple
where = 'object_id in %s AND question_id=%d AND value <0' %(object_tuple, question_id)
try:
rows = db.select('data', vars=locals(), where=where, what='count(*) AS count')
return rows[0].count
except IndexError:
return 0
def delete_question(question_id):
'''Deletes a question and its weights for a particular question_id.'''
db.delete('questions', where='id=$question_id', vars=locals())
db.delete('data', where='question_id=$question_id', vars=locals())
def delete_object(object_id):
'''Deletes an object and its weights for a particular object_id.'''
db.delete('objects', where='id=$object_id', vars=locals())
db.delete('data', where='object_id=$object_id', vars=locals())
def update_times_played(object_id):
'''Increments the number of times played for a particular object_id.'''
current = db.select('objects', vars=locals(), where='id=$object_id')[0].times_played
if current == None: current = 0
db.update('objects', where='id = $object_id', vars=locals(), times_played=current+1)
def num_objects():
'''Returns the number of objects in database.'''
return db.query('select COUNT(*) from objects;')
def record_playlog(object_id, asked_questions, right):
'''Records the questions and responses, and outcomes of each game. Allows us
to experiment using different parameters without having to retrain from scratch.'''
db.insert('playlog', object_id=object_id, data=str(asked_questions), right=right)
| python |
from ncssl_api_client.api.commands.abstract_command import AbstractCommand
class GetListCommand(AbstractCommand):
pass | python |
import logging
logger = logging.getLogger(__name__)
import io
import os
import re
from collections import defaultdict
from html import unescape
from urllib.parse import urljoin
import chardet
import lxml.html
import pandas as pd
from bs4 import BeautifulSoup
from py_sec_edgar.settings import CONFIG
from py_sec_edgar.utilities import file_size, uudecode, format_filename
prop = ['filing_url', 'filing_folder', 'cik_directory',
'filing_filepath', 'filing_zip_filepath',
'extracted_filing_directory', 'filing_basename',
'header_directory', 'header_filepath',
'is_downloaded', 'is_loaded',
'is_parsed_header', 'is_processed']
class SecEdgarFiling:
def __init__(self, filing, download=False, load=False, parse_header=False, process_filing=False):
self.is_processed = False
self.is_parsed_header = False
self.is_loaded = False
self.is_downloaded = False
self.sec_filing = filing
self.is_lxml_root = False
self.filing_text = None
self.lxml_root = None
feed_item = dict(filing)
self.filing_url = urljoin(CONFIG.edgar_Archives_url, feed_item['Filename'])
self.filing_folder = os.path.basename(feed_item['Filename']).split('.')[0].replace("-", "")
self.cik_directory = CONFIG.TXT_FILING_DATA_DIR.replace("CIK", str(feed_item['CIK'])).replace("FOLDER", "")
self.filing_filepath = os.path.join(self.cik_directory, os.path.basename(feed_item['Filename']))
self.filing_zip_filepath = os.path.join(self.cik_directory, os.path.basename(feed_item['Filename']).replace('.txt', '.zip'))
self.extracted_filing_directory = CONFIG.TXT_FILING_DATA_DIR.replace("CIK", str(feed_item['CIK'])).replace("FOLDER", self.filing_folder)
self.filing_basename = os.path.basename(self.filing_filepath)
self.header_directory = self.extracted_filing_directory
self.header_filepath = os.path.join(self.header_directory, f"{self.filing_basename.replace('.txt', '')}_FILING_HEADER.csv")
self.check_if_exists(self.header_directory)
if download == True:
self.download()
if load == True:
self.load()
if parse_header == True:
self.parse_header()
if process_filing == True:
self.process_filing()
def check_if_exists(self, path):
if not os.path.exists(path):
os.makedirs(path)
def load(self):
self._load(filing_filepath=self.filing_filepath)
def _load(self, filing_filepath=None, lxml_root=True, file_stats=True):
if self.is_loaded:
return
if not filing_filepath:
filing_filepath = self.filing_filepath
try:
# or codecs.open on Python 2
filing_text = open(filing_filepath, "rb").read()
result = chardet.detect(filing_text)
if result:
self.charenc = result['encoding']
with io.open(filing_filepath, "r", encoding=self.charenc) as f:
self.filing_text = f.read()
self.is_loaded = True
logger.info(f"Filing Loaded")
except:
with io.open(filing_filepath, "rb") as f:
self.filing_text = f.read()
if lxml_root:
lxml_html = lxml.html.fromstring(self.filing_text)
self.lxml_root = lxml_html.getroottree()
self.is_lxml_root = True
logger.info(f"Filing Lxml")
if file_stats:
self.FILE_SIZE = file_size(self.filing_filepath)
self.FILE_SIZE_BYTES = os.stat(self.filing_filepath).st_size
self.ENCODING = self.charenc
def download(self):
logger.info("Downloading Filing..")
self._download(filing_url=self.filing_url, filing_filepath=self.filing_filepath, overwrite_if_exists=True)
def _download(self, filing_url=None, filing_filepath=None, overwrite_if_exists=False):
try:
if not filing_url:
filing_url = self.filing_url
print(filing_url)
if not filing_filepath:
filing_filepath = self.filing_filepath
print(filing_filepath)
if not os.path.exists(filing_filepath) and overwrite_if_exists == True:
self.is_downloaded = True
logger.info(f"Filing Downloaded")
# todo: celery version of download full
# consume_complete_submission_filing_txt.delay(filing_json, filepath_cik)
elif os.path.exists(filing_filepath):
logger.error(f"Filing Already Exists")
self.is_downloaded = True
except Exception as e:
logger.error(f"Couldn't Download File \n\t{e}")
def parse_header(self, save_output=False):
raw_html = self.filing_text
self._parse_header(raw_html, save_output=save_output)
def _parse_header(self, raw_html, save_output=False):
"""parses the heading of an SEC Edgar filing"""
if not raw_html:
self.load()
lxml_html = lxml.html.fromstring(raw_html)
root = lxml_html.getroottree()
data = defaultdict(dict)
valuename = ""
for sec_header_element in root.xpath("//*/sec-header"):
soup = BeautifulSoup(lxml.html.tostring(sec_header_element), 'lxml')
sec_header = re.findall(
r'<(SEC-HEADER|sec-header)>(.*?)</(SEC-HEADER|sec-header)>', soup.prettify(), re.DOTALL)[0][1]
split_header = sec_header.split('\n')
for i, headerItem in enumerate(split_header):
if len(headerItem) > 0:
try:
if "<" in headerItem and ">" in headerItem:
keyname = headerItem
valuename = split_header[i + 1]
data[i] = ["", "", keyname.strip(), valuename]
elif not headerItem.startswith("\t") and headerItem != valuename and "<" not in headerItem:
data[i] = ["", "", headerItem.split(":")[0].split("\t")[0], unescape(headerItem.split(":")[1].lstrip())]
elif headerItem != "" and headerItem != valuename and "<" not in headerItem:
data[i] = headerItem.split(":")[0].split(
"\t") + [unescape(headerItem.split(":")[1].lstrip())]
else:
print(headerItem)
except:
keyname = headerItem.strip()
valuename = headerItem.strip()
print("found problem")
df_header = pd.DataFrame.from_dict(dict(data), orient='index')
df_header = df_header.replace('', pd.np.nan)
df_header[1] = df_header[1].ffill().bfill().tolist()
df_header = df_header.iloc[:, 1:]
df_header = df_header.dropna()
df_header.columns = ['GROUP', 'KEY', 'VALUE']
print(df_header)
if save_output == True:
df_header.to_csv(self.header_filepath)
self.df_header = df_header
self.is_parsed_header = True
def process_filing(self, save_output=False):
if os.path.exists(self.cik_directory) and not os.path.exists(self.cik_directory + ".zip"):
try:
logger.info("\n\n\n\n\tExtracting Filing Documents:\n")
self._process_filing(self.filing_text, save_output=save_output)
logger.info("\n\n\n\n\tExtraction Completed\n")
except UnicodeDecodeError as E:
logger.error(f"\n\n\n\nError Decoding \n\n{E}")
def _process_filing(self, raw_text, save_output=False):
"""
Given a filepath
:param filepath:
:param output_directory:
:return:
"""
elements_list = [('FILENAME', './/filename'), ('TYPE', './/type'),
('SEQUENCE', './/sequence'), ('DESCRIPTION', './/description')]
xbrl_doc = re.compile(r'<DOCUMENT>(.*?)</DOCUMENT>', re.DOTALL)
xbrl_text = re.compile(r'<(TEXT|text)>(.*?)</(TEXT|text)>', re.MULTILINE | re.DOTALL)
documents = xbrl_doc.findall(raw_text)
filing_documents = {}
for i, document in enumerate(documents, start=1):
uue_filepath = None
filing_document = {}
lxml_html = lxml.html.fromstring(document)
root = lxml_html.getroottree()
for (element, element_path) in elements_list:
try:
filing_document[f"{element}"] = root.xpath(f"{element_path}")[0].text.strip()
except:
filing_document[f"{element}"] = ""
raw_text = xbrl_text.findall(document)
raw_text = raw_text[0][1].replace("<XBRL>", "").replace("</XBRL>", "").strip()
raw_text = raw_text.replace("<XML>", "").replace("</XML>", "").strip()
if raw_text.lower().startswith("begin") or document.lower().startswith("begin"):
uue_filepath = os.path.join(self.filing_folder, filing_document['FILENAME'] + ".uue")
output_filepath = os.path.join(self.filing_folder, uue_filepath.replace(".uue", ""))
output_filename = os.path.basename(output_filepath)
if save_output:
with open(uue_filepath, 'w', encoding=self.charenc) as f:
f.write(raw_text)
uudecode(uue_filepath, out_file=output_filepath)
else:
doc_num = f"{int(filing_document['SEQUENCE'])}".zfill(4)
try:
output_filename = f"{doc_num}-({filing_document['TYPE']}) {filing_document['DESCRIPTION']} {filing_document['FILENAME']}"
except:
output_filename = f"{doc_num}-({filing_document['TYPE']}) {filing_document['FILENAME']}".replace(" ", "_").replace(":", "").replace("__", "_")
output_filename = output_filename.replace(" ", "_").replace(":", "").replace("__", "_")
output_filename = format_filename(output_filename)
output_filepath = os.path.join(self.filing_folder, output_filename)
if save_output:
with open(output_filepath, 'w', encoding=self.charenc) as f:
f.write(raw_text)
filing_document['RELATIVE_FILEPATH'] = os.path.join(os.path.basename(self.filing_folder), output_filepath)
filing_document['DESCRIPTIVE_FILEPATH'] = output_filename
if save_output:
filing_document['FILE_SIZE'] = file_size(output_filepath)
filing_document['FILE_SIZE_BYTES'] = os.stat(output_filepath).st_size
filing_documents[i] = filing_document
if uue_filepath and os.path.exists(uue_filepath):
os.remove(uue_filepath)
df_sec_filing_contents = pd.DataFrame.from_dict(filing_documents, orient='index')
if save_output:
df_sec_filing_contents.to_csv(os.path.join(self.filing_folder, f"{os.path.basename(self.filing_folder)}_FILING_CONTENTS.csv"))
logger.info(df_sec_filing_contents)
self.is_processed = True
self.df_sec_filing_contents = df_sec_filing_contents
def parse_filing(self, raw_text=None):
"""
Parses html file
:param sec_filing['filepath']: html file
:return: dictionary of file_contents including lxml_dict
"""
lxml_dict = {}
lxml_html = lxml.html.fromstring(raw_text)
root = lxml_html.getroottree()
soup = BeautifulSoup(lxml.html.tostring(root), 'lxml')
document_data = {}
document_data['FILEPATH'] = self.filing_filepath
for ii, element in enumerate(root.xpath("//*/body/*")):
lxml_dict[ii] = element
div_check = {}
for ii, element in enumerate(lxml.html.fromstring(soup.prettify()).xpath("//*/div/*")):
div_check[ii] = element
document_data['div_check'] = div_check
document_data['NUMBER_OF_ELEMENTS'] = len(lxml_dict)
return document_data
def __str__(self):
print(f'\nSEC Filing:\n')
for k in prop:
print(f'\t{k}: \t{getattr(self, k)}')
| python |
#!/usr/bin/env python
import copy
import rospy
from geometry_msgs.msg import PoseStamped
from interactive_markers.interactive_marker_server import InteractiveMarkerServer
from interactive_markers.menu_handler import MenuHandler
from traversability_rviz_paths.msg import Path, Paths
from std_msgs.msg import ColorRGBA
from visualization_msgs.msg import (InteractiveMarker,
InteractiveMarkerControl,
Marker)
from nav_msgs.msg import Path as NavPath
import numpy as np
from tf.transformations import quaternion_from_euler
def orientation(v):
roll = np.arctan2(v[1], v[0])
pitch = np.pi * 0.5 - np.arctan2(v[2], np.sqrt(v[0]**2 + v[1]**2))
return quaternion_from_euler(0, pitch, roll, axes='szyz')
def cylinder_between(p1, p2, color_msg, width=0.1):
cylinder = Marker()
cylinder.type = Marker.CYLINDER
cylinder.scale.x = cylinder.scale.y = width
cylinder.color = color_msg
cylinder.scale.z = np.linalg.norm(p1 - p2)
m = (p1 + p2) * 0.5
cylinder.pose.position.x = m[0]
cylinder.pose.position.y = m[1]
cylinder.pose.position.z = m[2]
o = cylinder.pose.orientation
o.x, o.y, o.z, o.w = orientation(p2 - p1)
return cylinder
def sphere_at(p, color_msg, width=0.1):
sphere = Marker()
sphere.type = Marker.SPHERE
sphere.scale.x = sphere.scale.y = sphere.scale.z = width
sphere.color = color_msg
sphere.pose.position.x = p[0]
sphere.pose.position.y = p[1]
sphere.pose.position.z = p[2]
return sphere
def node(pose, delta_z):
p = pose.pose.position
return np.array([p.x, p.y, p.z + delta_z])
def create_marker(path_msg, color_msg, description, path_id, width=0.1, delta_z=0.1):
int_marker = InteractiveMarker()
int_marker.header.frame_id = path_msg.header.frame_id
int_marker.name = str(path_id)
int_marker.description = "Path {0}".format(path_id)
# line_marker = Marker()
# line_marker.type = Marker.LINE_STRIP
# line_marker.scale.x = width
# line_marker.color = color_msg
# line_marker.points = [p.pose.position for p in path_msg.poses]
# for point in line_marker.points:
# point.z += delta_z
control = InteractiveMarkerControl()
control.always_visible = True
control.interaction_mode = InteractiveMarkerControl.MENU
# control.markers.append(line_marker)
points = [node(pose, delta_z) for pose in path_msg.poses]
for p1, p2 in zip(points[:-1], points[1:]):
control.markers.append(cylinder_between(p1, p2, color_msg, width))
for p in points:
control.markers.append(sphere_at(p, color_msg, width))
int_marker.controls.append(copy.deepcopy(control))
menu_handler = MenuHandler()
# put all the information in the main menu
#d = menu_handler.insert("Description")
for line in description:
menu_handler.insert(line)#, parent=d)
return menu_handler, int_marker
def ignore(msg):
pass
def test_msg():
msg = Path()
msg.path.header.frame_id = 'base_link'
msg.path.poses.append(PoseStamped())
msg.path.poses.append(PoseStamped())
msg.path.poses[1].pose.position.y = 1
msg.color = ColorRGBA(1.0, 0.5, 0.0, 0.5)
msg.description = ["A=1"]
return msg
class RvizPathServer(object):
def __init__(self):
super(RvizPathServer, self).__init__()
rospy.init_node("traversability_rviz_paths_node")
self.server = InteractiveMarkerServer("paths")
self.paths = {}
self.delta_z = rospy.get_param('~offset', 0.15)
self.width = rospy.get_param('~width', 0.15)
self.pub = rospy.Publisher("selected_path", NavPath, queue_size=1)
rospy.Subscriber("paths", Paths, self.updatePaths, queue_size=1)
# self.add_marker(test_msg(), 0)
# self.server.applyChanges()
rospy.spin()
def add_marker(self, msg, path_id):
menu, marker = create_marker(path_msg=msg.path, color_msg=msg.color,
description=msg.description, path_id=path_id,
width=self.width,
delta_z=self.delta_z)
self.server.insert(marker, ignore)
menu.insert("FOLLOW", callback=self.goto(path_id))
menu.apply(self.server, marker.name)
self.paths[path_id] = msg.path
def goto(self, path_id):
def f(msg):
rospy.loginfo("Follow path %d", path_id)
self.pub.publish(self.paths[path_id])
return f
def updatePaths(self, msg):
path_msg = NavPath()
path_msg.header.frame_id = 'map'
self.pub.publish(path_msg)
self.server.clear()
for i, m in enumerate(msg.paths):
self.add_marker(m, i)
self.server.applyChanges()
if __name__ == '__main__':
RvizPathServer()
| python |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""An example Keras trainer for the Cora data set using graph regularization.
USAGE:
python graph_keras_mlp_cora.py [flags] train.tfr test.tfr
See https://linqs.soe.ucsc.edu/data for a description of the Cora data set, and
the corresponding graph and training data set.
This example demonstrates the use of sequential, functional, and subclass models
in Keras for graph regularization. Users may change 'base_models' defined in
main() as necessary, to select a subset of the supported Keras base model types.
In all cases, the base model used is a multi-layer perceptron containing two
hidden layers with drop out.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
from absl import logging
import attr
import neural_structured_learning as nsl
import tensorflow as tf
FLAGS = flags.FLAGS
FLAGS.showprefixforinfo = False
flags.DEFINE_integer('train_epochs', None, 'Number of epochs to train.')
flags.DEFINE_integer('eval_steps', None, 'Number of steps to evaluate.')
NBR_FEATURE_PREFIX = 'NL_nbr_'
NBR_WEIGHT_SUFFIX = '_weight'
@attr.s
class HParams(object):
"""Hyper-parameters used for training."""
### dataset parameters
num_classes = attr.ib(default=7)
max_seq_length = attr.ib(default=1433)
### NGM parameters
distance_type = attr.ib(default=nsl.configs.DistanceType.L2)
graph_regularization_multiplier = attr.ib(default=0.1)
num_neighbors = attr.ib(default=1)
### model architecture
num_fc_units = attr.ib(default=[50, 50])
### training parameters
train_epochs = attr.ib(default=10)
batch_size = attr.ib(default=128)
dropout_rate = attr.ib(default=0.5)
### eval parameters
eval_steps = attr.ib(default=None) # Every test instance is evaluated.
def get_hyper_parameters():
"""Returns the hyper-parameters used for training."""
hparams = HParams()
if FLAGS.train_epochs:
hparams.train_epochs = FLAGS.train_epochs
if FLAGS.eval_steps:
hparams.eval_steps = FLAGS.eval_steps
return hparams
def load_dataset(filename):
"""Reads a file in the `.tfrecord` format.
Args:
filename: Name of the file containing `tf.train.Example` objects.
Returns:
An instance of `tf.data.TFRecordDataset` containing the `tf.train.Example`
objects.
"""
return tf.data.TFRecordDataset([filename])
def make_dataset(file_path, training, include_nbr_features, hparams):
"""Returns a `tf.data.Dataset` instance based on data in `file_path`."""
def parse_example(example_proto):
"""Extracts relevant fields from the `example_proto`.
Args:
example_proto: An instance of `tf.train.Example`.
Returns:
A pair whose first value is a dictionary containing relevant features
and whose second value contains the ground truth labels.
"""
# The 'words' feature is a multi-hot, bag-of-words representation of the
# original raw text. A default value is required for examples that don't
# have the feature.
feature_spec = {
'words':
tf.io.FixedLenFeature([hparams.max_seq_length],
tf.int64,
default_value=tf.constant(
0,
dtype=tf.int64,
shape=[hparams.max_seq_length])),
'label':
tf.io.FixedLenFeature((), tf.int64, default_value=-1),
}
if include_nbr_features:
for i in range(hparams.num_neighbors):
nbr_feature_key = '{}{}_{}'.format(NBR_FEATURE_PREFIX, i, 'words')
nbr_weight_key = '{}{}{}'.format(NBR_FEATURE_PREFIX, i,
NBR_WEIGHT_SUFFIX)
nbr_id_key = '{}{}_{}'.format(NBR_FEATURE_PREFIX, i, 'id')
feature_spec[nbr_feature_key] = tf.io.FixedLenFeature(
[hparams.max_seq_length],
tf.int64,
default_value=tf.constant(
0, dtype=tf.int64, shape=[hparams.max_seq_length]))
feature_spec[nbr_weight_key] = tf.io.FixedLenFeature(
[1], tf.float32, default_value=tf.constant([0.0]))
feature_spec[nbr_id_key] = tf.io.FixedLenFeature(
(), tf.string, default_value='')
features = tf.io.parse_single_example(example_proto, feature_spec)
labels = features.pop('label')
return features, labels
# If the dataset is sharded, the following code may be required:
# filenames = tf.data.Dataset.list_files(file_path, shuffle=True)
# dataset = filenames.interleave(load_dataset, cycle_length=1)
dataset = load_dataset(file_path)
if training:
dataset = dataset.shuffle(10000)
dataset = dataset.map(parse_example)
dataset = dataset.batch(hparams.batch_size)
return dataset
def make_mlp_sequential_model(hparams):
"""Creates a sequential multi-layer perceptron model."""
model = tf.keras.Sequential()
model.add(
tf.keras.layers.InputLayer(
input_shape=(hparams.max_seq_length,), name='words'))
# Input is already one-hot encoded in the integer format. We cast it to
# floating point format here.
model.add(
tf.keras.layers.Lambda(lambda x: tf.keras.backend.cast(x, tf.float32)))
for num_units in hparams.num_fc_units:
model.add(tf.keras.layers.Dense(num_units, activation='relu'))
model.add(tf.keras.layers.Dropout(hparams.dropout_rate))
model.add(tf.keras.layers.Dense(hparams.num_classes, activation='softmax'))
return model
def make_mlp_functional_model(hparams):
"""Creates a functional API-based multi-layer perceptron model."""
inputs = tf.keras.Input(
shape=(hparams.max_seq_length,), dtype='int64', name='words')
# Input is already one-hot encoded in the integer format. We cast it to
# floating point format here.
cur_layer = tf.keras.layers.Lambda(
lambda x: tf.keras.backend.cast(x, tf.float32))(
inputs)
for num_units in hparams.num_fc_units:
cur_layer = tf.keras.layers.Dense(num_units, activation='relu')(cur_layer)
# For functional models, by default, Keras ensures that the 'dropout' layer
# is invoked only during training.
cur_layer = tf.keras.layers.Dropout(hparams.dropout_rate)(cur_layer)
outputs = tf.keras.layers.Dense(
hparams.num_classes, activation='softmax')(
cur_layer)
model = tf.keras.Model(inputs, outputs=outputs)
return model
def make_mlp_subclass_model(hparams):
"""Creates a multi-layer perceptron subclass model in Keras."""
class MLP(tf.keras.Model):
"""Subclass model defining a multi-layer perceptron."""
def __init__(self):
super(MLP, self).__init__()
self.cast_to_float_layer = tf.keras.layers.Lambda(
lambda x: tf.keras.backend.cast(x, tf.float32))
self.dense_layers = [
tf.keras.layers.Dense(num_units, activation='relu')
for num_units in hparams.num_fc_units
]
self.dropout_layer = tf.keras.layers.Dropout(hparams.dropout_rate)
self.output_layer = tf.keras.layers.Dense(
hparams.num_classes, activation='softmax')
def call(self, inputs, training=False):
cur_layer = self.cast_to_float_layer(inputs['words'])
for dense_layer in self.dense_layers:
cur_layer = dense_layer(cur_layer)
cur_layer = self.dropout_layer(cur_layer, training=training)
outputs = self.output_layer(cur_layer)
return outputs
return MLP()
def log_metrics(model_desc, eval_metrics):
"""Logs evaluation metrics at `logging.INFO` level.
Args:
model_desc: A description of the model.
eval_metrics: A dictionary mapping metric names to corresponding values. It
must contain the loss and accuracy metrics.
"""
logging.info('\n')
logging.info('Eval accuracy for %s: %s', model_desc, eval_metrics['accuracy'])
logging.info('Eval loss for %s: %s', model_desc, eval_metrics['loss'])
if 'graph_loss' in eval_metrics:
logging.info('Eval graph loss for %s: %s', model_desc,
eval_metrics['graph_loss'])
def train_and_evaluate(model, model_desc, train_dataset, test_dataset, hparams):
"""Compiles, trains, and evaluates a `Keras` model.
Args:
model: An instance of `tf.Keras.Model`.
model_desc: A description of the model.
train_dataset: An instance of `tf.data.Dataset` representing training data.
test_dataset: An instance of `tf.data.Dataset` representing test data.
hparams: An instance of `Hparams`.
"""
model.compile(
optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
metrics=['accuracy'])
model.fit(train_dataset, epochs=hparams.train_epochs, verbose=1)
eval_results = dict(
zip(model.metrics_names,
model.evaluate(test_dataset, steps=hparams.eval_steps)))
log_metrics(model_desc, eval_results)
def main(argv):
# Check that the correct number of arguments have been provided. The
# training and test data should contain 'tf.train.Example' objects in the
# TFRecord format.
if len(argv) != 3:
raise app.UsageError('Invalid number of arguments; expected 2, got %d' %
(len(argv) - 1))
hparams = get_hyper_parameters()
train_data_path = argv[1]
test_data_path = argv[2]
# Graph regularization configuration.
graph_reg_config = nsl.configs.make_graph_reg_config(
max_neighbors=hparams.num_neighbors,
multiplier=hparams.graph_regularization_multiplier,
distance_type=hparams.distance_type,
sum_over_axis=-1)
# Create the base MLP models.
base_models = {
'FUNCTIONAL': make_mlp_functional_model(hparams),
'SEQUENTIAL': make_mlp_sequential_model(hparams),
'SUBCLASS': make_mlp_subclass_model(hparams)
}
for base_model_tag, base_model in base_models.items():
logging.info('\n====== %s BASE MODEL TEST BEGIN ======', base_model_tag)
train_dataset = make_dataset(train_data_path, True, False, hparams)
test_dataset = make_dataset(test_data_path, False, False, hparams)
train_and_evaluate(base_model, 'Base MLP model', train_dataset,
test_dataset, hparams)
logging.info('\n====== TRAINING WITH GRAPH REGULARIZATION ======\n')
# Wrap the base MLP model with graph regularization.
graph_reg_model = nsl.keras.GraphRegularization(base_model,
graph_reg_config)
train_dataset = make_dataset(train_data_path, True, True, hparams)
test_dataset = make_dataset(test_data_path, False, False, hparams)
train_and_evaluate(graph_reg_model, 'MLP + graph regularization',
train_dataset, test_dataset, hparams)
logging.info('\n====== %s BASE MODEL TEST END ======', base_model_tag)
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
app.run(main)
| python |
#!/usr/bin/python3
import pandas as pd
from os.path import join as oj
import os
def load_mit_voting(data_dir='.'):
''' Load in 2000-2016 County Presidential Data
Parameters
----------
data_dir : str; path to the data directory containing mit_voting.csv
Returns
-------
data frame
'''
raw = pd.read_csv(oj(data_dir, 'mit_voting.csv'))
return raw
if __name__ == '__main__':
raw = load_mit_voting()
print('loaded mit_voting successfully.')
| python |
import bpy
light = bpy.context.light
physical_light = bpy.context.light.photographer
light.type = 'SPOT'
light.spot_blend = 0.15000000596046448
light.shadow_soft_size = 0.029999999329447746
physical_light.spot_size = 1.1344640254974365
physical_light.light_unit = 'lumen'
physical_light.use_light_temperature = True
physical_light.light_temperature = 2700
physical_light.color = (1.0, 0.3967552185058594, 0.09530746936798096)
physical_light.normalizebycolor = True
physical_light.power = 20.061687469482422
physical_light.advanced_power = 10.0
physical_light.efficacy = 683.0
physical_light.lumen = 540.0
physical_light.candela = 543.5139770507812
physical_light.per_square_meter = True
physical_light.intensity = 10.0
physical_light.light_exposure = 0.0
| python |
"""This module tests the RXGate class."""
from __future__ import annotations
import numpy as np
from bqskit.ir.gates import RXGate
from bqskit.ir.gates import XGate
def test_get_unitary() -> None:
g = RXGate()
u = XGate().get_unitary()
assert g.get_unitary([np.pi]).get_distance_from(u) < 1e-7
| python |
from urllib.parse import urlparse
from kafka import KafkaProducer
def check(url):
bootstrap_urls = url.split(",")
bootstrap_parsed_urls = (urlparse(u) for u in bootstrap_urls)
bootstrap_nodes = list(
u.hostname + ":" + str(u.port or "9092") for u in bootstrap_parsed_urls
)
try:
KafkaProducer(bootstrap_servers=bootstrap_nodes)
return True
except Exception as e:
return False
| python |
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
if MULTI_USER_MODE:
db = DAL('sqlite://storage.sqlite') # if not, use SQLite or other DB
from gluon.tools import *
auth = Auth(
globals(), db) # authentication/authorization
crud = Crud(
globals(), db) # for CRUD helpers using auth
service = Service(
globals()) # for json, xml, jsonrpc, xmlrpc, amfrpc
plugins = PluginManager()
mail = auth.settings.mailer
mail.settings.server = EMAIL_SERVER
mail.settings.sender = EMAIL_SENDER
mail.settings.login = EMAIL_LOGIN
auth.settings.extra_fields['auth_user'] = \
[Field('is_manager', 'boolean', default=False, writable=False)]
auth.define_tables() # creates all needed tables
auth.settings.registration_requires_verification = True
auth.settings.registration_requires_approval = True
auth.settings.reset_password_requires_verification = True
auth.settings.actions_disabled.append('register')
db.define_table('app', Field('name'), Field('owner', db.auth_user))
if not session.authorized and MULTI_USER_MODE:
if auth.user and not request.function == 'user':
session.authorized = True
elif not request.function == 'user':
redirect(URL('default', 'user/login'))
def is_manager():
if not MULTI_USER_MODE:
return True
elif auth.user and (auth.user.id == 1 or auth.user.is_manager):
return True
else:
return False
| python |
import pandas as pd
import numpy as np
import time
from datetime import datetime
import plotly.graph_objects as go
import logging
def historic_reader(path, symbol):
# Historic dfs
data_5m = pd.read_csv(path + 'historic/'+symbol+'-5m-data.csv')
data_1h = pd.read_csv(path + 'historic/'+symbol+'-1h-data.csv')
data_1d = pd.read_csv(path + 'historic/'+symbol+'-1d-data.csv')
# Formatting data
data_1h['MA9'] = data_1h.close.rolling(window=9, min_periods=1).mean()
data_1h['MA20'] = data_1h.close.rolling(window=20, min_periods=1).mean()
data_1h['MA100'] = data_1h.close.rolling(window=100, min_periods=1).mean()
data_1h.reset_index(inplace=True)
data_1h['Date_time'] = pd.to_datetime(data_1h['timestamp'])
data_1h.set_index(data_1h['timestamp'], inplace=True)
data_5m['MA9'] = data_5m.close.rolling(window=9, min_periods=1).mean()
data_5m['MA20'] = data_5m.close.rolling(window=20, min_periods=1).mean()
data_5m['MA100'] = data_5m.close.rolling(window=100, min_periods=1).mean()
data_5m.reset_index(inplace=True)
data_5m['Date_time'] = pd.to_datetime(data_5m['timestamp'])
data_5m.set_index(data_5m['timestamp'], inplace=True)
data_1d['MA9'] = data_1d.close.rolling(window=9, min_periods=1).mean()
data_1d['MA20'] = data_1d.close.rolling(window=20, min_periods=1).mean()
data_1d['MA100'] = data_1d.close.rolling(window=100, min_periods=1).mean()
data_1d.reset_index(inplace=True)
data_1d['Date_time'] = pd.to_datetime(data_1d['timestamp'])
data_1d.set_index(data_1d['timestamp'], inplace=True)
# Trends df
trends_df = pd.read_csv(path + 'trendlines_' + symbol + '_py.csv', sep='\t')
# Entered positions: Make plot entered positions
return data_5m, data_1h, data_1d, trends_df
def c_plotter(base_df, data_5m, data_1h, data_1d, trends_df, filled_df = [], trend_percen = 0.02):
logger = logging.getLogger('root')
# 1h chart plot only last 12 months
# 5m chart plot only last week
# 1d chart plot all data
logger.info(f"Generating plot for {base_df} tf")
if base_df == '5m':
delta = '3 day'
end_date = data_5m.iloc[-1]['Date_time']
start_date = end_date - pd.Timedelta(delta)
elif base_df == '1h':
delta = '365 day'
end_date = data_1h.iloc[-1]['Date_time']
start_date = end_date - pd.Timedelta(delta)
elif base_df == '1d':
delta = 0
end_date = data_1d.iloc[-1]['Date_time']
start_date = data_1d.iloc[0]['Date_time']
# Masking dataframes
mask = (data_1h['Date_time'] > start_date) & (data_1h['Date_time'] <= end_date)
sub_df1h = data_1h.loc[mask]
mask2 = (data_5m['Date_time'] > start_date) & (data_5m['Date_time'] <= end_date)
sub_df5m = data_5m.loc[mask2]
mask3 = (data_1d['Date_time'] > start_date) & (data_1d['Date_time'] <= end_date)
sub_df1d = data_1d.loc[mask3]
#mask4 = (base_df['Date_time'] > start_date) & (base_df['Date_time'] <= end_date)
#sub_df = base_df.loc[mask4]
if base_df == '5m':
sub_df = sub_df5m
elif base_df == '1h':
sub_df = sub_df1h
elif base_df == '1d':
sub_df = sub_df1d
# 5 min MAs
MA_5min = [go.Scatter(x=sub_df5m.timestamp, y=sub_df5m.MA9, line=dict(color='blue', width=1.5, dash='dot'), name='MA9 5m',yaxis='y1'),
go.Scatter(x=sub_df5m.timestamp, y=sub_df5m.MA20, line=dict(color='darkorange', width=1.5, dash='dot'),name='MA20 5m', yaxis='y1'),
go.Scatter(x=sub_df5m.timestamp, y=sub_df5m.MA100, line=dict(color='darkred', width=1.5, dash='dot'),name='MA100 5m', yaxis='y1')
]
# 1h MAs
MA_1h = [
go.Scatter(x=sub_df1h.timestamp, y=sub_df1h.MA9, line=dict(color='blue', width=1.5), name='MA9 1h', yaxis='y1'),
go.Scatter(x=sub_df1h.timestamp, y=sub_df1h.MA20, line=dict(color='darkorange', width=1.5), name='MA20 1h',yaxis='y1'),
go.Scatter(x=sub_df1h.timestamp, y=sub_df1h.MA100, line=dict(color='darkred', width=1.5), name='MA100 1h',yaxis='y1'),
]
# 1d MAs
MA_1d = [
go.Scatter(x=sub_df1d.timestamp, y=sub_df1d.MA9, line=dict(color='blue', width=1.5, dash='dash'), name='MA9 1d',yaxis='y1'),
go.Scatter(x=sub_df1d.timestamp, y=sub_df1d.MA20, line=dict(color='darkorange', width=1.5, dash='dash'), name='MA20 1d', yaxis='y1'),
go.Scatter(x=sub_df1d.timestamp, y=sub_df1d.MA100, line=dict(color='darkred', width=1.5, dash='dash'), name='MA100 1d', yaxis='y1')
]
if base_df == '1d':
#not plotting 5min MA
MA_list = MA_1h + MA_1d
else:
MA_list = MA_5min + MA_1h + MA_1d
start = pd.Timestamp('2020-11-03')
end = pd.Timestamp('2021-12-25')
t_ts = np.linspace(start.value, end.value, 100)
t = pd.to_datetime(t_ts)
t_df = pd.DataFrame(t)
t_df.columns = ['Date_time']
t_df['timestamp'] = t_df.Date_time.values.astype(np.int64) // 10 ** 9
t_df.set_index(t_df['timestamp'], inplace=True)
xx = np.asarray(t)
xxx = []
for x in xx:
xxx.append(time.mktime(datetime.utcfromtimestamp(x.tolist() / 1e9).timetuple()))
trends_plot = []
for i, row in trends_df.iterrows():
if 'up' in row['trend_name']:
color = 'red'
else:
color = 'green'
yy = row['slope'] * np.array(xxx) + row['interc']
t_df[row['trend_name']] = yy
trends_plot.append(go.Scatter(x=t_df['Date_time'], y=t_df[row['trend_name']], line=dict(color=color, width=1.5), name=row['trend_name']))
data = [go.Candlestick(x=sub_df.timestamp,
open=sub_df.open,
high=sub_df.high,
low=sub_df.low,
close=sub_df.close,
name='XBTUSD', yaxis='y1'),
] + MA_list + trends_plot
layout = go.Layout(
xaxis=dict(
rangeslider=dict(
visible=False
)
)
)
fig = go.FigureWidget(data=data, layout=layout)
fig.update_layout(xaxis_range=[sub_df.timestamp[0], sub_df.timestamp[-1]])
if len(filled_df) != 0:
filled_df['Date_time'] = pd.to_datetime(filled_df['timestamp'])
# Plotting entered positions
arrow_list = []
for i, row in filled_df.iterrows():
if row['side'] == 'Sell':
color = 'red'
else:
color = 'green'
arrow = dict(
x=row['Date_time'],
y=row['avgPx'],
xref="x", yref="y",
text=row['orderQty'],
showarrow=True,
axref="x", ayref='y',
ax=row['Date_time'],
ay=row['avgPx']-0.1*np.nanmax(sub_df['close']),
arrowhead=3,
arrowwidth=1.5,
arrowcolor=color, )
arrow_list.append(arrow)
fig.update_layout(annotations = arrow_list)
if base_df == '1d':
fig.update_layout(yaxis_range=[0, 1.10*np.nanmax(data_1d['close'])])
return fig
#def wallet_plotter():
| python |
from collections import OrderedDict
from bs4 import BeautifulSoup
HEADER = """package cpu
// Generated from: http://www.pastraiser.com/cpu/gameboy/gameboy_opcodes.html"""
def main():
with open('opcodes.html', 'r') as f:
soup = BeautifulSoup(f, 'html.parser')
tables = soup.find_all('table')
standard = generate(tables[0])
prefix = generate(tables[1])
print(HEADER)
output('mnemonics', standard)
print()
output('prefixMnemonics', prefix)
def generate(table):
mnemonics = OrderedDict()
opcode = 0
for row in table.find_all('tr')[1:]:
for cell in row.find_all('td')[1:]:
if len(cell.contents) > 1:
mnemonics[opcode] = cell.contents[0]
opcode += 1
return mnemonics
def output(name, mnemonics):
print('var', name, '= map[byte]string{')
for opcode, mnemonic in mnemonics.items():
print('\t0x{:02x}: "{}",'.format(opcode, mnemonic))
print('}')
if __name__ == '__main__':
main()
| python |
from flask import Flask
from flask.helpers import send_from_directory
from waitress import serve
app = Flask(__name__, static_folder="build", static_url_path="/")
@app.route("/")
def index():
return send_from_directory(app.static_folder, "index.html")
@app.route("/api")
def hello_world():
return {"data": "hello esowc!"}
def run_server():
print("Starting CliMetLab server.")
print("Running on http://127.0.0.1:8080 (Press CTRL+C to quit)")
serve(app, host="127.0.0.1", port=8080)
if __name__ == "__main__":
run_server()
| python |
from typing import Union
from dimod import BinaryQuadraticModel, ConstrainedQuadraticModel
from omniqubo.transpiler import TranspilerAbs
from ..sympyopt import SympyOpt
class DimodToSympyopt(TranspilerAbs):
"""Transpiler for transforming dimod models into SymptOpt model
Transpiler can transform any BinaryQuadraticModel and
ConstrainedQuadraticModel.
"""
def transpile(self, model: Union[BinaryQuadraticModel, ConstrainedQuadraticModel]) -> SympyOpt:
"""Transpile dimod model into SympyOpt model
:param model: model to be transpiled
:return: equivalent SympyOpt model
"""
raise NotImplementedError()
def can_transpile(self, _: Union[BinaryQuadraticModel, ConstrainedQuadraticModel]) -> bool:
"""Check if model can be transpiled
Transpiler can transform any BinaryQuadraticModel and
ConstrainedQuadraticModel.
:type model: model to be transpiled
:return: flag denoting if model can be transpiled
"""
return True
| python |
from rapidfuzz import fuzz
from dvha.tools.roi_name_manager import clean_name
class ROINamePredictor:
def __init__(self, roi_map, weight_simple=1., weight_partial=0.6, threshold=0):
"""
:param roi_map: ROI map object
:type roi_map: DatabaseROIs
"""
self.roi_map = roi_map
norm_weight = weight_partial + weight_simple
self.weight = {'simple': 2 * weight_simple / norm_weight,
'partial': 2 * weight_partial / norm_weight}
self.threshold = threshold
def get_best_roi_match(self, roi, physician, return_score=False):
physician_variations = self.roi_map.get_all_variations_of_physician(physician)
fuzz_scores = self.get_combined_fuzz_scores(roi, physician_variations)
if fuzz_scores:
predicted_variation, score = fuzz_scores[0][1], fuzz_scores[0][0]
prediction = self.roi_map.get_physician_roi(physician, predicted_variation)
if score > self.threshold:
if return_score:
return prediction, score
return prediction
def get_combined_fuzz_score(self, a, b, mode='geom_mean'):
a, b = clean_name(a), clean_name(b)
simple = float(fuzz.ratio(a, b) * self.weight['simple'])
partial = float(fuzz.partial_ratio(a, b) * self.weight['partial'])
return self.combine_scores(simple, partial, mode=mode)
@staticmethod
def combine_scores(score_1, score_2, mode='average'):
if mode == 'geom_mean':
return (score_1 * score_2) ** 0.5
elif mode == 'product':
return score_1 * score_2 / 100.
else: # average
return (score_1 + score_2) / 2.
def get_combined_fuzz_scores(self, string, list_of_strings):
scores = [self.get_combined_fuzz_score(string, string_b) for string_b in list_of_strings]
if scores:
order_index = sorted(range(len(scores)), key=lambda k: scores[k])
return [(scores[i], list_of_strings[i]) for i in order_index[::-1]]
| python |
import hou
import AttributeName as an
def findPointAttrType(node_relative_path, attr_name):
attr_type = "none"
point_attrs = an.point(node_relative_path)
if attr_name in point_attrs:
if attr_name in an.pointFloat(node_relative_path):
attr_type = "f"
elif attr_name in an.pointInt(node_relative_path):
attr_type = "i"
elif attr_name in an.pointString(node_relative_path):
attr_type = "s"
elif attr_name in an.pointVector2(node_relative_path):
attr_type = "u"
elif attr_name in an.pointVector(node_relative_path):
attr_type = "v"
elif attr_name in an.pointVector4(node_relative_path):
attr_type = "p"
return attr_type
def findPrimAttrType(node_relative_path, attr_name):
attr_type = "none"
prim_attrs = an.prim(node_relative_path)
if attr_name in prim_attrs:
if attr_name in an.primFloat(node_relative_path):
attr_type = "f"
elif attr_name in an.primInt(node_relative_path):
attr_type = "i"
elif attr_name in an.primString(node_relative_path):
attr_type = "s"
elif attr_name in an.primVector2(node_relative_path):
attr_type = "u"
elif attr_name in an.primVector(node_relative_path):
attr_type = "v"
return attr_type | python |
EPSG_List = [
# ('3819 : HD1909', '3819'),
# ('3821 : TWD67', '3821'),
# ('3824 : TWD97', '3824'),
# ('3889 : IGRS', '3889'),
# ('3906 : MGI 1901', '3906'),
# ('4001 : Unknown datum based upon the Airy 1830 ellipsoid', '4001'),
# ('4002 : Unknown datum based upon the Airy Modified 1849 ellipsoid', '4002'),
# ('4003 : Unknown datum based upon the Australian National Spheroid', '4003'),
# ('4004 : Unknown datum based upon the Bessel 1841 ellipsoid', '4004'),
# ('4005 : Unknown datum based upon the Bessel Modified ellipsoid', '4005'),
# ('4006 : Unknown datum based upon the Bessel Namibia ellipsoid', '4006'),
# ('4007 : Unknown datum based upon the Clarke 1858 ellipsoid', '4007'),
# ('4008 : Unknown datum based upon the Clarke 1866 ellipsoid', '4008'),
# ('4009 : Unknown datum based upon the Clarke 1866 Michigan ellipsoid', '4009'),
# ('4010 : Unknown datum based upon the Clarke 1880 (Benoit) ellipsoid', '4010'),
# ('4011 : Unknown datum based upon the Clarke 1880 (IGN) ellipsoid', '4011'),
# ('4012 : Unknown datum based upon the Clarke 1880 (RGS) ellipsoid', '4012'),
# ('4013 : Unknown datum based upon the Clarke 1880 (Arc) ellipsoid', '4013'),
# ('4014 : Unknown datum based upon the Clarke 1880 (SGA 1922) ellipsoid', '4014'),
# ('4015 : Unknown datum based upon the Everest 1830 (1937 Adjustment) ellipsoid', '4015'),
# ('4016 : Unknown datum based upon the Everest 1830 (1967 Definition) ellipsoid', '4016'),
# ('4018 : Unknown datum based upon the Everest 1830 Modified ellipsoid', '4018'),
# ('4019 : Unknown datum based upon the GRS 1980 ellipsoid', '4019'),
# ('4020 : Unknown datum based upon the Helmert 1906 ellipsoid', '4020'),
# ('4021 : Unknown datum based upon the Indonesian National Spheroid', '4021'),
# ('4022 : Unknown datum based upon the International 1924 ellipsoid', '4022'),
# ('4023 : MOLDREF99', '4023'),
# ('4024 : Unknown datum based upon the Krassowsky 1940 ellipsoid', '4024'),
# ('4025 : Unknown datum based upon the NWL 9D ellipsoid', '4025'),
# ('4027 : Unknown datum based upon the Plessis 1817 ellipsoid', '4027'),
# ('4028 : Unknown datum based upon the Struve 1860 ellipsoid', '4028'),
# ('4029 : Unknown datum based upon the War Office ellipsoid', '4029'),
# ('4030 : Unknown datum based upon the WGS 84 ellipsoid', '4030'),
# ('4031 : Unknown datum based upon the GEM 10C ellipsoid', '4031'),
# ('4032 : Unknown datum based upon the OSU86F ellipsoid', '4032'),
# ('4033 : Unknown datum based upon the OSU91A ellipsoid', '4033'),
# ('4034 : Unknown datum based upon the Clarke 1880 ellipsoid', '4034'),
# ('4035 : Unknown datum based upon the Authalic Sphere', '4035'),
# ('4036 : Unknown datum based upon the GRS 1967 ellipsoid', '4036'),
# ('4041 : Unknown datum based upon the Average Terrestrial System 1977 ellipsoid', '4041'),
# ('4042 : Unknown datum based upon the Everest (1830 Definition) ellipsoid', '4042'),
# ('4043 : Unknown datum based upon the WGS 72 ellipsoid', '4043'),
# ('4044 : Unknown datum based upon the Everest 1830 (1962 Definition) ellipsoid', '4044'),
# ('4045 : Unknown datum based upon the Everest 1830 (1975 Definition) ellipsoid', '4045'),
# ('4046 : RGRDC 2005', '4046'),
# ('4047 : Unspecified datum based upon the GRS 1980 Authalic Sphere', '4047'),
# ('4052 : Unspecified datum based upon the Clarke 1866 Authalic Sphere', '4052'),
# ('4053 : Unspecified datum based upon the International 1924 Authalic Sphere', '4053'),
# ('4054 : Unspecified datum based upon the Hughes 1980 ellipsoid', '4054'),
# ('4055 : Popular Visualisation CRS', '4055'),
# ('4075 : SREF98', '4075'),
# ('4081 : REGCAN95', '4081'),
# ('4120 : Greek', '4120'),
# ('4121 : GGRS87', '4121'),
# ('4122 : ATS77', '4122'),
# ('4123 : KKJ', '4123'),
# ('4124 : RT90', '4124'),
# ('4125 : Samboja', '4125'),
# ('4126 : LKS94 (ETRS89)', '4126'),
# ('4127 : Tete', '4127'),
# ('4128 : Madzansua', '4128'),
# ('4129 : Observatario', '4129'),
# ('4130 : Moznet', '4130'),
# ('4131 : Indian 1960', '4131'),
# ('4132 : FD58', '4132'),
# ('4133 : EST92', '4133'),
# ('4134 : PSD93', '4134'),
# ('4135 : Old Hawaiian', '4135'),
# ('4136 : St. Lawrence Island', '4136'),
# ('4137 : St. Paul Island', '4137'),
# ('4138 : St. George Island', '4138'),
# ('4139 : Puerto Rico', '4139'),
# ('4140 : NAD83(CSRS98)', '4140'),
# ('4141 : Israel 1993', '4141'),
# ('4142 : Locodjo 1965', '4142'),
# ('4143 : Abidjan 1987', '4143'),
# ('4144 : Kalianpur 1937', '4144'),
# ('4145 : Kalianpur 1962', '4145'),
# ('4146 : Kalianpur 1975', '4146'),
# ('4147 : Hanoi 1972', '4147'),
# ('4148 : Hartebeesthoek94', '4148'),
# ('4149 : CH1903', '4149'),
# ('4150 : CH1903+', '4150'),
# ('4151 : CHTRF95', '4151'),
# ('4152 : NAD83(HARN)', '4152'),
# ('4153 : Rassadiran', '4153'),
# ('4154 : ED50(ED77)', '4154'),
# ('4155 : Dabola 1981', '4155'),
# ('4156 : S-JTSK', '4156'),
# ('4157 : Mount Dillon', '4157'),
# ('4158 : Naparima 1955', '4158'),
# ('4159 : ELD79', '4159'),
# ('4160 : Chos Malal 1914', '4160'),
# ('4161 : Pampa del Castillo', '4161'),
# ('4162 : Korean 1985', '4162'),
# ('4163 : Yemen NGN96', '4163'),
# ('4164 : South Yemen', '4164'),
# ('4165 : Bissau', '4165'),
# ('4166 : Korean 1995', '4166'),
# ('4167 : NZGD2000', '4167'),
# ('4168 : Accra', '4168'),
# ('4169 : American Samoa 1962', '4169'),
# ('4170 : SIRGAS 1995', '4170'),
# ('4171 : RGF93', '4171'),
# ('4172 : POSGAR', '4172'),
# ('4173 : IRENET95', '4173'),
# ('4174 : Sierra Leone 1924', '4174'),
# ('4175 : Sierra Leone 1968', '4175'),
# ('4176 : Australian Antarctic', '4176'),
# ('4178 : Pulkovo 1942(83)', '4178'),
# ('4179 : Pulkovo 1942(58)', '4179'),
# ('4180 : EST97', '4180'),
# ('4181 : Luxembourg 1930', '4181'),
# ('4182 : Azores Occidental 1939', '4182'),
# ('4183 : Azores Central 1948', '4183'),
# ('4184 : Azores Oriental 1940', '4184'),
# ('4185 : Madeira 1936', '4185'),
# ('4188 : OSNI 1952', '4188'),
# ('4189 : REGVEN', '4189'),
# ('4190 : POSGAR 98', '4190'),
# ('4191 : Albanian 1987', '4191'),
# ('4192 : Douala 1948', '4192'),
# ('4193 : Manoca 1962', '4193'),
# ('4194 : Qornoq 1927', '4194'),
# ('4195 : Scoresbysund 1952', '4195'),
# ('4196 : Ammassalik 1958', '4196'),
# ('4197 : Garoua', '4197'),
# ('4198 : Kousseri', '4198'),
# ('4199 : Egypt 1930', '4199'),
# ('4200 : Pulkovo 1995', '4200'),
# ('4201 : Adindan', '4201'),
# ('4202 : AGD66', '4202'),
# ('4203 : AGD84', '4203'),
# ('4204 : Ain el Abd', '4204'),
# ('4205 : Afgooye', '4205'),
# ('4206 : Agadez', '4206'),
# ('4207 : Lisbon', '4207'),
# ('4208 : Aratu', '4208'),
# ('4209 : Arc 1950', '4209'),
# ('4210 : Arc 1960', '4210'),
# ('4211 : Batavia', '4211'),
# ('4212 : Barbados 1938', '4212'),
# ('4213 : Beduaram', '4213'),
# ('4214 : Beijing 1954', '4214'),
# ('4215 : Belge 1950', '4215'),
# ('4216 : Bermuda 1957', '4216'),
# ('4218 : Bogota 1975', '4218'),
# ('4219 : Bukit Rimpah', '4219'),
# ('4220 : Camacupa', '4220'),
# ('4221 : Campo Inchauspe', '4221'),
# ('4222 : Cape', '4222'),
# ('4223 : Carthage', '4223'),
# ('4224 : Chua', '4224'),
# ('4225 : Corrego Alegre 1970-72', '4225'),
# ("4226 : Cote d'Ivoire", '4226'),
# ('4227 : Deir ez Zor', '4227'),
# ('4228 : Douala', '4228'),
# ('4229 : Egypt 1907', '4229'),
# ('4230 : ED50', '4230'),
# ('4231 : ED87', '4231'),
# ('4232 : Fahud', '4232'),
# ('4233 : Gandajika 1970', '4233'),
# ('4234 : Garoua', '4234'),
# ('4235 : Guyane Francaise', '4235'),
# ('4236 : Hu Tzu Shan 1950', '4236'),
# ('4237 : HD72', '4237'),
# ('4238 : ID74', '4238'),
# ('4239 : Indian 1954', '4239'),
# ('4240 : Indian 1975', '4240'),
# ('4241 : Jamaica 1875', '4241'),
# ('4242 : JAD69', '4242'),
# ('4243 : Kalianpur 1880', '4243'),
# ('4244 : Kandawala', '4244'),
# ('4245 : Kertau 1968', '4245'),
# ('4246 : KOC', '4246'),
# ('4247 : La Canoa', '4247'),
# ('4248 : PSAD56', '4248'),
# ('4249 : Lake', '4249'),
# ('4250 : Leigon', '4250'),
# ('4251 : Liberia 1964', '4251'),
# ('4252 : Lome', '4252'),
# ('4253 : Luzon 1911', '4253'),
# ('4254 : Hito XVIII 1963', '4254'),
# ('4255 : Herat North', '4255'),
# ('4256 : Mahe 1971', '4256'),
# ('4257 : Makassar', '4257'),
# ('4258 : ETRS89', '4258'),
# ('4259 : Malongo 1987', '4259'),
# ('4260 : Manoca', '4260'),
# ('4261 : Merchich', '4261'),
# ('4262 : Massawa', '4262'),
# ('4263 : Minna', '4263'),
# ('4264 : Mhast', '4264'),
# ('4265 : Monte Mario', '4265'),
# ("4266 : M'poraloko", '4266'),
# ('4267 : NAD27', '4267'),
# ('4268 : NAD27 Michigan', '4268'),
# ('4269 : NAD83', '4269'),
# ('4270 : Nahrwan 1967', '4270'),
# ('4271 : Naparima 1972', '4271'),
# ('4272 : NZGD49', '4272'),
# ('4273 : NGO 1948', '4273'),
# ('4274 : Datum 73', '4274'),
# ('4275 : NTF', '4275'),
# ('4276 : NSWC 9Z-2', '4276'),
# ('4277 : OSGB 1936', '4277'),
# ('4278 : OSGB70', '4278'),
# ('4279 : OS(SN)80', '4279'),
# ('4280 : Padang', '4280'),
# ('4281 : Palestine 1923', '4281'),
# ('4282 : Pointe Noire', '4282'),
# ('4283 : GDA94', '4283'),
# ('4284 : Pulkovo 1942', '4284'),
# ('4285 : Qatar 1974', '4285'),
# ('4286 : Qatar 1948', '4286'),
# ('4287 : Qornoq', '4287'),
# ('4288 : Loma Quintana', '4288'),
# ('4289 : Amersfoort', '4289'),
# ('4291 : SAD69', '4291'),
# ('4292 : Sapper Hill 1943', '4292'),
# ('4293 : Schwarzeck', '4293'),
# ('4294 : Segora', '4294'),
# ('4295 : Serindung', '4295'),
# ('4296 : Sudan', '4296'),
# ('4297 : Tananarive', '4297'),
# ('4298 : Timbalai 1948', '4298'),
# ('4299 : TM65', '4299'),
# ('4300 : TM75', '4300'),
# ('4301 : Tokyo', '4301'),
# ('4302 : Trinidad 1903', '4302'),
# ('4303 : TC(1948)', '4303'),
# ('4304 : Voirol 1875', '4304'),
# ('4306 : Bern 1938', '4306'),
# ('4307 : Nord Sahara 1959', '4307'),
# ('4308 : RT38', '4308'),
# ('4309 : Yacare', '4309'),
# ('4310 : Yoff', '4310'),
# ('4311 : Zanderij', '4311'),
# ('4312 : MGI', '4312'),
# ('4313 : Belge 1972', '4313'),
# ('4314 : DHDN', '4314'),
# ('4315 : Conakry 1905', '4315'),
# ('4316 : Dealul Piscului 1930', '4316'),
# ('4317 : Dealul Piscului 1970', '4317'),
# ('4318 : NGN', '4318'),
# ('4319 : KUDAMS', '4319'),
# ('4322 : WGS 72', '4322'),
# ('4324 : WGS 72BE', '4324'),
# ('4326 : WGS 84', '4326'),
# ('4463 : RGSPM06', '4463'),
# ('4470 : RGM04', '4470'),
# ('4475 : Cadastre 1997', '4475'),
# ('4483 : Mexico ITRF92', '4483'),
# ('4490 : China Geodetic Coordinate System 2000', '4490'),
# ('4555 : New Beijing', '4555'),
# ('4558 : RRAF 1991', '4558'),
# ('4600 : Anguilla 1957', '4600'),
# ('4601 : Antigua 1943', '4601'),
# ('4602 : Dominica 1945', '4602'),
# ('4603 : Grenada 1953', '4603'),
# ('4604 : Montserrat 1958', '4604'),
# ('4605 : St. Kitts 1955', '4605'),
# ('4606 : St. Lucia 1955', '4606'),
# ('4607 : St. Vincent 1945', '4607'),
# ('4608 : NAD27(76)', '4608'),
# ('4609 : NAD27(CGQ77)', '4609'),
# ('4610 : Xian 1980', '4610'),
# ('4611 : Hong Kong 1980', '4611'),
# ('4612 : JGD2000', '4612'),
# ('4613 : Segara', '4613'),
# ('4614 : QND95', '4614'),
# ('4615 : Porto Santo', '4615'),
# ('4616 : Selvagem Grande', '4616'),
# ('4617 : NAD83(CSRS)', '4617'),
# ('4618 : SAD69', '4618'),
# ('4619 : SWEREF99', '4619'),
# ('4620 : Point 58', '4620'),
# ('4621 : Fort Marigot', '4621'),
# ('4622 : Guadeloupe 1948', '4622'),
# ('4623 : CSG67', '4623'),
# ('4624 : RGFG95', '4624'),
# ('4625 : Martinique 1938', '4625'),
# ('4626 : Reunion 1947', '4626'),
# ('4627 : RGR92', '4627'),
# ('4628 : Tahiti 52', '4628'),
# ('4629 : Tahaa 54', '4629'),
# ('4630 : IGN72 Nuku Hiva', '4630'),
# ('4631 : K0 1949', '4631'),
# ('4632 : Combani 1950', '4632'),
# ('4633 : IGN56 Lifou', '4633'),
# ('4634 : IGN72 Grand Terre', '4634'),
# ('4635 : ST87 Ouvea', '4635'),
# ('4636 : Petrels 1972', '4636'),
# ('4637 : Perroud 1950', '4637'),
# ('4638 : Saint Pierre et Miquelon 1950', '4638'),
# ('4639 : MOP78', '4639'),
# ('4640 : RRAF 1991', '4640'),
# ('4641 : IGN53 Mare', '4641'),
# ('4642 : ST84 Ile des Pins', '4642'),
# ('4643 : ST71 Belep', '4643'),
# ('4644 : NEA74 Noumea', '4644'),
# ('4645 : RGNC 1991', '4645'),
# ('4646 : Grand Comoros', '4646'),
# ('4657 : Reykjavik 1900', '4657'),
# ('4658 : Hjorsey 1955', '4658'),
# ('4659 : ISN93', '4659'),
# ('4660 : Helle 1954', '4660'),
# ('4661 : LKS92', '4661'),
# ('4662 : IGN72 Grande Terre', '4662'),
# ('4663 : Porto Santo 1995', '4663'),
# ('4664 : Azores Oriental 1995', '4664'),
# ('4665 : Azores Central 1995', '4665'),
# ('4666 : Lisbon 1890', '4666'),
# ('4667 : IKBD-92', '4667'),
# ('4668 : ED79', '4668'),
# ('4669 : LKS94', '4669'),
# ('4670 : IGM95', '4670'),
# ('4671 : Voirol 1879', '4671'),
# ('4672 : Chatham Islands 1971', '4672'),
# ('4673 : Chatham Islands 1979', '4673'),
# ('4674 : SIRGAS 2000', '4674'),
# ('4675 : Guam 1963', '4675'),
# ('4676 : Vientiane 1982', '4676'),
# ('4677 : Lao 1993', '4677'),
# ('4678 : Lao 1997', '4678'),
# ('4679 : Jouik 1961', '4679'),
# ('4680 : Nouakchott 1965', '4680'),
# ('4681 : Mauritania 1999', '4681'),
# ('4682 : Gulshan 303', '4682'),
# ('4683 : PRS92', '4683'),
# ('4684 : Gan 1970', '4684'),
# ('4685 : Gandajika', '4685'),
# ('4686 : MAGNA-SIRGAS', '4686'),
# ('4687 : RGPF', '4687'),
# ('4688 : Fatu Iva 72', '4688'),
# ('4689 : IGN63 Hiva Oa', '4689'),
# ('4690 : Tahiti 79', '4690'),
# ('4691 : Moorea 87', '4691'),
# ('4692 : Maupiti 83', '4692'),
# ('4693 : Nakhl-e Ghanem', '4693'),
# ('4694 : POSGAR 94', '4694'),
# ('4695 : Katanga 1955', '4695'),
# ('4696 : Kasai 1953', '4696'),
# ('4697 : IGC 1962 6th Parallel South', '4697'),
# ('4698 : IGN 1962 Kerguelen', '4698'),
# ('4699 : Le Pouce 1934', '4699'),
# ('4700 : IGN Astro 1960', '4700'),
# ('4701 : IGCB 1955', '4701'),
# ('4702 : Mauritania 1999', '4702'),
# ('4703 : Mhast 1951', '4703'),
# ('4704 : Mhast (onshore)', '4704'),
# ('4705 : Mhast (offshore)', '4705'),
# ('4706 : Egypt Gulf of Suez S-650 TL', '4706'),
# ('4707 : Tern Island 1961', '4707'),
# ('4708 : Cocos Islands 1965', '4708'),
# ('4709 : Iwo Jima 1945', '4709'),
# ('4710 : St. Helena 1971', '4710'),
# ('4711 : Marcus Island 1952', '4711'),
# ('4712 : Ascension Island 1958', '4712'),
# ('4713 : Ayabelle Lighthouse', '4713'),
# ('4714 : Bellevue', '4714'),
# ('4715 : Camp Area Astro', '4715'),
# ('4716 : Phoenix Islands 1966', '4716'),
# ('4717 : Cape Canaveral', '4717'),
# ('4718 : Solomon 1968', '4718'),
# ('4719 : Easter Island 1967', '4719'),
# ('4720 : Fiji 1986', '4720'),
# ('4721 : Fiji 1956', '4721'),
# ('4722 : South Georgia 1968', '4722'),
# ('4723 : GCGD59', '4723'),
# ('4724 : Diego Garcia 1969', '4724'),
# ('4725 : Johnston Island 1961', '4725'),
# ('4726 : SIGD61', '4726'),
# ('4727 : Midway 1961', '4727'),
# ('4728 : Pico de las Nieves 1984', '4728'),
# ('4729 : Pitcairn 1967', '4729'),
# ('4730 : Santo 1965', '4730'),
# ('4731 : Viti Levu 1916', '4731'),
# ('4732 : Marshall Islands 1960', '4732'),
# ('4733 : Wake Island 1952', '4733'),
# ('4734 : Tristan 1968', '4734'),
# ('4735 : Kusaie 1951', '4735'),
# ('4736 : Deception Island', '4736'),
# ('4737 : Korea 2000', '4737'),
# ('4738 : Hong Kong 1963', '4738'),
# ('4739 : Hong Kong 1963(67)', '4739'),
# ('4740 : PZ-90', '4740'),
# ('4741 : FD54', '4741'),
# ('4742 : GDM2000', '4742'),
# ('4743 : Karbala 1979', '4743'),
# ('4744 : Nahrwan 1934', '4744'),
# ('4745 : RD/83', '4745'),
# ('4746 : PD/83', '4746'),
# ('4747 : GR96', '4747'),
# ('4748 : Vanua Levu 1915', '4748'),
# ('4749 : RGNC91-93', '4749'),
# ('4750 : ST87 Ouvea', '4750'),
# ('4751 : Kertau (RSO)', '4751'),
# ('4752 : Viti Levu 1912', '4752'),
# ('4753 : fk89', '4753'),
# ('4754 : LGD2006', '4754'),
# ('4755 : DGN95', '4755'),
# ('4756 : VN-2000', '4756'),
# ('4757 : SVY21', '4757'),
# ('4758 : JAD2001', '4758'),
# ('4759 : NAD83(NSRS2007)', '4759'),
# ('4760 : WGS 66', '4760'),
# ('4761 : HTRS96', '4761'),
# ('4762 : BDA2000', '4762'),
# ('4763 : Pitcairn 2006', '4763'),
# ('4764 : RSRGD2000', '4764'),
# ('4765 : Slovenia 1996', '4765'),
# ('4801 : Bern 1898 (Bern)', '4801'),
# ('4802 : Bogota 1975 (Bogota)', '4802'),
# ('4803 : Lisbon (Lisbon)', '4803'),
# ('4804 : Makassar (Jakarta)', '4804'),
# ('4805 : MGI (Ferro)', '4805'),
# ('4806 : Monte Mario (Rome)', '4806'),
# ('4807 : NTF (Paris)', '4807'),
# ('4808 : Padang (Jakarta)', '4808'),
# ('4809 : Belge 1950 (Brussels)', '4809'),
# ('4810 : Tananarive (Paris)', '4810'),
# ('4811 : Voirol 1875 (Paris)', '4811'),
# ('4813 : Batavia (Jakarta)', '4813'),
# ('4814 : RT38 (Stockholm)', '4814'),
# ('4815 : Greek (Athens)', '4815'),
# ('4816 : Carthage (Paris)', '4816'),
# ('4817 : NGO 1948 (Oslo)', '4817'),
# ('4818 : S-JTSK (Ferro)', '4818'),
# ('4819 : Nord Sahara 1959 (Paris)', '4819'),
# ('4820 : Segara (Jakarta)', '4820'),
# ('4821 : Voirol 1879 (Paris)', '4821'),
# ('4823 : Sao Tome', '4823'),
# ('4824 : Principe', '4824'),
# ('4901 : ATF (Paris)', '4901'),
# ('4902 : NDG (Paris)', '4902'),
# ('4903 : Madrid 1870 (Madrid)', '4903'),
# ('4904 : Lisbon 1890 (Lisbon)', '4904'),
# ('5013 : PTRA08', '5013'),
# ('5132 : Tokyo 1892', '5132'),
# ('5228 : S-JTSK/05', '5228'),
# ('5229 : S-JTSK/05 (Ferro)', '5229'),
# ('5233 : SLD99', '5233'),
# ('5246 : GDBD2009', '5246'),
# ('5252 : TUREF', '5252'),
# ('5264 : DRUKREF 03', '5264'),
# ('5324 : ISN2004', '5324'),
# ('5340 : POSGAR 2007', '5340'),
# ('5354 : MARGEN', '5354'),
# ('5360 : SIRGAS-Chile', '5360'),
# ('5365 : CR05', '5365'),
# ('5371 : MACARIO SOLIS', '5371'),
# ('5373 : Peru96', '5373'),
# ('5381 : SIRGAS-ROU98', '5381'),
# ('5393 : SIRGAS_ES2007.8', '5393'),
# ('5451 : Ocotepeque 1935', '5451'),
# ('5464 : Sibun Gorge 1922', '5464'),
# ('5467 : Panama-Colon 1911', '5467'),
# ('5489 : RGAF09', '5489'),
# ('5524 : Corrego Alegre 1961', '5524'),
# ('5527 : SAD69(96)', '5527'),
# ('5546 : PNG94', '5546'),
# ('5561 : UCS-2000', '5561'),
# ('5593 : FEH2010', '5593'),
# ('5681 : DB_REF', '5681'),
# ('5886 : TGD2005', '5886'),
# ('6135 : CIGD11', '6135'),
# ('6207 : Nepal 1981', '6207'),
# ('6311 : CGRS93', '6311'),
# ('6318 : NAD83(2011)', '6318'),
# ('6322 : NAD83(PA11)', '6322'),
# ('6325 : NAD83(MA11)', '6325'),
# ('6365 : Mexico ITRF2008', '6365'),
# ('6668 : JGD2011', '6668'),
# ('6706 : RDN2008', '6706'),
# ('6783 : NAD83(CORS96)', '6783'),
# ('6881 : Aden 1925', '6881'),
# ('6882 : Bekaa Valley 1920', '6882'),
# ('6883 : Bioko', '6883'),
# ('6892 : South East Island 1943', '6892'),
# ('6894 : Gambia', '6894'),
# ('6980 : IGD05', '6980'),
# ('6983 : IG05 Intermediate CRS', '6983'),
# ('6987 : IGD05/12', '6987'),
# ('6990 : IG05/12 Intermediate CRS', '6990'),
# ('7035 : RGSPM06 (lon-lat)', '7035'),
# ('7037 : RGR92 (lon-lat)', '7037'),
# ('7039 : RGM04 (lon-lat)', '7039'),
# ('7041 : RGFG95 (lon-lat)', '7041'),
# ('7073 : RGTAAF07', '7073'),
# ('7084 : RGF93 (lon-lat)', '7084'),
# ('7086 : RGAF09 (lon-lat)', '7086'),
# ('7088 : RGTAAF07 (lon-lat)', '7088'),
# ('7133 : RGTAAF07 (lon-lat)', '7133'),
# ('7136 : IGD05', '7136'),
# ('7139 : IGD05/12', '7139'),
# ('7373 : ONGD14', '7373'),
('2000 : Anguilla 1957 / British West Indies Grid', '2000'),
('2001 : Antigua 1943 / British West Indies Grid', '2001'),
('2002 : Dominica 1945 / British West Indies Grid', '2002'),
('2003 : Grenada 1953 / British West Indies Grid', '2003'),
('2004 : Montserrat 1958 / British West Indies Grid', '2004'),
('2005 : St. Kitts 1955 / British West Indies Grid', '2005'),
('2006 : St. Lucia 1955 / British West Indies Grid', '2006'),
('2007 : St. Vincent 45 / British West Indies Grid', '2007'),
('2008 : NAD27(CGQ77) / SCoPQ zone 2', '2008'),
('2009 : NAD27(CGQ77) / SCoPQ zone 3', '2009'),
('2010 : NAD27(CGQ77) / SCoPQ zone 4', '2010'),
('2011 : NAD27(CGQ77) / SCoPQ zone 5', '2011'),
('2012 : NAD27(CGQ77) / SCoPQ zone 6', '2012'),
('2013 : NAD27(CGQ77) / SCoPQ zone 7', '2013'),
('2014 : NAD27(CGQ77) / SCoPQ zone 8', '2014'),
('2015 : NAD27(CGQ77) / SCoPQ zone 9', '2015'),
('2016 : NAD27(CGQ77) / SCoPQ zone 10', '2016'),
('2017 : NAD27(76) / MTM zone 8', '2017'),
('2018 : NAD27(76) / MTM zone 9', '2018'),
('2019 : NAD27(76) / MTM zone 10', '2019'),
('2020 : NAD27(76) / MTM zone 11', '2020'),
('2021 : NAD27(76) / MTM zone 12', '2021'),
('2022 : NAD27(76) / MTM zone 13', '2022'),
('2023 : NAD27(76) / MTM zone 14', '2023'),
('2024 : NAD27(76) / MTM zone 15', '2024'),
('2025 : NAD27(76) / MTM zone 16', '2025'),
('2026 : NAD27(76) / MTM zone 17', '2026'),
('2027 : NAD27(76) / UTM zone 15N', '2027'),
('2028 : NAD27(76) / UTM zone 16N', '2028'),
('2029 : NAD27(76) / UTM zone 17N', '2029'),
('2030 : NAD27(76) / UTM zone 18N', '2030'),
('2031 : NAD27(CGQ77) / UTM zone 17N', '2031'),
('2032 : NAD27(CGQ77) / UTM zone 18N', '2032'),
('2033 : NAD27(CGQ77) / UTM zone 19N', '2033'),
('2034 : NAD27(CGQ77) / UTM zone 20N', '2034'),
('2035 : NAD27(CGQ77) / UTM zone 21N', '2035'),
('2036 : NAD83(CSRS98) / New Brunswick Stereo', '2036'),
('2037 : NAD83(CSRS98) / UTM zone 19N', '2037'),
('2038 : NAD83(CSRS98) / UTM zone 20N', '2038'),
('2039 : Israel 1993 / Israeli TM Grid', '2039'),
('2040 : Locodjo 1965 / UTM zone 30N', '2040'),
('2041 : Abidjan 1987 / UTM zone 30N', '2041'),
('2042 : Locodjo 1965 / UTM zone 29N', '2042'),
('2043 : Abidjan 1987 / UTM zone 29N', '2043'),
('2044 : Hanoi 1972 / Gauss-Kruger zone 18', '2044'),
('2045 : Hanoi 1972 / Gauss-Kruger zone 19', '2045'),
('2046 : Hartebeesthoek94 / Lo15', '2046'),
('2047 : Hartebeesthoek94 / Lo17', '2047'),
('2048 : Hartebeesthoek94 / Lo19', '2048'),
('2049 : Hartebeesthoek94 / Lo21', '2049'),
('2050 : Hartebeesthoek94 / Lo23', '2050'),
('2051 : Hartebeesthoek94 / Lo25', '2051'),
('2052 : Hartebeesthoek94 / Lo27', '2052'),
('2053 : Hartebeesthoek94 / Lo29', '2053'),
('2054 : Hartebeesthoek94 / Lo31', '2054'),
('2055 : Hartebeesthoek94 / Lo33', '2055'),
('2056 : CH1903+ / LV95', '2056'),
('2057 : Rassadiran / Nakhl e Taqi', '2057'),
('2058 : ED50(ED77) / UTM zone 38N', '2058'),
('2059 : ED50(ED77) / UTM zone 39N', '2059'),
('2060 : ED50(ED77) / UTM zone 40N', '2060'),
('2061 : ED50(ED77) / UTM zone 41N', '2061'),
('2062 : Madrid 1870 (Madrid) / Spain', '2062'),
('2063 : Dabola 1981 / UTM zone 28N', '2063'),
('2064 : Dabola 1981 / UTM zone 29N', '2064'),
('2065 : S-JTSK (Ferro) / Krovak', '2065'),
('2066 : Mount Dillon / Tobago Grid', '2066'),
('2067 : Naparima 1955 / UTM zone 20N', '2067'),
('2068 : ELD79 / Libya zone 5', '2068'),
('2069 : ELD79 / Libya zone 6', '2069'),
('2070 : ELD79 / Libya zone 7', '2070'),
('2071 : ELD79 / Libya zone 8', '2071'),
('2072 : ELD79 / Libya zone 9', '2072'),
('2073 : ELD79 / Libya zone 10', '2073'),
('2074 : ELD79 / Libya zone 11', '2074'),
('2075 : ELD79 / Libya zone 12', '2075'),
('2076 : ELD79 / Libya zone 13', '2076'),
('2077 : ELD79 / UTM zone 32N', '2077'),
('2078 : ELD79 / UTM zone 33N', '2078'),
('2079 : ELD79 / UTM zone 34N', '2079'),
('2080 : ELD79 / UTM zone 35N', '2080'),
('2081 : Chos Malal 1914 / Argentina 2', '2081'),
('2082 : Pampa del Castillo / Argentina 2', '2082'),
('2083 : Hito XVIII 1963 / Argentina 2', '2083'),
('2084 : Hito XVIII 1963 / UTM zone 19S', '2084'),
('2085 : NAD27 / Cuba Norte', '2085'),
('2086 : NAD27 / Cuba Sur', '2086'),
('2087 : ELD79 / TM 12 NE', '2087'),
('2088 : Carthage / TM 11 NE', '2088'),
('2089 : Yemen NGN96 / UTM zone 38N', '2089'),
('2090 : Yemen NGN96 / UTM zone 39N', '2090'),
('2091 : South Yemen / Gauss Kruger zone 8', '2091'),
('2092 : South Yemen / Gauss Kruger zone 9', '2092'),
('2093 : Hanoi 1972 / GK 106 NE', '2093'),
('2094 : WGS 72BE / TM 106 NE', '2094'),
('2095 : Bissau / UTM zone 28N', '2095'),
('2096 : Korean 1985 / East Belt', '2096'),
('2097 : Korean 1985 / Central Belt', '2097'),
('2098 : Korean 1985 / West Belt', '2098'),
('2099 : Qatar 1948 / Qatar Grid', '2099'),
('2100 : GGRS87 / Greek Grid', '2100'),
('2101 : Lake / Maracaibo Grid M1', '2101'),
('2102 : Lake / Maracaibo Grid', '2102'),
('2103 : Lake / Maracaibo Grid M3', '2103'),
('2104 : Lake / Maracaibo La Rosa Grid', '2104'),
('2105 : NZGD2000 / Mount Eden 2000', '2105'),
('2106 : NZGD2000 / Bay of Plenty 2000', '2106'),
('2107 : NZGD2000 / Poverty Bay 2000', '2107'),
('2108 : NZGD2000 / Hawkes Bay 2000', '2108'),
('2109 : NZGD2000 / Taranaki 2000', '2109'),
('2110 : NZGD2000 / Tuhirangi 2000', '2110'),
('2111 : NZGD2000 / Wanganui 2000', '2111'),
('2112 : NZGD2000 / Wairarapa 2000', '2112'),
('2113 : NZGD2000 / Wellington 2000', '2113'),
('2114 : NZGD2000 / Collingwood 2000', '2114'),
('2115 : NZGD2000 / Nelson 2000', '2115'),
('2116 : NZGD2000 / Karamea 2000', '2116'),
('2117 : NZGD2000 / Buller 2000', '2117'),
('2118 : NZGD2000 / Grey 2000', '2118'),
('2119 : NZGD2000 / Amuri 2000', '2119'),
('2120 : NZGD2000 / Marlborough 2000', '2120'),
('2121 : NZGD2000 / Hokitika 2000', '2121'),
('2122 : NZGD2000 / Okarito 2000', '2122'),
('2123 : NZGD2000 / Jacksons Bay 2000', '2123'),
('2124 : NZGD2000 / Mount Pleasant 2000', '2124'),
('2125 : NZGD2000 / Gawler 2000', '2125'),
('2126 : NZGD2000 / Timaru 2000', '2126'),
('2127 : NZGD2000 / Lindis Peak 2000', '2127'),
('2128 : NZGD2000 / Mount Nicholas 2000', '2128'),
('2129 : NZGD2000 / Mount York 2000', '2129'),
('2130 : NZGD2000 / Observation Point 2000', '2130'),
('2131 : NZGD2000 / North Taieri 2000', '2131'),
('2132 : NZGD2000 / Bluff 2000', '2132'),
('2133 : NZGD2000 / UTM zone 58S', '2133'),
('2134 : NZGD2000 / UTM zone 59S', '2134'),
('2135 : NZGD2000 / UTM zone 60S', '2135'),
('2136 : Accra / Ghana National Grid', '2136'),
('2137 : Accra / TM 1 NW', '2137'),
('2138 : NAD27(CGQ77) / Quebec Lambert', '2138'),
('2139 : NAD83(CSRS98) / SCoPQ zone 2', '2139'),
('2140 : NAD83(CSRS98) / MTM zone 3', '2140'),
('2141 : NAD83(CSRS98) / MTM zone 4', '2141'),
('2142 : NAD83(CSRS98) / MTM zone 5', '2142'),
('2143 : NAD83(CSRS98) / MTM zone 6', '2143'),
('2144 : NAD83(CSRS98) / MTM zone 7', '2144'),
('2145 : NAD83(CSRS98) / MTM zone 8', '2145'),
('2146 : NAD83(CSRS98) / MTM zone 9', '2146'),
('2147 : NAD83(CSRS98) / MTM zone 10', '2147'),
('2148 : NAD83(CSRS98) / UTM zone 21N', '2148'),
('2149 : NAD83(CSRS98) / UTM zone 18N', '2149'),
('2150 : NAD83(CSRS98) / UTM zone 17N', '2150'),
('2151 : NAD83(CSRS98) / UTM zone 13N', '2151'),
('2152 : NAD83(CSRS98) / UTM zone 12N', '2152'),
('2153 : NAD83(CSRS98) / UTM zone 11N', '2153'),
('2154 : RGF93 / Lambert-93', '2154'),
('2155 : American Samoa 1962 / American Samoa Lambert', '2155'),
('2156 : NAD83(HARN) / UTM zone 59S', '2156'),
('2157 : IRENET95 / Irish Transverse Mercator', '2157'),
('2158 : IRENET95 / UTM zone 29N', '2158'),
('2159 : Sierra Leone 1924 / New Colony Grid', '2159'),
('2160 : Sierra Leone 1924 / New War Office Grid', '2160'),
('2161 : Sierra Leone 1968 / UTM zone 28N', '2161'),
('2162 : Sierra Leone 1968 / UTM zone 29N', '2162'),
('2163 : US National Atlas Equal Area', '2163'),
('2164 : Locodjo 1965 / TM 5 NW', '2164'),
('2165 : Abidjan 1987 / TM 5 NW', '2165'),
('2166 : Pulkovo 1942(83) / Gauss Kruger zone 3', '2166'),
('2167 : Pulkovo 1942(83) / Gauss Kruger zone 4', '2167'),
('2168 : Pulkovo 1942(83) / Gauss Kruger zone 5', '2168'),
('2169 : Luxembourg 1930 / Gauss', '2169'),
('2170 : MGI / Slovenia Grid', '2170'),
('2171 : Pulkovo 1942(58) / Poland zone I', '2171'),
('2172 : Pulkovo 1942(58) / Poland zone II', '2172'),
('2173 : Pulkovo 1942(58) / Poland zone III', '2173'),
('2174 : Pulkovo 1942(58) / Poland zone IV', '2174'),
('2175 : Pulkovo 1942(58) / Poland zone V', '2175'),
('2176 : ETRS89 / Poland CS2000 zone 5', '2176'),
('2177 : ETRS89 / Poland CS2000 zone 6', '2177'),
('2178 : ETRS89 / Poland CS2000 zone 7', '2178'),
('2179 : ETRS89 / Poland CS2000 zone 8', '2179'),
('2180 : ETRS89 / Poland CS92', '2180'),
('2188 : Azores Occidental 1939 / UTM zone 25N', '2188'),
('2189 : Azores Central 1948 / UTM zone 26N', '2189'),
('2190 : Azores Oriental 1940 / UTM zone 26N', '2190'),
('2191 : Madeira 1936 / UTM zone 28N', '2191'),
('2192 : ED50 / France EuroLambert', '2192'),
('2193 : NZGD2000 / New Zealand Transverse Mercator 2000', '2193'),
('2194 : American Samoa 1962 / American Samoa Lambert', '2194'),
('2195 : NAD83(HARN) / UTM zone 2S', '2195'),
('2196 : ETRS89 / Kp2000 Jutland', '2196'),
('2197 : ETRS89 / Kp2000 Zealand', '2197'),
('2198 : ETRS89 / Kp2000 Bornholm', '2198'),
('2199 : Albanian 1987 / Gauss Kruger zone 4', '2199'),
('2200 : ATS77 / New Brunswick Stereographic (ATS77)', '2200'),
('2201 : REGVEN / UTM zone 18N', '2201'),
('2202 : REGVEN / UTM zone 19N', '2202'),
('2203 : REGVEN / UTM zone 20N', '2203'),
('2204 : NAD27 / Tennessee', '2204'),
('2205 : NAD83 / Kentucky North', '2205'),
('2206 : ED50 / 3-degree Gauss-Kruger zone 9', '2206'),
('2207 : ED50 / 3-degree Gauss-Kruger zone 10', '2207'),
('2208 : ED50 / 3-degree Gauss-Kruger zone 11', '2208'),
('2209 : ED50 / 3-degree Gauss-Kruger zone 12', '2209'),
('2210 : ED50 / 3-degree Gauss-Kruger zone 13', '2210'),
('2211 : ED50 / 3-degree Gauss-Kruger zone 14', '2211'),
('2212 : ED50 / 3-degree Gauss-Kruger zone 15', '2212'),
('2213 : ETRS89 / TM 30 NE', '2213'),
('2214 : Douala 1948 / AOF west', '2214'),
('2215 : Manoca 1962 / UTM zone 32N', '2215'),
('2216 : Qornoq 1927 / UTM zone 22N', '2216'),
('2217 : Qornoq 1927 / UTM zone 23N', '2217'),
('2218 : Scoresbysund 1952 / Greenland zone 5 east', '2218'),
('2219 : ATS77 / UTM zone 19N', '2219'),
('2220 : ATS77 / UTM zone 20N', '2220'),
('2221 : Scoresbysund 1952 / Greenland zone 6 east', '2221'),
('2222 : NAD83 / Arizona East (ft)', '2222'),
('2223 : NAD83 / Arizona Central (ft)', '2223'),
('2224 : NAD83 / Arizona West (ft)', '2224'),
('2225 : NAD83 / California zone 1 (ftUS)', '2225'),
('2226 : NAD83 / California zone 2 (ftUS)', '2226'),
('2227 : NAD83 / California zone 3 (ftUS)', '2227'),
('2228 : NAD83 / California zone 4 (ftUS)', '2228'),
('2229 : NAD83 / California zone 5 (ftUS)', '2229'),
('2230 : NAD83 / California zone 6 (ftUS)', '2230'),
('2231 : NAD83 / Colorado North (ftUS)', '2231'),
('2232 : NAD83 / Colorado Central (ftUS)', '2232'),
('2233 : NAD83 / Colorado South (ftUS)', '2233'),
('2234 : NAD83 / Connecticut (ftUS)', '2234'),
('2235 : NAD83 / Delaware (ftUS)', '2235'),
('2236 : NAD83 / Florida East (ftUS)', '2236'),
('2237 : NAD83 / Florida West (ftUS)', '2237'),
('2238 : NAD83 / Florida North (ftUS)', '2238'),
('2239 : NAD83 / Georgia East (ftUS)', '2239'),
('2240 : NAD83 / Georgia West (ftUS)', '2240'),
('2241 : NAD83 / Idaho East (ftUS)', '2241'),
('2242 : NAD83 / Idaho Central (ftUS)', '2242'),
('2243 : NAD83 / Idaho West (ftUS)', '2243'),
('2244 : NAD83 / Indiana East (ftUS)', '2244'),
('2245 : NAD83 / Indiana West (ftUS)', '2245'),
('2246 : NAD83 / Kentucky North (ftUS)', '2246'),
('2247 : NAD83 / Kentucky South (ftUS)', '2247'),
('2248 : NAD83 / Maryland (ftUS)', '2248'),
('2249 : NAD83 / Massachusetts Mainland (ftUS)', '2249'),
('2250 : NAD83 / Massachusetts Island (ftUS)', '2250'),
('2251 : NAD83 / Michigan North (ft)', '2251'),
('2252 : NAD83 / Michigan Central (ft)', '2252'),
('2253 : NAD83 / Michigan South (ft)', '2253'),
('2254 : NAD83 / Mississippi East (ftUS)', '2254'),
('2255 : NAD83 / Mississippi West (ftUS)', '2255'),
('2256 : NAD83 / Montana (ft)', '2256'),
('2257 : NAD83 / New Mexico East (ftUS)', '2257'),
('2258 : NAD83 / New Mexico Central (ftUS)', '2258'),
('2259 : NAD83 / New Mexico West (ftUS)', '2259'),
('2260 : NAD83 / New York East (ftUS)', '2260'),
('2261 : NAD83 / New York Central (ftUS)', '2261'),
('2262 : NAD83 / New York West (ftUS)', '2262'),
('2263 : NAD83 / New York Long Island (ftUS)', '2263'),
('2264 : NAD83 / North Carolina (ftUS)', '2264'),
('2265 : NAD83 / North Dakota North (ft)', '2265'),
('2266 : NAD83 / North Dakota South (ft)', '2266'),
('2267 : NAD83 / Oklahoma North (ftUS)', '2267'),
('2268 : NAD83 / Oklahoma South (ftUS)', '2268'),
('2269 : NAD83 / Oregon North (ft)', '2269'),
('2270 : NAD83 / Oregon South (ft)', '2270'),
('2271 : NAD83 / Pennsylvania North (ftUS)', '2271'),
('2272 : NAD83 / Pennsylvania South (ftUS)', '2272'),
('2273 : NAD83 / South Carolina (ft)', '2273'),
('2274 : NAD83 / Tennessee (ftUS)', '2274'),
('2275 : NAD83 / Texas North (ftUS)', '2275'),
('2276 : NAD83 / Texas North Central (ftUS)', '2276'),
('2277 : NAD83 / Texas Central (ftUS)', '2277'),
('2278 : NAD83 / Texas South Central (ftUS)', '2278'),
('2279 : NAD83 / Texas South (ftUS)', '2279'),
('2280 : NAD83 / Utah North (ft)', '2280'),
('2281 : NAD83 / Utah Central (ft)', '2281'),
('2282 : NAD83 / Utah South (ft)', '2282'),
('2283 : NAD83 / Virginia North (ftUS)', '2283'),
('2284 : NAD83 / Virginia South (ftUS)', '2284'),
('2285 : NAD83 / Washington North (ftUS)', '2285'),
('2286 : NAD83 / Washington South (ftUS)', '2286'),
('2287 : NAD83 / Wisconsin North (ftUS)', '2287'),
('2288 : NAD83 / Wisconsin Central (ftUS)', '2288'),
('2289 : NAD83 / Wisconsin South (ftUS)', '2289'),
('2290 : ATS77 / Prince Edward Isl. Stereographic (ATS77)', '2290'),
('2291 : NAD83(CSRS98) / Prince Edward Isl. Stereographic (NAD83)', '2291'),
('2292 : NAD83(CSRS98) / Prince Edward Isl. Stereographic (NAD83)', '2292'),
('2294 : ATS77 / MTM Nova Scotia zone 4', '2294'),
('2295 : ATS77 / MTM Nova Scotia zone 5', '2295'),
('2296 : Ammassalik 1958 / Greenland zone 7 east', '2296'),
('2297 : Qornoq 1927 / Greenland zone 1 east', '2297'),
('2298 : Qornoq 1927 / Greenland zone 2 east', '2298'),
('2299 : Qornoq 1927 / Greenland zone 2 west', '2299'),
('2300 : Qornoq 1927 / Greenland zone 3 east', '2300'),
('2301 : Qornoq 1927 / Greenland zone 3 west', '2301'),
('2302 : Qornoq 1927 / Greenland zone 4 east', '2302'),
('2303 : Qornoq 1927 / Greenland zone 4 west', '2303'),
('2304 : Qornoq 1927 / Greenland zone 5 west', '2304'),
('2305 : Qornoq 1927 / Greenland zone 6 west', '2305'),
('2306 : Qornoq 1927 / Greenland zone 7 west', '2306'),
('2307 : Qornoq 1927 / Greenland zone 8 east', '2307'),
('2308 : Batavia / TM 109 SE', '2308'),
('2309 : WGS 84 / TM 116 SE', '2309'),
('2310 : WGS 84 / TM 132 SE', '2310'),
('2311 : WGS 84 / TM 6 NE', '2311'),
('2312 : Garoua / UTM zone 33N', '2312'),
('2313 : Kousseri / UTM zone 33N', '2313'),
('2314 : Trinidad 1903 / Trinidad Grid (ftCla)', '2314'),
('2315 : Campo Inchauspe / UTM zone 19S', '2315'),
('2316 : Campo Inchauspe / UTM zone 20S', '2316'),
('2317 : PSAD56 / ICN Regional', '2317'),
('2318 : Ain el Abd / Aramco Lambert', '2318'),
('2319 : ED50 / TM27', '2319'),
('2320 : ED50 / TM30', '2320'),
('2321 : ED50 / TM33', '2321'),
('2322 : ED50 / TM36', '2322'),
('2323 : ED50 / TM39', '2323'),
('2324 : ED50 / TM42', '2324'),
('2325 : ED50 / TM45', '2325'),
('2326 : Hong Kong 1980 Grid System', '2326'),
('2327 : Xian 1980 / Gauss-Kruger zone 13', '2327'),
('2328 : Xian 1980 / Gauss-Kruger zone 14', '2328'),
('2329 : Xian 1980 / Gauss-Kruger zone 15', '2329'),
('2330 : Xian 1980 / Gauss-Kruger zone 16', '2330'),
('2331 : Xian 1980 / Gauss-Kruger zone 17', '2331'),
('2332 : Xian 1980 / Gauss-Kruger zone 18', '2332'),
('2333 : Xian 1980 / Gauss-Kruger zone 19', '2333'),
('2334 : Xian 1980 / Gauss-Kruger zone 20', '2334'),
('2335 : Xian 1980 / Gauss-Kruger zone 21', '2335'),
('2336 : Xian 1980 / Gauss-Kruger zone 22', '2336'),
('2337 : Xian 1980 / Gauss-Kruger zone 23', '2337'),
('2338 : Xian 1980 / Gauss-Kruger CM 75E', '2338'),
('2339 : Xian 1980 / Gauss-Kruger CM 81E', '2339'),
('2340 : Xian 1980 / Gauss-Kruger CM 87E', '2340'),
('2341 : Xian 1980 / Gauss-Kruger CM 93E', '2341'),
('2342 : Xian 1980 / Gauss-Kruger CM 99E', '2342'),
('2343 : Xian 1980 / Gauss-Kruger CM 105E', '2343'),
('2344 : Xian 1980 / Gauss-Kruger CM 111E', '2344'),
('2345 : Xian 1980 / Gauss-Kruger CM 117E', '2345'),
('2346 : Xian 1980 / Gauss-Kruger CM 123E', '2346'),
('2347 : Xian 1980 / Gauss-Kruger CM 129E', '2347'),
('2348 : Xian 1980 / Gauss-Kruger CM 135E', '2348'),
('2349 : Xian 1980 / 3-degree Gauss-Kruger zone 25', '2349'),
('2350 : Xian 1980 / 3-degree Gauss-Kruger zone 26', '2350'),
('2351 : Xian 1980 / 3-degree Gauss-Kruger zone 27', '2351'),
('2352 : Xian 1980 / 3-degree Gauss-Kruger zone 28', '2352'),
('2353 : Xian 1980 / 3-degree Gauss-Kruger zone 29', '2353'),
('2354 : Xian 1980 / 3-degree Gauss-Kruger zone 30', '2354'),
('2355 : Xian 1980 / 3-degree Gauss-Kruger zone 31', '2355'),
('2356 : Xian 1980 / 3-degree Gauss-Kruger zone 32', '2356'),
('2357 : Xian 1980 / 3-degree Gauss-Kruger zone 33', '2357'),
('2358 : Xian 1980 / 3-degree Gauss-Kruger zone 34', '2358'),
('2359 : Xian 1980 / 3-degree Gauss-Kruger zone 35', '2359'),
('2360 : Xian 1980 / 3-degree Gauss-Kruger zone 36', '2360'),
('2361 : Xian 1980 / 3-degree Gauss-Kruger zone 37', '2361'),
('2362 : Xian 1980 / 3-degree Gauss-Kruger zone 38', '2362'),
('2363 : Xian 1980 / 3-degree Gauss-Kruger zone 39', '2363'),
('2364 : Xian 1980 / 3-degree Gauss-Kruger zone 40', '2364'),
('2365 : Xian 1980 / 3-degree Gauss-Kruger zone 41', '2365'),
('2366 : Xian 1980 / 3-degree Gauss-Kruger zone 42', '2366'),
('2367 : Xian 1980 / 3-degree Gauss-Kruger zone 43', '2367'),
('2368 : Xian 1980 / 3-degree Gauss-Kruger zone 44', '2368'),
('2369 : Xian 1980 / 3-degree Gauss-Kruger zone 45', '2369'),
('2370 : Xian 1980 / 3-degree Gauss-Kruger CM 75E', '2370'),
('2371 : Xian 1980 / 3-degree Gauss-Kruger CM 78E', '2371'),
('2372 : Xian 1980 / 3-degree Gauss-Kruger CM 81E', '2372'),
('2373 : Xian 1980 / 3-degree Gauss-Kruger CM 84E', '2373'),
('2374 : Xian 1980 / 3-degree Gauss-Kruger CM 87E', '2374'),
('2375 : Xian 1980 / 3-degree Gauss-Kruger CM 90E', '2375'),
('2376 : Xian 1980 / 3-degree Gauss-Kruger CM 93E', '2376'),
('2377 : Xian 1980 / 3-degree Gauss-Kruger CM 96E', '2377'),
('2378 : Xian 1980 / 3-degree Gauss-Kruger CM 99E', '2378'),
('2379 : Xian 1980 / 3-degree Gauss-Kruger CM 102E', '2379'),
('2380 : Xian 1980 / 3-degree Gauss-Kruger CM 105E', '2380'),
('2381 : Xian 1980 / 3-degree Gauss-Kruger CM 108E', '2381'),
('2382 : Xian 1980 / 3-degree Gauss-Kruger CM 111E', '2382'),
('2383 : Xian 1980 / 3-degree Gauss-Kruger CM 114E', '2383'),
('2384 : Xian 1980 / 3-degree Gauss-Kruger CM 117E', '2384'),
('2385 : Xian 1980 / 3-degree Gauss-Kruger CM 120E', '2385'),
('2386 : Xian 1980 / 3-degree Gauss-Kruger CM 123E', '2386'),
('2387 : Xian 1980 / 3-degree Gauss-Kruger CM 126E', '2387'),
('2388 : Xian 1980 / 3-degree Gauss-Kruger CM 129E', '2388'),
('2389 : Xian 1980 / 3-degree Gauss-Kruger CM 132E', '2389'),
('2390 : Xian 1980 / 3-degree Gauss-Kruger CM 135E', '2390'),
('2391 : KKJ / Finland zone 1', '2391'),
('2392 : KKJ / Finland zone 2', '2392'),
('2393 : KKJ / Finland Uniform Coordinate System', '2393'),
('2394 : KKJ / Finland zone 4', '2394'),
('2395 : South Yemen / Gauss-Kruger zone 8', '2395'),
('2396 : South Yemen / Gauss-Kruger zone 9', '2396'),
('2397 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 3', '2397'),
('2398 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 4', '2398'),
('2399 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 5', '2399'),
('2400 : RT90 2.5 gon W', '2400'),
('2401 : Beijing 1954 / 3-degree Gauss-Kruger zone 25', '2401'),
('2402 : Beijing 1954 / 3-degree Gauss-Kruger zone 26', '2402'),
('2403 : Beijing 1954 / 3-degree Gauss-Kruger zone 27', '2403'),
('2404 : Beijing 1954 / 3-degree Gauss-Kruger zone 28', '2404'),
('2405 : Beijing 1954 / 3-degree Gauss-Kruger zone 29', '2405'),
('2406 : Beijing 1954 / 3-degree Gauss-Kruger zone 30', '2406'),
('2407 : Beijing 1954 / 3-degree Gauss-Kruger zone 31', '2407'),
('2408 : Beijing 1954 / 3-degree Gauss-Kruger zone 32', '2408'),
('2409 : Beijing 1954 / 3-degree Gauss-Kruger zone 33', '2409'),
('2410 : Beijing 1954 / 3-degree Gauss-Kruger zone 34', '2410'),
('2411 : Beijing 1954 / 3-degree Gauss-Kruger zone 35', '2411'),
('2412 : Beijing 1954 / 3-degree Gauss-Kruger zone 36', '2412'),
('2413 : Beijing 1954 / 3-degree Gauss-Kruger zone 37', '2413'),
('2414 : Beijing 1954 / 3-degree Gauss-Kruger zone 38', '2414'),
('2415 : Beijing 1954 / 3-degree Gauss-Kruger zone 39', '2415'),
('2416 : Beijing 1954 / 3-degree Gauss-Kruger zone 40', '2416'),
('2417 : Beijing 1954 / 3-degree Gauss-Kruger zone 41', '2417'),
('2418 : Beijing 1954 / 3-degree Gauss-Kruger zone 42', '2418'),
('2419 : Beijing 1954 / 3-degree Gauss-Kruger zone 43', '2419'),
('2420 : Beijing 1954 / 3-degree Gauss-Kruger zone 44', '2420'),
('2421 : Beijing 1954 / 3-degree Gauss-Kruger zone 45', '2421'),
('2422 : Beijing 1954 / 3-degree Gauss-Kruger CM 75E', '2422'),
('2423 : Beijing 1954 / 3-degree Gauss-Kruger CM 78E', '2423'),
('2424 : Beijing 1954 / 3-degree Gauss-Kruger CM 81E', '2424'),
('2425 : Beijing 1954 / 3-degree Gauss-Kruger CM 84E', '2425'),
('2426 : Beijing 1954 / 3-degree Gauss-Kruger CM 87E', '2426'),
('2427 : Beijing 1954 / 3-degree Gauss-Kruger CM 90E', '2427'),
('2428 : Beijing 1954 / 3-degree Gauss-Kruger CM 93E', '2428'),
('2429 : Beijing 1954 / 3-degree Gauss-Kruger CM 96E', '2429'),
('2430 : Beijing 1954 / 3-degree Gauss-Kruger CM 99E', '2430'),
('2431 : Beijing 1954 / 3-degree Gauss-Kruger CM 102E', '2431'),
('2432 : Beijing 1954 / 3-degree Gauss-Kruger CM 105E', '2432'),
('2433 : Beijing 1954 / 3-degree Gauss-Kruger CM 108E', '2433'),
('2434 : Beijing 1954 / 3-degree Gauss-Kruger CM 111E', '2434'),
('2435 : Beijing 1954 / 3-degree Gauss-Kruger CM 114E', '2435'),
('2436 : Beijing 1954 / 3-degree Gauss-Kruger CM 117E', '2436'),
('2437 : Beijing 1954 / 3-degree Gauss-Kruger CM 120E', '2437'),
('2438 : Beijing 1954 / 3-degree Gauss-Kruger CM 123E', '2438'),
('2439 : Beijing 1954 / 3-degree Gauss-Kruger CM 126E', '2439'),
('2440 : Beijing 1954 / 3-degree Gauss-Kruger CM 129E', '2440'),
('2441 : Beijing 1954 / 3-degree Gauss-Kruger CM 132E', '2441'),
('2442 : Beijing 1954 / 3-degree Gauss-Kruger CM 135E', '2442'),
('2443 : JGD2000 / Japan Plane Rectangular CS I', '2443'),
('2444 : JGD2000 / Japan Plane Rectangular CS II', '2444'),
('2445 : JGD2000 / Japan Plane Rectangular CS III', '2445'),
('2446 : JGD2000 / Japan Plane Rectangular CS IV', '2446'),
('2447 : JGD2000 / Japan Plane Rectangular CS V', '2447'),
('2448 : JGD2000 / Japan Plane Rectangular CS VI', '2448'),
('2449 : JGD2000 / Japan Plane Rectangular CS VII', '2449'),
('2450 : JGD2000 / Japan Plane Rectangular CS VIII', '2450'),
('2451 : JGD2000 / Japan Plane Rectangular CS IX', '2451'),
('2452 : JGD2000 / Japan Plane Rectangular CS X', '2452'),
('2453 : JGD2000 / Japan Plane Rectangular CS XI', '2453'),
('2454 : JGD2000 / Japan Plane Rectangular CS XII', '2454'),
('2455 : JGD2000 / Japan Plane Rectangular CS XIII', '2455'),
('2456 : JGD2000 / Japan Plane Rectangular CS XIV', '2456'),
('2457 : JGD2000 / Japan Plane Rectangular CS XV', '2457'),
('2458 : JGD2000 / Japan Plane Rectangular CS XVI', '2458'),
('2459 : JGD2000 / Japan Plane Rectangular CS XVII', '2459'),
('2460 : JGD2000 / Japan Plane Rectangular CS XVIII', '2460'),
('2461 : JGD2000 / Japan Plane Rectangular CS XIX', '2461'),
('2462 : Albanian 1987 / Gauss-Kruger zone 4', '2462'),
('2463 : Pulkovo 1995 / Gauss-Kruger CM 21E', '2463'),
('2464 : Pulkovo 1995 / Gauss-Kruger CM 27E', '2464'),
('2465 : Pulkovo 1995 / Gauss-Kruger CM 33E', '2465'),
('2466 : Pulkovo 1995 / Gauss-Kruger CM 39E', '2466'),
('2467 : Pulkovo 1995 / Gauss-Kruger CM 45E', '2467'),
('2468 : Pulkovo 1995 / Gauss-Kruger CM 51E', '2468'),
('2469 : Pulkovo 1995 / Gauss-Kruger CM 57E', '2469'),
('2470 : Pulkovo 1995 / Gauss-Kruger CM 63E', '2470'),
('2471 : Pulkovo 1995 / Gauss-Kruger CM 69E', '2471'),
('2472 : Pulkovo 1995 / Gauss-Kruger CM 75E', '2472'),
('2473 : Pulkovo 1995 / Gauss-Kruger CM 81E', '2473'),
('2474 : Pulkovo 1995 / Gauss-Kruger CM 87E', '2474'),
('2475 : Pulkovo 1995 / Gauss-Kruger CM 93E', '2475'),
('2476 : Pulkovo 1995 / Gauss-Kruger CM 99E', '2476'),
('2477 : Pulkovo 1995 / Gauss-Kruger CM 105E', '2477'),
('2478 : Pulkovo 1995 / Gauss-Kruger CM 111E', '2478'),
('2479 : Pulkovo 1995 / Gauss-Kruger CM 117E', '2479'),
('2480 : Pulkovo 1995 / Gauss-Kruger CM 123E', '2480'),
('2481 : Pulkovo 1995 / Gauss-Kruger CM 129E', '2481'),
('2482 : Pulkovo 1995 / Gauss-Kruger CM 135E', '2482'),
('2483 : Pulkovo 1995 / Gauss-Kruger CM 141E', '2483'),
('2484 : Pulkovo 1995 / Gauss-Kruger CM 147E', '2484'),
('2485 : Pulkovo 1995 / Gauss-Kruger CM 153E', '2485'),
('2486 : Pulkovo 1995 / Gauss-Kruger CM 159E', '2486'),
('2487 : Pulkovo 1995 / Gauss-Kruger CM 165E', '2487'),
('2488 : Pulkovo 1995 / Gauss-Kruger CM 171E', '2488'),
('2489 : Pulkovo 1995 / Gauss-Kruger CM 177E', '2489'),
('2490 : Pulkovo 1995 / Gauss-Kruger CM 177W', '2490'),
('2491 : Pulkovo 1995 / Gauss-Kruger CM 171W', '2491'),
('2492 : Pulkovo 1942 / Gauss-Kruger CM 9E', '2492'),
('2493 : Pulkovo 1942 / Gauss-Kruger CM 15E', '2493'),
('2494 : Pulkovo 1942 / Gauss-Kruger CM 21E', '2494'),
('2495 : Pulkovo 1942 / Gauss-Kruger CM 27E', '2495'),
('2496 : Pulkovo 1942 / Gauss-Kruger CM 33E', '2496'),
('2497 : Pulkovo 1942 / Gauss-Kruger CM 39E', '2497'),
('2498 : Pulkovo 1942 / Gauss-Kruger CM 45E', '2498'),
('2499 : Pulkovo 1942 / Gauss-Kruger CM 51E', '2499'),
('2500 : Pulkovo 1942 / Gauss-Kruger CM 57E', '2500'),
('2501 : Pulkovo 1942 / Gauss-Kruger CM 63E', '2501'),
('2502 : Pulkovo 1942 / Gauss-Kruger CM 69E', '2502'),
('2503 : Pulkovo 1942 / Gauss-Kruger CM 75E', '2503'),
('2504 : Pulkovo 1942 / Gauss-Kruger CM 81E', '2504'),
('2505 : Pulkovo 1942 / Gauss-Kruger CM 87E', '2505'),
('2506 : Pulkovo 1942 / Gauss-Kruger CM 93E', '2506'),
('2507 : Pulkovo 1942 / Gauss-Kruger CM 99E', '2507'),
('2508 : Pulkovo 1942 / Gauss-Kruger CM 105E', '2508'),
('2509 : Pulkovo 1942 / Gauss-Kruger CM 111E', '2509'),
('2510 : Pulkovo 1942 / Gauss-Kruger CM 117E', '2510'),
('2511 : Pulkovo 1942 / Gauss-Kruger CM 123E', '2511'),
('2512 : Pulkovo 1942 / Gauss-Kruger CM 129E', '2512'),
('2513 : Pulkovo 1942 / Gauss-Kruger CM 135E', '2513'),
('2514 : Pulkovo 1942 / Gauss-Kruger CM 141E', '2514'),
('2515 : Pulkovo 1942 / Gauss-Kruger CM 147E', '2515'),
('2516 : Pulkovo 1942 / Gauss-Kruger CM 153E', '2516'),
('2517 : Pulkovo 1942 / Gauss-Kruger CM 159E', '2517'),
('2518 : Pulkovo 1942 / Gauss-Kruger CM 165E', '2518'),
('2519 : Pulkovo 1942 / Gauss-Kruger CM 171E', '2519'),
('2520 : Pulkovo 1942 / Gauss-Kruger CM 177E', '2520'),
('2521 : Pulkovo 1942 / Gauss-Kruger CM 177W', '2521'),
('2522 : Pulkovo 1942 / Gauss-Kruger CM 171W', '2522'),
('2523 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 7', '2523'),
('2524 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 8', '2524'),
('2525 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 9', '2525'),
('2526 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 10', '2526'),
('2527 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 11', '2527'),
('2528 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 12', '2528'),
('2529 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 13', '2529'),
('2530 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 14', '2530'),
('2531 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 15', '2531'),
('2532 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 16', '2532'),
('2533 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 17', '2533'),
('2534 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 18', '2534'),
('2535 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 19', '2535'),
('2536 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 20', '2536'),
('2537 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 21', '2537'),
('2538 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 22', '2538'),
('2539 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 23', '2539'),
('2540 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 24', '2540'),
('2541 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 25', '2541'),
('2542 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 26', '2542'),
('2543 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 27', '2543'),
('2544 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 28', '2544'),
('2545 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 29', '2545'),
('2546 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 30', '2546'),
('2547 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 31', '2547'),
('2548 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 32', '2548'),
('2549 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 33', '2549'),
('2550 : Samboja / UTM zone 50S', '2550'),
('2551 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 34', '2551'),
('2552 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 35', '2552'),
('2553 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 36', '2553'),
('2554 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 37', '2554'),
('2555 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 38', '2555'),
('2556 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 39', '2556'),
('2557 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 40', '2557'),
('2558 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 41', '2558'),
('2559 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 42', '2559'),
('2560 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 43', '2560'),
('2561 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 44', '2561'),
('2562 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 45', '2562'),
('2563 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 46', '2563'),
('2564 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 47', '2564'),
('2565 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 48', '2565'),
('2566 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 49', '2566'),
('2567 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 50', '2567'),
('2568 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 51', '2568'),
('2569 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 52', '2569'),
('2570 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 53', '2570'),
('2571 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 54', '2571'),
('2572 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 55', '2572'),
('2573 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 56', '2573'),
('2574 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 57', '2574'),
('2575 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 58', '2575'),
('2576 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 59', '2576'),
('2577 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 60', '2577'),
('2578 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 61', '2578'),
('2579 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 62', '2579'),
('2580 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 63', '2580'),
('2581 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 64', '2581'),
('2582 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 21E', '2582'),
('2583 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 24E', '2583'),
('2584 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 27E', '2584'),
('2585 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 30E', '2585'),
('2586 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 33E', '2586'),
('2587 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 36E', '2587'),
('2588 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 39E', '2588'),
('2589 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 42E', '2589'),
('2590 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 45E', '2590'),
('2591 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 48E', '2591'),
('2592 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 51E', '2592'),
('2593 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 54E', '2593'),
('2594 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 57E', '2594'),
('2595 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 60E', '2595'),
('2596 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 63E', '2596'),
('2597 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 66E', '2597'),
('2598 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 69E', '2598'),
('2599 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 72E', '2599'),
('2600 : Lietuvos Koordinoei Sistema 1994', '2600'),
('2601 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 75E', '2601'),
('2602 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 78E', '2602'),
('2603 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 81E', '2603'),
('2604 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 84E', '2604'),
('2605 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 87E', '2605'),
('2606 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 90E', '2606'),
('2607 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 93E', '2607'),
('2608 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 96E', '2608'),
('2609 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 99E', '2609'),
('2610 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 102E', '2610'),
('2611 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 105E', '2611'),
('2612 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 108E', '2612'),
('2613 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 111E', '2613'),
('2614 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 114E', '2614'),
('2615 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 117E', '2615'),
('2616 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 120E', '2616'),
('2617 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 123E', '2617'),
('2618 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 126E', '2618'),
('2619 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 129E', '2619'),
('2620 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 132E', '2620'),
('2621 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 135E', '2621'),
('2622 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 138E', '2622'),
('2623 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 141E', '2623'),
('2624 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 144E', '2624'),
('2625 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 147E', '2625'),
('2626 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 150E', '2626'),
('2627 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 153E', '2627'),
('2628 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 156E', '2628'),
('2629 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 159E', '2629'),
('2630 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 162E', '2630'),
('2631 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 165E', '2631'),
('2632 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 168E', '2632'),
('2633 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 171E', '2633'),
('2634 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 174E', '2634'),
('2635 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 177E', '2635'),
('2636 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 180E', '2636'),
('2637 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 177W', '2637'),
('2638 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 174W', '2638'),
('2639 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 171W', '2639'),
('2640 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 168W', '2640'),
('2641 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 7', '2641'),
('2642 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 8', '2642'),
('2643 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 9', '2643'),
('2644 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 10', '2644'),
('2645 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 11', '2645'),
('2646 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 12', '2646'),
('2647 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 13', '2647'),
('2648 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 14', '2648'),
('2649 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 15', '2649'),
('2650 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 16', '2650'),
('2651 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 17', '2651'),
('2652 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 18', '2652'),
('2653 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 19', '2653'),
('2654 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 20', '2654'),
('2655 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 21', '2655'),
('2656 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 22', '2656'),
('2657 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 23', '2657'),
('2658 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 24', '2658'),
('2659 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 25', '2659'),
('2660 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 26', '2660'),
('2661 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 27', '2661'),
('2662 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 28', '2662'),
('2663 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 29', '2663'),
('2664 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 30', '2664'),
('2665 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 31', '2665'),
('2666 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 32', '2666'),
('2667 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 33', '2667'),
('2668 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 34', '2668'),
('2669 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 35', '2669'),
('2670 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 36', '2670'),
('2671 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 37', '2671'),
('2672 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 38', '2672'),
('2673 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 39', '2673'),
('2674 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 40', '2674'),
('2675 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 41', '2675'),
('2676 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 42', '2676'),
('2677 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 43', '2677'),
('2678 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 44', '2678'),
('2679 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 45', '2679'),
('2680 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 46', '2680'),
('2681 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 47', '2681'),
('2682 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 48', '2682'),
('2683 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 49', '2683'),
('2684 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 50', '2684'),
('2685 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 51', '2685'),
('2686 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 52', '2686'),
('2687 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 53', '2687'),
('2688 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 54', '2688'),
('2689 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 55', '2689'),
('2690 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 56', '2690'),
('2691 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 57', '2691'),
('2692 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 58', '2692'),
('2693 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 59', '2693'),
('2694 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 60', '2694'),
('2695 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 61', '2695'),
('2696 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 62', '2696'),
('2697 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 63', '2697'),
('2698 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 64', '2698'),
('2699 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 21E', '2699'),
('2700 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 24E', '2700'),
('2701 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 27E', '2701'),
('2702 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 30E', '2702'),
('2703 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 33E', '2703'),
('2704 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 36E', '2704'),
('2705 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 39E', '2705'),
('2706 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 42E', '2706'),
('2707 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 45E', '2707'),
('2708 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 48E', '2708'),
('2709 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 51E', '2709'),
('2710 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 54E', '2710'),
('2711 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 57E', '2711'),
('2712 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 60E', '2712'),
('2713 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 63E', '2713'),
('2714 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 66E', '2714'),
('2715 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 69E', '2715'),
('2716 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 72E', '2716'),
('2717 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 75E', '2717'),
('2718 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 78E', '2718'),
('2719 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 81E', '2719'),
('2720 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 84E', '2720'),
('2721 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 87E', '2721'),
('2722 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 90E', '2722'),
('2723 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 93E', '2723'),
('2724 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 96E', '2724'),
('2725 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 99E', '2725'),
('2726 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 102E', '2726'),
('2727 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 105E', '2727'),
('2728 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 108E', '2728'),
('2729 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 111E', '2729'),
('2730 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 114E', '2730'),
('2731 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 117E', '2731'),
('2732 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 120E', '2732'),
('2733 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 123E', '2733'),
('2734 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 126E', '2734'),
('2735 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 129E', '2735'),
('2736 : Tete / UTM zone 36S', '2736'),
('2737 : Tete / UTM zone 37S', '2737'),
('2738 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 132E', '2738'),
('2739 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 135E', '2739'),
('2740 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 138E', '2740'),
('2741 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 141E', '2741'),
('2742 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 144E', '2742'),
('2743 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 147E', '2743'),
('2744 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 150E', '2744'),
('2745 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 153E', '2745'),
('2746 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 156E', '2746'),
('2747 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 159E', '2747'),
('2748 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 162E', '2748'),
('2749 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 165E', '2749'),
('2750 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 168E', '2750'),
('2751 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 171E', '2751'),
('2752 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 174E', '2752'),
('2753 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 177E', '2753'),
('2754 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 180E', '2754'),
('2755 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 177W', '2755'),
('2756 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 174W', '2756'),
('2757 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 171W', '2757'),
('2758 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 168W', '2758'),
('2759 : NAD83(HARN) / Alabama East', '2759'),
('2760 : NAD83(HARN) / Alabama West', '2760'),
('2761 : NAD83(HARN) / Arizona East', '2761'),
('2762 : NAD83(HARN) / Arizona Central', '2762'),
('2763 : NAD83(HARN) / Arizona West', '2763'),
('2764 : NAD83(HARN) / Arkansas North', '2764'),
('2765 : NAD83(HARN) / Arkansas South', '2765'),
('2766 : NAD83(HARN) / California zone 1', '2766'),
('2767 : NAD83(HARN) / California zone 2', '2767'),
('2768 : NAD83(HARN) / California zone 3', '2768'),
('2769 : NAD83(HARN) / California zone 4', '2769'),
('2770 : NAD83(HARN) / California zone 5', '2770'),
('2771 : NAD83(HARN) / California zone 6', '2771'),
('2772 : NAD83(HARN) / Colorado North', '2772'),
('2773 : NAD83(HARN) / Colorado Central', '2773'),
('2774 : NAD83(HARN) / Colorado South', '2774'),
('2775 : NAD83(HARN) / Connecticut', '2775'),
('2776 : NAD83(HARN) / Delaware', '2776'),
('2777 : NAD83(HARN) / Florida East', '2777'),
('2778 : NAD83(HARN) / Florida West', '2778'),
('2779 : NAD83(HARN) / Florida North', '2779'),
('2780 : NAD83(HARN) / Georgia East', '2780'),
('2781 : NAD83(HARN) / Georgia West', '2781'),
('2782 : NAD83(HARN) / Hawaii zone 1', '2782'),
('2783 : NAD83(HARN) / Hawaii zone 2', '2783'),
('2784 : NAD83(HARN) / Hawaii zone 3', '2784'),
('2785 : NAD83(HARN) / Hawaii zone 4', '2785'),
('2786 : NAD83(HARN) / Hawaii zone 5', '2786'),
('2787 : NAD83(HARN) / Idaho East', '2787'),
('2788 : NAD83(HARN) / Idaho Central', '2788'),
('2789 : NAD83(HARN) / Idaho West', '2789'),
('2790 : NAD83(HARN) / Illinois East', '2790'),
('2791 : NAD83(HARN) / Illinois West', '2791'),
('2792 : NAD83(HARN) / Indiana East', '2792'),
('2793 : NAD83(HARN) / Indiana West', '2793'),
('2794 : NAD83(HARN) / Iowa North', '2794'),
('2795 : NAD83(HARN) / Iowa South', '2795'),
('2796 : NAD83(HARN) / Kansas North', '2796'),
('2797 : NAD83(HARN) / Kansas South', '2797'),
('2798 : NAD83(HARN) / Kentucky North', '2798'),
('2799 : NAD83(HARN) / Kentucky South', '2799'),
('2800 : NAD83(HARN) / Louisiana North', '2800'),
('2801 : NAD83(HARN) / Louisiana South', '2801'),
('2802 : NAD83(HARN) / Maine East', '2802'),
('2803 : NAD83(HARN) / Maine West', '2803'),
('2804 : NAD83(HARN) / Maryland', '2804'),
('2805 : NAD83(HARN) / Massachusetts Mainland', '2805'),
('2806 : NAD83(HARN) / Massachusetts Island', '2806'),
('2807 : NAD83(HARN) / Michigan North', '2807'),
('2808 : NAD83(HARN) / Michigan Central', '2808'),
('2809 : NAD83(HARN) / Michigan South', '2809'),
('2810 : NAD83(HARN) / Minnesota North', '2810'),
('2811 : NAD83(HARN) / Minnesota Central', '2811'),
('2812 : NAD83(HARN) / Minnesota South', '2812'),
('2813 : NAD83(HARN) / Mississippi East', '2813'),
('2814 : NAD83(HARN) / Mississippi West', '2814'),
('2815 : NAD83(HARN) / Missouri East', '2815'),
('2816 : NAD83(HARN) / Missouri Central', '2816'),
('2817 : NAD83(HARN) / Missouri West', '2817'),
('2818 : NAD83(HARN) / Montana', '2818'),
('2819 : NAD83(HARN) / Nebraska', '2819'),
('2820 : NAD83(HARN) / Nevada East', '2820'),
('2821 : NAD83(HARN) / Nevada Central', '2821'),
('2822 : NAD83(HARN) / Nevada West', '2822'),
('2823 : NAD83(HARN) / New Hampshire', '2823'),
('2824 : NAD83(HARN) / New Jersey', '2824'),
('2825 : NAD83(HARN) / New Mexico East', '2825'),
('2826 : NAD83(HARN) / New Mexico Central', '2826'),
('2827 : NAD83(HARN) / New Mexico West', '2827'),
('2828 : NAD83(HARN) / New York East', '2828'),
('2829 : NAD83(HARN) / New York Central', '2829'),
('2830 : NAD83(HARN) / New York West', '2830'),
('2831 : NAD83(HARN) / New York Long Island', '2831'),
('2832 : NAD83(HARN) / North Dakota North', '2832'),
('2833 : NAD83(HARN) / North Dakota South', '2833'),
('2834 : NAD83(HARN) / Ohio North', '2834'),
('2835 : NAD83(HARN) / Ohio South', '2835'),
('2836 : NAD83(HARN) / Oklahoma North', '2836'),
('2837 : NAD83(HARN) / Oklahoma South', '2837'),
('2838 : NAD83(HARN) / Oregon North', '2838'),
('2839 : NAD83(HARN) / Oregon South', '2839'),
('2840 : NAD83(HARN) / Rhode Island', '2840'),
('2841 : NAD83(HARN) / South Dakota North', '2841'),
('2842 : NAD83(HARN) / South Dakota South', '2842'),
('2843 : NAD83(HARN) / Tennessee', '2843'),
('2844 : NAD83(HARN) / Texas North', '2844'),
('2845 : NAD83(HARN) / Texas North Central', '2845'),
('2846 : NAD83(HARN) / Texas Central', '2846'),
('2847 : NAD83(HARN) / Texas South Central', '2847'),
('2848 : NAD83(HARN) / Texas South', '2848'),
('2849 : NAD83(HARN) / Utah North', '2849'),
('2850 : NAD83(HARN) / Utah Central', '2850'),
('2851 : NAD83(HARN) / Utah South', '2851'),
('2852 : NAD83(HARN) / Vermont', '2852'),
('2853 : NAD83(HARN) / Virginia North', '2853'),
('2854 : NAD83(HARN) / Virginia South', '2854'),
('2855 : NAD83(HARN) / Washington North', '2855'),
('2856 : NAD83(HARN) / Washington South', '2856'),
('2857 : NAD83(HARN) / West Virginia North', '2857'),
('2858 : NAD83(HARN) / West Virginia South', '2858'),
('2859 : NAD83(HARN) / Wisconsin North', '2859'),
('2860 : NAD83(HARN) / Wisconsin Central', '2860'),
('2861 : NAD83(HARN) / Wisconsin South', '2861'),
('2862 : NAD83(HARN) / Wyoming East', '2862'),
('2863 : NAD83(HARN) / Wyoming East Central', '2863'),
('2864 : NAD83(HARN) / Wyoming West Central', '2864'),
('2865 : NAD83(HARN) / Wyoming West', '2865'),
('2866 : NAD83(HARN) / Puerto Rico and Virgin Is.', '2866'),
('2867 : NAD83(HARN) / Arizona East (ft)', '2867'),
('2868 : NAD83(HARN) / Arizona Central (ft)', '2868'),
('2869 : NAD83(HARN) / Arizona West (ft)', '2869'),
('2870 : NAD83(HARN) / California zone 1 (ftUS)', '2870'),
('2871 : NAD83(HARN) / California zone 2 (ftUS)', '2871'),
('2872 : NAD83(HARN) / California zone 3 (ftUS)', '2872'),
('2873 : NAD83(HARN) / California zone 4 (ftUS)', '2873'),
('2874 : NAD83(HARN) / California zone 5 (ftUS)', '2874'),
('2875 : NAD83(HARN) / California zone 6 (ftUS)', '2875'),
('2876 : NAD83(HARN) / Colorado North (ftUS)', '2876'),
('2877 : NAD83(HARN) / Colorado Central (ftUS)', '2877'),
('2878 : NAD83(HARN) / Colorado South (ftUS)', '2878'),
('2879 : NAD83(HARN) / Connecticut (ftUS)', '2879'),
('2880 : NAD83(HARN) / Delaware (ftUS)', '2880'),
('2881 : NAD83(HARN) / Florida East (ftUS)', '2881'),
('2882 : NAD83(HARN) / Florida West (ftUS)', '2882'),
('2883 : NAD83(HARN) / Florida North (ftUS)', '2883'),
('2884 : NAD83(HARN) / Georgia East (ftUS)', '2884'),
('2885 : NAD83(HARN) / Georgia West (ftUS)', '2885'),
('2886 : NAD83(HARN) / Idaho East (ftUS)', '2886'),
('2887 : NAD83(HARN) / Idaho Central (ftUS)', '2887'),
('2888 : NAD83(HARN) / Idaho West (ftUS)', '2888'),
('2889 : NAD83(HARN) / Indiana East (ftUS)', '2889'),
('2890 : NAD83(HARN) / Indiana West (ftUS)', '2890'),
('2891 : NAD83(HARN) / Kentucky North (ftUS)', '2891'),
('2892 : NAD83(HARN) / Kentucky South (ftUS)', '2892'),
('2893 : NAD83(HARN) / Maryland (ftUS)', '2893'),
('2894 : NAD83(HARN) / Massachusetts Mainland (ftUS)', '2894'),
('2895 : NAD83(HARN) / Massachusetts Island (ftUS)', '2895'),
('2896 : NAD83(HARN) / Michigan North (ft)', '2896'),
('2897 : NAD83(HARN) / Michigan Central (ft)', '2897'),
('2898 : NAD83(HARN) / Michigan South (ft)', '2898'),
('2899 : NAD83(HARN) / Mississippi East (ftUS)', '2899'),
('2900 : NAD83(HARN) / Mississippi West (ftUS)', '2900'),
('2901 : NAD83(HARN) / Montana (ft)', '2901'),
('2902 : NAD83(HARN) / New Mexico East (ftUS)', '2902'),
('2903 : NAD83(HARN) / New Mexico Central (ftUS)', '2903'),
('2904 : NAD83(HARN) / New Mexico West (ftUS)', '2904'),
('2905 : NAD83(HARN) / New York East (ftUS)', '2905'),
('2906 : NAD83(HARN) / New York Central (ftUS)', '2906'),
('2907 : NAD83(HARN) / New York West (ftUS)', '2907'),
('2908 : NAD83(HARN) / New York Long Island (ftUS)', '2908'),
('2909 : NAD83(HARN) / North Dakota North (ft)', '2909'),
('2910 : NAD83(HARN) / North Dakota South (ft)', '2910'),
('2911 : NAD83(HARN) / Oklahoma North (ftUS)', '2911'),
('2912 : NAD83(HARN) / Oklahoma South (ftUS)', '2912'),
('2913 : NAD83(HARN) / Oregon North (ft)', '2913'),
('2914 : NAD83(HARN) / Oregon South (ft)', '2914'),
('2915 : NAD83(HARN) / Tennessee (ftUS)', '2915'),
('2916 : NAD83(HARN) / Texas North (ftUS)', '2916'),
('2917 : NAD83(HARN) / Texas North Central (ftUS)', '2917'),
('2918 : NAD83(HARN) / Texas Central (ftUS)', '2918'),
('2919 : NAD83(HARN) / Texas South Central (ftUS)', '2919'),
('2920 : NAD83(HARN) / Texas South (ftUS)', '2920'),
('2921 : NAD83(HARN) / Utah North (ft)', '2921'),
('2922 : NAD83(HARN) / Utah Central (ft)', '2922'),
('2923 : NAD83(HARN) / Utah South (ft)', '2923'),
('2924 : NAD83(HARN) / Virginia North (ftUS)', '2924'),
('2925 : NAD83(HARN) / Virginia South (ftUS)', '2925'),
('2926 : NAD83(HARN) / Washington North (ftUS)', '2926'),
('2927 : NAD83(HARN) / Washington South (ftUS)', '2927'),
('2928 : NAD83(HARN) / Wisconsin North (ftUS)', '2928'),
('2929 : NAD83(HARN) / Wisconsin Central (ftUS)', '2929'),
('2930 : NAD83(HARN) / Wisconsin South (ftUS)', '2930'),
('2931 : Beduaram / TM 13 NE', '2931'),
('2932 : QND95 / Qatar National Grid', '2932'),
('2933 : Segara / UTM zone 50S', '2933'),
('2934 : Segara (Jakarta) / NEIEZ', '2934'),
('2935 : Pulkovo 1942 / CS63 zone A1', '2935'),
('2936 : Pulkovo 1942 / CS63 zone A2', '2936'),
('2937 : Pulkovo 1942 / CS63 zone A3', '2937'),
('2938 : Pulkovo 1942 / CS63 zone A4', '2938'),
('2939 : Pulkovo 1942 / CS63 zone K2', '2939'),
('2940 : Pulkovo 1942 / CS63 zone K3', '2940'),
('2941 : Pulkovo 1942 / CS63 zone K4', '2941'),
('2942 : Porto Santo / UTM zone 28N', '2942'),
('2943 : Selvagem Grande / UTM zone 28N', '2943'),
('2944 : NAD83(CSRS) / SCoPQ zone 2', '2944'),
('2945 : NAD83(CSRS) / MTM zone 3', '2945'),
('2946 : NAD83(CSRS) / MTM zone 4', '2946'),
('2947 : NAD83(CSRS) / MTM zone 5', '2947'),
('2948 : NAD83(CSRS) / MTM zone 6', '2948'),
('2949 : NAD83(CSRS) / MTM zone 7', '2949'),
('2950 : NAD83(CSRS) / MTM zone 8', '2950'),
('2951 : NAD83(CSRS) / MTM zone 9', '2951'),
('2952 : NAD83(CSRS) / MTM zone 10', '2952'),
('2953 : NAD83(CSRS) / New Brunswick Stereographic', '2953'),
('2954 : NAD83(CSRS) / Prince Edward Isl. Stereographic (NAD83)', '2954'),
('2955 : NAD83(CSRS) / UTM zone 11N', '2955'),
('2956 : NAD83(CSRS) / UTM zone 12N', '2956'),
('2957 : NAD83(CSRS) / UTM zone 13N', '2957'),
('2958 : NAD83(CSRS) / UTM zone 17N', '2958'),
('2959 : NAD83(CSRS) / UTM zone 18N', '2959'),
('2960 : NAD83(CSRS) / UTM zone 19N', '2960'),
('2961 : NAD83(CSRS) / UTM zone 20N', '2961'),
('2962 : NAD83(CSRS) / UTM zone 21N', '2962'),
('2963 : Lisbon 1890 (Lisbon) / Portugal Bonne', '2963'),
('2964 : NAD27 / Alaska Albers', '2964'),
('2965 : NAD83 / Indiana East (ftUS)', '2965'),
('2966 : NAD83 / Indiana West (ftUS)', '2966'),
('2967 : NAD83(HARN) / Indiana East (ftUS)', '2967'),
('2968 : NAD83(HARN) / Indiana West (ftUS)', '2968'),
('2969 : Fort Marigot / UTM zone 20N', '2969'),
('2970 : Guadeloupe 1948 / UTM zone 20N', '2970'),
('2971 : CSG67 / UTM zone 22N', '2971'),
('2972 : RGFG95 / UTM zone 22N', '2972'),
('2973 : Martinique 1938 / UTM zone 20N', '2973'),
('2975 : RGR92 / UTM zone 40S', '2975'),
('2976 : Tahiti 52 / UTM zone 6S', '2976'),
('2977 : Tahaa 54 / UTM zone 5S', '2977'),
('2978 : IGN72 Nuku Hiva / UTM zone 7S', '2978'),
('2979 : K0 1949 / UTM zone 42S', '2979'),
('2980 : Combani 1950 / UTM zone 38S', '2980'),
('2981 : IGN56 Lifou / UTM zone 58S', '2981'),
('2982 : IGN72 Grand Terre / UTM zone 58S', '2982'),
('2983 : ST87 Ouvea / UTM zone 58S', '2983'),
('2984 : RGNC 1991 / Lambert New Caledonia', '2984'),
('2985 : Petrels 1972 / Terre Adelie Polar Stereographic', '2985'),
('2986 : Perroud 1950 / Terre Adelie Polar Stereographic', '2986'),
('2987 : Saint Pierre et Miquelon 1950 / UTM zone 21N', '2987'),
('2988 : MOP78 / UTM zone 1S', '2988'),
('2989 : RRAF 1991 / UTM zone 20N', '2989'),
('2990 : Reunion 1947 / TM Reunion', '2990'),
('2991 : NAD83 / Oregon LCC (m)', '2991'),
('2992 : NAD83 / Oregon GIC Lambert (ft)', '2992'),
('2993 : NAD83(HARN) / Oregon LCC (m)', '2993'),
('2994 : NAD83(HARN) / Oregon GIC Lambert (ft)', '2994'),
('2995 : IGN53 Mare / UTM zone 58S', '2995'),
('2996 : ST84 Ile des Pins / UTM zone 58S', '2996'),
('2997 : ST71 Belep / UTM zone 58S', '2997'),
('2998 : NEA74 Noumea / UTM zone 58S', '2998'),
('2999 : Grand Comoros / UTM zone 38S', '2999'),
('3000 : Segara / NEIEZ', '3000'),
('3001 : Batavia / NEIEZ', '3001'),
('3002 : Makassar / NEIEZ', '3002'),
('3003 : Monte Mario / Italy zone 1', '3003'),
('3004 : Monte Mario / Italy zone 2', '3004'),
('3005 : NAD83 / BC Albers', '3005'),
('3006 : SWEREF99 TM', '3006'),
('3007 : SWEREF99 12 00', '3007'),
('3008 : SWEREF99 13 30', '3008'),
('3009 : SWEREF99 15 00', '3009'),
('3010 : SWEREF99 16 30', '3010'),
('3011 : SWEREF99 18 00', '3011'),
('3012 : SWEREF99 14 15', '3012'),
('3013 : SWEREF99 15 45', '3013'),
('3014 : SWEREF99 17 15', '3014'),
('3015 : SWEREF99 18 45', '3015'),
('3016 : SWEREF99 20 15', '3016'),
('3017 : SWEREF99 21 45', '3017'),
('3018 : SWEREF99 23 15', '3018'),
('3019 : RT90 7.5 gon V', '3019'),
('3020 : RT90 5 gon V', '3020'),
('3021 : RT90 2.5 gon V', '3021'),
('3022 : RT90 0 gon', '3022'),
('3023 : RT90 2.5 gon O', '3023'),
('3024 : RT90 5 gon O', '3024'),
('3025 : RT38 7.5 gon V', '3025'),
('3026 : RT38 5 gon V', '3026'),
('3027 : RT38 2.5 gon V', '3027'),
('3028 : RT38 0 gon', '3028'),
('3029 : RT38 2.5 gon O', '3029'),
('3030 : RT38 5 gon O', '3030'),
('3031 : WGS 84 / Antarctic Polar Stereographic', '3031'),
('3032 : WGS 84 / Australian Antarctic Polar Stereographic', '3032'),
('3033 : WGS 84 / Australian Antarctic Lambert', '3033'),
('3034 : ETRS89 / LCC Europe', '3034'),
('3035 : ETRS89 / LAEA Europe', '3035'),
('3036 : Moznet / UTM zone 36S', '3036'),
('3037 : Moznet / UTM zone 37S', '3037'),
('3038 : ETRS89 / TM26', '3038'),
('3039 : ETRS89 / TM27', '3039'),
('3040 : ETRS89 / UTM zone 28N (N-E)', '3040'),
('3041 : ETRS89 / UTM zone 29N (N-E)', '3041'),
('3042 : ETRS89 / UTM zone 30N (N-E)', '3042'),
('3043 : ETRS89 / UTM zone 31N (N-E)', '3043'),
('3044 : ETRS89 / UTM zone 32N (N-E)', '3044'),
('3045 : ETRS89 / UTM zone 33N (N-E)', '3045'),
('3046 : ETRS89 / UTM zone 34N (N-E)', '3046'),
('3047 : ETRS89 / UTM zone 35N (N-E)', '3047'),
('3048 : ETRS89 / UTM zone 36N (N-E)', '3048'),
('3049 : ETRS89 / UTM zone 37N (N-E)', '3049'),
('3050 : ETRS89 / TM38', '3050'),
('3051 : ETRS89 / TM39', '3051'),
('3052 : Reykjavik 1900 / Lambert 1900', '3052'),
('3053 : Hjorsey 1955 / Lambert 1955', '3053'),
('3054 : Hjorsey 1955 / UTM zone 26N', '3054'),
('3055 : Hjorsey 1955 / UTM zone 27N', '3055'),
('3056 : Hjorsey 1955 / UTM zone 28N', '3056'),
('3057 : ISN93 / Lambert 1993', '3057'),
('3058 : Helle 1954 / Jan Mayen Grid', '3058'),
('3059 : LKS92 / Latvia TM', '3059'),
('3060 : IGN72 Grande Terre / UTM zone 58S', '3060'),
('3061 : Porto Santo 1995 / UTM zone 28N', '3061'),
('3062 : Azores Oriental 1995 / UTM zone 26N', '3062'),
('3063 : Azores Central 1995 / UTM zone 26N', '3063'),
('3064 : IGM95 / UTM zone 32N', '3064'),
('3065 : IGM95 / UTM zone 33N', '3065'),
('3066 : ED50 / Jordan TM', '3066'),
('3067 : ETRS89 / TM35FIN(E,N)', '3067'),
('3068 : DHDN / Soldner Berlin', '3068'),
('3069 : NAD27 / Wisconsin Transverse Mercator', '3069'),
('3070 : NAD83 / Wisconsin Transverse Mercator', '3070'),
('3071 : NAD83(HARN) / Wisconsin Transverse Mercator', '3071'),
('3072 : NAD83 / Maine CS2000 East', '3072'),
('3073 : NAD83 / Maine CS2000 Central', '3073'),
('3074 : NAD83 / Maine CS2000 West', '3074'),
('3075 : NAD83(HARN) / Maine CS2000 East', '3075'),
('3076 : NAD83(HARN) / Maine CS2000 Central', '3076'),
('3077 : NAD83(HARN) / Maine CS2000 West', '3077'),
('3078 : NAD83 / Michigan Oblique Mercator', '3078'),
('3079 : NAD83(HARN) / Michigan Oblique Mercator', '3079'),
('3080 : NAD27 / Shackleford', '3080'),
('3081 : NAD83 / Texas State Mapping System', '3081'),
('3082 : NAD83 / Texas Centric Lambert Conformal', '3082'),
('3083 : NAD83 / Texas Centric Albers Equal Area', '3083'),
('3084 : NAD83(HARN) / Texas Centric Lambert Conformal', '3084'),
('3085 : NAD83(HARN) / Texas Centric Albers Equal Area', '3085'),
('3086 : NAD83 / Florida GDL Albers', '3086'),
('3087 : NAD83(HARN) / Florida GDL Albers', '3087'),
('3088 : NAD83 / Kentucky Single Zone', '3088'),
('3089 : NAD83 / Kentucky Single Zone (ftUS)', '3089'),
('3090 : NAD83(HARN) / Kentucky Single Zone', '3090'),
('3091 : NAD83(HARN) / Kentucky Single Zone (ftUS)', '3091'),
('3092 : Tokyo / UTM zone 51N', '3092'),
('3093 : Tokyo / UTM zone 52N', '3093'),
('3094 : Tokyo / UTM zone 53N', '3094'),
('3095 : Tokyo / UTM zone 54N', '3095'),
('3096 : Tokyo / UTM zone 55N', '3096'),
('3097 : JGD2000 / UTM zone 51N', '3097'),
('3098 : JGD2000 / UTM zone 52N', '3098'),
('3099 : JGD2000 / UTM zone 53N', '3099'),
('3100 : JGD2000 / UTM zone 54N', '3100'),
('3101 : JGD2000 / UTM zone 55N', '3101'),
('3102 : American Samoa 1962 / American Samoa Lambert', '3102'),
('3103 : Mauritania 1999 / UTM zone 28N', '3103'),
('3104 : Mauritania 1999 / UTM zone 29N', '3104'),
('3105 : Mauritania 1999 / UTM zone 30N', '3105'),
('3106 : Gulshan 303 / Bangladesh Transverse Mercator', '3106'),
('3107 : GDA94 / SA Lambert', '3107'),
('3108 : ETRS89 / Guernsey Grid', '3108'),
('3109 : ETRS89 / Jersey Transverse Mercator', '3109'),
('3110 : AGD66 / Vicgrid66', '3110'),
('3111 : GDA94 / Vicgrid94', '3111'),
('3112 : GDA94 / Geoscience Australia Lambert', '3112'),
('3113 : GDA94 / BCSG02', '3113'),
('3114 : MAGNA-SIRGAS / Colombia Far West zone', '3114'),
('3115 : MAGNA-SIRGAS / Colombia West zone', '3115'),
('3116 : MAGNA-SIRGAS / Colombia Bogota zone', '3116'),
('3117 : MAGNA-SIRGAS / Colombia East Central zone', '3117'),
('3118 : MAGNA-SIRGAS / Colombia East zone', '3118'),
('3119 : Douala 1948 / AEF west', '3119'),
('3120 : Pulkovo 1942(58) / Poland zone I', '3120'),
('3121 : PRS92 / Philippines zone 1', '3121'),
('3122 : PRS92 / Philippines zone 2', '3122'),
('3123 : PRS92 / Philippines zone 3', '3123'),
('3124 : PRS92 / Philippines zone 4', '3124'),
('3125 : PRS92 / Philippines zone 5', '3125'),
('3126 : ETRS89 / ETRS-GK19FIN', '3126'),
('3127 : ETRS89 / ETRS-GK20FIN', '3127'),
('3128 : ETRS89 / ETRS-GK21FIN', '3128'),
('3129 : ETRS89 / ETRS-GK22FIN', '3129'),
('3130 : ETRS89 / ETRS-GK23FIN', '3130'),
('3131 : ETRS89 / ETRS-GK24FIN', '3131'),
('3132 : ETRS89 / ETRS-GK25FIN', '3132'),
('3133 : ETRS89 / ETRS-GK26FIN', '3133'),
('3134 : ETRS89 / ETRS-GK27FIN', '3134'),
('3135 : ETRS89 / ETRS-GK28FIN', '3135'),
('3136 : ETRS89 / ETRS-GK29FIN', '3136'),
('3137 : ETRS89 / ETRS-GK30FIN', '3137'),
('3138 : ETRS89 / ETRS-GK31FIN', '3138'),
('3139 : Vanua Levu 1915 / Vanua Levu Grid', '3139'),
('3140 : Viti Levu 1912 / Viti Levu Grid', '3140'),
('3141 : Fiji 1956 / UTM zone 60S', '3141'),
('3142 : Fiji 1956 / UTM zone 1S', '3142'),
('3143 : Fiji 1986 / Fiji Map Grid', '3143'),
('3144 : FD54 / Faroe Lambert', '3144'),
('3145 : ETRS89 / Faroe Lambert', '3145'),
('3146 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 6', '3146'),
('3147 : Pulkovo 1942 / 3-degree Gauss-Kruger CM 18E', '3147'),
('3148 : Indian 1960 / UTM zone 48N', '3148'),
('3149 : Indian 1960 / UTM zone 49N', '3149'),
('3150 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 6', '3150'),
('3151 : Pulkovo 1995 / 3-degree Gauss-Kruger CM 18E', '3151'),
('3152 : ST74', '3152'),
('3153 : NAD83(CSRS) / BC Albers', '3153'),
('3154 : NAD83(CSRS) / UTM zone 7N', '3154'),
('3155 : NAD83(CSRS) / UTM zone 8N', '3155'),
('3156 : NAD83(CSRS) / UTM zone 9N', '3156'),
('3157 : NAD83(CSRS) / UTM zone 10N', '3157'),
('3158 : NAD83(CSRS) / UTM zone 14N', '3158'),
('3159 : NAD83(CSRS) / UTM zone 15N', '3159'),
('3160 : NAD83(CSRS) / UTM zone 16N', '3160'),
('3161 : NAD83 / Ontario MNR Lambert', '3161'),
('3162 : NAD83(CSRS) / Ontario MNR Lambert', '3162'),
('3163 : RGNC91-93 / Lambert New Caledonia', '3163'),
('3164 : ST87 Ouvea / UTM zone 58S', '3164'),
('3165 : NEA74 Noumea / Noumea Lambert', '3165'),
('3166 : NEA74 Noumea / Noumea Lambert 2', '3166'),
('3167 : Kertau (RSO) / RSO Malaya (ch)', '3167'),
('3168 : Kertau (RSO) / RSO Malaya (m)', '3168'),
('3169 : RGNC91-93 / UTM zone 57S', '3169'),
('3170 : RGNC91-93 / UTM zone 58S', '3170'),
('3171 : RGNC91-93 / UTM zone 59S', '3171'),
('3172 : IGN53 Mare / UTM zone 59S', '3172'),
('3173 : fk89 / Faroe Lambert FK89', '3173'),
('3174 : NAD83 / Great Lakes Albers', '3174'),
('3175 : NAD83 / Great Lakes and St Lawrence Albers', '3175'),
('3176 : Indian 1960 / TM 106 NE', '3176'),
('3177 : LGD2006 / Libya TM', '3177'),
('3178 : GR96 / UTM zone 18N', '3178'),
('3179 : GR96 / UTM zone 19N', '3179'),
('3180 : GR96 / UTM zone 20N', '3180'),
('3181 : GR96 / UTM zone 21N', '3181'),
('3182 : GR96 / UTM zone 22N', '3182'),
('3183 : GR96 / UTM zone 23N', '3183'),
('3184 : GR96 / UTM zone 24N', '3184'),
('3185 : GR96 / UTM zone 25N', '3185'),
('3186 : GR96 / UTM zone 26N', '3186'),
('3187 : GR96 / UTM zone 27N', '3187'),
('3188 : GR96 / UTM zone 28N', '3188'),
('3189 : GR96 / UTM zone 29N', '3189'),
('3190 : LGD2006 / Libya TM zone 5', '3190'),
('3191 : LGD2006 / Libya TM zone 6', '3191'),
('3192 : LGD2006 / Libya TM zone 7', '3192'),
('3193 : LGD2006 / Libya TM zone 8', '3193'),
('3194 : LGD2006 / Libya TM zone 9', '3194'),
('3195 : LGD2006 / Libya TM zone 10', '3195'),
('3196 : LGD2006 / Libya TM zone 11', '3196'),
('3197 : LGD2006 / Libya TM zone 12', '3197'),
('3198 : LGD2006 / Libya TM zone 13', '3198'),
('3199 : LGD2006 / UTM zone 32N', '3199'),
('3200 : FD58 / Iraq zone', '3200'),
('3201 : LGD2006 / UTM zone 33N', '3201'),
('3202 : LGD2006 / UTM zone 34N', '3202'),
('3203 : LGD2006 / UTM zone 35N', '3203'),
('3204 : WGS 84 / SCAR IMW SP19-20', '3204'),
('3205 : WGS 84 / SCAR IMW SP21-22', '3205'),
('3206 : WGS 84 / SCAR IMW SP23-24', '3206'),
('3207 : WGS 84 / SCAR IMW SQ01-02', '3207'),
('3208 : WGS 84 / SCAR IMW SQ19-20', '3208'),
('3209 : WGS 84 / SCAR IMW SQ21-22', '3209'),
('3210 : WGS 84 / SCAR IMW SQ37-38', '3210'),
('3211 : WGS 84 / SCAR IMW SQ39-40', '3211'),
('3212 : WGS 84 / SCAR IMW SQ41-42', '3212'),
('3213 : WGS 84 / SCAR IMW SQ43-44', '3213'),
('3214 : WGS 84 / SCAR IMW SQ45-46', '3214'),
('3215 : WGS 84 / SCAR IMW SQ47-48', '3215'),
('3216 : WGS 84 / SCAR IMW SQ49-50', '3216'),
('3217 : WGS 84 / SCAR IMW SQ51-52', '3217'),
('3218 : WGS 84 / SCAR IMW SQ53-54', '3218'),
('3219 : WGS 84 / SCAR IMW SQ55-56', '3219'),
('3220 : WGS 84 / SCAR IMW SQ57-58', '3220'),
('3221 : WGS 84 / SCAR IMW SR13-14', '3221'),
('3222 : WGS 84 / SCAR IMW SR15-16', '3222'),
('3223 : WGS 84 / SCAR IMW SR17-18', '3223'),
('3224 : WGS 84 / SCAR IMW SR19-20', '3224'),
('3225 : WGS 84 / SCAR IMW SR27-28', '3225'),
('3226 : WGS 84 / SCAR IMW SR29-30', '3226'),
('3227 : WGS 84 / SCAR IMW SR31-32', '3227'),
('3228 : WGS 84 / SCAR IMW SR33-34', '3228'),
('3229 : WGS 84 / SCAR IMW SR35-36', '3229'),
('3230 : WGS 84 / SCAR IMW SR37-38', '3230'),
('3231 : WGS 84 / SCAR IMW SR39-40', '3231'),
('3232 : WGS 84 / SCAR IMW SR41-42', '3232'),
('3233 : WGS 84 / SCAR IMW SR43-44', '3233'),
('3234 : WGS 84 / SCAR IMW SR45-46', '3234'),
('3235 : WGS 84 / SCAR IMW SR47-48', '3235'),
('3236 : WGS 84 / SCAR IMW SR49-50', '3236'),
('3237 : WGS 84 / SCAR IMW SR51-52', '3237'),
('3238 : WGS 84 / SCAR IMW SR53-54', '3238'),
('3239 : WGS 84 / SCAR IMW SR55-56', '3239'),
('3240 : WGS 84 / SCAR IMW SR57-58', '3240'),
('3241 : WGS 84 / SCAR IMW SR59-60', '3241'),
('3242 : WGS 84 / SCAR IMW SS04-06', '3242'),
('3243 : WGS 84 / SCAR IMW SS07-09', '3243'),
('3244 : WGS 84 / SCAR IMW SS10-12', '3244'),
('3245 : WGS 84 / SCAR IMW SS13-15', '3245'),
('3246 : WGS 84 / SCAR IMW SS16-18', '3246'),
('3247 : WGS 84 / SCAR IMW SS19-21', '3247'),
('3248 : WGS 84 / SCAR IMW SS25-27', '3248'),
('3249 : WGS 84 / SCAR IMW SS28-30', '3249'),
('3250 : WGS 84 / SCAR IMW SS31-33', '3250'),
('3251 : WGS 84 / SCAR IMW SS34-36', '3251'),
('3252 : WGS 84 / SCAR IMW SS37-39', '3252'),
('3253 : WGS 84 / SCAR IMW SS40-42', '3253'),
('3254 : WGS 84 / SCAR IMW SS43-45', '3254'),
('3255 : WGS 84 / SCAR IMW SS46-48', '3255'),
('3256 : WGS 84 / SCAR IMW SS49-51', '3256'),
('3257 : WGS 84 / SCAR IMW SS52-54', '3257'),
('3258 : WGS 84 / SCAR IMW SS55-57', '3258'),
('3259 : WGS 84 / SCAR IMW SS58-60', '3259'),
('3260 : WGS 84 / SCAR IMW ST01-04', '3260'),
('3261 : WGS 84 / SCAR IMW ST05-08', '3261'),
('3262 : WGS 84 / SCAR IMW ST09-12', '3262'),
('3263 : WGS 84 / SCAR IMW ST13-16', '3263'),
('3264 : WGS 84 / SCAR IMW ST17-20', '3264'),
('3265 : WGS 84 / SCAR IMW ST21-24', '3265'),
('3266 : WGS 84 / SCAR IMW ST25-28', '3266'),
('3267 : WGS 84 / SCAR IMW ST29-32', '3267'),
('3268 : WGS 84 / SCAR IMW ST33-36', '3268'),
('3269 : WGS 84 / SCAR IMW ST37-40', '3269'),
('3270 : WGS 84 / SCAR IMW ST41-44', '3270'),
('3271 : WGS 84 / SCAR IMW ST45-48', '3271'),
('3272 : WGS 84 / SCAR IMW ST49-52', '3272'),
('3273 : WGS 84 / SCAR IMW ST53-56', '3273'),
('3274 : WGS 84 / SCAR IMW ST57-60', '3274'),
('3275 : WGS 84 / SCAR IMW SU01-05', '3275'),
('3276 : WGS 84 / SCAR IMW SU06-10', '3276'),
('3277 : WGS 84 / SCAR IMW SU11-15', '3277'),
('3278 : WGS 84 / SCAR IMW SU16-20', '3278'),
('3279 : WGS 84 / SCAR IMW SU21-25', '3279'),
('3280 : WGS 84 / SCAR IMW SU26-30', '3280'),
('3281 : WGS 84 / SCAR IMW SU31-35', '3281'),
('3282 : WGS 84 / SCAR IMW SU36-40', '3282'),
('3283 : WGS 84 / SCAR IMW SU41-45', '3283'),
('3284 : WGS 84 / SCAR IMW SU46-50', '3284'),
('3285 : WGS 84 / SCAR IMW SU51-55', '3285'),
('3286 : WGS 84 / SCAR IMW SU56-60', '3286'),
('3287 : WGS 84 / SCAR IMW SV01-10', '3287'),
('3288 : WGS 84 / SCAR IMW SV11-20', '3288'),
('3289 : WGS 84 / SCAR IMW SV21-30', '3289'),
('3290 : WGS 84 / SCAR IMW SV31-40', '3290'),
('3291 : WGS 84 / SCAR IMW SV41-50', '3291'),
('3292 : WGS 84 / SCAR IMW SV51-60', '3292'),
('3293 : WGS 84 / SCAR IMW SW01-60', '3293'),
('3294 : WGS 84 / USGS Transantarctic Mountains', '3294'),
('3295 : Guam 1963 / Yap Islands', '3295'),
('3296 : RGPF / UTM zone 5S', '3296'),
('3297 : RGPF / UTM zone 6S', '3297'),
('3298 : RGPF / UTM zone 7S', '3298'),
('3299 : RGPF / UTM zone 8S', '3299'),
('3300 : Estonian Coordinate System of 1992', '3300'),
('3301 : Estonian Coordinate System of 1997', '3301'),
('3302 : IGN63 Hiva Oa / UTM zone 7S', '3302'),
('3303 : Fatu Iva 72 / UTM zone 7S', '3303'),
('3304 : Tahiti 79 / UTM zone 6S', '3304'),
('3305 : Moorea 87 / UTM zone 6S', '3305'),
('3306 : Maupiti 83 / UTM zone 5S', '3306'),
('3307 : Nakhl-e Ghanem / UTM zone 39N', '3307'),
('3308 : GDA94 / NSW Lambert', '3308'),
('3309 : NAD27 / California Albers', '3309'),
('3310 : NAD83 / California Albers', '3310'),
('3311 : NAD83(HARN) / California Albers', '3311'),
('3312 : CSG67 / UTM zone 21N', '3312'),
('3313 : RGFG95 / UTM zone 21N', '3313'),
('3314 : Katanga 1955 / Katanga Lambert', '3314'),
('3315 : Katanga 1955 / Katanga TM', '3315'),
('3316 : Kasai 1953 / Congo TM zone 22', '3316'),
('3317 : Kasai 1953 / Congo TM zone 24', '3317'),
('3318 : IGC 1962 / Congo TM zone 12', '3318'),
('3319 : IGC 1962 / Congo TM zone 14', '3319'),
('3320 : IGC 1962 / Congo TM zone 16', '3320'),
('3321 : IGC 1962 / Congo TM zone 18', '3321'),
('3322 : IGC 1962 / Congo TM zone 20', '3322'),
('3323 : IGC 1962 / Congo TM zone 22', '3323'),
('3324 : IGC 1962 / Congo TM zone 24', '3324'),
('3325 : IGC 1962 / Congo TM zone 26', '3325'),
('3326 : IGC 1962 / Congo TM zone 28', '3326'),
('3327 : IGC 1962 / Congo TM zone 30', '3327'),
('3328 : Pulkovo 1942(58) / GUGiK-80', '3328'),
('3329 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 5', '3329'),
('3330 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 6', '3330'),
('3331 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 7', '3331'),
('3332 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 8', '3332'),
('3333 : Pulkovo 1942(58) / Gauss-Kruger zone 3', '3333'),
('3334 : Pulkovo 1942(58) / Gauss-Kruger zone 4', '3334'),
('3335 : Pulkovo 1942(58) / Gauss-Kruger zone 5', '3335'),
('3336 : IGN 1962 Kerguelen / UTM zone 42S', '3336'),
('3337 : Le Pouce 1934 / Mauritius Grid', '3337'),
('3338 : NAD83 / Alaska Albers', '3338'),
('3339 : IGCB 1955 / Congo TM zone 12', '3339'),
('3340 : IGCB 1955 / Congo TM zone 14', '3340'),
('3341 : IGCB 1955 / Congo TM zone 16', '3341'),
('3342 : IGCB 1955 / UTM zone 33S', '3342'),
('3343 : Mauritania 1999 / UTM zone 28N', '3343'),
('3344 : Mauritania 1999 / UTM zone 29N', '3344'),
('3345 : Mauritania 1999 / UTM zone 30N', '3345'),
('3346 : LKS94 / Lithuania TM', '3346'),
('3347 : NAD83 / Statistics Canada Lambert', '3347'),
('3348 : NAD83(CSRS) / Statistics Canada Lambert', '3348'),
('3349 : WGS 84 / PDC Mercator', '3349'),
('3350 : Pulkovo 1942 / CS63 zone C0', '3350'),
('3351 : Pulkovo 1942 / CS63 zone C1', '3351'),
('3352 : Pulkovo 1942 / CS63 zone C2', '3352'),
('3353 : Mhast (onshore) / UTM zone 32S', '3353'),
('3354 : Mhast (offshore) / UTM zone 32S', '3354'),
('3355 : Egypt Gulf of Suez S-650 TL / Red Belt', '3355'),
('3356 : Grand Cayman 1959 / UTM zone 17N', '3356'),
('3357 : Little Cayman 1961 / UTM zone 17N', '3357'),
('3358 : NAD83(HARN) / North Carolina', '3358'),
('3359 : NAD83(HARN) / North Carolina (ftUS)', '3359'),
('3360 : NAD83(HARN) / South Carolina', '3360'),
('3361 : NAD83(HARN) / South Carolina (ft)', '3361'),
('3362 : NAD83(HARN) / Pennsylvania North', '3362'),
('3363 : NAD83(HARN) / Pennsylvania North (ftUS)', '3363'),
('3364 : NAD83(HARN) / Pennsylvania South', '3364'),
('3365 : NAD83(HARN) / Pennsylvania South (ftUS)', '3365'),
('3366 : Hong Kong 1963 Grid System', '3366'),
('3367 : IGN Astro 1960 / UTM zone 28N', '3367'),
('3368 : IGN Astro 1960 / UTM zone 29N', '3368'),
('3369 : IGN Astro 1960 / UTM zone 30N', '3369'),
('3370 : NAD27 / UTM zone 59N', '3370'),
('3371 : NAD27 / UTM zone 60N', '3371'),
('3372 : NAD83 / UTM zone 59N', '3372'),
('3373 : NAD83 / UTM zone 60N', '3373'),
('3374 : FD54 / UTM zone 29N', '3374'),
('3375 : GDM2000 / Peninsula RSO', '3375'),
('3376 : GDM2000 / East Malaysia BRSO', '3376'),
('3377 : GDM2000 / Johor Grid', '3377'),
('3378 : GDM2000 / Sembilan and Melaka Grid', '3378'),
('3379 : GDM2000 / Pahang Grid', '3379'),
('3380 : GDM2000 / Selangor Grid', '3380'),
('3381 : GDM2000 / Terengganu Grid', '3381'),
('3382 : GDM2000 / Pinang Grid', '3382'),
('3383 : GDM2000 / Kedah and Perlis Grid', '3383'),
('3384 : GDM2000 / Perak Grid', '3384'),
('3385 : GDM2000 / Kelantan Grid', '3385'),
('3386 : KKJ / Finland zone 0', '3386'),
('3387 : KKJ / Finland zone 5', '3387'),
('3388 : Pulkovo 1942 / Caspian Sea Mercator', '3388'),
('3389 : Pulkovo 1942 / 3-degree Gauss-Kruger zone 60', '3389'),
('3390 : Pulkovo 1995 / 3-degree Gauss-Kruger zone 60', '3390'),
('3391 : Karbala 1979 / UTM zone 37N', '3391'),
('3392 : Karbala 1979 / UTM zone 38N', '3392'),
('3393 : Karbala 1979 / UTM zone 39N', '3393'),
('3394 : Nahrwan 1934 / Iraq zone', '3394'),
('3395 : WGS 84 / World Mercator', '3395'),
('3396 : PD/83 / 3-degree Gauss-Kruger zone 3', '3396'),
('3397 : PD/83 / 3-degree Gauss-Kruger zone 4', '3397'),
('3398 : RD/83 / 3-degree Gauss-Kruger zone 4', '3398'),
('3399 : RD/83 / 3-degree Gauss-Kruger zone 5', '3399'),
('3400 : NAD83 / Alberta 10-TM (Forest)', '3400'),
('3401 : NAD83 / Alberta 10-TM (Resource)', '3401'),
('3402 : NAD83(CSRS) / Alberta 10-TM (Forest)', '3402'),
('3403 : NAD83(CSRS) / Alberta 10-TM (Resource)', '3403'),
('3404 : NAD83(HARN) / North Carolina (ftUS)', '3404'),
('3405 : VN-2000 / UTM zone 48N', '3405'),
('3406 : VN-2000 / UTM zone 49N', '3406'),
('3407 : Hong Kong 1963 Grid System', '3407'),
('3408 : NSIDC EASE-Grid North', '3408'),
('3409 : NSIDC EASE-Grid South', '3409'),
('3410 : NSIDC EASE-Grid Global', '3410'),
('3411 : NSIDC Sea Ice Polar Stereographic North', '3411'),
('3412 : NSIDC Sea Ice Polar Stereographic South', '3412'),
('3413 : WGS 84 / NSIDC Sea Ice Polar Stereographic North', '3413'),
('3414 : SVY21 / Singapore TM', '3414'),
('3415 : WGS 72BE / South China Sea Lambert', '3415'),
('3416 : ETRS89 / Austria Lambert', '3416'),
('3417 : NAD83 / Iowa North (ftUS)', '3417'),
('3418 : NAD83 / Iowa South (ftUS)', '3418'),
('3419 : NAD83 / Kansas North (ftUS)', '3419'),
('3420 : NAD83 / Kansas South (ftUS)', '3420'),
('3421 : NAD83 / Nevada East (ftUS)', '3421'),
('3422 : NAD83 / Nevada Central (ftUS)', '3422'),
('3423 : NAD83 / Nevada West (ftUS)', '3423'),
('3424 : NAD83 / New Jersey (ftUS)', '3424'),
('3425 : NAD83(HARN) / Iowa North (ftUS)', '3425'),
('3426 : NAD83(HARN) / Iowa South (ftUS)', '3426'),
('3427 : NAD83(HARN) / Kansas North (ftUS)', '3427'),
('3428 : NAD83(HARN) / Kansas South (ftUS)', '3428'),
('3429 : NAD83(HARN) / Nevada East (ftUS)', '3429'),
('3430 : NAD83(HARN) / Nevada Central (ftUS)', '3430'),
('3431 : NAD83(HARN) / Nevada West (ftUS)', '3431'),
('3432 : NAD83(HARN) / New Jersey (ftUS)', '3432'),
('3433 : NAD83 / Arkansas North (ftUS)', '3433'),
('3434 : NAD83 / Arkansas South (ftUS)', '3434'),
('3435 : NAD83 / Illinois East (ftUS)', '3435'),
('3436 : NAD83 / Illinois West (ftUS)', '3436'),
('3437 : NAD83 / New Hampshire (ftUS)', '3437'),
('3438 : NAD83 / Rhode Island (ftUS)', '3438'),
('3439 : PSD93 / UTM zone 39N', '3439'),
('3440 : PSD93 / UTM zone 40N', '3440'),
('3441 : NAD83(HARN) / Arkansas North (ftUS)', '3441'),
('3442 : NAD83(HARN) / Arkansas South (ftUS)', '3442'),
('3443 : NAD83(HARN) / Illinois East (ftUS)', '3443'),
('3444 : NAD83(HARN) / Illinois West (ftUS)', '3444'),
('3445 : NAD83(HARN) / New Hampshire (ftUS)', '3445'),
('3446 : NAD83(HARN) / Rhode Island (ftUS)', '3446'),
('3447 : ETRS89 / Belgian Lambert 2005', '3447'),
('3448 : JAD2001 / Jamaica Metric Grid', '3448'),
('3449 : JAD2001 / UTM zone 17N', '3449'),
('3450 : JAD2001 / UTM zone 18N', '3450'),
('3451 : NAD83 / Louisiana North (ftUS)', '3451'),
('3452 : NAD83 / Louisiana South (ftUS)', '3452'),
('3453 : NAD83 / Louisiana Offshore (ftUS)', '3453'),
('3454 : NAD83 / South Dakota North (ftUS)', '3454'),
('3455 : NAD83 / South Dakota South (ftUS)', '3455'),
('3456 : NAD83(HARN) / Louisiana North (ftUS)', '3456'),
('3457 : NAD83(HARN) / Louisiana South (ftUS)', '3457'),
('3458 : NAD83(HARN) / South Dakota North (ftUS)', '3458'),
('3459 : NAD83(HARN) / South Dakota South (ftUS)', '3459'),
('3460 : Fiji 1986 / Fiji Map Grid', '3460'),
('3461 : Dabola 1981 / UTM zone 28N', '3461'),
('3462 : Dabola 1981 / UTM zone 29N', '3462'),
('3463 : NAD83 / Maine CS2000 Central', '3463'),
('3464 : NAD83(HARN) / Maine CS2000 Central', '3464'),
('3465 : NAD83(NSRS2007) / Alabama East', '3465'),
('3466 : NAD83(NSRS2007) / Alabama West', '3466'),
('3467 : NAD83(NSRS2007) / Alaska Albers', '3467'),
('3468 : NAD83(NSRS2007) / Alaska zone 1', '3468'),
('3469 : NAD83(NSRS2007) / Alaska zone 2', '3469'),
('3470 : NAD83(NSRS2007) / Alaska zone 3', '3470'),
('3471 : NAD83(NSRS2007) / Alaska zone 4', '3471'),
('3472 : NAD83(NSRS2007) / Alaska zone 5', '3472'),
('3473 : NAD83(NSRS2007) / Alaska zone 6', '3473'),
('3474 : NAD83(NSRS2007) / Alaska zone 7', '3474'),
('3475 : NAD83(NSRS2007) / Alaska zone 8', '3475'),
('3476 : NAD83(NSRS2007) / Alaska zone 9', '3476'),
('3477 : NAD83(NSRS2007) / Alaska zone 10', '3477'),
('3478 : NAD83(NSRS2007) / Arizona Central', '3478'),
('3479 : NAD83(NSRS2007) / Arizona Central (ft)', '3479'),
('3480 : NAD83(NSRS2007) / Arizona East', '3480'),
('3481 : NAD83(NSRS2007) / Arizona East (ft)', '3481'),
('3482 : NAD83(NSRS2007) / Arizona West', '3482'),
('3483 : NAD83(NSRS2007) / Arizona West (ft)', '3483'),
('3484 : NAD83(NSRS2007) / Arkansas North', '3484'),
('3485 : NAD83(NSRS2007) / Arkansas North (ftUS)', '3485'),
('3486 : NAD83(NSRS2007) / Arkansas South', '3486'),
('3487 : NAD83(NSRS2007) / Arkansas South (ftUS)', '3487'),
('3488 : NAD83(NSRS2007) / California Albers', '3488'),
('3489 : NAD83(NSRS2007) / California zone 1', '3489'),
('3490 : NAD83(NSRS2007) / California zone 1 (ftUS)', '3490'),
('3491 : NAD83(NSRS2007) / California zone 2', '3491'),
('3492 : NAD83(NSRS2007) / California zone 2 (ftUS)', '3492'),
('3493 : NAD83(NSRS2007) / California zone 3', '3493'),
('3494 : NAD83(NSRS2007) / California zone 3 (ftUS)', '3494'),
('3495 : NAD83(NSRS2007) / California zone 4', '3495'),
('3496 : NAD83(NSRS2007) / California zone 4 (ftUS)', '3496'),
('3497 : NAD83(NSRS2007) / California zone 5', '3497'),
('3498 : NAD83(NSRS2007) / California zone 5 (ftUS)', '3498'),
('3499 : NAD83(NSRS2007) / California zone 6', '3499'),
('3500 : NAD83(NSRS2007) / California zone 6 (ftUS)', '3500'),
('3501 : NAD83(NSRS2007) / Colorado Central', '3501'),
('3502 : NAD83(NSRS2007) / Colorado Central (ftUS)', '3502'),
('3503 : NAD83(NSRS2007) / Colorado North', '3503'),
('3504 : NAD83(NSRS2007) / Colorado North (ftUS)', '3504'),
('3505 : NAD83(NSRS2007) / Colorado South', '3505'),
('3506 : NAD83(NSRS2007) / Colorado South (ftUS)', '3506'),
('3507 : NAD83(NSRS2007) / Connecticut', '3507'),
('3508 : NAD83(NSRS2007) / Connecticut (ftUS)', '3508'),
('3509 : NAD83(NSRS2007) / Delaware', '3509'),
('3510 : NAD83(NSRS2007) / Delaware (ftUS)', '3510'),
('3511 : NAD83(NSRS2007) / Florida East', '3511'),
('3512 : NAD83(NSRS2007) / Florida East (ftUS)', '3512'),
('3513 : NAD83(NSRS2007) / Florida GDL Albers', '3513'),
('3514 : NAD83(NSRS2007) / Florida North', '3514'),
('3515 : NAD83(NSRS2007) / Florida North (ftUS)', '3515'),
('3516 : NAD83(NSRS2007) / Florida West', '3516'),
('3517 : NAD83(NSRS2007) / Florida West (ftUS)', '3517'),
('3518 : NAD83(NSRS2007) / Georgia East', '3518'),
('3519 : NAD83(NSRS2007) / Georgia East (ftUS)', '3519'),
('3520 : NAD83(NSRS2007) / Georgia West', '3520'),
('3521 : NAD83(NSRS2007) / Georgia West (ftUS)', '3521'),
('3522 : NAD83(NSRS2007) / Idaho Central', '3522'),
('3523 : NAD83(NSRS2007) / Idaho Central (ftUS)', '3523'),
('3524 : NAD83(NSRS2007) / Idaho East', '3524'),
('3525 : NAD83(NSRS2007) / Idaho East (ftUS)', '3525'),
('3526 : NAD83(NSRS2007) / Idaho West', '3526'),
('3527 : NAD83(NSRS2007) / Idaho West (ftUS)', '3527'),
('3528 : NAD83(NSRS2007) / Illinois East', '3528'),
('3529 : NAD83(NSRS2007) / Illinois East (ftUS)', '3529'),
('3530 : NAD83(NSRS2007) / Illinois West', '3530'),
('3531 : NAD83(NSRS2007) / Illinois West (ftUS)', '3531'),
('3532 : NAD83(NSRS2007) / Indiana East', '3532'),
('3533 : NAD83(NSRS2007) / Indiana East (ftUS)', '3533'),
('3534 : NAD83(NSRS2007) / Indiana West', '3534'),
('3535 : NAD83(NSRS2007) / Indiana West (ftUS)', '3535'),
('3536 : NAD83(NSRS2007) / Iowa North', '3536'),
('3537 : NAD83(NSRS2007) / Iowa North (ftUS)', '3537'),
('3538 : NAD83(NSRS2007) / Iowa South', '3538'),
('3539 : NAD83(NSRS2007) / Iowa South (ftUS)', '3539'),
('3540 : NAD83(NSRS2007) / Kansas North', '3540'),
('3541 : NAD83(NSRS2007) / Kansas North (ftUS)', '3541'),
('3542 : NAD83(NSRS2007) / Kansas South', '3542'),
('3543 : NAD83(NSRS2007) / Kansas South (ftUS)', '3543'),
('3544 : NAD83(NSRS2007) / Kentucky North', '3544'),
('3545 : NAD83(NSRS2007) / Kentucky North (ftUS)', '3545'),
('3546 : NAD83(NSRS2007) / Kentucky Single Zone', '3546'),
('3547 : NAD83(NSRS2007) / Kentucky Single Zone (ftUS)', '3547'),
('3548 : NAD83(NSRS2007) / Kentucky South', '3548'),
('3549 : NAD83(NSRS2007) / Kentucky South (ftUS)', '3549'),
('3550 : NAD83(NSRS2007) / Louisiana North', '3550'),
('3551 : NAD83(NSRS2007) / Louisiana North (ftUS)', '3551'),
('3552 : NAD83(NSRS2007) / Louisiana South', '3552'),
('3553 : NAD83(NSRS2007) / Louisiana South (ftUS)', '3553'),
('3554 : NAD83(NSRS2007) / Maine CS2000 Central', '3554'),
('3555 : NAD83(NSRS2007) / Maine CS2000 East', '3555'),
('3556 : NAD83(NSRS2007) / Maine CS2000 West', '3556'),
('3557 : NAD83(NSRS2007) / Maine East', '3557'),
('3558 : NAD83(NSRS2007) / Maine West', '3558'),
('3559 : NAD83(NSRS2007) / Maryland', '3559'),
('3560 : NAD83 / Utah North (ftUS)', '3560'),
('3561 : Old Hawaiian / Hawaii zone 1', '3561'),
('3562 : Old Hawaiian / Hawaii zone 2', '3562'),
('3563 : Old Hawaiian / Hawaii zone 3', '3563'),
('3564 : Old Hawaiian / Hawaii zone 4', '3564'),
('3565 : Old Hawaiian / Hawaii zone 5', '3565'),
('3566 : NAD83 / Utah Central (ftUS)', '3566'),
('3567 : NAD83 / Utah South (ftUS)', '3567'),
('3568 : NAD83(HARN) / Utah North (ftUS)', '3568'),
('3569 : NAD83(HARN) / Utah Central (ftUS)', '3569'),
('3570 : NAD83(HARN) / Utah South (ftUS)', '3570'),
('3571 : WGS 84 / North Pole LAEA Bering Sea', '3571'),
('3572 : WGS 84 / North Pole LAEA Alaska', '3572'),
('3573 : WGS 84 / North Pole LAEA Canada', '3573'),
('3574 : WGS 84 / North Pole LAEA Atlantic', '3574'),
('3575 : WGS 84 / North Pole LAEA Europe', '3575'),
('3576 : WGS 84 / North Pole LAEA Russia', '3576'),
('3577 : GDA94 / Australian Albers', '3577'),
('3578 : NAD83 / Yukon Albers', '3578'),
('3579 : NAD83(CSRS) / Yukon Albers', '3579'),
('3580 : NAD83 / NWT Lambert', '3580'),
('3581 : NAD83(CSRS) / NWT Lambert', '3581'),
('3582 : NAD83(NSRS2007) / Maryland (ftUS)', '3582'),
('3583 : NAD83(NSRS2007) / Massachusetts Island', '3583'),
('3584 : NAD83(NSRS2007) / Massachusetts Island (ftUS)', '3584'),
('3585 : NAD83(NSRS2007) / Massachusetts Mainland', '3585'),
('3586 : NAD83(NSRS2007) / Massachusetts Mainland (ftUS)', '3586'),
('3587 : NAD83(NSRS2007) / Michigan Central', '3587'),
('3588 : NAD83(NSRS2007) / Michigan Central (ft)', '3588'),
('3589 : NAD83(NSRS2007) / Michigan North', '3589'),
('3590 : NAD83(NSRS2007) / Michigan North (ft)', '3590'),
('3591 : NAD83(NSRS2007) / Michigan Oblique Mercator', '3591'),
('3592 : NAD83(NSRS2007) / Michigan South', '3592'),
('3593 : NAD83(NSRS2007) / Michigan South (ft)', '3593'),
('3594 : NAD83(NSRS2007) / Minnesota Central', '3594'),
('3595 : NAD83(NSRS2007) / Minnesota North', '3595'),
('3596 : NAD83(NSRS2007) / Minnesota South', '3596'),
('3597 : NAD83(NSRS2007) / Mississippi East', '3597'),
('3598 : NAD83(NSRS2007) / Mississippi East (ftUS)', '3598'),
('3599 : NAD83(NSRS2007) / Mississippi West', '3599'),
('3600 : NAD83(NSRS2007) / Mississippi West (ftUS)', '3600'),
('3601 : NAD83(NSRS2007) / Missouri Central', '3601'),
('3602 : NAD83(NSRS2007) / Missouri East', '3602'),
('3603 : NAD83(NSRS2007) / Missouri West', '3603'),
('3604 : NAD83(NSRS2007) / Montana', '3604'),
('3605 : NAD83(NSRS2007) / Montana (ft)', '3605'),
('3606 : NAD83(NSRS2007) / Nebraska', '3606'),
('3607 : NAD83(NSRS2007) / Nevada Central', '3607'),
('3608 : NAD83(NSRS2007) / Nevada Central (ftUS)', '3608'),
('3609 : NAD83(NSRS2007) / Nevada East', '3609'),
('3610 : NAD83(NSRS2007) / Nevada East (ftUS)', '3610'),
('3611 : NAD83(NSRS2007) / Nevada West', '3611'),
('3612 : NAD83(NSRS2007) / Nevada West (ftUS)', '3612'),
('3613 : NAD83(NSRS2007) / New Hampshire', '3613'),
('3614 : NAD83(NSRS2007) / New Hampshire (ftUS)', '3614'),
('3615 : NAD83(NSRS2007) / New Jersey', '3615'),
('3616 : NAD83(NSRS2007) / New Jersey (ftUS)', '3616'),
('3617 : NAD83(NSRS2007) / New Mexico Central', '3617'),
('3618 : NAD83(NSRS2007) / New Mexico Central (ftUS)', '3618'),
('3619 : NAD83(NSRS2007) / New Mexico East', '3619'),
('3620 : NAD83(NSRS2007) / New Mexico East (ftUS)', '3620'),
('3621 : NAD83(NSRS2007) / New Mexico West', '3621'),
('3622 : NAD83(NSRS2007) / New Mexico West (ftUS)', '3622'),
('3623 : NAD83(NSRS2007) / New York Central', '3623'),
('3624 : NAD83(NSRS2007) / New York Central (ftUS)', '3624'),
('3625 : NAD83(NSRS2007) / New York East', '3625'),
('3626 : NAD83(NSRS2007) / New York East (ftUS)', '3626'),
('3627 : NAD83(NSRS2007) / New York Long Island', '3627'),
('3628 : NAD83(NSRS2007) / New York Long Island (ftUS)', '3628'),
('3629 : NAD83(NSRS2007) / New York West', '3629'),
('3630 : NAD83(NSRS2007) / New York West (ftUS)', '3630'),
('3631 : NAD83(NSRS2007) / North Carolina', '3631'),
('3632 : NAD83(NSRS2007) / North Carolina (ftUS)', '3632'),
('3633 : NAD83(NSRS2007) / North Dakota North', '3633'),
('3634 : NAD83(NSRS2007) / North Dakota North (ft)', '3634'),
('3635 : NAD83(NSRS2007) / North Dakota South', '3635'),
('3636 : NAD83(NSRS2007) / North Dakota South (ft)', '3636'),
('3637 : NAD83(NSRS2007) / Ohio North', '3637'),
('3638 : NAD83(NSRS2007) / Ohio South', '3638'),
('3639 : NAD83(NSRS2007) / Oklahoma North', '3639'),
('3640 : NAD83(NSRS2007) / Oklahoma North (ftUS)', '3640'),
('3641 : NAD83(NSRS2007) / Oklahoma South', '3641'),
('3642 : NAD83(NSRS2007) / Oklahoma South (ftUS)', '3642'),
('3643 : NAD83(NSRS2007) / Oregon LCC (m)', '3643'),
('3644 : NAD83(NSRS2007) / Oregon GIC Lambert (ft)', '3644'),
('3645 : NAD83(NSRS2007) / Oregon North', '3645'),
('3646 : NAD83(NSRS2007) / Oregon North (ft)', '3646'),
('3647 : NAD83(NSRS2007) / Oregon South', '3647'),
('3648 : NAD83(NSRS2007) / Oregon South (ft)', '3648'),
('3649 : NAD83(NSRS2007) / Pennsylvania North', '3649'),
('3650 : NAD83(NSRS2007) / Pennsylvania North (ftUS)', '3650'),
('3651 : NAD83(NSRS2007) / Pennsylvania South', '3651'),
('3652 : NAD83(NSRS2007) / Pennsylvania South (ftUS)', '3652'),
('3653 : NAD83(NSRS2007) / Rhode Island', '3653'),
('3654 : NAD83(NSRS2007) / Rhode Island (ftUS)', '3654'),
('3655 : NAD83(NSRS2007) / South Carolina', '3655'),
('3656 : NAD83(NSRS2007) / South Carolina (ft)', '3656'),
('3657 : NAD83(NSRS2007) / South Dakota North', '3657'),
('3658 : NAD83(NSRS2007) / South Dakota North (ftUS)', '3658'),
('3659 : NAD83(NSRS2007) / South Dakota South', '3659'),
('3660 : NAD83(NSRS2007) / South Dakota South (ftUS)', '3660'),
('3661 : NAD83(NSRS2007) / Tennessee', '3661'),
('3662 : NAD83(NSRS2007) / Tennessee (ftUS)', '3662'),
('3663 : NAD83(NSRS2007) / Texas Central', '3663'),
('3664 : NAD83(NSRS2007) / Texas Central (ftUS)', '3664'),
('3665 : NAD83(NSRS2007) / Texas Centric Albers Equal Area', '3665'),
('3666 : NAD83(NSRS2007) / Texas Centric Lambert Conformal', '3666'),
('3667 : NAD83(NSRS2007) / Texas North', '3667'),
('3668 : NAD83(NSRS2007) / Texas North (ftUS)', '3668'),
('3669 : NAD83(NSRS2007) / Texas North Central', '3669'),
('3670 : NAD83(NSRS2007) / Texas North Central (ftUS)', '3670'),
('3671 : NAD83(NSRS2007) / Texas South', '3671'),
('3672 : NAD83(NSRS2007) / Texas South (ftUS)', '3672'),
('3673 : NAD83(NSRS2007) / Texas South Central', '3673'),
('3674 : NAD83(NSRS2007) / Texas South Central (ftUS)', '3674'),
('3675 : NAD83(NSRS2007) / Utah Central', '3675'),
('3676 : NAD83(NSRS2007) / Utah Central (ft)', '3676'),
('3677 : NAD83(NSRS2007) / Utah Central (ftUS)', '3677'),
('3678 : NAD83(NSRS2007) / Utah North', '3678'),
('3679 : NAD83(NSRS2007) / Utah North (ft)', '3679'),
('3680 : NAD83(NSRS2007) / Utah North (ftUS)', '3680'),
('3681 : NAD83(NSRS2007) / Utah South', '3681'),
('3682 : NAD83(NSRS2007) / Utah South (ft)', '3682'),
('3683 : NAD83(NSRS2007) / Utah South (ftUS)', '3683'),
('3684 : NAD83(NSRS2007) / Vermont', '3684'),
('3685 : NAD83(NSRS2007) / Virginia North', '3685'),
('3686 : NAD83(NSRS2007) / Virginia North (ftUS)', '3686'),
('3687 : NAD83(NSRS2007) / Virginia South', '3687'),
('3688 : NAD83(NSRS2007) / Virginia South (ftUS)', '3688'),
('3689 : NAD83(NSRS2007) / Washington North', '3689'),
('3690 : NAD83(NSRS2007) / Washington North (ftUS)', '3690'),
('3691 : NAD83(NSRS2007) / Washington South', '3691'),
('3692 : NAD83(NSRS2007) / Washington South (ftUS)', '3692'),
('3693 : NAD83(NSRS2007) / West Virginia North', '3693'),
('3694 : NAD83(NSRS2007) / West Virginia South', '3694'),
('3695 : NAD83(NSRS2007) / Wisconsin Central', '3695'),
('3696 : NAD83(NSRS2007) / Wisconsin Central (ftUS)', '3696'),
('3697 : NAD83(NSRS2007) / Wisconsin North', '3697'),
('3698 : NAD83(NSRS2007) / Wisconsin North (ftUS)', '3698'),
('3699 : NAD83(NSRS2007) / Wisconsin South', '3699'),
('3700 : NAD83(NSRS2007) / Wisconsin South (ftUS)', '3700'),
('3701 : NAD83(NSRS2007) / Wisconsin Transverse Mercator', '3701'),
('3702 : NAD83(NSRS2007) / Wyoming East', '3702'),
('3703 : NAD83(NSRS2007) / Wyoming East Central', '3703'),
('3704 : NAD83(NSRS2007) / Wyoming West Central', '3704'),
('3705 : NAD83(NSRS2007) / Wyoming West', '3705'),
('3706 : NAD83(NSRS2007) / UTM zone 59N', '3706'),
('3707 : NAD83(NSRS2007) / UTM zone 60N', '3707'),
('3708 : NAD83(NSRS2007) / UTM zone 1N', '3708'),
('3709 : NAD83(NSRS2007) / UTM zone 2N', '3709'),
('3710 : NAD83(NSRS2007) / UTM zone 3N', '3710'),
('3711 : NAD83(NSRS2007) / UTM zone 4N', '3711'),
('3712 : NAD83(NSRS2007) / UTM zone 5N', '3712'),
('3713 : NAD83(NSRS2007) / UTM zone 6N', '3713'),
('3714 : NAD83(NSRS2007) / UTM zone 7N', '3714'),
('3715 : NAD83(NSRS2007) / UTM zone 8N', '3715'),
('3716 : NAD83(NSRS2007) / UTM zone 9N', '3716'),
('3717 : NAD83(NSRS2007) / UTM zone 10N', '3717'),
('3718 : NAD83(NSRS2007) / UTM zone 11N', '3718'),
('3719 : NAD83(NSRS2007) / UTM zone 12N', '3719'),
('3720 : NAD83(NSRS2007) / UTM zone 13N', '3720'),
('3721 : NAD83(NSRS2007) / UTM zone 14N', '3721'),
('3722 : NAD83(NSRS2007) / UTM zone 15N', '3722'),
('3723 : NAD83(NSRS2007) / UTM zone 16N', '3723'),
('3724 : NAD83(NSRS2007) / UTM zone 17N', '3724'),
('3725 : NAD83(NSRS2007) / UTM zone 18N', '3725'),
('3726 : NAD83(NSRS2007) / UTM zone 19N', '3726'),
('3727 : Reunion 1947 / TM Reunion', '3727'),
('3728 : NAD83(NSRS2007) / Ohio North (ftUS)', '3728'),
('3729 : NAD83(NSRS2007) / Ohio South (ftUS)', '3729'),
('3730 : NAD83(NSRS2007) / Wyoming East (ftUS)', '3730'),
('3731 : NAD83(NSRS2007) / Wyoming East Central (ftUS)', '3731'),
('3732 : NAD83(NSRS2007) / Wyoming West Central (ftUS)', '3732'),
('3733 : NAD83(NSRS2007) / Wyoming West (ftUS)', '3733'),
('3734 : NAD83 / Ohio North (ftUS)', '3734'),
('3735 : NAD83 / Ohio South (ftUS)', '3735'),
('3736 : NAD83 / Wyoming East (ftUS)', '3736'),
('3737 : NAD83 / Wyoming East Central (ftUS)', '3737'),
('3738 : NAD83 / Wyoming West Central (ftUS)', '3738'),
('3739 : NAD83 / Wyoming West (ftUS)', '3739'),
('3740 : NAD83(HARN) / UTM zone 10N', '3740'),
('3741 : NAD83(HARN) / UTM zone 11N', '3741'),
('3742 : NAD83(HARN) / UTM zone 12N', '3742'),
('3743 : NAD83(HARN) / UTM zone 13N', '3743'),
('3744 : NAD83(HARN) / UTM zone 14N', '3744'),
('3745 : NAD83(HARN) / UTM zone 15N', '3745'),
('3746 : NAD83(HARN) / UTM zone 16N', '3746'),
('3747 : NAD83(HARN) / UTM zone 17N', '3747'),
('3748 : NAD83(HARN) / UTM zone 18N', '3748'),
('3749 : NAD83(HARN) / UTM zone 19N', '3749'),
('3750 : NAD83(HARN) / UTM zone 4N', '3750'),
('3751 : NAD83(HARN) / UTM zone 5N', '3751'),
('3752 : WGS 84 / Mercator 41', '3752'),
('3753 : NAD83(HARN) / Ohio North (ftUS)', '3753'),
('3754 : NAD83(HARN) / Ohio South (ftUS)', '3754'),
('3755 : NAD83(HARN) / Wyoming East (ftUS)', '3755'),
('3756 : NAD83(HARN) / Wyoming East Central (ftUS)', '3756'),
('3757 : NAD83(HARN) / Wyoming West Central (ftUS)', '3757'),
('3758 : NAD83(HARN) / Wyoming West (ftUS)', '3758'),
('3759 : NAD83 / Hawaii zone 3 (ftUS)', '3759'),
('3760 : NAD83(HARN) / Hawaii zone 3 (ftUS)', '3760'),
('3761 : NAD83(CSRS) / UTM zone 22N', '3761'),
('3762 : WGS 84 / South Georgia Lambert', '3762'),
('3763 : ETRS89 / Portugal TM06', '3763'),
('3764 : NZGD2000 / Chatham Island Circuit 2000', '3764'),
('3765 : HTRS96 / Croatia TM', '3765'),
('3766 : HTRS96 / Croatia LCC', '3766'),
('3767 : HTRS96 / UTM zone 33N', '3767'),
('3768 : HTRS96 / UTM zone 34N', '3768'),
('3769 : Bermuda 1957 / UTM zone 20N', '3769'),
('3770 : BDA2000 / Bermuda 2000 National Grid', '3770'),
('3771 : NAD27 / Alberta 3TM ref merid 111 W', '3771'),
('3772 : NAD27 / Alberta 3TM ref merid 114 W', '3772'),
('3773 : NAD27 / Alberta 3TM ref merid 117 W', '3773'),
('3774 : NAD27 / Alberta 3TM ref merid 120 W', '3774'),
('3775 : NAD83 / Alberta 3TM ref merid 111 W', '3775'),
('3776 : NAD83 / Alberta 3TM ref merid 114 W', '3776'),
('3777 : NAD83 / Alberta 3TM ref merid 117 W', '3777'),
('3778 : NAD83 / Alberta 3TM ref merid 120 W', '3778'),
('3779 : NAD83(CSRS) / Alberta 3TM ref merid 111 W', '3779'),
('3780 : NAD83(CSRS) / Alberta 3TM ref merid 114 W', '3780'),
('3781 : NAD83(CSRS) / Alberta 3TM ref merid 117 W', '3781'),
('3782 : NAD83(CSRS) / Alberta 3TM ref merid 120 W', '3782'),
('3783 : Pitcairn 2006 / Pitcairn TM 2006', '3783'),
('3784 : Pitcairn 1967 / UTM zone 9S', '3784'),
('3785 : Popular Visualisation CRS / Mercator', '3785'),
('3786 : World Equidistant Cylindrical (Sphere)', '3786'),
('3787 : MGI / Slovene National Grid', '3787'),
('3788 : NZGD2000 / Auckland Islands TM 2000', '3788'),
('3789 : NZGD2000 / Campbell Island TM 2000', '3789'),
('3790 : NZGD2000 / Antipodes Islands TM 2000', '3790'),
('3791 : NZGD2000 / Raoul Island TM 2000', '3791'),
('3793 : NZGD2000 / Chatham Islands TM 2000', '3793'),
('3794 : Slovenia 1996 / Slovene National Grid', '3794'),
('3795 : NAD27 / Cuba Norte', '3795'),
('3796 : NAD27 / Cuba Sur', '3796'),
('3797 : NAD27 / MTQ Lambert', '3797'),
('3798 : NAD83 / MTQ Lambert', '3798'),
('3799 : NAD83(CSRS) / MTQ Lambert', '3799'),
('3800 : NAD27 / Alberta 3TM ref merid 120 W', '3800'),
('3801 : NAD83 / Alberta 3TM ref merid 120 W', '3801'),
('3802 : NAD83(CSRS) / Alberta 3TM ref merid 120 W', '3802'),
('3812 : ETRS89 / Belgian Lambert 2008', '3812'),
('3814 : NAD83 / Mississippi TM', '3814'),
('3815 : NAD83(HARN) / Mississippi TM', '3815'),
('3816 : NAD83(NSRS2007) / Mississippi TM', '3816'),
('3825 : TWD97 / TM2 zone 119', '3825'),
('3826 : TWD97 / TM2 zone 121', '3826'),
('3827 : TWD67 / TM2 zone 119', '3827'),
('3828 : TWD67 / TM2 zone 121', '3828'),
('3829 : Hu Tzu Shan 1950 / UTM zone 51N', '3829'),
('3832 : WGS 84 / PDC Mercator', '3832'),
('3833 : Pulkovo 1942(58) / Gauss-Kruger zone 2', '3833'),
('3834 : Pulkovo 1942(83) / Gauss-Kruger zone 2', '3834'),
('3835 : Pulkovo 1942(83) / Gauss-Kruger zone 3', '3835'),
('3836 : Pulkovo 1942(83) / Gauss-Kruger zone 4', '3836'),
('3837 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 3', '3837'),
('3838 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 4', '3838'),
('3839 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 9', '3839'),
('3840 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 10', '3840'),
('3841 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 6', '3841'),
('3842 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 7', '3842'),
('3843 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 8', '3843'),
('3844 : Pulkovo 1942(58) / Stereo70', '3844'),
('3845 : SWEREF99 / RT90 7.5 gon V emulation', '3845'),
('3846 : SWEREF99 / RT90 5 gon V emulation', '3846'),
('3847 : SWEREF99 / RT90 2.5 gon V emulation', '3847'),
('3848 : SWEREF99 / RT90 0 gon emulation', '3848'),
('3849 : SWEREF99 / RT90 2.5 gon O emulation', '3849'),
('3850 : SWEREF99 / RT90 5 gon O emulation', '3850'),
('3851 : NZGD2000 / NZCS2000', '3851'),
('3852 : RSRGD2000 / DGLC2000', '3852'),
('3854 : County ST74', '3854'),
('3857 : WGS 84 / Pseudo-Mercator', '3857'),
('3873 : ETRS89 / GK19FIN', '3873'),
('3874 : ETRS89 / GK20FIN', '3874'),
('3875 : ETRS89 / GK21FIN', '3875'),
('3876 : ETRS89 / GK22FIN', '3876'),
('3877 : ETRS89 / GK23FIN', '3877'),
('3878 : ETRS89 / GK24FIN', '3878'),
('3879 : ETRS89 / GK25FIN', '3879'),
('3880 : ETRS89 / GK26FIN', '3880'),
('3881 : ETRS89 / GK27FIN', '3881'),
('3882 : ETRS89 / GK28FIN', '3882'),
('3883 : ETRS89 / GK29FIN', '3883'),
('3884 : ETRS89 / GK30FIN', '3884'),
('3885 : ETRS89 / GK31FIN', '3885'),
('3890 : IGRS / UTM zone 37N', '3890'),
('3891 : IGRS / UTM zone 38N', '3891'),
('3892 : IGRS / UTM zone 39N', '3892'),
('3893 : ED50 / Iraq National Grid', '3893'),
('3907 : MGI 1901 / Balkans zone 5', '3907'),
('3908 : MGI 1901 / Balkans zone 6', '3908'),
('3909 : MGI 1901 / Balkans zone 7', '3909'),
('3910 : MGI 1901 / Balkans zone 8', '3910'),
('3911 : MGI 1901 / Slovenia Grid', '3911'),
('3912 : MGI 1901 / Slovene National Grid', '3912'),
('3920 : Puerto Rico / UTM zone 20N', '3920'),
('3942 : RGF93 / CC42', '3942'),
('3943 : RGF93 / CC43', '3943'),
('3944 : RGF93 / CC44', '3944'),
('3945 : RGF93 / CC45', '3945'),
('3946 : RGF93 / CC46', '3946'),
('3947 : RGF93 / CC47', '3947'),
('3948 : RGF93 / CC48', '3948'),
('3949 : RGF93 / CC49', '3949'),
('3950 : RGF93 / CC50', '3950'),
('3968 : NAD83 / Virginia Lambert', '3968'),
('3969 : NAD83(HARN) / Virginia Lambert', '3969'),
('3970 : NAD83(NSRS2007) / Virginia Lambert', '3970'),
('3973 : WGS 84 / NSIDC EASE-Grid North', '3973'),
('3974 : WGS 84 / NSIDC EASE-Grid South', '3974'),
('3975 : WGS 84 / NSIDC EASE-Grid Global', '3975'),
('3976 : WGS 84 / NSIDC Sea Ice Polar Stereographic South', '3976'),
('3978 : NAD83 / Canada Atlas Lambert', '3978'),
('3979 : NAD83(CSRS) / Canada Atlas Lambert', '3979'),
('3985 : Katanga 1955 / Katanga Lambert', '3985'),
('3986 : Katanga 1955 / Katanga Gauss zone A', '3986'),
('3987 : Katanga 1955 / Katanga Gauss zone B', '3987'),
('3988 : Katanga 1955 / Katanga Gauss zone C', '3988'),
('3989 : Katanga 1955 / Katanga Gauss zone D', '3989'),
('3991 : Puerto Rico State Plane CS of 1927', '3991'),
('3992 : Puerto Rico / St. Croix', '3992'),
('3993 : Guam 1963 / Guam SPCS', '3993'),
('3994 : WGS 84 / Mercator 41', '3994'),
('3995 : WGS 84 / Arctic Polar Stereographic', '3995'),
('3996 : WGS 84 / IBCAO Polar Stereographic', '3996'),
('3997 : WGS 84 / Dubai Local TM', '3997'),
('4026 : MOLDREF99 / Moldova TM', '4026'),
('4037 : WGS 84 / TMzn35N', '4037'),
('4038 : WGS 84 / TMzn36N', '4038'),
('4048 : RGRDC 2005 / Congo TM zone 12', '4048'),
('4049 : RGRDC 2005 / Congo TM zone 14', '4049'),
('4050 : RGRDC 2005 / Congo TM zone 16', '4050'),
('4051 : RGRDC 2005 / Congo TM zone 18', '4051'),
('4056 : RGRDC 2005 / Congo TM zone 20', '4056'),
('4057 : RGRDC 2005 / Congo TM zone 22', '4057'),
('4058 : RGRDC 2005 / Congo TM zone 24', '4058'),
('4059 : RGRDC 2005 / Congo TM zone 26', '4059'),
('4060 : RGRDC 2005 / Congo TM zone 28', '4060'),
('4061 : RGRDC 2005 / UTM zone 33S', '4061'),
('4062 : RGRDC 2005 / UTM zone 34S', '4062'),
('4063 : RGRDC 2005 / UTM zone 35S', '4063'),
('4071 : Chua / UTM zone 23S', '4071'),
('4082 : REGCAN95 / UTM zone 27N', '4082'),
('4083 : REGCAN95 / UTM zone 28N', '4083'),
('4087 : WGS 84 / World Equidistant Cylindrical', '4087'),
('4088 : World Equidistant Cylindrical (Sphere)', '4088'),
('4093 : ETRS89 / DKTM1', '4093'),
('4094 : ETRS89 / DKTM2', '4094'),
('4095 : ETRS89 / DKTM3', '4095'),
('4096 : ETRS89 / DKTM4', '4096'),
('4217 : NAD83 / BLM 59N (ftUS)', '4217'),
('4390 : Kertau 1968 / Johor Grid', '4390'),
('4391 : Kertau 1968 / Sembilan and Melaka Grid', '4391'),
('4392 : Kertau 1968 / Pahang Grid', '4392'),
('4393 : Kertau 1968 / Selangor Grid', '4393'),
('4394 : Kertau 1968 / Terengganu Grid', '4394'),
('4395 : Kertau 1968 / Pinang Grid', '4395'),
('4396 : Kertau 1968 / Kedah and Perlis Grid', '4396'),
('4397 : Kertau 1968 / Perak Revised Grid', '4397'),
('4398 : Kertau 1968 / Kelantan Grid', '4398'),
('4399 : NAD27 / BLM 59N (ftUS)', '4399'),
('4400 : NAD27 / BLM 60N (ftUS)', '4400'),
('4401 : NAD27 / BLM 1N (ftUS)', '4401'),
('4402 : NAD27 / BLM 2N (ftUS)', '4402'),
('4403 : NAD27 / BLM 3N (ftUS)', '4403'),
('4404 : NAD27 / BLM 4N (ftUS)', '4404'),
('4405 : NAD27 / BLM 5N (ftUS)', '4405'),
('4406 : NAD27 / BLM 6N (ftUS)', '4406'),
('4407 : NAD27 / BLM 7N (ftUS)', '4407'),
('4408 : NAD27 / BLM 8N (ftUS)', '4408'),
('4409 : NAD27 / BLM 9N (ftUS)', '4409'),
('4410 : NAD27 / BLM 10N (ftUS)', '4410'),
('4411 : NAD27 / BLM 11N (ftUS)', '4411'),
('4412 : NAD27 / BLM 12N (ftUS)', '4412'),
('4413 : NAD27 / BLM 13N (ftUS)', '4413'),
('4414 : NAD83(HARN) / Guam Map Grid', '4414'),
('4415 : Katanga 1955 / Katanga Lambert', '4415'),
('4417 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 7', '4417'),
('4418 : NAD27 / BLM 18N (ftUS)', '4418'),
('4419 : NAD27 / BLM 19N (ftUS)', '4419'),
('4420 : NAD83 / BLM 60N (ftUS)', '4420'),
('4421 : NAD83 / BLM 1N (ftUS)', '4421'),
('4422 : NAD83 / BLM 2N (ftUS)', '4422'),
('4423 : NAD83 / BLM 3N (ftUS)', '4423'),
('4424 : NAD83 / BLM 4N (ftUS)', '4424'),
('4425 : NAD83 / BLM 5N (ftUS)', '4425'),
('4426 : NAD83 / BLM 6N (ftUS)', '4426'),
('4427 : NAD83 / BLM 7N (ftUS)', '4427'),
('4428 : NAD83 / BLM 8N (ftUS)', '4428'),
('4429 : NAD83 / BLM 9N (ftUS)', '4429'),
('4430 : NAD83 / BLM 10N (ftUS)', '4430'),
('4431 : NAD83 / BLM 11N (ftUS)', '4431'),
('4432 : NAD83 / BLM 12N (ftUS)', '4432'),
('4433 : NAD83 / BLM 13N (ftUS)', '4433'),
('4434 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 8', '4434'),
('4437 : NAD83(NSRS2007) / Puerto Rico and Virgin Is.', '4437'),
('4438 : NAD83 / BLM 18N (ftUS)', '4438'),
('4439 : NAD83 / BLM 19N (ftUS)', '4439'),
('4455 : NAD27 / Pennsylvania South', '4455'),
('4456 : NAD27 / New York Long Island', '4456'),
('4457 : NAD83 / South Dakota North (ftUS)', '4457'),
('4462 : WGS 84 / Australian Centre for Remote Sensing Lambert', '4462'),
('4467 : RGSPM06 / UTM zone 21N', '4467'),
('4471 : RGM04 / UTM zone 38S', '4471'),
('4474 : Cadastre 1997 / UTM zone 38S', '4474'),
('4484 : Mexico ITRF92 / UTM zone 11N', '4484'),
('4485 : Mexico ITRF92 / UTM zone 12N', '4485'),
('4486 : Mexico ITRF92 / UTM zone 13N', '4486'),
('4487 : Mexico ITRF92 / UTM zone 14N', '4487'),
('4488 : Mexico ITRF92 / UTM zone 15N', '4488'),
('4489 : Mexico ITRF92 / UTM zone 16N', '4489'),
('4491 : CGCS2000 / Gauss-Kruger zone 13', '4491'),
('4492 : CGCS2000 / Gauss-Kruger zone 14', '4492'),
('4493 : CGCS2000 / Gauss-Kruger zone 15', '4493'),
('4494 : CGCS2000 / Gauss-Kruger zone 16', '4494'),
('4495 : CGCS2000 / Gauss-Kruger zone 17', '4495'),
('4496 : CGCS2000 / Gauss-Kruger zone 18', '4496'),
('4497 : CGCS2000 / Gauss-Kruger zone 19', '4497'),
('4498 : CGCS2000 / Gauss-Kruger zone 20', '4498'),
('4499 : CGCS2000 / Gauss-Kruger zone 21', '4499'),
('4500 : CGCS2000 / Gauss-Kruger zone 22', '4500'),
('4501 : CGCS2000 / Gauss-Kruger zone 23', '4501'),
('4502 : CGCS2000 / Gauss-Kruger CM 75E', '4502'),
('4503 : CGCS2000 / Gauss-Kruger CM 81E', '4503'),
('4504 : CGCS2000 / Gauss-Kruger CM 87E', '4504'),
('4505 : CGCS2000 / Gauss-Kruger CM 93E', '4505'),
('4506 : CGCS2000 / Gauss-Kruger CM 99E', '4506'),
('4507 : CGCS2000 / Gauss-Kruger CM 105E', '4507'),
('4508 : CGCS2000 / Gauss-Kruger CM 111E', '4508'),
('4509 : CGCS2000 / Gauss-Kruger CM 117E', '4509'),
('4510 : CGCS2000 / Gauss-Kruger CM 123E', '4510'),
('4511 : CGCS2000 / Gauss-Kruger CM 129E', '4511'),
('4512 : CGCS2000 / Gauss-Kruger CM 135E', '4512'),
('4513 : CGCS2000 / 3-degree Gauss-Kruger zone 25', '4513'),
('4514 : CGCS2000 / 3-degree Gauss-Kruger zone 26', '4514'),
('4515 : CGCS2000 / 3-degree Gauss-Kruger zone 27', '4515'),
('4516 : CGCS2000 / 3-degree Gauss-Kruger zone 28', '4516'),
('4517 : CGCS2000 / 3-degree Gauss-Kruger zone 29', '4517'),
('4518 : CGCS2000 / 3-degree Gauss-Kruger zone 30', '4518'),
('4519 : CGCS2000 / 3-degree Gauss-Kruger zone 31', '4519'),
('4520 : CGCS2000 / 3-degree Gauss-Kruger zone 32', '4520'),
('4521 : CGCS2000 / 3-degree Gauss-Kruger zone 33', '4521'),
('4522 : CGCS2000 / 3-degree Gauss-Kruger zone 34', '4522'),
('4523 : CGCS2000 / 3-degree Gauss-Kruger zone 35', '4523'),
('4524 : CGCS2000 / 3-degree Gauss-Kruger zone 36', '4524'),
('4525 : CGCS2000 / 3-degree Gauss-Kruger zone 37', '4525'),
('4526 : CGCS2000 / 3-degree Gauss-Kruger zone 38', '4526'),
('4527 : CGCS2000 / 3-degree Gauss-Kruger zone 39', '4527'),
('4528 : CGCS2000 / 3-degree Gauss-Kruger zone 40', '4528'),
('4529 : CGCS2000 / 3-degree Gauss-Kruger zone 41', '4529'),
('4530 : CGCS2000 / 3-degree Gauss-Kruger zone 42', '4530'),
('4531 : CGCS2000 / 3-degree Gauss-Kruger zone 43', '4531'),
('4532 : CGCS2000 / 3-degree Gauss-Kruger zone 44', '4532'),
('4533 : CGCS2000 / 3-degree Gauss-Kruger zone 45', '4533'),
('4534 : CGCS2000 / 3-degree Gauss-Kruger CM 75E', '4534'),
('4535 : CGCS2000 / 3-degree Gauss-Kruger CM 78E', '4535'),
('4536 : CGCS2000 / 3-degree Gauss-Kruger CM 81E', '4536'),
('4537 : CGCS2000 / 3-degree Gauss-Kruger CM 84E', '4537'),
('4538 : CGCS2000 / 3-degree Gauss-Kruger CM 87E', '4538'),
('4539 : CGCS2000 / 3-degree Gauss-Kruger CM 90E', '4539'),
('4540 : CGCS2000 / 3-degree Gauss-Kruger CM 93E', '4540'),
('4541 : CGCS2000 / 3-degree Gauss-Kruger CM 96E', '4541'),
('4542 : CGCS2000 / 3-degree Gauss-Kruger CM 99E', '4542'),
('4543 : CGCS2000 / 3-degree Gauss-Kruger CM 102E', '4543'),
('4544 : CGCS2000 / 3-degree Gauss-Kruger CM 105E', '4544'),
('4545 : CGCS2000 / 3-degree Gauss-Kruger CM 108E', '4545'),
('4546 : CGCS2000 / 3-degree Gauss-Kruger CM 111E', '4546'),
('4547 : CGCS2000 / 3-degree Gauss-Kruger CM 114E', '4547'),
('4548 : CGCS2000 / 3-degree Gauss-Kruger CM 117E', '4548'),
('4549 : CGCS2000 / 3-degree Gauss-Kruger CM 120E', '4549'),
('4550 : CGCS2000 / 3-degree Gauss-Kruger CM 123E', '4550'),
('4551 : CGCS2000 / 3-degree Gauss-Kruger CM 126E', '4551'),
('4552 : CGCS2000 / 3-degree Gauss-Kruger CM 129E', '4552'),
('4553 : CGCS2000 / 3-degree Gauss-Kruger CM 132E', '4553'),
('4554 : CGCS2000 / 3-degree Gauss-Kruger CM 135E', '4554'),
('4559 : RRAF 1991 / UTM zone 20N', '4559'),
('4568 : New Beijing / Gauss-Kruger zone 13', '4568'),
('4569 : New Beijing / Gauss-Kruger zone 14', '4569'),
('4570 : New Beijing / Gauss-Kruger zone 15', '4570'),
('4571 : New Beijing / Gauss-Kruger zone 16', '4571'),
('4572 : New Beijing / Gauss-Kruger zone 17', '4572'),
('4573 : New Beijing / Gauss-Kruger zone 18', '4573'),
('4574 : New Beijing / Gauss-Kruger zone 19', '4574'),
('4575 : New Beijing / Gauss-Kruger zone 20', '4575'),
('4576 : New Beijing / Gauss-Kruger zone 21', '4576'),
('4577 : New Beijing / Gauss-Kruger zone 22', '4577'),
('4578 : New Beijing / Gauss-Kruger zone 23', '4578'),
('4579 : New Beijing / Gauss-Kruger CM 75E', '4579'),
('4580 : New Beijing / Gauss-Kruger CM 81E', '4580'),
('4581 : New Beijing / Gauss-Kruger CM 87E', '4581'),
('4582 : New Beijing / Gauss-Kruger CM 93E', '4582'),
('4583 : New Beijing / Gauss-Kruger CM 99E', '4583'),
('4584 : New Beijing / Gauss-Kruger CM 105E', '4584'),
('4585 : New Beijing / Gauss-Kruger CM 111E', '4585'),
('4586 : New Beijing / Gauss-Kruger CM 117E', '4586'),
('4587 : New Beijing / Gauss-Kruger CM 123E', '4587'),
('4588 : New Beijing / Gauss-Kruger CM 129E', '4588'),
('4589 : New Beijing / Gauss-Kruger CM 135E', '4589'),
('4647 : ETRS89 / UTM zone 32N (zE-N)', '4647'),
('4652 : New Beijing / 3-degree Gauss-Kruger zone 25', '4652'),
('4653 : New Beijing / 3-degree Gauss-Kruger zone 26', '4653'),
('4654 : New Beijing / 3-degree Gauss-Kruger zone 27', '4654'),
('4655 : New Beijing / 3-degree Gauss-Kruger zone 28', '4655'),
('4656 : New Beijing / 3-degree Gauss-Kruger zone 29', '4656'),
('4766 : New Beijing / 3-degree Gauss-Kruger zone 30', '4766'),
('4767 : New Beijing / 3-degree Gauss-Kruger zone 31', '4767'),
('4768 : New Beijing / 3-degree Gauss-Kruger zone 32', '4768'),
('4769 : New Beijing / 3-degree Gauss-Kruger zone 33', '4769'),
('4770 : New Beijing / 3-degree Gauss-Kruger zone 34', '4770'),
('4771 : New Beijing / 3-degree Gauss-Kruger zone 35', '4771'),
('4772 : New Beijing / 3-degree Gauss-Kruger zone 36', '4772'),
('4773 : New Beijing / 3-degree Gauss-Kruger zone 37', '4773'),
('4774 : New Beijing / 3-degree Gauss-Kruger zone 38', '4774'),
('4775 : New Beijing / 3-degree Gauss-Kruger zone 39', '4775'),
('4776 : New Beijing / 3-degree Gauss-Kruger zone 40', '4776'),
('4777 : New Beijing / 3-degree Gauss-Kruger zone 41', '4777'),
('4778 : New Beijing / 3-degree Gauss-Kruger zone 42', '4778'),
('4779 : New Beijing / 3-degree Gauss-Kruger zone 43', '4779'),
('4780 : New Beijing / 3-degree Gauss-Kruger zone 44', '4780'),
('4781 : New Beijing / 3-degree Gauss-Kruger zone 45', '4781'),
('4782 : New Beijing / 3-degree Gauss-Kruger CM 75E', '4782'),
('4783 : New Beijing / 3-degree Gauss-Kruger CM 78E', '4783'),
('4784 : New Beijing / 3-degree Gauss-Kruger CM 81E', '4784'),
('4785 : New Beijing / 3-degree Gauss-Kruger CM 84E', '4785'),
('4786 : New Beijing / 3-degree Gauss-Kruger CM 87E', '4786'),
('4787 : New Beijing / 3-degree Gauss-Kruger CM 90E', '4787'),
('4788 : New Beijing / 3-degree Gauss-Kruger CM 93E', '4788'),
('4789 : New Beijing / 3-degree Gauss-Kruger CM 96E', '4789'),
('4790 : New Beijing / 3-degree Gauss-Kruger CM 99E', '4790'),
('4791 : New Beijing / 3-degree Gauss-Kruger CM 102E', '4791'),
('4792 : New Beijing / 3-degree Gauss-Kruger CM 105E', '4792'),
('4793 : New Beijing / 3-degree Gauss-Kruger CM 108E', '4793'),
('4794 : New Beijing / 3-degree Gauss-Kruger CM 111E', '4794'),
('4795 : New Beijing / 3-degree Gauss-Kruger CM 114E', '4795'),
('4796 : New Beijing / 3-degree Gauss-Kruger CM 117E', '4796'),
('4797 : New Beijing / 3-degree Gauss-Kruger CM 120E', '4797'),
('4798 : New Beijing / 3-degree Gauss-Kruger CM 123E', '4798'),
('4799 : New Beijing / 3-degree Gauss-Kruger CM 126E', '4799'),
('4800 : New Beijing / 3-degree Gauss-Kruger CM 129E', '4800'),
('4812 : New Beijing / 3-degree Gauss-Kruger CM 132E', '4812'),
('4822 : New Beijing / 3-degree Gauss-Kruger CM 135E', '4822'),
('4826 : WGS 84 / Cape Verde National', '4826'),
('4839 : ETRS89 / LCC Germany (N-E)', '4839'),
('4855 : ETRS89 / NTM zone 5', '4855'),
('4856 : ETRS89 / NTM zone 6', '4856'),
('4857 : ETRS89 / NTM zone 7', '4857'),
('4858 : ETRS89 / NTM zone 8', '4858'),
('4859 : ETRS89 / NTM zone 9', '4859'),
('4860 : ETRS89 / NTM zone 10', '4860'),
('4861 : ETRS89 / NTM zone 11', '4861'),
('4862 : ETRS89 / NTM zone 12', '4862'),
('4863 : ETRS89 / NTM zone 13', '4863'),
('4864 : ETRS89 / NTM zone 14', '4864'),
('4865 : ETRS89 / NTM zone 15', '4865'),
('4866 : ETRS89 / NTM zone 16', '4866'),
('4867 : ETRS89 / NTM zone 17', '4867'),
('4868 : ETRS89 / NTM zone 18', '4868'),
('4869 : ETRS89 / NTM zone 19', '4869'),
('4870 : ETRS89 / NTM zone 20', '4870'),
('4871 : ETRS89 / NTM zone 21', '4871'),
('4872 : ETRS89 / NTM zone 22', '4872'),
('4873 : ETRS89 / NTM zone 23', '4873'),
('4874 : ETRS89 / NTM zone 24', '4874'),
('4875 : ETRS89 / NTM zone 25', '4875'),
('4876 : ETRS89 / NTM zone 26', '4876'),
('4877 : ETRS89 / NTM zone 27', '4877'),
('4878 : ETRS89 / NTM zone 28', '4878'),
('4879 : ETRS89 / NTM zone 29', '4879'),
('4880 : ETRS89 / NTM zone 30', '4880'),
('5014 : PTRA08 / UTM zone 25N', '5014'),
('5015 : PTRA08 / UTM zone 26N', '5015'),
('5016 : PTRA08 / UTM zone 28N', '5016'),
('5017 : Lisbon 1890 / Portugal Bonne New', '5017'),
('5018 : Lisbon / Portuguese Grid New', '5018'),
('5041 : WGS 84 / UPS North (E,N)', '5041'),
('5042 : WGS 84 / UPS South (E,N)', '5042'),
('5048 : ETRS89 / TM35FIN(N,E)', '5048'),
('5069 : NAD27 / Conus Albers', '5069'),
('5070 : NAD83 / Conus Albers', '5070'),
('5071 : NAD83(HARN) / Conus Albers', '5071'),
('5072 : NAD83(NSRS2007) / Conus Albers', '5072'),
('5105 : ETRS89 / NTM zone 5', '5105'),
('5106 : ETRS89 / NTM zone 6', '5106'),
('5107 : ETRS89 / NTM zone 7', '5107'),
('5108 : ETRS89 / NTM zone 8', '5108'),
('5109 : ETRS89 / NTM zone 9', '5109'),
('5110 : ETRS89 / NTM zone 10', '5110'),
('5111 : ETRS89 / NTM zone 11', '5111'),
('5112 : ETRS89 / NTM zone 12', '5112'),
('5113 : ETRS89 / NTM zone 13', '5113'),
('5114 : ETRS89 / NTM zone 14', '5114'),
('5115 : ETRS89 / NTM zone 15', '5115'),
('5116 : ETRS89 / NTM zone 16', '5116'),
('5117 : ETRS89 / NTM zone 17', '5117'),
('5118 : ETRS89 / NTM zone 18', '5118'),
('5119 : ETRS89 / NTM zone 19', '5119'),
('5120 : ETRS89 / NTM zone 20', '5120'),
('5121 : ETRS89 / NTM zone 21', '5121'),
('5122 : ETRS89 / NTM zone 22', '5122'),
('5123 : ETRS89 / NTM zone 23', '5123'),
('5124 : ETRS89 / NTM zone 24', '5124'),
('5125 : ETRS89 / NTM zone 25', '5125'),
('5126 : ETRS89 / NTM zone 26', '5126'),
('5127 : ETRS89 / NTM zone 27', '5127'),
('5128 : ETRS89 / NTM zone 28', '5128'),
('5129 : ETRS89 / NTM zone 29', '5129'),
('5130 : ETRS89 / NTM zone 30', '5130'),
('5167 : Korean 1985 / East Sea Belt', '5167'),
('5168 : Korean 1985 / Central Belt Jeju', '5168'),
('5169 : Tokyo 1892 / Korea West Belt', '5169'),
('5170 : Tokyo 1892 / Korea Central Belt', '5170'),
('5171 : Tokyo 1892 / Korea East Belt', '5171'),
('5172 : Tokyo 1892 / Korea East Sea Belt', '5172'),
('5173 : Korean 1985 / Modified West Belt', '5173'),
('5174 : Korean 1985 / Modified Central Belt', '5174'),
('5175 : Korean 1985 / Modified Central Belt Jeju', '5175'),
('5176 : Korean 1985 / Modified East Belt', '5176'),
('5177 : Korean 1985 / Modified East Sea Belt', '5177'),
('5178 : Korean 1985 / Unified CS', '5178'),
('5179 : Korea 2000 / Unified CS', '5179'),
('5180 : Korea 2000 / West Belt', '5180'),
('5181 : Korea 2000 / Central Belt', '5181'),
('5182 : Korea 2000 / Central Belt Jeju', '5182'),
('5183 : Korea 2000 / East Belt', '5183'),
('5184 : Korea 2000 / East Sea Belt', '5184'),
('5185 : Korea 2000 / West Belt 2010', '5185'),
('5186 : Korea 2000 / Central Belt 2010', '5186'),
('5187 : Korea 2000 / East Belt 2010', '5187'),
('5188 : Korea 2000 / East Sea Belt 2010', '5188'),
('5221 : S-JTSK (Ferro) / Krovak East North', '5221'),
('5223 : WGS 84 / Gabon TM', '5223'),
('5224 : S-JTSK/05 (Ferro) / Modified Krovak', '5224'),
('5225 : S-JTSK/05 (Ferro) / Modified Krovak East North', '5225'),
('5234 : Kandawala / Sri Lanka Grid', '5234'),
('5235 : SLD99 / Sri Lanka Grid 1999', '5235'),
('5243 : ETRS89 / LCC Germany (E-N)', '5243'),
('5247 : GDBD2009 / Brunei BRSO', '5247'),
('5253 : TUREF / TM27', '5253'),
('5254 : TUREF / TM30', '5254'),
('5255 : TUREF / TM33', '5255'),
('5256 : TUREF / TM36', '5256'),
('5257 : TUREF / TM39', '5257'),
('5258 : TUREF / TM42', '5258'),
('5259 : TUREF / TM45', '5259'),
('5266 : DRUKREF 03 / Bhutan National Grid', '5266'),
('5269 : TUREF / 3-degree Gauss-Kruger zone 9', '5269'),
('5270 : TUREF / 3-degree Gauss-Kruger zone 10', '5270'),
('5271 : TUREF / 3-degree Gauss-Kruger zone 11', '5271'),
('5272 : TUREF / 3-degree Gauss-Kruger zone 12', '5272'),
('5273 : TUREF / 3-degree Gauss-Kruger zone 13', '5273'),
('5274 : TUREF / 3-degree Gauss-Kruger zone 14', '5274'),
('5275 : TUREF / 3-degree Gauss-Kruger zone 15', '5275'),
('5292 : DRUKREF 03 / Bumthang TM', '5292'),
('5293 : DRUKREF 03 / Chhukha TM', '5293'),
('5294 : DRUKREF 03 / Dagana TM', '5294'),
('5295 : DRUKREF 03 / Gasa TM', '5295'),
('5296 : DRUKREF 03 / Ha TM', '5296'),
('5297 : DRUKREF 03 / Lhuentse TM', '5297'),
('5298 : DRUKREF 03 / Mongar TM', '5298'),
('5299 : DRUKREF 03 / Paro TM', '5299'),
('5300 : DRUKREF 03 / Pemagatshel TM', '5300'),
('5301 : DRUKREF 03 / Punakha TM', '5301'),
('5302 : DRUKREF 03 / Samdrup Jongkhar TM', '5302'),
('5303 : DRUKREF 03 / Samtse TM', '5303'),
('5304 : DRUKREF 03 / Sarpang TM', '5304'),
('5305 : DRUKREF 03 / Thimphu TM', '5305'),
('5306 : DRUKREF 03 / Trashigang TM', '5306'),
('5307 : DRUKREF 03 / Trongsa TM', '5307'),
('5308 : DRUKREF 03 / Tsirang TM', '5308'),
('5309 : DRUKREF 03 / Wangdue Phodrang TM', '5309'),
('5310 : DRUKREF 03 / Yangtse TM', '5310'),
('5311 : DRUKREF 03 / Zhemgang TM', '5311'),
('5316 : ETRS89 / Faroe TM', '5316'),
('5320 : NAD83 / Teranet Ontario Lambert', '5320'),
('5321 : NAD83(CSRS) / Teranet Ontario Lambert', '5321'),
('5325 : ISN2004 / Lambert 2004', '5325'),
('5329 : Segara (Jakarta) / NEIEZ', '5329'),
('5330 : Batavia (Jakarta) / NEIEZ', '5330'),
('5331 : Makassar (Jakarta) / NEIEZ', '5331'),
('5337 : Aratu / UTM zone 25S', '5337'),
('5343 : POSGAR 2007 / Argentina 1', '5343'),
('5344 : POSGAR 2007 / Argentina 2', '5344'),
('5345 : POSGAR 2007 / Argentina 3', '5345'),
('5346 : POSGAR 2007 / Argentina 4', '5346'),
('5347 : POSGAR 2007 / Argentina 5', '5347'),
('5348 : POSGAR 2007 / Argentina 6', '5348'),
('5349 : POSGAR 2007 / Argentina 7', '5349'),
('5355 : MARGEN / UTM zone 20S', '5355'),
('5356 : MARGEN / UTM zone 19S', '5356'),
('5357 : MARGEN / UTM zone 21S', '5357'),
('5361 : SIRGAS-Chile / UTM zone 19S', '5361'),
('5362 : SIRGAS-Chile / UTM zone 18S', '5362'),
('5367 : CR05 / CRTM05', '5367'),
('5382 : SIRGAS-ROU98 / UTM zone 21S', '5382'),
('5383 : SIRGAS-ROU98 / UTM zone 22S', '5383'),
('5387 : Peru96 / UTM zone 18S', '5387'),
('5388 : Peru96 / UTM zone 17S', '5388'),
('5389 : Peru96 / UTM zone 19S', '5389'),
('5396 : SIRGAS 2000 / UTM zone 26S', '5396'),
('5456 : Ocotepeque 1935 / Costa Rica Norte', '5456'),
('5457 : Ocotepeque 1935 / Costa Rica Sur', '5457'),
('5458 : Ocotepeque 1935 / Guatemala Norte', '5458'),
('5459 : Ocotepeque 1935 / Guatemala Sur', '5459'),
('5460 : Ocotepeque 1935 / El Salvador Lambert', '5460'),
('5461 : Ocotepeque 1935 / Nicaragua Norte', '5461'),
('5462 : Ocotepeque 1935 / Nicaragua Sur', '5462'),
('5463 : SAD69 / UTM zone 17N', '5463'),
('5466 : Sibun Gorge 1922 / Colony Grid', '5466'),
('5469 : Panama-Colon 1911 / Panama Lambert', '5469'),
('5472 : Panama-Colon 1911 / Panama Polyconic', '5472'),
('5479 : RSRGD2000 / MSLC2000', '5479'),
('5480 : RSRGD2000 / BCLC2000', '5480'),
('5481 : RSRGD2000 / PCLC2000', '5481'),
('5482 : RSRGD2000 / RSPS2000', '5482'),
('5490 : RGAF09 / UTM zone 20N', '5490'),
('5513 : S-JTSK / Krovak', '5513'),
('5514 : S-JTSK / Krovak East North', '5514'),
('5515 : S-JTSK/05 / Modified Krovak', '5515'),
('5516 : S-JTSK/05 / Modified Krovak East North', '5516'),
('5518 : CI1971 / Chatham Islands Map Grid', '5518'),
('5519 : CI1979 / Chatham Islands Map Grid', '5519'),
('5520 : DHDN / 3-degree Gauss-Kruger zone 1', '5520'),
('5523 : WGS 84 / Gabon TM 2011', '5523'),
('5530 : SAD69(96) / Brazil Polyconic', '5530'),
('5531 : SAD69(96) / UTM zone 21S', '5531'),
('5532 : SAD69(96) / UTM zone 22S', '5532'),
('5533 : SAD69(96) / UTM zone 23S', '5533'),
('5534 : SAD69(96) / UTM zone 24S', '5534'),
('5535 : SAD69(96) / UTM zone 25S', '5535'),
('5536 : Corrego Alegre 1961 / UTM zone 21S', '5536'),
('5537 : Corrego Alegre 1961 / UTM zone 22S', '5537'),
('5538 : Corrego Alegre 1961 / UTM zone 23S', '5538'),
('5539 : Corrego Alegre 1961 / UTM zone 24S', '5539'),
('5550 : PNG94 / PNGMG94 zone 54', '5550'),
('5551 : PNG94 / PNGMG94 zone 55', '5551'),
('5552 : PNG94 / PNGMG94 zone 56', '5552'),
('5559 : Ocotepeque 1935 / Guatemala Norte', '5559'),
('5562 : UCS-2000 / Gauss-Kruger zone 4', '5562'),
('5563 : UCS-2000 / Gauss-Kruger zone 5', '5563'),
('5564 : UCS-2000 / Gauss-Kruger zone 6', '5564'),
('5565 : UCS-2000 / Gauss-Kruger zone 7', '5565'),
('5566 : UCS-2000 / Gauss-Kruger CM 21E', '5566'),
('5567 : UCS-2000 / Gauss-Kruger CM 27E', '5567'),
('5568 : UCS-2000 / Gauss-Kruger CM 33E', '5568'),
('5569 : UCS-2000 / Gauss-Kruger CM 39E', '5569'),
('5570 : UCS-2000 / 3-degree Gauss-Kruger zone 7', '5570'),
('5571 : UCS-2000 / 3-degree Gauss-Kruger zone 8', '5571'),
('5572 : UCS-2000 / 3-degree Gauss-Kruger zone 9', '5572'),
('5573 : UCS-2000 / 3-degree Gauss-Kruger zone 10', '5573'),
('5574 : UCS-2000 / 3-degree Gauss-Kruger zone 11', '5574'),
('5575 : UCS-2000 / 3-degree Gauss-Kruger zone 12', '5575'),
('5576 : UCS-2000 / 3-degree Gauss-Kruger zone 13', '5576'),
('5577 : UCS-2000 / 3-degree Gauss-Kruger CM 21E', '5577'),
('5578 : UCS-2000 / 3-degree Gauss-Kruger CM 24E', '5578'),
('5579 : UCS-2000 / 3-degree Gauss-Kruger CM 27E', '5579'),
('5580 : UCS-2000 / 3-degree Gauss-Kruger CM 30E', '5580'),
('5581 : UCS-2000 / 3-degree Gauss-Kruger CM 33E', '5581'),
('5582 : UCS-2000 / 3-degree Gauss-Kruger CM 36E', '5582'),
('5583 : UCS-2000 / 3-degree Gauss-Kruger CM 39E', '5583'),
('5588 : NAD27 / New Brunswick Stereographic (NAD27)', '5588'),
('5589 : Sibun Gorge 1922 / Colony Grid', '5589'),
('5596 : FEH2010 / Fehmarnbelt TM', '5596'),
('5623 : NAD27 / Michigan East', '5623'),
('5624 : NAD27 / Michigan Old Central', '5624'),
('5625 : NAD27 / Michigan West', '5625'),
('5627 : ED50 / TM 6 NE', '5627'),
('5629 : Moznet / UTM zone 38S', '5629'),
('5631 : Pulkovo 1942(58) / Gauss-Kruger zone 2 (E-N)', '5631'),
('5632 : PTRA08 / LCC Europe', '5632'),
('5633 : PTRA08 / LAEA Europe', '5633'),
('5634 : REGCAN95 / LCC Europe', '5634'),
('5635 : REGCAN95 / LAEA Europe', '5635'),
('5636 : TUREF / LAEA Europe', '5636'),
('5637 : TUREF / LCC Europe', '5637'),
('5638 : ISN2004 / LAEA Europe', '5638'),
('5639 : ISN2004 / LCC Europe', '5639'),
('5641 : SIRGAS 2000 / Brazil Mercator', '5641'),
('5643 : ED50 / SPBA LCC', '5643'),
('5644 : RGR92 / UTM zone 39S', '5644'),
('5646 : NAD83 / Vermont (ftUS)', '5646'),
('5649 : ETRS89 / UTM zone 31N (zE-N)', '5649'),
('5650 : ETRS89 / UTM zone 33N (zE-N)', '5650'),
('5651 : ETRS89 / UTM zone 31N (N-zE)', '5651'),
('5652 : ETRS89 / UTM zone 32N (N-zE)', '5652'),
('5653 : ETRS89 / UTM zone 33N (N-zE)', '5653'),
('5654 : NAD83(HARN) / Vermont (ftUS)', '5654'),
('5655 : NAD83(NSRS2007) / Vermont (ftUS)', '5655'),
('5659 : Monte Mario / TM Emilia-Romagna', '5659'),
('5663 : Pulkovo 1942(58) / Gauss-Kruger zone 3 (E-N)', '5663'),
('5664 : Pulkovo 1942(83) / Gauss-Kruger zone 2 (E-N)', '5664'),
('5665 : Pulkovo 1942(83) / Gauss-Kruger zone 3 (E-N)', '5665'),
('5666 : PD/83 / 3-degree Gauss-Kruger zone 3 (E-N)', '5666'),
('5667 : PD/83 / 3-degree Gauss-Kruger zone 4 (E-N)', '5667'),
('5668 : RD/83 / 3-degree Gauss-Kruger zone 4 (E-N)', '5668'),
('5669 : RD/83 / 3-degree Gauss-Kruger zone 5 (E-N)', '5669'),
('5670 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 3 (E-N)', '5670'),
('5671 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 4 (E-N)', '5671'),
('5672 : Pulkovo 1942(58) / 3-degree Gauss-Kruger zone 5 (E-N)', '5672'),
('5673 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 3 (E-N)', '5673'),
('5674 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 4 (E-N)', '5674'),
('5675 : Pulkovo 1942(83) / 3-degree Gauss-Kruger zone 5 (E-N)', '5675'),
('5676 : DHDN / 3-degree Gauss-Kruger zone 2 (E-N)', '5676'),
('5677 : DHDN / 3-degree Gauss-Kruger zone 3 (E-N)', '5677'),
('5678 : DHDN / 3-degree Gauss-Kruger zone 4 (E-N)', '5678'),
('5679 : DHDN / 3-degree Gauss-Kruger zone 5 (E-N)', '5679'),
('5680 : DHDN / 3-degree Gauss-Kruger zone 1 (E-N)', '5680'),
('5682 : DB_REF / 3-degree Gauss-Kruger zone 2 (E-N)', '5682'),
('5683 : DB_REF / 3-degree Gauss-Kruger zone 3 (E-N)', '5683'),
('5684 : DB_REF / 3-degree Gauss-Kruger zone 4 (E-N)', '5684'),
('5685 : DB_REF / 3-degree Gauss-Kruger zone 5 (E-N)', '5685'),
('5700 : NZGD2000 / UTM zone 1S', '5700'),
('5819 : EPSG topocentric example A', '5819'),
('5820 : EPSG topocentric example B', '5820'),
('5821 : EPSG vertical perspective example', '5821'),
('5825 : AGD66 / ACT Standard Grid', '5825'),
('5836 : Yemen NGN96 / UTM zone 37N', '5836'),
('5837 : Yemen NGN96 / UTM zone 40N', '5837'),
('5839 : Peru96 / UTM zone 17S', '5839'),
('5842 : WGS 84 / TM 12 SE', '5842'),
('5844 : RGRDC 2005 / Congo TM zone 30', '5844'),
('5858 : SAD69(96) / UTM zone 22S', '5858'),
('5875 : SAD69(96) / UTM zone 18S', '5875'),
('5876 : SAD69(96) / UTM zone 19S', '5876'),
('5877 : SAD69(96) / UTM zone 20S', '5877'),
('5879 : Cadastre 1997 / UTM zone 38S', '5879'),
('5880 : SIRGAS 2000 / Brazil Polyconic', '5880'),
('5887 : TGD2005 / Tonga Map Grid', '5887'),
('5890 : JAXA Snow Depth Polar Stereographic North', '5890'),
('5921 : WGS 84 / EPSG Arctic Regional zone A1', '5921'),
('5922 : WGS 84 / EPSG Arctic Regional zone A2', '5922'),
('5923 : WGS 84 / EPSG Arctic Regional zone A3', '5923'),
('5924 : WGS 84 / EPSG Arctic Regional zone A4', '5924'),
('5925 : WGS 84 / EPSG Arctic Regional zone A5', '5925'),
('5926 : WGS 84 / EPSG Arctic Regional zone B1', '5926'),
('5927 : WGS 84 / EPSG Arctic Regional zone B2', '5927'),
('5928 : WGS 84 / EPSG Arctic Regional zone B3', '5928'),
('5929 : WGS 84 / EPSG Arctic Regional zone B4', '5929'),
('5930 : WGS 84 / EPSG Arctic Regional zone B5', '5930'),
('5931 : WGS 84 / EPSG Arctic Regional zone C1', '5931'),
('5932 : WGS 84 / EPSG Arctic Regional zone C2', '5932'),
('5933 : WGS 84 / EPSG Arctic Regional zone C3', '5933'),
('5934 : WGS 84 / EPSG Arctic Regional zone C4', '5934'),
('5935 : WGS 84 / EPSG Arctic Regional zone C5', '5935'),
('5936 : WGS 84 / EPSG Alaska Polar Stereographic', '5936'),
('5937 : WGS 84 / EPSG Canada Polar Stereographic', '5937'),
('5938 : WGS 84 / EPSG Greenland Polar Stereographic', '5938'),
('5939 : WGS 84 / EPSG Norway Polar Stereographic', '5939'),
('5940 : WGS 84 / EPSG Russia Polar Stereographic', '5940'),
('6050 : GR96 / EPSG Arctic zone 1-25', '6050'),
('6051 : GR96 / EPSG Arctic zone 2-18', '6051'),
('6052 : GR96 / EPSG Arctic zone 2-20', '6052'),
('6053 : GR96 / EPSG Arctic zone 3-29', '6053'),
('6054 : GR96 / EPSG Arctic zone 3-31', '6054'),
('6055 : GR96 / EPSG Arctic zone 3-33', '6055'),
('6056 : GR96 / EPSG Arctic zone 4-20', '6056'),
('6057 : GR96 / EPSG Arctic zone 4-22', '6057'),
('6058 : GR96 / EPSG Arctic zone 4-24', '6058'),
('6059 : GR96 / EPSG Arctic zone 5-41', '6059'),
('6060 : GR96 / EPSG Arctic zone 5-43', '6060'),
('6061 : GR96 / EPSG Arctic zone 5-45', '6061'),
('6062 : GR96 / EPSG Arctic zone 6-26', '6062'),
('6063 : GR96 / EPSG Arctic zone 6-28', '6063'),
('6064 : GR96 / EPSG Arctic zone 6-30', '6064'),
('6065 : GR96 / EPSG Arctic zone 7-11', '6065'),
('6066 : GR96 / EPSG Arctic zone 7-13', '6066'),
('6067 : GR96 / EPSG Arctic zone 8-20', '6067'),
('6068 : GR96 / EPSG Arctic zone 8-22', '6068'),
('6069 : ETRS89 / EPSG Arctic zone 2-22', '6069'),
('6070 : ETRS89 / EPSG Arctic zone 3-11', '6070'),
('6071 : ETRS89 / EPSG Arctic zone 4-26', '6071'),
('6072 : ETRS89 / EPSG Arctic zone 4-28', '6072'),
('6073 : ETRS89 / EPSG Arctic zone 5-11', '6073'),
('6074 : ETRS89 / EPSG Arctic zone 5-13', '6074'),
('6075 : WGS 84 / EPSG Arctic zone 2-24', '6075'),
('6076 : WGS 84 / EPSG Arctic zone 2-26', '6076'),
('6077 : WGS 84 / EPSG Arctic zone 3-13', '6077'),
('6078 : WGS 84 / EPSG Arctic zone 3-15', '6078'),
('6079 : WGS 84 / EPSG Arctic zone 3-17', '6079'),
('6080 : WGS 84 / EPSG Arctic zone 3-19', '6080'),
('6081 : WGS 84 / EPSG Arctic zone 4-30', '6081'),
('6082 : WGS 84 / EPSG Arctic zone 4-32', '6082'),
('6083 : WGS 84 / EPSG Arctic zone 4-34', '6083'),
('6084 : WGS 84 / EPSG Arctic zone 4-36', '6084'),
('6085 : WGS 84 / EPSG Arctic zone 4-38', '6085'),
('6086 : WGS 84 / EPSG Arctic zone 4-40', '6086'),
('6087 : WGS 84 / EPSG Arctic zone 5-15', '6087'),
('6088 : WGS 84 / EPSG Arctic zone 5-17', '6088'),
('6089 : WGS 84 / EPSG Arctic zone 5-19', '6089'),
('6090 : WGS 84 / EPSG Arctic zone 5-21', '6090'),
('6091 : WGS 84 / EPSG Arctic zone 5-23', '6091'),
('6092 : WGS 84 / EPSG Arctic zone 5-25', '6092'),
('6093 : WGS 84 / EPSG Arctic zone 5-27', '6093'),
('6094 : NAD83(NSRS2007) / EPSG Arctic zone 5-29', '6094'),
('6095 : NAD83(NSRS2007) / EPSG Arctic zone 5-31', '6095'),
('6096 : NAD83(NSRS2007) / EPSG Arctic zone 6-14', '6096'),
('6097 : NAD83(NSRS2007) / EPSG Arctic zone 6-16', '6097'),
('6098 : NAD83(CSRS) / EPSG Arctic zone 1-23', '6098'),
('6099 : NAD83(CSRS) / EPSG Arctic zone 2-14', '6099'),
('6100 : NAD83(CSRS) / EPSG Arctic zone 2-16', '6100'),
('6101 : NAD83(CSRS) / EPSG Arctic zone 3-25', '6101'),
('6102 : NAD83(CSRS) / EPSG Arctic zone 3-27', '6102'),
('6103 : NAD83(CSRS) / EPSG Arctic zone 3-29', '6103'),
('6104 : NAD83(CSRS) / EPSG Arctic zone 4-14', '6104'),
('6105 : NAD83(CSRS) / EPSG Arctic zone 4-16', '6105'),
('6106 : NAD83(CSRS) / EPSG Arctic zone 4-18', '6106'),
('6107 : NAD83(CSRS) / EPSG Arctic zone 5-33', '6107'),
('6108 : NAD83(CSRS) / EPSG Arctic zone 5-35', '6108'),
('6109 : NAD83(CSRS) / EPSG Arctic zone 5-37', '6109'),
('6110 : NAD83(CSRS) / EPSG Arctic zone 5-39', '6110'),
('6111 : NAD83(CSRS) / EPSG Arctic zone 6-18', '6111'),
('6112 : NAD83(CSRS) / EPSG Arctic zone 6-20', '6112'),
('6113 : NAD83(CSRS) / EPSG Arctic zone 6-22', '6113'),
('6114 : NAD83(CSRS) / EPSG Arctic zone 6-24', '6114'),
('6115 : WGS 84 / EPSG Arctic zone 1-27', '6115'),
('6116 : WGS 84 / EPSG Arctic zone 1-29', '6116'),
('6117 : WGS 84 / EPSG Arctic zone 1-31', '6117'),
('6118 : WGS 84 / EPSG Arctic zone 1-21', '6118'),
('6119 : WGS 84 / EPSG Arctic zone 2-28', '6119'),
('6120 : WGS 84 / EPSG Arctic zone 2-10', '6120'),
('6121 : WGS 84 / EPSG Arctic zone 2-12', '6121'),
('6122 : WGS 84 / EPSG Arctic zone 3-21', '6122'),
('6123 : WGS 84 / EPSG Arctic zone 3-23', '6123'),
('6124 : WGS 84 / EPSG Arctic zone 4-12', '6124'),
('6125 : ETRS89 / EPSG Arctic zone 5-47', '6125'),
('6128 : Grand Cayman National Grid 1959', '6128'),
('6129 : Sister Islands National Grid 1961', '6129'),
('6141 : Cayman Islands National Grid 2011', '6141'),
('6200 : NAD27 / Michigan North', '6200'),
('6201 : NAD27 / Michigan Central', '6201'),
('6202 : NAD27 / Michigan South', '6202'),
('6204 : Macedonia State Coordinate System', '6204'),
('6210 : SIRGAS 2000 / UTM zone 23N', '6210'),
('6211 : SIRGAS 2000 / UTM zone 24N', '6211'),
('6244 : MAGNA-SIRGAS / Arauca urban grid', '6244'),
('6245 : MAGNA-SIRGAS / Armenia urban grid', '6245'),
('6246 : MAGNA-SIRGAS / Barranquilla urban grid', '6246'),
('6247 : MAGNA-SIRGAS / Bogota urban grid', '6247'),
('6248 : MAGNA-SIRGAS / Bucaramanga urban grid', '6248'),
('6249 : MAGNA-SIRGAS / Cali urban grid', '6249'),
('6250 : MAGNA-SIRGAS / Cartagena urban grid', '6250'),
('6251 : MAGNA-SIRGAS / Cucuta urban grid', '6251'),
('6252 : MAGNA-SIRGAS / Florencia urban grid', '6252'),
('6253 : MAGNA-SIRGAS / Ibague urban grid', '6253'),
('6254 : MAGNA-SIRGAS / Inirida urban grid', '6254'),
('6255 : MAGNA-SIRGAS / Leticia urban grid', '6255'),
('6256 : MAGNA-SIRGAS / Manizales urban grid', '6256'),
('6257 : MAGNA-SIRGAS / Medellin urban grid', '6257'),
('6258 : MAGNA-SIRGAS / Mitu urban grid', '6258'),
('6259 : MAGNA-SIRGAS / Mocoa urban grid', '6259'),
('6260 : MAGNA-SIRGAS / Monteria urban grid', '6260'),
('6261 : MAGNA-SIRGAS / Neiva urban grid', '6261'),
('6262 : MAGNA-SIRGAS / Pasto urban grid', '6262'),
('6263 : MAGNA-SIRGAS / Pereira urban grid', '6263'),
('6264 : MAGNA-SIRGAS / Popayan urban grid', '6264'),
('6265 : MAGNA-SIRGAS / Puerto Carreno urban grid', '6265'),
('6266 : MAGNA-SIRGAS / Quibdo urban grid', '6266'),
('6267 : MAGNA-SIRGAS / Riohacha urban grid', '6267'),
('6268 : MAGNA-SIRGAS / San Andres urban grid', '6268'),
('6269 : MAGNA-SIRGAS / San Jose del Guaviare urban grid', '6269'),
('6270 : MAGNA-SIRGAS / Santa Marta urban grid', '6270'),
('6271 : MAGNA-SIRGAS / Sucre urban grid', '6271'),
('6272 : MAGNA-SIRGAS / Tunja urban grid', '6272'),
('6273 : MAGNA-SIRGAS / Valledupar urban grid', '6273'),
('6274 : MAGNA-SIRGAS / Villavicencio urban grid', '6274'),
('6275 : MAGNA-SIRGAS / Yopal urban grid', '6275'),
('6307 : NAD83(CORS96) / Puerto Rico and Virgin Is.', '6307'),
('6312 : CGRS93 / Cyprus Local Transverse Mercator', '6312'),
('6316 : Macedonia State Coordinate System zone 7', '6316'),
('6328 : NAD83(2011) / UTM zone 59N', '6328'),
('6329 : NAD83(2011) / UTM zone 60N', '6329'),
('6330 : NAD83(2011) / UTM zone 1N', '6330'),
('6331 : NAD83(2011) / UTM zone 2N', '6331'),
('6332 : NAD83(2011) / UTM zone 3N', '6332'),
('6333 : NAD83(2011) / UTM zone 4N', '6333'),
('6334 : NAD83(2011) / UTM zone 5N', '6334'),
('6335 : NAD83(2011) / UTM zone 6N', '6335'),
('6336 : NAD83(2011) / UTM zone 7N', '6336'),
('6337 : NAD83(2011) / UTM zone 8N', '6337'),
('6338 : NAD83(2011) / UTM zone 9N', '6338'),
('6339 : NAD83(2011) / UTM zone 10N', '6339'),
('6340 : NAD83(2011) / UTM zone 11N', '6340'),
('6341 : NAD83(2011) / UTM zone 12N', '6341'),
('6342 : NAD83(2011) / UTM zone 13N', '6342'),
('6343 : NAD83(2011) / UTM zone 14N', '6343'),
('6344 : NAD83(2011) / UTM zone 15N', '6344'),
('6345 : NAD83(2011) / UTM zone 16N', '6345'),
('6346 : NAD83(2011) / UTM zone 17N', '6346'),
('6347 : NAD83(2011) / UTM zone 18N', '6347'),
('6348 : NAD83(2011) / UTM zone 19N', '6348'),
('6350 : NAD83(2011) / Conus Albers', '6350'),
('6351 : NAD83(2011) / EPSG Arctic zone 5-29', '6351'),
('6352 : NAD83(2011) / EPSG Arctic zone 5-31', '6352'),
('6353 : NAD83(2011) / EPSG Arctic zone 6-14', '6353'),
('6354 : NAD83(2011) / EPSG Arctic zone 6-16', '6354'),
('6355 : NAD83(2011) / Alabama East', '6355'),
('6356 : NAD83(2011) / Alabama West', '6356'),
('6362 : Mexico ITRF92 / LCC', '6362'),
('6366 : Mexico ITRF2008 / UTM zone 11N', '6366'),
('6367 : Mexico ITRF2008 / UTM zone 12N', '6367'),
('6368 : Mexico ITRF2008 / UTM zone 13N', '6368'),
('6369 : Mexico ITRF2008 / UTM zone 14N', '6369'),
('6370 : Mexico ITRF2008 / UTM zone 15N', '6370'),
('6371 : Mexico ITRF2008 / UTM zone 16N', '6371'),
('6372 : Mexico ITRF2008 / LCC', '6372'),
('6381 : UCS-2000 / Ukraine TM zone 7', '6381'),
('6382 : UCS-2000 / Ukraine TM zone 8', '6382'),
('6383 : UCS-2000 / Ukraine TM zone 9', '6383'),
('6384 : UCS-2000 / Ukraine TM zone 10', '6384'),
('6385 : UCS-2000 / Ukraine TM zone 11', '6385'),
('6386 : UCS-2000 / Ukraine TM zone 12', '6386'),
('6387 : UCS-2000 / Ukraine TM zone 13', '6387'),
('6391 : Cayman Islands National Grid 2011', '6391'),
('6393 : NAD83(2011) / Alaska Albers', '6393'),
('6394 : NAD83(2011) / Alaska zone 1', '6394'),
('6395 : NAD83(2011) / Alaska zone 2', '6395'),
('6396 : NAD83(2011) / Alaska zone 3', '6396'),
('6397 : NAD83(2011) / Alaska zone 4', '6397'),
('6398 : NAD83(2011) / Alaska zone 5', '6398'),
('6399 : NAD83(2011) / Alaska zone 6', '6399'),
('6400 : NAD83(2011) / Alaska zone 7', '6400'),
('6401 : NAD83(2011) / Alaska zone 8', '6401'),
('6402 : NAD83(2011) / Alaska zone 9', '6402'),
('6403 : NAD83(2011) / Alaska zone 10', '6403'),
('6404 : NAD83(2011) / Arizona Central', '6404'),
('6405 : NAD83(2011) / Arizona Central (ft)', '6405'),
('6406 : NAD83(2011) / Arizona East', '6406'),
('6407 : NAD83(2011) / Arizona East (ft)', '6407'),
('6408 : NAD83(2011) / Arizona West', '6408'),
('6409 : NAD83(2011) / Arizona West (ft)', '6409'),
('6410 : NAD83(2011) / Arkansas North', '6410'),
('6411 : NAD83(2011) / Arkansas North (ftUS)', '6411'),
('6412 : NAD83(2011) / Arkansas South', '6412'),
('6413 : NAD83(2011) / Arkansas South (ftUS)', '6413'),
('6414 : NAD83(2011) / California Albers', '6414'),
('6415 : NAD83(2011) / California zone 1', '6415'),
('6416 : NAD83(2011) / California zone 1 (ftUS)', '6416'),
('6417 : NAD83(2011) / California zone 2', '6417'),
('6418 : NAD83(2011) / California zone 2 (ftUS)', '6418'),
('6419 : NAD83(2011) / California zone 3', '6419'),
('6420 : NAD83(2011) / California zone 3 (ftUS)', '6420'),
('6421 : NAD83(2011) / California zone 4', '6421'),
('6422 : NAD83(2011) / California zone 4 (ftUS)', '6422'),
('6423 : NAD83(2011) / California zone 5', '6423'),
('6424 : NAD83(2011) / California zone 5 (ftUS)', '6424'),
('6425 : NAD83(2011) / California zone 6', '6425'),
('6426 : NAD83(2011) / California zone 6 (ftUS)', '6426'),
('6427 : NAD83(2011) / Colorado Central', '6427'),
('6428 : NAD83(2011) / Colorado Central (ftUS)', '6428'),
('6429 : NAD83(2011) / Colorado North', '6429'),
('6430 : NAD83(2011) / Colorado North (ftUS)', '6430'),
('6431 : NAD83(2011) / Colorado South', '6431'),
('6432 : NAD83(2011) / Colorado South (ftUS)', '6432'),
('6433 : NAD83(2011) / Connecticut', '6433'),
('6434 : NAD83(2011) / Connecticut (ftUS)', '6434'),
('6435 : NAD83(2011) / Delaware', '6435'),
('6436 : NAD83(2011) / Delaware (ftUS)', '6436'),
('6437 : NAD83(2011) / Florida East', '6437'),
('6438 : NAD83(2011) / Florida East (ftUS)', '6438'),
('6439 : NAD83(2011) / Florida GDL Albers', '6439'),
('6440 : NAD83(2011) / Florida North', '6440'),
('6441 : NAD83(2011) / Florida North (ftUS)', '6441'),
('6442 : NAD83(2011) / Florida West', '6442'),
('6443 : NAD83(2011) / Florida West (ftUS)', '6443'),
('6444 : NAD83(2011) / Georgia East', '6444'),
('6445 : NAD83(2011) / Georgia East (ftUS)', '6445'),
('6446 : NAD83(2011) / Georgia West', '6446'),
('6447 : NAD83(2011) / Georgia West (ftUS)', '6447'),
('6448 : NAD83(2011) / Idaho Central', '6448'),
('6449 : NAD83(2011) / Idaho Central (ftUS)', '6449'),
('6450 : NAD83(2011) / Idaho East', '6450'),
('6451 : NAD83(2011) / Idaho East (ftUS)', '6451'),
('6452 : NAD83(2011) / Idaho West', '6452'),
('6453 : NAD83(2011) / Idaho West (ftUS)', '6453'),
('6454 : NAD83(2011) / Illinois East', '6454'),
('6455 : NAD83(2011) / Illinois East (ftUS)', '6455'),
('6456 : NAD83(2011) / Illinois West', '6456'),
('6457 : NAD83(2011) / Illinois West (ftUS)', '6457'),
('6458 : NAD83(2011) / Indiana East', '6458'),
('6459 : NAD83(2011) / Indiana East (ftUS)', '6459'),
('6460 : NAD83(2011) / Indiana West', '6460'),
('6461 : NAD83(2011) / Indiana West (ftUS)', '6461'),
('6462 : NAD83(2011) / Iowa North', '6462'),
('6463 : NAD83(2011) / Iowa North (ftUS)', '6463'),
('6464 : NAD83(2011) / Iowa South', '6464'),
('6465 : NAD83(2011) / Iowa South (ftUS)', '6465'),
('6466 : NAD83(2011) / Kansas North', '6466'),
('6467 : NAD83(2011) / Kansas North (ftUS)', '6467'),
('6468 : NAD83(2011) / Kansas South', '6468'),
('6469 : NAD83(2011) / Kansas South (ftUS)', '6469'),
('6470 : NAD83(2011) / Kentucky North', '6470'),
('6471 : NAD83(2011) / Kentucky North (ftUS)', '6471'),
('6472 : NAD83(2011) / Kentucky Single Zone', '6472'),
('6473 : NAD83(2011) / Kentucky Single Zone (ftUS)', '6473'),
('6474 : NAD83(2011) / Kentucky South', '6474'),
('6475 : NAD83(2011) / Kentucky South (ftUS)', '6475'),
('6476 : NAD83(2011) / Louisiana North', '6476'),
('6477 : NAD83(2011) / Louisiana North (ftUS)', '6477'),
('6478 : NAD83(2011) / Louisiana South', '6478'),
('6479 : NAD83(2011) / Louisiana South (ftUS)', '6479'),
('6480 : NAD83(2011) / Maine CS2000 Central', '6480'),
('6481 : NAD83(2011) / Maine CS2000 East', '6481'),
('6482 : NAD83(2011) / Maine CS2000 West', '6482'),
('6483 : NAD83(2011) / Maine East', '6483'),
('6484 : NAD83(2011) / Maine East (ftUS)', '6484'),
('6485 : NAD83(2011) / Maine West', '6485'),
('6486 : NAD83(2011) / Maine West (ftUS)', '6486'),
('6487 : NAD83(2011) / Maryland', '6487'),
('6488 : NAD83(2011) / Maryland (ftUS)', '6488'),
('6489 : NAD83(2011) / Massachusetts Island', '6489'),
('6490 : NAD83(2011) / Massachusetts Island (ftUS)', '6490'),
('6491 : NAD83(2011) / Massachusetts Mainland', '6491'),
('6492 : NAD83(2011) / Massachusetts Mainland (ftUS)', '6492'),
('6493 : NAD83(2011) / Michigan Central', '6493'),
('6494 : NAD83(2011) / Michigan Central (ft)', '6494'),
('6495 : NAD83(2011) / Michigan North', '6495'),
('6496 : NAD83(2011) / Michigan North (ft)', '6496'),
('6497 : NAD83(2011) / Michigan Oblique Mercator', '6497'),
('6498 : NAD83(2011) / Michigan South', '6498'),
('6499 : NAD83(2011) / Michigan South (ft)', '6499'),
('6500 : NAD83(2011) / Minnesota Central', '6500'),
('6501 : NAD83(2011) / Minnesota Central (ftUS)', '6501'),
('6502 : NAD83(2011) / Minnesota North', '6502'),
('6503 : NAD83(2011) / Minnesota North (ftUS)', '6503'),
('6504 : NAD83(2011) / Minnesota South', '6504'),
('6505 : NAD83(2011) / Minnesota South (ftUS)', '6505'),
('6506 : NAD83(2011) / Mississippi East', '6506'),
('6507 : NAD83(2011) / Mississippi East (ftUS)', '6507'),
('6508 : NAD83(2011) / Mississippi TM', '6508'),
('6509 : NAD83(2011) / Mississippi West', '6509'),
('6510 : NAD83(2011) / Mississippi West (ftUS)', '6510'),
('6511 : NAD83(2011) / Missouri Central', '6511'),
('6512 : NAD83(2011) / Missouri East', '6512'),
('6513 : NAD83(2011) / Missouri West', '6513'),
('6514 : NAD83(2011) / Montana', '6514'),
('6515 : NAD83(2011) / Montana (ft)', '6515'),
('6516 : NAD83(2011) / Nebraska', '6516'),
('6517 : NAD83(2011) / Nebraska (ftUS)', '6517'),
('6518 : NAD83(2011) / Nevada Central', '6518'),
('6519 : NAD83(2011) / Nevada Central (ftUS)', '6519'),
('6520 : NAD83(2011) / Nevada East', '6520'),
('6521 : NAD83(2011) / Nevada East (ftUS)', '6521'),
('6522 : NAD83(2011) / Nevada West', '6522'),
('6523 : NAD83(2011) / Nevada West (ftUS)', '6523'),
('6524 : NAD83(2011) / New Hampshire', '6524'),
('6525 : NAD83(2011) / New Hampshire (ftUS)', '6525'),
('6526 : NAD83(2011) / New Jersey', '6526'),
('6527 : NAD83(2011) / New Jersey (ftUS)', '6527'),
('6528 : NAD83(2011) / New Mexico Central', '6528'),
('6529 : NAD83(2011) / New Mexico Central (ftUS)', '6529'),
('6530 : NAD83(2011) / New Mexico East', '6530'),
('6531 : NAD83(2011) / New Mexico East (ftUS)', '6531'),
('6532 : NAD83(2011) / New Mexico West', '6532'),
('6533 : NAD83(2011) / New Mexico West (ftUS)', '6533'),
('6534 : NAD83(2011) / New York Central', '6534'),
('6535 : NAD83(2011) / New York Central (ftUS)', '6535'),
('6536 : NAD83(2011) / New York East', '6536'),
('6537 : NAD83(2011) / New York East (ftUS)', '6537'),
('6538 : NAD83(2011) / New York Long Island', '6538'),
('6539 : NAD83(2011) / New York Long Island (ftUS)', '6539'),
('6540 : NAD83(2011) / New York West', '6540'),
('6541 : NAD83(2011) / New York West (ftUS)', '6541'),
('6542 : NAD83(2011) / North Carolina', '6542'),
('6543 : NAD83(2011) / North Carolina (ftUS)', '6543'),
('6544 : NAD83(2011) / North Dakota North', '6544'),
('6545 : NAD83(2011) / North Dakota North (ft)', '6545'),
('6546 : NAD83(2011) / North Dakota South', '6546'),
('6547 : NAD83(2011) / North Dakota South (ft)', '6547'),
('6548 : NAD83(2011) / Ohio North', '6548'),
('6549 : NAD83(2011) / Ohio North (ftUS)', '6549'),
('6550 : NAD83(2011) / Ohio South', '6550'),
('6551 : NAD83(2011) / Ohio South (ftUS)', '6551'),
('6552 : NAD83(2011) / Oklahoma North', '6552'),
('6553 : NAD83(2011) / Oklahoma North (ftUS)', '6553'),
('6554 : NAD83(2011) / Oklahoma South', '6554'),
('6555 : NAD83(2011) / Oklahoma South (ftUS)', '6555'),
('6556 : NAD83(2011) / Oregon LCC (m)', '6556'),
('6557 : NAD83(2011) / Oregon GIC Lambert (ft)', '6557'),
('6558 : NAD83(2011) / Oregon North', '6558'),
('6559 : NAD83(2011) / Oregon North (ft)', '6559'),
('6560 : NAD83(2011) / Oregon South', '6560'),
('6561 : NAD83(2011) / Oregon South (ft)', '6561'),
('6562 : NAD83(2011) / Pennsylvania North', '6562'),
('6563 : NAD83(2011) / Pennsylvania North (ftUS)', '6563'),
('6564 : NAD83(2011) / Pennsylvania South', '6564'),
('6565 : NAD83(2011) / Pennsylvania South (ftUS)', '6565'),
('6566 : NAD83(2011) / Puerto Rico and Virgin Is.', '6566'),
('6567 : NAD83(2011) / Rhode Island', '6567'),
('6568 : NAD83(2011) / Rhode Island (ftUS)', '6568'),
('6569 : NAD83(2011) / South Carolina', '6569'),
('6570 : NAD83(2011) / South Carolina (ft)', '6570'),
('6571 : NAD83(2011) / South Dakota North', '6571'),
('6572 : NAD83(2011) / South Dakota North (ftUS)', '6572'),
('6573 : NAD83(2011) / South Dakota South', '6573'),
('6574 : NAD83(2011) / South Dakota South (ftUS)', '6574'),
('6575 : NAD83(2011) / Tennessee', '6575'),
('6576 : NAD83(2011) / Tennessee (ftUS)', '6576'),
('6577 : NAD83(2011) / Texas Central', '6577'),
('6578 : NAD83(2011) / Texas Central (ftUS)', '6578'),
('6579 : NAD83(2011) / Texas Centric Albers Equal Area', '6579'),
('6580 : NAD83(2011) / Texas Centric Lambert Conformal', '6580'),
('6581 : NAD83(2011) / Texas North', '6581'),
('6582 : NAD83(2011) / Texas North (ftUS)', '6582'),
('6583 : NAD83(2011) / Texas North Central', '6583'),
('6584 : NAD83(2011) / Texas North Central (ftUS)', '6584'),
('6585 : NAD83(2011) / Texas South', '6585'),
('6586 : NAD83(2011) / Texas South (ftUS)', '6586'),
('6587 : NAD83(2011) / Texas South Central', '6587'),
('6588 : NAD83(2011) / Texas South Central (ftUS)', '6588'),
('6589 : NAD83(2011) / Vermont', '6589'),
('6590 : NAD83(2011) / Vermont (ftUS)', '6590'),
('6591 : NAD83(2011) / Virginia Lambert', '6591'),
('6592 : NAD83(2011) / Virginia North', '6592'),
('6593 : NAD83(2011) / Virginia North (ftUS)', '6593'),
('6594 : NAD83(2011) / Virginia South', '6594'),
('6595 : NAD83(2011) / Virginia South (ftUS)', '6595'),
('6596 : NAD83(2011) / Washington North', '6596'),
('6597 : NAD83(2011) / Washington North (ftUS)', '6597'),
('6598 : NAD83(2011) / Washington South', '6598'),
('6599 : NAD83(2011) / Washington South (ftUS)', '6599'),
('6600 : NAD83(2011) / West Virginia North', '6600'),
('6601 : NAD83(2011) / West Virginia North (ftUS)', '6601'),
('6602 : NAD83(2011) / West Virginia South', '6602'),
('6603 : NAD83(2011) / West Virginia South (ftUS)', '6603'),
('6604 : NAD83(2011) / Wisconsin Central', '6604'),
('6605 : NAD83(2011) / Wisconsin Central (ftUS)', '6605'),
('6606 : NAD83(2011) / Wisconsin North', '6606'),
('6607 : NAD83(2011) / Wisconsin North (ftUS)', '6607'),
('6608 : NAD83(2011) / Wisconsin South', '6608'),
('6609 : NAD83(2011) / Wisconsin South (ftUS)', '6609'),
('6610 : NAD83(2011) / Wisconsin Transverse Mercator', '6610'),
('6611 : NAD83(2011) / Wyoming East', '6611'),
('6612 : NAD83(2011) / Wyoming East (ftUS)', '6612'),
('6613 : NAD83(2011) / Wyoming East Central', '6613'),
('6614 : NAD83(2011) / Wyoming East Central (ftUS)', '6614'),
('6615 : NAD83(2011) / Wyoming West', '6615'),
('6616 : NAD83(2011) / Wyoming West (ftUS)', '6616'),
('6617 : NAD83(2011) / Wyoming West Central', '6617'),
('6618 : NAD83(2011) / Wyoming West Central (ftUS)', '6618'),
('6619 : NAD83(2011) / Utah Central', '6619'),
('6620 : NAD83(2011) / Utah North', '6620'),
('6621 : NAD83(2011) / Utah South', '6621'),
('6622 : NAD83(CSRS) / Quebec Lambert', '6622'),
('6623 : NAD83 / Quebec Albers', '6623'),
('6624 : NAD83(CSRS) / Quebec Albers', '6624'),
('6625 : NAD83(2011) / Utah Central (ftUS)', '6625'),
('6626 : NAD83(2011) / Utah North (ftUS)', '6626'),
('6627 : NAD83(2011) / Utah South (ftUS)', '6627'),
('6628 : NAD83(PA11) / Hawaii zone 1', '6628'),
('6629 : NAD83(PA11) / Hawaii zone 2', '6629'),
('6630 : NAD83(PA11) / Hawaii zone 3', '6630'),
('6631 : NAD83(PA11) / Hawaii zone 4', '6631'),
('6632 : NAD83(PA11) / Hawaii zone 5', '6632'),
('6633 : NAD83(PA11) / Hawaii zone 3 (ftUS)', '6633'),
('6634 : NAD83(PA11) / UTM zone 4N', '6634'),
('6635 : NAD83(PA11) / UTM zone 5N', '6635'),
('6636 : NAD83(PA11) / UTM zone 2S', '6636'),
('6637 : NAD83(MA11) / Guam Map Grid', '6637'),
('6646 : Karbala 1979 / Iraq National Grid', '6646'),
('6669 : JGD2011 / Japan Plane Rectangular CS I', '6669'),
('6670 : JGD2011 / Japan Plane Rectangular CS II', '6670'),
('6671 : JGD2011 / Japan Plane Rectangular CS III', '6671'),
('6672 : JGD2011 / Japan Plane Rectangular CS IV', '6672'),
('6673 : JGD2011 / Japan Plane Rectangular CS V', '6673'),
('6674 : JGD2011 / Japan Plane Rectangular CS VI', '6674'),
('6675 : JGD2011 / Japan Plane Rectangular CS VII', '6675'),
('6676 : JGD2011 / Japan Plane Rectangular CS VIII', '6676'),
('6677 : JGD2011 / Japan Plane Rectangular CS IX', '6677'),
('6678 : JGD2011 / Japan Plane Rectangular CS X', '6678'),
('6679 : JGD2011 / Japan Plane Rectangular CS XI', '6679'),
('6680 : JGD2011 / Japan Plane Rectangular CS XII', '6680'),
('6681 : JGD2011 / Japan Plane Rectangular CS XIII', '6681'),
('6682 : JGD2011 / Japan Plane Rectangular CS XIV', '6682'),
('6683 : JGD2011 / Japan Plane Rectangular CS XV', '6683'),
('6684 : JGD2011 / Japan Plane Rectangular CS XVI', '6684'),
('6685 : JGD2011 / Japan Plane Rectangular CS XVII', '6685'),
('6686 : JGD2011 / Japan Plane Rectangular CS XVIII', '6686'),
('6687 : JGD2011 / Japan Plane Rectangular CS XIX', '6687'),
('6688 : JGD2011 / UTM zone 51N', '6688'),
('6689 : JGD2011 / UTM zone 52N', '6689'),
('6690 : JGD2011 / UTM zone 53N', '6690'),
('6691 : JGD2011 / UTM zone 54N', '6691'),
('6692 : JGD2011 / UTM zone 55N', '6692'),
('6703 : WGS 84 / TM 60 SW', '6703'),
('6707 : RDN2008 / TM32', '6707'),
('6708 : RDN2008 / TM33', '6708'),
('6709 : RDN2008 / TM34', '6709'),
('6720 : WGS 84 / CIG92', '6720'),
('6721 : GDA94 / CIG94', '6721'),
('6722 : WGS 84 / CKIG92', '6722'),
('6723 : GDA94 / CKIG94', '6723'),
('6732 : GDA94 / MGA zone 41', '6732'),
('6733 : GDA94 / MGA zone 42', '6733'),
('6734 : GDA94 / MGA zone 43', '6734'),
('6735 : GDA94 / MGA zone 44', '6735'),
('6736 : GDA94 / MGA zone 46', '6736'),
('6737 : GDA94 / MGA zone 47', '6737'),
('6738 : GDA94 / MGA zone 59', '6738'),
('6784 : NAD83(CORS96) / Oregon Baker zone (m)', '6784'),
('6785 : NAD83(CORS96) / Oregon Baker zone (ft)', '6785'),
('6786 : NAD83(2011) / Oregon Baker zone (m)', '6786'),
('6787 : NAD83(2011) / Oregon Baker zone (ft)', '6787'),
('6788 : NAD83(CORS96) / Oregon Bend-Klamath Falls zone (m)', '6788'),
('6789 : NAD83(CORS96) / Oregon Bend-Klamath Falls zone (ft)', '6789'),
('6790 : NAD83(2011) / Oregon Bend-Klamath Falls zone (m)', '6790'),
('6791 : NAD83(2011) / Oregon Bend-Klamath Falls zone (ft)', '6791'),
('6792 : NAD83(CORS96) / Oregon Bend-Redmond-Prineville zone (m)', '6792'),
('6793 : NAD83(CORS96) / Oregon Bend-Redmond-Prineville zone (ft)', '6793'),
('6794 : NAD83(2011) / Oregon Bend-Redmond-Prineville zone (m)', '6794'),
('6795 : NAD83(2011) / Oregon Bend-Redmond-Prineville zone (ft)', '6795'),
('6796 : NAD83(CORS96) / Oregon Bend-Burns zone (m)', '6796'),
('6797 : NAD83(CORS96) / Oregon Bend-Burns zone (ft)', '6797'),
('6798 : NAD83(2011) / Oregon Bend-Burns zone (m)', '6798'),
('6799 : NAD83(2011) / Oregon Bend-Burns zone (ft)', '6799'),
('6800 : NAD83(CORS96) / Oregon Canyonville-Grants Pass zone (m)', '6800'),
('6801 : NAD83(CORS96) / Oregon Canyonville-Grants Pass zone (ft)', '6801'),
('6802 : NAD83(2011) / Oregon Canyonville-Grants Pass zone (m)', '6802'),
('6803 : NAD83(2011) / Oregon Canyonville-Grants Pass zone (ft)', '6803'),
('6804 : NAD83(CORS96) / Oregon Columbia River East zone (m)', '6804'),
('6805 : NAD83(CORS96) / Oregon Columbia River East zone (ft)', '6805'),
('6806 : NAD83(2011) / Oregon Columbia River East zone (m)', '6806'),
('6807 : NAD83(2011) / Oregon Columbia River East zone (ft)', '6807'),
('6808 : NAD83(CORS96) / Oregon Columbia River West zone (m)', '6808'),
('6809 : NAD83(CORS96) / Oregon Columbia River West zone (ft)', '6809'),
('6810 : NAD83(2011) / Oregon Columbia River West zone (m)', '6810'),
('6811 : NAD83(2011) / Oregon Columbia River West zone (ft)', '6811'),
('6812 : NAD83(CORS96) / Oregon Cottage Grove-Canyonville zone (m)', '6812'),
('6813 : NAD83(CORS96) / Oregon Cottage Grove-Canyonville zone (ft)', '6813'),
('6814 : NAD83(2011) / Oregon Cottage Grove-Canyonville zone (m)', '6814'),
('6815 : NAD83(2011) / Oregon Cottage Grove-Canyonville zone (ft)', '6815'),
('6816 : NAD83(CORS96) / Oregon Dufur-Madras zone (m)', '6816'),
('6817 : NAD83(CORS96) / Oregon Dufur-Madras zone (ft)', '6817'),
('6818 : NAD83(2011) / Oregon Dufur-Madras zone (m)', '6818'),
('6819 : NAD83(2011) / Oregon Dufur-Madras zone (ft)', '6819'),
('6820 : NAD83(CORS96) / Oregon Eugene zone (m)', '6820'),
('6821 : NAD83(CORS96) / Oregon Eugene zone (ft)', '6821'),
('6822 : NAD83(2011) / Oregon Eugene zone (m)', '6822'),
('6823 : NAD83(2011) / Oregon Eugene zone (ft)', '6823'),
('6824 : NAD83(CORS96) / Oregon Grants Pass-Ashland zone (m)', '6824'),
('6825 : NAD83(CORS96) / Oregon Grants Pass-Ashland zone (ft)', '6825'),
('6826 : NAD83(2011) / Oregon Grants Pass-Ashland zone (m)', '6826'),
('6827 : NAD83(2011) / Oregon Grants Pass-Ashland zone (ft)', '6827'),
('6828 : NAD83(CORS96) / Oregon Gresham-Warm Springs zone (m)', '6828'),
('6829 : NAD83(CORS96) / Oregon Gresham-Warm Springs zone (ft)', '6829'),
('6830 : NAD83(2011) / Oregon Gresham-Warm Springs zone (m)', '6830'),
('6831 : NAD83(2011) / Oregon Gresham-Warm Springs zone (ft)', '6831'),
('6832 : NAD83(CORS96) / Oregon La Grande zone (m)', '6832'),
('6833 : NAD83(CORS96) / Oregon La Grande zone (ft)', '6833'),
('6834 : NAD83(2011) / Oregon La Grande zone (m)', '6834'),
('6835 : NAD83(2011) / Oregon La Grande zone (ft)', '6835'),
('6836 : NAD83(CORS96) / Oregon Ontario zone (m)', '6836'),
('6837 : NAD83(CORS96) / Oregon Ontario zone (ft)', '6837'),
('6838 : NAD83(2011) / Oregon Ontario zone (m)', '6838'),
('6839 : NAD83(2011) / Oregon Ontario zone (ft)', '6839'),
('6840 : NAD83(CORS96) / Oregon Coast zone (m)', '6840'),
('6841 : NAD83(CORS96) / Oregon Coast zone (ft)', '6841'),
('6842 : NAD83(2011) / Oregon Coast zone (m)', '6842'),
('6843 : NAD83(2011) / Oregon Coast zone (ft)', '6843'),
('6844 : NAD83(CORS96) / Oregon Pendleton zone (m)', '6844'),
('6845 : NAD83(CORS96) / Oregon Pendleton zone (ft)', '6845'),
('6846 : NAD83(2011) / Oregon Pendleton zone (m)', '6846'),
('6847 : NAD83(2011) / Oregon Pendleton zone (ft)', '6847'),
('6848 : NAD83(CORS96) / Oregon Pendleton-La Grande zone (m)', '6848'),
('6849 : NAD83(CORS96) / Oregon Pendleton-La Grande zone (ft)', '6849'),
('6850 : NAD83(2011) / Oregon Pendleton-La Grande zone (m)', '6850'),
('6851 : NAD83(2011) / Oregon Pendleton-La Grande zone (ft)', '6851'),
('6852 : NAD83(CORS96) / Oregon Portland zone (m)', '6852'),
('6853 : NAD83(CORS96) / Oregon Portland zone (ft)', '6853'),
('6854 : NAD83(2011) / Oregon Portland zone (m)', '6854'),
('6855 : NAD83(2011) / Oregon Portland zone (ft)', '6855'),
('6856 : NAD83(CORS96) / Oregon Salem zone (m)', '6856'),
('6857 : NAD83(CORS96) / Oregon Salem zone (ft)', '6857'),
('6858 : NAD83(2011) / Oregon Salem zone (m)', '6858'),
('6859 : NAD83(2011) / Oregon Salem zone (ft)', '6859'),
('6860 : NAD83(CORS96) / Oregon Santiam Pass zone (m)', '6860'),
('6861 : NAD83(CORS96) / Oregon Santiam Pass zone (ft)', '6861'),
('6862 : NAD83(2011) / Oregon Santiam Pass zone (m)', '6862'),
('6863 : NAD83(2011) / Oregon Santiam Pass (ft)', '6863'),
('6867 : NAD83(CORS96) / Oregon LCC (m)', '6867'),
('6868 : NAD83(CORS96) / Oregon GIC Lambert (ft)', '6868'),
('6870 : ETRS89 / Albania TM 2010', '6870'),
('6875 : RDN2008 / Italy zone', '6875'),
('6876 : RDN2008 / Zone 12', '6876'),
('6879 : NAD83(2011) / Wisconsin Central', '6879'),
('6880 : NAD83(2011) / Nebraska (ftUS)', '6880'),
('6884 : NAD83(CORS96) / Oregon North', '6884'),
('6885 : NAD83(CORS96) / Oregon North (ft)', '6885'),
('6886 : NAD83(CORS96) / Oregon South', '6886'),
('6887 : NAD83(CORS96) / Oregon South (ft)', '6887'),
('6915 : South East Island 1943 / UTM zone 40N', '6915'),
('6922 : NAD83 / Kansas LCC', '6922'),
('6923 : NAD83 / Kansas LCC (ftUS)', '6923'),
('6924 : NAD83(2011) / Kansas LCC', '6924'),
('6925 : NAD83(2011) / Kansas LCC (ftUS)', '6925'),
('6931 : WGS 84 / NSIDC EASE-Grid 2.0 North', '6931'),
('6932 : WGS 84 / NSIDC EASE-Grid 2.0 South', '6932'),
('6933 : WGS 84 / NSIDC EASE-Grid 2.0 Global', '6933'),
('6956 : VN-2000 / TM-3 zone 481', '6956'),
('6957 : VN-2000 / TM-3 zone 482', '6957'),
('6958 : VN-2000 / TM-3 zone 491', '6958'),
('6959 : VN-2000 / TM-3 Da Nang zone', '6959'),
('6962 : ETRS89 / Albania LCC 2010', '6962'),
('6966 : NAD27 / Michigan North', '6966'),
('6984 : Israeli Grid 05', '6984'),
('6991 : Israeli Grid 05/12', '6991'),
('6996 : NAD83(2011) / San Francisco CS13', '6996'),
('6997 : NAD83(2011) / San Francisco CS13 (ftUS)', '6997'),
('7005 : Nahrwan 1934 / UTM zone 37N', '7005'),
('7006 : Nahrwan 1934 / UTM zone 38N', '7006'),
('7007 : Nahrwan 1934 / UTM zone 39N', '7007'),
('7057 : NAD83(2011) / IaRCS zone 1', '7057'),
('7058 : NAD83(2011) / IaRCS zone 2', '7058'),
('7059 : NAD83(2011) / IaRCS zone 3', '7059'),
('7060 : NAD83(2011) / IaRCS zone 4', '7060'),
('7061 : NAD83(2011) / IaRCS zone 5', '7061'),
('7062 : NAD83(2011) / IaRCS zone 6', '7062'),
('7063 : NAD83(2011) / IaRCS zone 7', '7063'),
('7064 : NAD83(2011) / IaRCS zone 8', '7064'),
('7065 : NAD83(2011) / IaRCS zone 9', '7065'),
('7066 : NAD83(2011) / IaRCS zone 10', '7066'),
('7067 : NAD83(2011) / IaRCS zone 11', '7067'),
('7068 : NAD83(2011) / IaRCS zone 12', '7068'),
('7069 : NAD83(2011) / IaRCS zone 13', '7069'),
('7070 : NAD83(2011) / IaRCS zone 14', '7070'),
('7074 : RGTAAF07 / UTM zone 37S', '7074'),
('7075 : RGTAAF07 / UTM zone 38S', '7075'),
('7076 : RGTAAF07 / UTM zone 39S', '7076'),
('7077 : RGTAAF07 / UTM zone 40S', '7077'),
('7078 : RGTAAF07 / UTM zone 41S', '7078'),
('7079 : RGTAAF07 / UTM zone 42S', '7079'),
('7080 : RGTAAF07 / UTM zone 43S', '7080'),
('7081 : RGTAAF07 / UTM zone 44S', '7081'),
('7082 : RGTAAF07 / Terre Adelie Polar Stereographic', '7082'),
('7109 : NAD83(2011) / RMTCRS St Mary (m)', '7109'),
('7110 : NAD83(2011) / RMTCRS Blackfeet (m)', '7110'),
('7111 : NAD83(2011) / RMTCRS Milk River (m)', '7111'),
('7112 : NAD83(2011) / RMTCRS Fort Belknap (m)', '7112'),
('7113 : NAD83(2011) / RMTCRS Fort Peck Assiniboine (m)', '7113'),
('7114 : NAD83(2011) / RMTCRS Fort Peck Sioux (m)', '7114'),
('7115 : NAD83(2011) / RMTCRS Crow (m)', '7115'),
('7116 : NAD83(2011) / RMTCRS Bobcat (m)', '7116'),
('7117 : NAD83(2011) / RMTCRS Billings (m)', '7117'),
('7118 : NAD83(2011) / RMTCRS Wind River (m)', '7118'),
('7119 : NAD83(2011) / RMTCRS St Mary (ft)', '7119'),
('7120 : NAD83(2011) / RMTCRS Blackfeet (ft)', '7120'),
('7121 : NAD83(2011) / RMTCRS Milk River (ft)', '7121'),
('7122 : NAD83(2011) / RMTCRS Fort Belknap (ft)', '7122'),
('7123 : NAD83(2011) / RMTCRS Fort Peck Assiniboine (ft)', '7123'),
('7124 : NAD83(2011) / RMTCRS Fort Peck Sioux (ft)', '7124'),
('7125 : NAD83(2011) / RMTCRS Crow (ft)', '7125'),
('7126 : NAD83(2011) / RMTCRS Bobcat (ft)', '7126'),
('7127 : NAD83(2011) / RMTCRS Billings (ft)', '7127'),
('7128 : NAD83(2011) / RMTCRS Wind River (ftUS)', '7128'),
('7131 : NAD83(2011) / San Francisco CS13', '7131'),
('7132 : NAD83(2011) / San Francisco CS13 (ftUS)', '7132'),
('7142 : Palestine 1923 / Palestine Grid modified', '7142'),
('7257 : NAD83(2011) / InGCS Adams (m)', '7257'),
('7258 : NAD83(2011) / InGCS Adams (ftUS)', '7258'),
('7259 : NAD83(2011) / InGCS Allen (m)', '7259'),
('7260 : NAD83(2011) / InGCS Allen (ftUS)', '7260'),
('7261 : NAD83(2011) / InGCS Bartholomew (m)', '7261'),
('7262 : NAD83(2011) / InGCS Bartholomew (ftUS)', '7262'),
('7263 : NAD83(2011) / InGCS Benton (m)', '7263'),
('7264 : NAD83(2011) / InGCS Benton (ftUS)', '7264'),
('7265 : NAD83(2011) / InGCS Blackford-Delaware (m)', '7265'),
('7266 : NAD83(2011) / InGCS Blackford-Delaware (ftUS)', '7266'),
('7267 : NAD83(2011) / InGCS Boone-Hendricks (m)', '7267'),
('7268 : NAD83(2011) / InGCS Boone-Hendricks (ftUS)', '7268'),
('7269 : NAD83(2011) / InGCS Brown (m)', '7269'),
('7270 : NAD83(2011) / InGCS Brown (ftUS)', '7270'),
('7271 : NAD83(2011) / InGCS Carroll (m)', '7271'),
('7272 : NAD83(2011) / InGCS Carroll (ftUS)', '7272'),
('7273 : NAD83(2011) / InGCS Cass (m)', '7273'),
('7274 : NAD83(2011) / InGCS Cass (ftUS)', '7274'),
('7275 : NAD83(2011) / InGCS Clark-Floyd-Scott (m)', '7275'),
('7276 : NAD83(2011) / InGCS Clark-Floyd-Scott (ftUS)', '7276'),
('7277 : NAD83(2011) / InGCS Clay (m)', '7277'),
('7278 : NAD83(2011) / InGCS Clay (ftUS)', '7278'),
('7279 : NAD83(2011) / InGCS Clinton (m)', '7279'),
('7280 : NAD83(2011) / InGCS Clinton (ftUS)', '7280'),
('7281 : NAD83(2011) / InGCS Crawford-Lawrence-Orange (m)', '7281'),
('7282 : NAD83(2011) / InGCS Crawford-Lawrence-Orange (ftUS)', '7282'),
('7283 : NAD83(2011) / InGCS Daviess-Greene (m)', '7283'),
('7284 : NAD83(2011) / InGCS Daviess-Greene (ftUS)', '7284'),
('7285 : NAD83(2011) / InGCS Dearborn-Ohio-Switzerland (m)', '7285'),
('7286 : NAD83(2011) / InGCS Dearborn-Ohio-Switzerland (ftUS)', '7286'),
('7287 : NAD83(2011) / InGCS Decatur-Rush (m)', '7287'),
('7288 : NAD83(2011) / InGCS Decatur-Rush (ftUS)', '7288'),
('7289 : NAD83(2011) / InGCS DeKalb (m)', '7289'),
('7290 : NAD83(2011) / InGCS DeKalb (ftUS)', '7290'),
('7291 : NAD83(2011) / InGCS Dubois-Martin (m)', '7291'),
('7292 : NAD83(2011) / InGCS Dubois-Martin (ftUS)', '7292'),
('7293 : NAD83(2011) / InGCS Elkhart-Kosciusko-Wabash (m)', '7293'),
('7294 : NAD83(2011) / InGCS Elkhart-Kosciusko-Wabash (ftUS)', '7294'),
('7295 : NAD83(2011) / InGCS Fayette-Franklin-Union (m)', '7295'),
('7296 : NAD83(2011) / InGCS Fayette-Franklin-Union (ftUS)', '7296'),
('7297 : NAD83(2011) / InGCS Fountain-Warren (m)', '7297'),
('7298 : NAD83(2011) / InGCS Fountain-Warren (ftUS)', '7298'),
('7299 : NAD83(2011) / InGCS Fulton-Marshall-St. Joseph (m)', '7299'),
('7300 : NAD83(2011) / InGCS Fulton-Marshall-St. Joseph (ftUS)', '7300'),
('7301 : NAD83(2011) / InGCS Gibson (m)', '7301'),
('7302 : NAD83(2011) / InGCS Gibson (ftUS)', '7302'),
('7303 : NAD83(2011) / InGCS Grant (m)', '7303'),
('7304 : NAD83(2011) / InGCS Grant (ftUS)', '7304'),
('7305 : NAD83(2011) / InGCS Hamilton-Tipton (m)', '7305'),
('7306 : NAD83(2011) / InGCS Hamilton-Tipton (ftUS)', '7306'),
('7307 : NAD83(2011) / InGCS Hancock-Madison (m)', '7307'),
('7308 : NAD83(2011) / InGCS Hancock-Madison (ftUS)', '7308'),
('7309 : NAD83(2011) / InGCS Harrison-Washington (m)', '7309'),
('7310 : NAD83(2011) / InGCS Harrison-Washington (ftUS)', '7310'),
('7311 : NAD83(2011) / InGCS Henry (m)', '7311'),
('7312 : NAD83(2011) / InGCS Henry (ftUS)', '7312'),
('7313 : NAD83(2011) / InGCS Howard-Miami (m)', '7313'),
('7314 : NAD83(2011) / InGCS Howard-Miami (ftUS)', '7314'),
('7315 : NAD83(2011) / InGCS Huntington-Whitley (m)', '7315'),
('7316 : NAD83(2011) / InGCS Huntington-Whitley (ftUS)', '7316'),
('7317 : NAD83(2011) / InGCS Jackson (m)', '7317'),
('7318 : NAD83(2011) / InGCS Jackson (ftUS)', '7318'),
('7319 : NAD83(2011) / InGCS Jasper-Porter (m)', '7319'),
('7320 : NAD83(2011) / InGCS Jasper-Porter (ftUS)', '7320'),
('7321 : NAD83(2011) / InGCS Jay (m)', '7321'),
('7322 : NAD83(2011) / InGCS Jay (ftUS)', '7322'),
('7323 : NAD83(2011) / InGCS Jefferson (m)', '7323'),
('7324 : NAD83(2011) / InGCS Jefferson (ftUS)', '7324'),
('7325 : NAD83(2011) / InGCS Jennings (m)', '7325'),
('7326 : NAD83(2011) / InGCS Jennings (ftUS)', '7326'),
('7327 : NAD83(2011) / InGCS Johnson-Marion (m)', '7327'),
('7328 : NAD83(2011) / InGCS Johnson-Marion (ftUS)', '7328'),
('7329 : NAD83(2011) / InGCS Knox (m)', '7329'),
('7330 : NAD83(2011) / InGCS Knox (ftUS)', '7330'),
('7331 : NAD83(2011) / InGCS LaGrange-Noble (m)', '7331'),
('7332 : NAD83(2011) / InGCS LaGrange-Noble (ftUS)', '7332'),
('7333 : NAD83(2011) / InGCS Lake-Newton (m)', '7333'),
('7334 : NAD83(2011) / InGCS Lake-Newton (ftUS)', '7334'),
('7335 : NAD83(2011) / InGCS LaPorte-Pulaski-Starke (m)', '7335'),
('7336 : NAD83(2011) / InGCS LaPorte-Pulaski-Starke (ftUS)', '7336'),
('7337 : NAD83(2011) / InGCS Monroe-Morgan (m)', '7337'),
('7338 : NAD83(2011) / InGCS Monroe-Morgan (ftUS)', '7338'),
('7339 : NAD83(2011) / InGCS Montgomery-Putnam (m)', '7339'),
('7340 : NAD83(2011) / InGCS Montgomery-Putnam (ftUS)', '7340'),
('7341 : NAD83(2011) / InGCS Owen (m)', '7341'),
('7342 : NAD83(2011) / InGCS Owen (ftUS)', '7342'),
('7343 : NAD83(2011) / InGCS Parke-Vermillion (m)', '7343'),
('7344 : NAD83(2011) / InGCS Parke-Vermillion (ftUS)', '7344'),
('7345 : NAD83(2011) / InGCS Perry (m)', '7345'),
('7346 : NAD83(2011) / InGCS Perry (ftUS)', '7346'),
('7347 : NAD83(2011) / InGCS Pike-Warrick (m)', '7347'),
('7348 : NAD83(2011) / InGCS Pike-Warrick (ftUS)', '7348'),
('7349 : NAD83(2011) / InGCS Posey (m)', '7349'),
('7350 : NAD83(2011) / InGCS Posey (ftUS)', '7350'),
('7351 : NAD83(2011) / InGCS Randolph-Wayne (m)', '7351'),
('7352 : NAD83(2011) / InGCS Randolph-Wayne (ftUS)', '7352'),
('7353 : NAD83(2011) / InGCS Ripley (m)', '7353'),
('7354 : NAD83(2011) / InGCS Ripley (ftUS)', '7354'),
('7355 : NAD83(2011) / InGCS Shelby (m)', '7355'),
('7356 : NAD83(2011) / InGCS Shelby (ftUS)', '7356'),
('7357 : NAD83(2011) / InGCS Spencer (m)', '7357'),
('7358 : NAD83(2011) / InGCS Spencer (ftUS)', '7358'),
('7359 : NAD83(2011) / InGCS Steuben (m)', '7359'),
('7360 : NAD83(2011) / InGCS Steuben (ftUS)', '7360'),
('7361 : NAD83(2011) / InGCS Sullivan (m)', '7361'),
('7362 : NAD83(2011) / InGCS Sullivan (ftUS)', '7362'),
('7363 : NAD83(2011) / InGCS Tippecanoe-White (m)', '7363'),
('7364 : NAD83(2011) / InGCS Tippecanoe-White (ftUS)', '7364'),
('7365 : NAD83(2011) / InGCS Vanderburgh (m)', '7365'),
('7366 : NAD83(2011) / InGCS Vanderburgh (ftUS)', '7366'),
('7367 : NAD83(2011) / InGCS Vigo (m)', '7367'),
('7368 : NAD83(2011) / InGCS Vigo (ftUS)', '7368'),
('7369 : NAD83(2011) / InGCS Wells (m)', '7369'),
('7370 : NAD83(2011) / InGCS Wells (ftUS)', '7370'),
('7374 : ONGD14 / UTM zone 39N', '7374'),
('7375 : ONGD14 / UTM zone 40N', '7375'),
('7376 : ONGD14 / UTM zone 41N', '7376'),
('7528 : NAD83(2011) / WISCRS Adams and Juneau (m)', '7528'),
('7529 : NAD83(2011) / WISCRS Ashland (m)', '7529'),
('7530 : NAD83(2011) / WISCRS Barron (m)', '7530'),
('7531 : NAD83(2011) / WISCRS Bayfield (m)', '7531'),
('7532 : NAD83(2011) / WISCRS Brown (m)', '7532'),
('7533 : NAD83(2011) / WISCRS Buffalo (m)', '7533'),
('7534 : NAD83(2011) / WISCRS Burnett (m)', '7534'),
('7535 : NAD83(2011) / WISCRS Calumet, Fond du Lac, Outagamie and Winnebago (m)', '7535'),
('7536 : NAD83(2011) / WISCRS Chippewa (m)', '7536'),
('7537 : NAD83(2011) / WISCRS Clark (m)', '7537'),
('7538 : NAD83(2011) / WISCRS Columbia (m)', '7538'),
('7539 : NAD83(2011) / WISCRS Crawford (m)', '7539'),
('7540 : NAD83(2011) / WISCRS Dane (m)', '7540'),
('7541 : NAD83(2011) / WISCRS Dodge and Jefferson (m)', '7541'),
('7542 : NAD83(2011) / WISCRS Door (m)', '7542'),
('7543 : NAD83(2011) / WISCRS Douglas (m)', '7543'),
('7544 : NAD83(2011) / WISCRS Dunn (m)', '7544'),
('7545 : NAD83(2011) / WISCRS Eau Claire (m)', '7545'),
('7546 : NAD83(2011) / WISCRS Florence (m)', '7546'),
('7547 : NAD83(2011) / WISCRS Forest (m)', '7547'),
('7548 : NAD83(2011) / WISCRS Grant (m)', '7548'),
('7549 : NAD83(2011) / WISCRS Green and Lafayette (m)', '7549'),
('7550 : NAD83(2011) / WISCRS Green Lake and Marquette (m)', '7550'),
('7551 : NAD83(2011) / WISCRS Iowa (m)', '7551'),
('7552 : NAD83(2011) / WISCRS Iron (m)', '7552'),
('7553 : NAD83(2011) / WISCRS Jackson (m)', '7553'),
('7554 : NAD83(2011) / WISCRS Kenosha, Milwaukee, Ozaukee and Racine (m)', '7554'),
('7555 : NAD83(2011) / WISCRS Kewaunee, Manitowoc and Sheboygan (m)', '7555'),
('7556 : NAD83(2011) / WISCRS La Crosse (m)', '7556'),
('7557 : NAD83(2011) / WISCRS Langlade (m)', '7557'),
('7558 : NAD83(2011) / WISCRS Lincoln (m)', '7558'),
('7559 : NAD83(2011) / WISCRS Marathon (m)', '7559'),
('7560 : NAD83(2011) / WISCRS Marinette (m)', '7560'),
('7561 : NAD83(2011) / WISCRS Menominee (m)', '7561'),
('7562 : NAD83(2011) / WISCRS Monroe (m)', '7562'),
('7563 : NAD83(2011) / WISCRS Oconto (m)', '7563'),
('7564 : NAD83(2011) / WISCRS Oneida (m)', '7564'),
('7565 : NAD83(2011) / WISCRS Pepin and Pierce (m)', '7565'),
('7566 : NAD83(2011) / WISCRS Polk (m)', '7566'),
('7567 : NAD83(2011) / WISCRS Portage (m)', '7567'),
('7568 : NAD83(2011) / WISCRS Price (m)', '7568'),
('7569 : NAD83(2011) / WISCRS Richland (m)', '7569'),
('7570 : NAD83(2011) / WISCRS Rock (m)', '7570'),
('7571 : NAD83(2011) / WISCRS Rusk (m)', '7571'),
('7572 : NAD83(2011) / WISCRS Sauk (m)', '7572'),
('7573 : NAD83(2011) / WISCRS Sawyer (m)', '7573'),
('7574 : NAD83(2011) / WISCRS Shawano (m)', '7574'),
('7575 : NAD83(2011) / WISCRS St. Croix (m)', '7575'),
('7576 : NAD83(2011) / WISCRS Taylor (m)', '7576'),
('7577 : NAD83(2011) / WISCRS Trempealeau (m)', '7577'),
('7578 : NAD83(2011) / WISCRS Vernon (m)', '7578'),
('7579 : NAD83(2011) / WISCRS Vilas (m)', '7579'),
('7580 : NAD83(2011) / WISCRS Walworth (m)', '7580'),
('7581 : NAD83(2011) / WISCRS Washburn (m)', '7581'),
('7582 : NAD83(2011) / WISCRS Washington (m)', '7582'),
('7583 : NAD83(2011) / WISCRS Waukesha (m)', '7583'),
('7584 : NAD83(2011) / WISCRS Waupaca (m)', '7584'),
('7585 : NAD83(2011) / WISCRS Waushara (m)', '7585'),
('7586 : NAD83(2011) / WISCRS Wood (m)', '7586'),
('7587 : NAD83(2011) / WISCRS Adams and Juneau (ftUS)', '7587'),
('7588 : NAD83(2011) / WISCRS Ashland (ftUS)', '7588'),
('7589 : NAD83(2011) / WISCRS Barron (ftUS)', '7589'),
('7590 : NAD83(2011) / WISCRS Bayfield (ftUS)', '7590'),
('7591 : NAD83(2011) / WISCRS Brown (ftUS)', '7591'),
('7592 : NAD83(2011) / WISCRS Buffalo (ftUS)', '7592'),
('7593 : NAD83(2011) / WISCRS Burnett (ftUS)', '7593'),
('7594 : NAD83(2011) / WISCRS Calumet, Fond du Lac, Outagamie and Winnebago (ftUS)', '7594'),
('7595 : NAD83(2011) / WISCRS Chippewa (ftUS)', '7595'),
('7596 : NAD83(2011) / WISCRS Clark (ftUS)', '7596'),
('7597 : NAD83(2011) / WISCRS Columbia (ftUS)', '7597'),
('7598 : NAD83(2011) / WISCRS Crawford (ftUS)', '7598'),
('7599 : NAD83(2011) / WISCRS Dane (ftUS)', '7599'),
('7600 : NAD83(2011) / WISCRS Dodge and Jefferson (ftUS)', '7600'),
('7601 : NAD83(2011) / WISCRS Door (ftUS)', '7601'),
('7602 : NAD83(2011) / WISCRS Douglas (ftUS)', '7602'),
('7603 : NAD83(2011) / WISCRS Dunn (ftUS)', '7603'),
('7604 : NAD83(2011) / WISCRS Eau Claire (ftUS)', '7604'),
('7605 : NAD83(2011) / WISCRS Florence (ftUS)', '7605'),
('7606 : NAD83(2011) / WISCRS Forest (ftUS)', '7606'),
('7607 : NAD83(2011) / WISCRS Grant (ftUS)', '7607'),
('7608 : NAD83(2011) / WISCRS Green and Lafayette (ftUS)', '7608'),
('7609 : NAD83(2011) / WISCRS Green Lake and Marquette (ftUS)', '7609'),
('7610 : NAD83(2011) / WISCRS Iowa (ftUS)', '7610'),
('7611 : NAD83(2011) / WISCRS Iron (ftUS)', '7611'),
('7612 : NAD83(2011) / WISCRS Jackson (ftUS)', '7612'),
('7613 : NAD83(2011) / WISCRS Kenosha, Milwaukee, Ozaukee and Racine (ftUS)', '7613'),
('7614 : NAD83(2011) / WISCRS Kewaunee, Manitowoc and Sheboygan (ftUS)', '7614'),
('7615 : NAD83(2011) / WISCRS La Crosse (ftUS)', '7615'),
('7616 : NAD83(2011) / WISCRS Langlade (ftUS)', '7616'),
('7617 : NAD83(2011) / WISCRS Lincoln (ftUS)', '7617'),
('7618 : NAD83(2011) / WISCRS Marathon (ftUS)', '7618'),
('7619 : NAD83(2011) / WISCRS Marinette (ftUS)', '7619'),
('7620 : NAD83(2011) / WISCRS Menominee (ftUS)', '7620'),
('7621 : NAD83(2011) / WISCRS Monroe (ftUS)', '7621'),
('7622 : NAD83(2011) / WISCRS Oconto (ftUS)', '7622'),
('7623 : NAD83(2011) / WISCRS Oneida (ftUS)', '7623'),
('7624 : NAD83(2011) / WISCRS Pepin and Pierce (ftUS)', '7624'),
('7625 : NAD83(2011) / WISCRS Polk (ftUS)', '7625'),
('7626 : NAD83(2011) / WISCRS Portage (ftUS)', '7626'),
('7627 : NAD83(2011) / WISCRS Price (ftUS)', '7627'),
('7628 : NAD83(2011) / WISCRS Richland (ftUS)', '7628'),
('7629 : NAD83(2011) / WISCRS Rock (ftUS)', '7629'),
('7630 : NAD83(2011) / WISCRS Rusk (ftUS)', '7630'),
('7631 : NAD83(2011) / WISCRS Sauk (ftUS)', '7631'),
('7632 : NAD83(2011) / WISCRS Sawyer (ftUS)', '7632'),
('7633 : NAD83(2011) / WISCRS Shawano (ftUS)', '7633'),
('7634 : NAD83(2011) / WISCRS St. Croix (ftUS)', '7634'),
('7635 : NAD83(2011) / WISCRS Taylor (ftUS)', '7635'),
('7636 : NAD83(2011) / WISCRS Trempealeau (ftUS)', '7636'),
('7637 : NAD83(2011) / WISCRS Vernon (ftUS)', '7637'),
('7638 : NAD83(2011) / WISCRS Vilas (ftUS)', '7638'),
('7639 : NAD83(2011) / WISCRS Walworth (ftUS)', '7639'),
('7640 : NAD83(2011) / WISCRS Washburn (ftUS)', '7640'),
('7641 : NAD83(2011) / WISCRS Washington (ftUS)', '7641'),
('7642 : NAD83(2011) / WISCRS Waukesha (ftUS)', '7642'),
('7643 : NAD83(2011) / WISCRS Waupaca (ftUS)', '7643'),
('7644 : NAD83(2011) / WISCRS Waushara (ftUS)', '7644'),
('7645 : NAD83(2011) / WISCRS Wood (ftUS)', '7645'),
('20004 : Pulkovo 1995 / Gauss-Kruger zone 4', '20004'),
('20005 : Pulkovo 1995 / Gauss-Kruger zone 5', '20005'),
('20006 : Pulkovo 1995 / Gauss-Kruger zone 6', '20006'),
('20007 : Pulkovo 1995 / Gauss-Kruger zone 7', '20007'),
('20008 : Pulkovo 1995 / Gauss-Kruger zone 8', '20008'),
('20009 : Pulkovo 1995 / Gauss-Kruger zone 9', '20009'),
('20010 : Pulkovo 1995 / Gauss-Kruger zone 10', '20010'),
('20011 : Pulkovo 1995 / Gauss-Kruger zone 11', '20011'),
('20012 : Pulkovo 1995 / Gauss-Kruger zone 12', '20012'),
('20013 : Pulkovo 1995 / Gauss-Kruger zone 13', '20013'),
('20014 : Pulkovo 1995 / Gauss-Kruger zone 14', '20014'),
('20015 : Pulkovo 1995 / Gauss-Kruger zone 15', '20015'),
('20016 : Pulkovo 1995 / Gauss-Kruger zone 16', '20016'),
('20017 : Pulkovo 1995 / Gauss-Kruger zone 17', '20017'),
('20018 : Pulkovo 1995 / Gauss-Kruger zone 18', '20018'),
('20019 : Pulkovo 1995 / Gauss-Kruger zone 19', '20019'),
('20020 : Pulkovo 1995 / Gauss-Kruger zone 20', '20020'),
('20021 : Pulkovo 1995 / Gauss-Kruger zone 21', '20021'),
('20022 : Pulkovo 1995 / Gauss-Kruger zone 22', '20022'),
('20023 : Pulkovo 1995 / Gauss-Kruger zone 23', '20023'),
('20024 : Pulkovo 1995 / Gauss-Kruger zone 24', '20024'),
('20025 : Pulkovo 1995 / Gauss-Kruger zone 25', '20025'),
('20026 : Pulkovo 1995 / Gauss-Kruger zone 26', '20026'),
('20027 : Pulkovo 1995 / Gauss-Kruger zone 27', '20027'),
('20028 : Pulkovo 1995 / Gauss-Kruger zone 28', '20028'),
('20029 : Pulkovo 1995 / Gauss-Kruger zone 29', '20029'),
('20030 : Pulkovo 1995 / Gauss-Kruger zone 30', '20030'),
('20031 : Pulkovo 1995 / Gauss-Kruger zone 31', '20031'),
('20032 : Pulkovo 1995 / Gauss-Kruger zone 32', '20032'),
('20064 : Pulkovo 1995 / Gauss-Kruger 4N', '20064'),
('20065 : Pulkovo 1995 / Gauss-Kruger 5N', '20065'),
('20066 : Pulkovo 1995 / Gauss-Kruger 6N', '20066'),
('20067 : Pulkovo 1995 / Gauss-Kruger 7N', '20067'),
('20068 : Pulkovo 1995 / Gauss-Kruger 8N', '20068'),
('20069 : Pulkovo 1995 / Gauss-Kruger 9N', '20069'),
('20070 : Pulkovo 1995 / Gauss-Kruger 10N', '20070'),
('20071 : Pulkovo 1995 / Gauss-Kruger 11N', '20071'),
('20072 : Pulkovo 1995 / Gauss-Kruger 12N', '20072'),
('20073 : Pulkovo 1995 / Gauss-Kruger 13N', '20073'),
('20074 : Pulkovo 1995 / Gauss-Kruger 14N', '20074'),
('20075 : Pulkovo 1995 / Gauss-Kruger 15N', '20075'),
('20076 : Pulkovo 1995 / Gauss-Kruger 16N', '20076'),
('20077 : Pulkovo 1995 / Gauss-Kruger 17N', '20077'),
('20078 : Pulkovo 1995 / Gauss-Kruger 18N', '20078'),
('20079 : Pulkovo 1995 / Gauss-Kruger 19N', '20079'),
('20080 : Pulkovo 1995 / Gauss-Kruger 20N', '20080'),
('20081 : Pulkovo 1995 / Gauss-Kruger 21N', '20081'),
('20082 : Pulkovo 1995 / Gauss-Kruger 22N', '20082'),
('20083 : Pulkovo 1995 / Gauss-Kruger 23N', '20083'),
('20084 : Pulkovo 1995 / Gauss-Kruger 24N', '20084'),
('20085 : Pulkovo 1995 / Gauss-Kruger 25N', '20085'),
('20086 : Pulkovo 1995 / Gauss-Kruger 26N', '20086'),
('20087 : Pulkovo 1995 / Gauss-Kruger 27N', '20087'),
('20088 : Pulkovo 1995 / Gauss-Kruger 28N', '20088'),
('20089 : Pulkovo 1995 / Gauss-Kruger 29N', '20089'),
('20090 : Pulkovo 1995 / Gauss-Kruger 30N', '20090'),
('20091 : Pulkovo 1995 / Gauss-Kruger 31N', '20091'),
('20092 : Pulkovo 1995 / Gauss-Kruger 32N', '20092'),
('20135 : Adindan / UTM zone 35N', '20135'),
('20136 : Adindan / UTM zone 36N', '20136'),
('20137 : Adindan / UTM zone 37N', '20137'),
('20138 : Adindan / UTM zone 38N', '20138'),
('20248 : AGD66 / AMG zone 48', '20248'),
('20249 : AGD66 / AMG zone 49', '20249'),
('20250 : AGD66 / AMG zone 50', '20250'),
('20251 : AGD66 / AMG zone 51', '20251'),
('20252 : AGD66 / AMG zone 52', '20252'),
('20253 : AGD66 / AMG zone 53', '20253'),
('20254 : AGD66 / AMG zone 54', '20254'),
('20255 : AGD66 / AMG zone 55', '20255'),
('20256 : AGD66 / AMG zone 56', '20256'),
('20257 : AGD66 / AMG zone 57', '20257'),
('20258 : AGD66 / AMG zone 58', '20258'),
('20348 : AGD84 / AMG zone 48', '20348'),
('20349 : AGD84 / AMG zone 49', '20349'),
('20350 : AGD84 / AMG zone 50', '20350'),
('20351 : AGD84 / AMG zone 51', '20351'),
('20352 : AGD84 / AMG zone 52', '20352'),
('20353 : AGD84 / AMG zone 53', '20353'),
('20354 : AGD84 / AMG zone 54', '20354'),
('20355 : AGD84 / AMG zone 55', '20355'),
('20356 : AGD84 / AMG zone 56', '20356'),
('20357 : AGD84 / AMG zone 57', '20357'),
('20358 : AGD84 / AMG zone 58', '20358'),
('20436 : Ain el Abd / UTM zone 36N', '20436'),
('20437 : Ain el Abd / UTM zone 37N', '20437'),
('20438 : Ain el Abd / UTM zone 38N', '20438'),
('20439 : Ain el Abd / UTM zone 39N', '20439'),
('20440 : Ain el Abd / UTM zone 40N', '20440'),
('20499 : Ain el Abd / Bahrain Grid', '20499'),
('20538 : Afgooye / UTM zone 38N', '20538'),
('20539 : Afgooye / UTM zone 39N', '20539'),
('20790 : Lisbon (Lisbon) / Portuguese National Grid', '20790'),
('20791 : Lisbon (Lisbon) / Portuguese Grid', '20791'),
('20822 : Aratu / UTM zone 22S', '20822'),
('20823 : Aratu / UTM zone 23S', '20823'),
('20824 : Aratu / UTM zone 24S', '20824'),
('20934 : Arc 1950 / UTM zone 34S', '20934'),
('20935 : Arc 1950 / UTM zone 35S', '20935'),
('20936 : Arc 1950 / UTM zone 36S', '20936'),
('21035 : Arc 1960 / UTM zone 35S', '21035'),
('21036 : Arc 1960 / UTM zone 36S', '21036'),
('21037 : Arc 1960 / UTM zone 37S', '21037'),
('21095 : Arc 1960 / UTM zone 35N', '21095'),
('21096 : Arc 1960 / UTM zone 36N', '21096'),
('21097 : Arc 1960 / UTM zone 37N', '21097'),
('21100 : Batavia (Jakarta) / NEIEZ', '21100'),
('21148 : Batavia / UTM zone 48S', '21148'),
('21149 : Batavia / UTM zone 49S', '21149'),
('21150 : Batavia / UTM zone 50S', '21150'),
('21291 : Barbados 1938 / British West Indies Grid', '21291'),
('21292 : Barbados 1938 / Barbados National Grid', '21292'),
('21413 : Beijing 1954 / Gauss-Kruger zone 13', '21413'),
('21414 : Beijing 1954 / Gauss-Kruger zone 14', '21414'),
('21415 : Beijing 1954 / Gauss-Kruger zone 15', '21415'),
('21416 : Beijing 1954 / Gauss-Kruger zone 16', '21416'),
('21417 : Beijing 1954 / Gauss-Kruger zone 17', '21417'),
('21418 : Beijing 1954 / Gauss-Kruger zone 18', '21418'),
('21419 : Beijing 1954 / Gauss-Kruger zone 19', '21419'),
('21420 : Beijing 1954 / Gauss-Kruger zone 20', '21420'),
('21421 : Beijing 1954 / Gauss-Kruger zone 21', '21421'),
('21422 : Beijing 1954 / Gauss-Kruger zone 22', '21422'),
('21423 : Beijing 1954 / Gauss-Kruger zone 23', '21423'),
('21453 : Beijing 1954 / Gauss-Kruger CM 75E', '21453'),
('21454 : Beijing 1954 / Gauss-Kruger CM 81E', '21454'),
('21455 : Beijing 1954 / Gauss-Kruger CM 87E', '21455'),
('21456 : Beijing 1954 / Gauss-Kruger CM 93E', '21456'),
('21457 : Beijing 1954 / Gauss-Kruger CM 99E', '21457'),
('21458 : Beijing 1954 / Gauss-Kruger CM 105E', '21458'),
('21459 : Beijing 1954 / Gauss-Kruger CM 111E', '21459'),
('21460 : Beijing 1954 / Gauss-Kruger CM 117E', '21460'),
('21461 : Beijing 1954 / Gauss-Kruger CM 123E', '21461'),
('21462 : Beijing 1954 / Gauss-Kruger CM 129E', '21462'),
('21463 : Beijing 1954 / Gauss-Kruger CM 135E', '21463'),
('21473 : Beijing 1954 / Gauss-Kruger 13N', '21473'),
('21474 : Beijing 1954 / Gauss-Kruger 14N', '21474'),
('21475 : Beijing 1954 / Gauss-Kruger 15N', '21475'),
('21476 : Beijing 1954 / Gauss-Kruger 16N', '21476'),
('21477 : Beijing 1954 / Gauss-Kruger 17N', '21477'),
('21478 : Beijing 1954 / Gauss-Kruger 18N', '21478'),
('21479 : Beijing 1954 / Gauss-Kruger 19N', '21479'),
('21480 : Beijing 1954 / Gauss-Kruger 20N', '21480'),
('21481 : Beijing 1954 / Gauss-Kruger 21N', '21481'),
('21482 : Beijing 1954 / Gauss-Kruger 22N', '21482'),
('21483 : Beijing 1954 / Gauss-Kruger 23N', '21483'),
('21500 : Belge 1950 (Brussels) / Belge Lambert 50', '21500'),
('21780 : Bern 1898 (Bern) / LV03C', '21780'),
('21781 : CH1903 / LV03', '21781'),
('21782 : CH1903 / LV03C-G', '21782'),
('21817 : Bogota 1975 / UTM zone 17N', '21817'),
('21818 : Bogota 1975 / UTM zone 18N', '21818'),
('21891 : Bogota 1975 / Colombia West zone', '21891'),
('21892 : Bogota 1975 / Colombia Bogota zone', '21892'),
('21893 : Bogota 1975 / Colombia East Central zone', '21893'),
('21894 : Bogota 1975 / Colombia East', '21894'),
('21896 : Bogota 1975 / Colombia West zone', '21896'),
('21897 : Bogota 1975 / Colombia Bogota zone', '21897'),
('21898 : Bogota 1975 / Colombia East Central zone', '21898'),
('21899 : Bogota 1975 / Colombia East', '21899'),
('22032 : Camacupa / UTM zone 32S', '22032'),
('22033 : Camacupa / UTM zone 33S', '22033'),
('22091 : Camacupa / TM 11.30 SE', '22091'),
('22092 : Camacupa / TM 12 SE', '22092'),
('22171 : POSGAR 98 / Argentina 1', '22171'),
('22172 : POSGAR 98 / Argentina 2', '22172'),
('22173 : POSGAR 98 / Argentina 3', '22173'),
('22174 : POSGAR 98 / Argentina 4', '22174'),
('22175 : POSGAR 98 / Argentina 5', '22175'),
('22176 : POSGAR 98 / Argentina 6', '22176'),
('22177 : POSGAR 98 / Argentina 7', '22177'),
('22181 : POSGAR 94 / Argentina 1', '22181'),
('22182 : POSGAR 94 / Argentina 2', '22182'),
('22183 : POSGAR 94 / Argentina 3', '22183'),
('22184 : POSGAR 94 / Argentina 4', '22184'),
('22185 : POSGAR 94 / Argentina 5', '22185'),
('22186 : POSGAR 94 / Argentina 6', '22186'),
('22187 : POSGAR 94 / Argentina 7', '22187'),
('22191 : Campo Inchauspe / Argentina 1', '22191'),
('22192 : Campo Inchauspe / Argentina 2', '22192'),
('22193 : Campo Inchauspe / Argentina 3', '22193'),
('22194 : Campo Inchauspe / Argentina 4', '22194'),
('22195 : Campo Inchauspe / Argentina 5', '22195'),
('22196 : Campo Inchauspe / Argentina 6', '22196'),
('22197 : Campo Inchauspe / Argentina 7', '22197'),
('22234 : Cape / UTM zone 34S', '22234'),
('22235 : Cape / UTM zone 35S', '22235'),
('22236 : Cape / UTM zone 36S', '22236'),
('22275 : Cape / Lo15', '22275'),
('22277 : Cape / Lo17', '22277'),
('22279 : Cape / Lo19', '22279'),
('22281 : Cape / Lo21', '22281'),
('22283 : Cape / Lo23', '22283'),
('22285 : Cape / Lo25', '22285'),
('22287 : Cape / Lo27', '22287'),
('22289 : Cape / Lo29', '22289'),
('22291 : Cape / Lo31', '22291'),
('22293 : Cape / Lo33', '22293'),
('22300 : Carthage (Paris) / Tunisia Mining Grid', '22300'),
('22332 : Carthage / UTM zone 32N', '22332'),
('22391 : Carthage / Nord Tunisie', '22391'),
('22392 : Carthage / Sud Tunisie', '22392'),
('22521 : Corrego Alegre 1970-72 / UTM zone 21S', '22521'),
('22522 : Corrego Alegre 1970-72 / UTM zone 22S', '22522'),
('22523 : Corrego Alegre 1970-72 / UTM zone 23S', '22523'),
('22524 : Corrego Alegre 1970-72 / UTM zone 24S', '22524'),
('22525 : Corrego Alegre 1970-72 / UTM zone 25S', '22525'),
('22700 : Deir ez Zor / Levant Zone', '22700'),
('22770 : Deir ez Zor / Syria Lambert', '22770'),
('22780 : Deir ez Zor / Levant Stereographic', '22780'),
('22832 : Douala / UTM zone 32N', '22832'),
('22991 : Egypt 1907 / Blue Belt', '22991'),
('22992 : Egypt 1907 / Red Belt', '22992'),
('22993 : Egypt 1907 / Purple Belt', '22993'),
('22994 : Egypt 1907 / Extended Purple Belt', '22994'),
('23028 : ED50 / UTM zone 28N', '23028'),
('23029 : ED50 / UTM zone 29N', '23029'),
('23030 : ED50 / UTM zone 30N', '23030'),
('23031 : ED50 / UTM zone 31N', '23031'),
('23032 : ED50 / UTM zone 32N', '23032'),
('23033 : ED50 / UTM zone 33N', '23033'),
('23034 : ED50 / UTM zone 34N', '23034'),
('23035 : ED50 / UTM zone 35N', '23035'),
('23036 : ED50 / UTM zone 36N', '23036'),
('23037 : ED50 / UTM zone 37N', '23037'),
('23038 : ED50 / UTM zone 38N', '23038'),
('23090 : ED50 / TM 0 N', '23090'),
('23095 : ED50 / TM 5 NE', '23095'),
('23239 : Fahud / UTM zone 39N', '23239'),
('23240 : Fahud / UTM zone 40N', '23240'),
('23433 : Garoua / UTM zone 33N', '23433'),
('23700 : HD72 / EOV', '23700'),
('23830 : DGN95 / Indonesia TM-3 zone 46.2', '23830'),
('23831 : DGN95 / Indonesia TM-3 zone 47.1', '23831'),
('23832 : DGN95 / Indonesia TM-3 zone 47.2', '23832'),
('23833 : DGN95 / Indonesia TM-3 zone 48.1', '23833'),
('23834 : DGN95 / Indonesia TM-3 zone 48.2', '23834'),
('23835 : DGN95 / Indonesia TM-3 zone 49.1', '23835'),
('23836 : DGN95 / Indonesia TM-3 zone 49.2', '23836'),
('23837 : DGN95 / Indonesia TM-3 zone 50.1', '23837'),
('23838 : DGN95 / Indonesia TM-3 zone 50.2', '23838'),
('23839 : DGN95 / Indonesia TM-3 zone 51.1', '23839'),
('23840 : DGN95 / Indonesia TM-3 zone 51.2', '23840'),
('23841 : DGN95 / Indonesia TM-3 zone 52.1', '23841'),
('23842 : DGN95 / Indonesia TM-3 zone 52.2', '23842'),
('23843 : DGN95 / Indonesia TM-3 zone 53.1', '23843'),
('23844 : DGN95 / Indonesia TM-3 zone 53.2', '23844'),
('23845 : DGN95 / Indonesia TM-3 zone 54.1', '23845'),
('23846 : ID74 / UTM zone 46N', '23846'),
('23847 : ID74 / UTM zone 47N', '23847'),
('23848 : ID74 / UTM zone 48N', '23848'),
('23849 : ID74 / UTM zone 49N', '23849'),
('23850 : ID74 / UTM zone 50N', '23850'),
('23851 : ID74 / UTM zone 51N', '23851'),
('23852 : ID74 / UTM zone 52N', '23852'),
('23853 : ID74 / UTM zone 53N', '23853'),
('23866 : DGN95 / UTM zone 46N', '23866'),
('23867 : DGN95 / UTM zone 47N', '23867'),
('23868 : DGN95 / UTM zone 48N', '23868'),
('23869 : DGN95 / UTM zone 49N', '23869'),
('23870 : DGN95 / UTM zone 50N', '23870'),
('23871 : DGN95 / UTM zone 51N', '23871'),
('23872 : DGN95 / UTM zone 52N', '23872'),
('23877 : DGN95 / UTM zone 47S', '23877'),
('23878 : DGN95 / UTM zone 48S', '23878'),
('23879 : DGN95 / UTM zone 49S', '23879'),
('23880 : DGN95 / UTM zone 50S', '23880'),
('23881 : DGN95 / UTM zone 51S', '23881'),
('23882 : DGN95 / UTM zone 52S', '23882'),
('23883 : DGN95 / UTM zone 53S', '23883'),
('23884 : DGN95 / UTM zone 54S', '23884'),
('23886 : ID74 / UTM zone 46S', '23886'),
('23887 : ID74 / UTM zone 47S', '23887'),
('23888 : ID74 / UTM zone 48S', '23888'),
('23889 : ID74 / UTM zone 49S', '23889'),
('23890 : ID74 / UTM zone 50S', '23890'),
('23891 : ID74 / UTM zone 51S', '23891'),
('23892 : ID74 / UTM zone 52S', '23892'),
('23893 : ID74 / UTM zone 53S', '23893'),
('23894 : ID74 / UTM zone 54S', '23894'),
('23946 : Indian 1954 / UTM zone 46N', '23946'),
('23947 : Indian 1954 / UTM zone 47N', '23947'),
('23948 : Indian 1954 / UTM zone 48N', '23948'),
('24047 : Indian 1975 / UTM zone 47N', '24047'),
('24048 : Indian 1975 / UTM zone 48N', '24048'),
('24100 : Jamaica 1875 / Jamaica (Old Grid)', '24100'),
('24200 : JAD69 / Jamaica National Grid', '24200'),
('24305 : Kalianpur 1937 / UTM zone 45N', '24305'),
('24306 : Kalianpur 1937 / UTM zone 46N', '24306'),
('24311 : Kalianpur 1962 / UTM zone 41N', '24311'),
('24312 : Kalianpur 1962 / UTM zone 42N', '24312'),
('24313 : Kalianpur 1962 / UTM zone 43N', '24313'),
('24342 : Kalianpur 1975 / UTM zone 42N', '24342'),
('24343 : Kalianpur 1975 / UTM zone 43N', '24343'),
('24344 : Kalianpur 1975 / UTM zone 44N', '24344'),
('24345 : Kalianpur 1975 / UTM zone 45N', '24345'),
('24346 : Kalianpur 1975 / UTM zone 46N', '24346'),
('24347 : Kalianpur 1975 / UTM zone 47N', '24347'),
('24370 : Kalianpur 1880 / India zone 0', '24370'),
('24371 : Kalianpur 1880 / India zone I', '24371'),
('24372 : Kalianpur 1880 / India zone IIa', '24372'),
('24373 : Kalianpur 1880 / India zone IIIa', '24373'),
('24374 : Kalianpur 1880 / India zone IVa', '24374'),
('24375 : Kalianpur 1937 / India zone IIb', '24375'),
('24376 : Kalianpur 1962 / India zone I', '24376'),
('24377 : Kalianpur 1962 / India zone IIa', '24377'),
('24378 : Kalianpur 1975 / India zone I', '24378'),
('24379 : Kalianpur 1975 / India zone IIa', '24379'),
('24380 : Kalianpur 1975 / India zone IIb', '24380'),
('24381 : Kalianpur 1975 / India zone IIIa', '24381'),
('24382 : Kalianpur 1880 / India zone IIb', '24382'),
('24383 : Kalianpur 1975 / India zone IVa', '24383'),
('24500 : Kertau 1968 / Singapore Grid', '24500'),
('24547 : Kertau 1968 / UTM zone 47N', '24547'),
('24548 : Kertau 1968 / UTM zone 48N', '24548'),
('24571 : Kertau / R.S.O. Malaya (ch)', '24571'),
('24600 : KOC Lambert', '24600'),
('24718 : La Canoa / UTM zone 18N', '24718'),
('24719 : La Canoa / UTM zone 19N', '24719'),
('24720 : La Canoa / UTM zone 20N', '24720'),
('24817 : PSAD56 / UTM zone 17N', '24817'),
('24818 : PSAD56 / UTM zone 18N', '24818'),
('24819 : PSAD56 / UTM zone 19N', '24819'),
('24820 : PSAD56 / UTM zone 20N', '24820'),
('24821 : PSAD56 / UTM zone 21N', '24821'),
('24877 : PSAD56 / UTM zone 17S', '24877'),
('24878 : PSAD56 / UTM zone 18S', '24878'),
('24879 : PSAD56 / UTM zone 19S', '24879'),
('24880 : PSAD56 / UTM zone 20S', '24880'),
('24881 : PSAD56 / UTM zone 21S', '24881'),
('24882 : PSAD56 / UTM zone 22S', '24882'),
('24891 : PSAD56 / Peru west zone', '24891'),
('24892 : PSAD56 / Peru central zone', '24892'),
('24893 : PSAD56 / Peru east zone', '24893'),
('25000 : Leigon / Ghana Metre Grid', '25000'),
('25231 : Lome / UTM zone 31N', '25231'),
('25391 : Luzon 1911 / Philippines zone I', '25391'),
('25392 : Luzon 1911 / Philippines zone II', '25392'),
('25393 : Luzon 1911 / Philippines zone III', '25393'),
('25394 : Luzon 1911 / Philippines zone IV', '25394'),
('25395 : Luzon 1911 / Philippines zone V', '25395'),
('25700 : Makassar (Jakarta) / NEIEZ', '25700'),
('25828 : ETRS89 / UTM zone 28N', '25828'),
('25829 : ETRS89 / UTM zone 29N', '25829'),
('25830 : ETRS89 / UTM zone 30N', '25830'),
('25831 : ETRS89 / UTM zone 31N', '25831'),
('25832 : ETRS89 / UTM zone 32N', '25832'),
('25833 : ETRS89 / UTM zone 33N', '25833'),
('25834 : ETRS89 / UTM zone 34N', '25834'),
('25835 : ETRS89 / UTM zone 35N', '25835'),
('25836 : ETRS89 / UTM zone 36N', '25836'),
('25837 : ETRS89 / UTM zone 37N', '25837'),
('25838 : ETRS89 / UTM zone 38N', '25838'),
('25884 : ETRS89 / TM Baltic93', '25884'),
('25932 : Malongo 1987 / UTM zone 32S', '25932'),
('26191 : Merchich / Nord Maroc', '26191'),
('26192 : Merchich / Sud Maroc', '26192'),
('26193 : Merchich / Sahara', '26193'),
('26194 : Merchich / Sahara Nord', '26194'),
('26195 : Merchich / Sahara Sud', '26195'),
('26237 : Massawa / UTM zone 37N', '26237'),
('26331 : Minna / UTM zone 31N', '26331'),
('26332 : Minna / UTM zone 32N', '26332'),
('26391 : Minna / Nigeria West Belt', '26391'),
('26392 : Minna / Nigeria Mid Belt', '26392'),
('26393 : Minna / Nigeria East Belt', '26393'),
('26432 : Mhast / UTM zone 32S', '26432'),
('26591 : Monte Mario (Rome) / Italy zone 1', '26591'),
('26592 : Monte Mario (Rome) / Italy zone 2', '26592'),
("26632 : M'poraloko / UTM zone 32N", '26632'),
("26692 : M'poraloko / UTM zone 32S", '26692'),
('26701 : NAD27 / UTM zone 1N', '26701'),
('26702 : NAD27 / UTM zone 2N', '26702'),
('26703 : NAD27 / UTM zone 3N', '26703'),
('26704 : NAD27 / UTM zone 4N', '26704'),
('26705 : NAD27 / UTM zone 5N', '26705'),
('26706 : NAD27 / UTM zone 6N', '26706'),
('26707 : NAD27 / UTM zone 7N', '26707'),
('26708 : NAD27 / UTM zone 8N', '26708'),
('26709 : NAD27 / UTM zone 9N', '26709'),
('26710 : NAD27 / UTM zone 10N', '26710'),
('26711 : NAD27 / UTM zone 11N', '26711'),
('26712 : NAD27 / UTM zone 12N', '26712'),
('26713 : NAD27 / UTM zone 13N', '26713'),
('26714 : NAD27 / UTM zone 14N', '26714'),
('26715 : NAD27 / UTM zone 15N', '26715'),
('26716 : NAD27 / UTM zone 16N', '26716'),
('26717 : NAD27 / UTM zone 17N', '26717'),
('26718 : NAD27 / UTM zone 18N', '26718'),
('26719 : NAD27 / UTM zone 19N', '26719'),
('26720 : NAD27 / UTM zone 20N', '26720'),
('26721 : NAD27 / UTM zone 21N', '26721'),
('26722 : NAD27 / UTM zone 22N', '26722'),
('26729 : NAD27 / Alabama East', '26729'),
('26730 : NAD27 / Alabama West', '26730'),
('26731 : NAD27 / Alaska zone 1', '26731'),
('26732 : NAD27 / Alaska zone 2', '26732'),
('26733 : NAD27 / Alaska zone 3', '26733'),
('26734 : NAD27 / Alaska zone 4', '26734'),
('26735 : NAD27 / Alaska zone 5', '26735'),
('26736 : NAD27 / Alaska zone 6', '26736'),
('26737 : NAD27 / Alaska zone 7', '26737'),
('26738 : NAD27 / Alaska zone 8', '26738'),
('26739 : NAD27 / Alaska zone 9', '26739'),
('26740 : NAD27 / Alaska zone 10', '26740'),
('26741 : NAD27 / California zone I', '26741'),
('26742 : NAD27 / California zone II', '26742'),
('26743 : NAD27 / California zone III', '26743'),
('26744 : NAD27 / California zone IV', '26744'),
('26745 : NAD27 / California zone V', '26745'),
('26746 : NAD27 / California zone VI', '26746'),
('26747 : NAD27 / California zone VII', '26747'),
('26748 : NAD27 / Arizona East', '26748'),
('26749 : NAD27 / Arizona Central', '26749'),
('26750 : NAD27 / Arizona West', '26750'),
('26751 : NAD27 / Arkansas North', '26751'),
('26752 : NAD27 / Arkansas South', '26752'),
('26753 : NAD27 / Colorado North', '26753'),
('26754 : NAD27 / Colorado Central', '26754'),
('26755 : NAD27 / Colorado South', '26755'),
('26756 : NAD27 / Connecticut', '26756'),
('26757 : NAD27 / Delaware', '26757'),
('26758 : NAD27 / Florida East', '26758'),
('26759 : NAD27 / Florida West', '26759'),
('26760 : NAD27 / Florida North', '26760'),
('26766 : NAD27 / Georgia East', '26766'),
('26767 : NAD27 / Georgia West', '26767'),
('26768 : NAD27 / Idaho East', '26768'),
('26769 : NAD27 / Idaho Central', '26769'),
('26770 : NAD27 / Idaho West', '26770'),
('26771 : NAD27 / Illinois East', '26771'),
('26772 : NAD27 / Illinois West', '26772'),
('26773 : NAD27 / Indiana East', '26773'),
('26774 : NAD27 / Indiana West', '26774'),
('26775 : NAD27 / Iowa North', '26775'),
('26776 : NAD27 / Iowa South', '26776'),
('26777 : NAD27 / Kansas North', '26777'),
('26778 : NAD27 / Kansas South', '26778'),
('26779 : NAD27 / Kentucky North', '26779'),
('26780 : NAD27 / Kentucky South', '26780'),
('26781 : NAD27 / Louisiana North', '26781'),
('26782 : NAD27 / Louisiana South', '26782'),
('26783 : NAD27 / Maine East', '26783'),
('26784 : NAD27 / Maine West', '26784'),
('26785 : NAD27 / Maryland', '26785'),
('26786 : NAD27 / Massachusetts Mainland', '26786'),
('26787 : NAD27 / Massachusetts Island', '26787'),
('26791 : NAD27 / Minnesota North', '26791'),
('26792 : NAD27 / Minnesota Central', '26792'),
('26793 : NAD27 / Minnesota South', '26793'),
('26794 : NAD27 / Mississippi East', '26794'),
('26795 : NAD27 / Mississippi West', '26795'),
('26796 : NAD27 / Missouri East', '26796'),
('26797 : NAD27 / Missouri Central', '26797'),
('26798 : NAD27 / Missouri West', '26798'),
('26799 : NAD27 / California zone VII', '26799'),
('26801 : NAD Michigan / Michigan East', '26801'),
('26802 : NAD Michigan / Michigan Old Central', '26802'),
('26803 : NAD Michigan / Michigan West', '26803'),
('26811 : NAD Michigan / Michigan North', '26811'),
('26812 : NAD Michigan / Michigan Central', '26812'),
('26813 : NAD Michigan / Michigan South', '26813'),
('26814 : NAD83 / Maine East (ftUS)', '26814'),
('26815 : NAD83 / Maine West (ftUS)', '26815'),
('26819 : NAD83 / Minnesota North (ftUS)', '26819'),
('26820 : NAD83 / Minnesota Central (ftUS)', '26820'),
('26821 : NAD83 / Minnesota South (ftUS)', '26821'),
('26822 : NAD83 / Nebraska (ftUS)', '26822'),
('26823 : NAD83 / West Virginia North (ftUS)', '26823'),
('26824 : NAD83 / West Virginia South (ftUS)', '26824'),
('26825 : NAD83(HARN) / Maine East (ftUS)', '26825'),
('26826 : NAD83(HARN) / Maine West (ftUS)', '26826'),
('26830 : NAD83(HARN) / Minnesota North (ftUS)', '26830'),
('26831 : NAD83(HARN) / Minnesota Central (ftUS)', '26831'),
('26832 : NAD83(HARN) / Minnesota South (ftUS)', '26832'),
('26833 : NAD83(HARN) / Nebraska (ftUS)', '26833'),
('26834 : NAD83(HARN) / West Virginia North (ftUS)', '26834'),
('26835 : NAD83(HARN) / West Virginia South (ftUS)', '26835'),
('26836 : NAD83(NSRS2007) / Maine East (ftUS)', '26836'),
('26837 : NAD83(NSRS2007) / Maine West (ftUS)', '26837'),
('26841 : NAD83(NSRS2007) / Minnesota North (ftUS)', '26841'),
('26842 : NAD83(NSRS2007) / Minnesota Central (ftUS)', '26842'),
('26843 : NAD83(NSRS2007) / Minnesota South (ftUS)', '26843'),
('26844 : NAD83(NSRS2007) / Nebraska (ftUS)', '26844'),
('26845 : NAD83(NSRS2007) / West Virginia North (ftUS)', '26845'),
('26846 : NAD83(NSRS2007) / West Virginia South (ftUS)', '26846'),
('26847 : NAD83 / Maine East (ftUS)', '26847'),
('26848 : NAD83 / Maine West (ftUS)', '26848'),
('26849 : NAD83 / Minnesota North (ftUS)', '26849'),
('26850 : NAD83 / Minnesota Central (ftUS)', '26850'),
('26851 : NAD83 / Minnesota South (ftUS)', '26851'),
('26852 : NAD83 / Nebraska (ftUS)', '26852'),
('26853 : NAD83 / West Virginia North (ftUS)', '26853'),
('26854 : NAD83 / West Virginia South (ftUS)', '26854'),
('26855 : NAD83(HARN) / Maine East (ftUS)', '26855'),
('26856 : NAD83(HARN) / Maine West (ftUS)', '26856'),
('26857 : NAD83(HARN) / Minnesota North (ftUS)', '26857'),
('26858 : NAD83(HARN) / Minnesota Central (ftUS)', '26858'),
('26859 : NAD83(HARN) / Minnesota South (ftUS)', '26859'),
('26860 : NAD83(HARN) / Nebraska (ftUS)', '26860'),
('26861 : NAD83(HARN) / West Virginia North (ftUS)', '26861'),
('26862 : NAD83(HARN) / West Virginia South (ftUS)', '26862'),
('26863 : NAD83(NSRS2007) / Maine East (ftUS)', '26863'),
('26864 : NAD83(NSRS2007) / Maine West (ftUS)', '26864'),
('26865 : NAD83(NSRS2007) / Minnesota North (ftUS)', '26865'),
('26866 : NAD83(NSRS2007) / Minnesota Central (ftUS)', '26866'),
('26867 : NAD83(NSRS2007) / Minnesota South (ftUS)', '26867'),
('26868 : NAD83(NSRS2007) / Nebraska (ftUS)', '26868'),
('26869 : NAD83(NSRS2007) / West Virginia North (ftUS)', '26869'),
('26870 : NAD83(NSRS2007) / West Virginia South (ftUS)', '26870'),
('26891 : NAD83(CSRS) / MTM zone 11', '26891'),
('26892 : NAD83(CSRS) / MTM zone 12', '26892'),
('26893 : NAD83(CSRS) / MTM zone 13', '26893'),
('26894 : NAD83(CSRS) / MTM zone 14', '26894'),
('26895 : NAD83(CSRS) / MTM zone 15', '26895'),
('26896 : NAD83(CSRS) / MTM zone 16', '26896'),
('26897 : NAD83(CSRS) / MTM zone 17', '26897'),
('26898 : NAD83(CSRS) / MTM zone 1', '26898'),
('26899 : NAD83(CSRS) / MTM zone 2', '26899'),
('26901 : NAD83 / UTM zone 1N', '26901'),
('26902 : NAD83 / UTM zone 2N', '26902'),
('26903 : NAD83 / UTM zone 3N', '26903'),
('26904 : NAD83 / UTM zone 4N', '26904'),
('26905 : NAD83 / UTM zone 5N', '26905'),
('26906 : NAD83 / UTM zone 6N', '26906'),
('26907 : NAD83 / UTM zone 7N', '26907'),
('26908 : NAD83 / UTM zone 8N', '26908'),
('26909 : NAD83 / UTM zone 9N', '26909'),
('26910 : NAD83 / UTM zone 10N', '26910'),
('26911 : NAD83 / UTM zone 11N', '26911'),
('26912 : NAD83 / UTM zone 12N', '26912'),
('26913 : NAD83 / UTM zone 13N', '26913'),
('26914 : NAD83 / UTM zone 14N', '26914'),
('26915 : NAD83 / UTM zone 15N', '26915'),
('26916 : NAD83 / UTM zone 16N', '26916'),
('26917 : NAD83 / UTM zone 17N', '26917'),
('26918 : NAD83 / UTM zone 18N', '26918'),
('26919 : NAD83 / UTM zone 19N', '26919'),
('26920 : NAD83 / UTM zone 20N', '26920'),
('26921 : NAD83 / UTM zone 21N', '26921'),
('26922 : NAD83 / UTM zone 22N', '26922'),
('26923 : NAD83 / UTM zone 23N', '26923'),
('26929 : NAD83 / Alabama East', '26929'),
('26930 : NAD83 / Alabama West', '26930'),
('26931 : NAD83 / Alaska zone 1', '26931'),
('26932 : NAD83 / Alaska zone 2', '26932'),
('26933 : NAD83 / Alaska zone 3', '26933'),
('26934 : NAD83 / Alaska zone 4', '26934'),
('26935 : NAD83 / Alaska zone 5', '26935'),
('26936 : NAD83 / Alaska zone 6', '26936'),
('26937 : NAD83 / Alaska zone 7', '26937'),
('26938 : NAD83 / Alaska zone 8', '26938'),
('26939 : NAD83 / Alaska zone 9', '26939'),
('26940 : NAD83 / Alaska zone 10', '26940'),
('26941 : NAD83 / California zone 1', '26941'),
('26942 : NAD83 / California zone 2', '26942'),
('26943 : NAD83 / California zone 3', '26943'),
('26944 : NAD83 / California zone 4', '26944'),
('26945 : NAD83 / California zone 5', '26945'),
('26946 : NAD83 / California zone 6', '26946'),
('26948 : NAD83 / Arizona East', '26948'),
('26949 : NAD83 / Arizona Central', '26949'),
('26950 : NAD83 / Arizona West', '26950'),
('26951 : NAD83 / Arkansas North', '26951'),
('26952 : NAD83 / Arkansas South', '26952'),
('26953 : NAD83 / Colorado North', '26953'),
('26954 : NAD83 / Colorado Central', '26954'),
('26955 : NAD83 / Colorado South', '26955'),
('26956 : NAD83 / Connecticut', '26956'),
('26957 : NAD83 / Delaware', '26957'),
('26958 : NAD83 / Florida East', '26958'),
('26959 : NAD83 / Florida West', '26959'),
('26960 : NAD83 / Florida North', '26960'),
('26961 : NAD83 / Hawaii zone 1', '26961'),
('26962 : NAD83 / Hawaii zone 2', '26962'),
('26963 : NAD83 / Hawaii zone 3', '26963'),
('26964 : NAD83 / Hawaii zone 4', '26964'),
('26965 : NAD83 / Hawaii zone 5', '26965'),
('26966 : NAD83 / Georgia East', '26966'),
('26967 : NAD83 / Georgia West', '26967'),
('26968 : NAD83 / Idaho East', '26968'),
('26969 : NAD83 / Idaho Central', '26969'),
('26970 : NAD83 / Idaho West', '26970'),
('26971 : NAD83 / Illinois East', '26971'),
('26972 : NAD83 / Illinois West', '26972'),
('26973 : NAD83 / Indiana East', '26973'),
('26974 : NAD83 / Indiana West', '26974'),
('26975 : NAD83 / Iowa North', '26975'),
('26976 : NAD83 / Iowa South', '26976'),
('26977 : NAD83 / Kansas North', '26977'),
('26978 : NAD83 / Kansas South', '26978'),
('26979 : NAD83 / Kentucky North', '26979'),
('26980 : NAD83 / Kentucky South', '26980'),
('26981 : NAD83 / Louisiana North', '26981'),
('26982 : NAD83 / Louisiana South', '26982'),
('26983 : NAD83 / Maine East', '26983'),
('26984 : NAD83 / Maine West', '26984'),
('26985 : NAD83 / Maryland', '26985'),
('26986 : NAD83 / Massachusetts Mainland', '26986'),
('26987 : NAD83 / Massachusetts Island', '26987'),
('26988 : NAD83 / Michigan North', '26988'),
('26989 : NAD83 / Michigan Central', '26989'),
('26990 : NAD83 / Michigan South', '26990'),
('26991 : NAD83 / Minnesota North', '26991'),
('26992 : NAD83 / Minnesota Central', '26992'),
('26993 : NAD83 / Minnesota South', '26993'),
('26994 : NAD83 / Mississippi East', '26994'),
('26995 : NAD83 / Mississippi West', '26995'),
('26996 : NAD83 / Missouri East', '26996'),
('26997 : NAD83 / Missouri Central', '26997'),
('26998 : NAD83 / Missouri West', '26998'),
('27037 : Nahrwan 1967 / UTM zone 37N', '27037'),
('27038 : Nahrwan 1967 / UTM zone 38N', '27038'),
('27039 : Nahrwan 1967 / UTM zone 39N', '27039'),
('27040 : Nahrwan 1967 / UTM zone 40N', '27040'),
('27120 : Naparima 1972 / UTM zone 20N', '27120'),
('27200 : NZGD49 / New Zealand Map Grid', '27200'),
('27205 : NZGD49 / Mount Eden Circuit', '27205'),
('27206 : NZGD49 / Bay of Plenty Circuit', '27206'),
('27207 : NZGD49 / Poverty Bay Circuit', '27207'),
('27208 : NZGD49 / Hawkes Bay Circuit', '27208'),
('27209 : NZGD49 / Taranaki Circuit', '27209'),
('27210 : NZGD49 / Tuhirangi Circuit', '27210'),
('27211 : NZGD49 / Wanganui Circuit', '27211'),
('27212 : NZGD49 / Wairarapa Circuit', '27212'),
('27213 : NZGD49 / Wellington Circuit', '27213'),
('27214 : NZGD49 / Collingwood Circuit', '27214'),
('27215 : NZGD49 / Nelson Circuit', '27215'),
('27216 : NZGD49 / Karamea Circuit', '27216'),
('27217 : NZGD49 / Buller Circuit', '27217'),
('27218 : NZGD49 / Grey Circuit', '27218'),
('27219 : NZGD49 / Amuri Circuit', '27219'),
('27220 : NZGD49 / Marlborough Circuit', '27220'),
('27221 : NZGD49 / Hokitika Circuit', '27221'),
('27222 : NZGD49 / Okarito Circuit', '27222'),
('27223 : NZGD49 / Jacksons Bay Circuit', '27223'),
('27224 : NZGD49 / Mount Pleasant Circuit', '27224'),
('27225 : NZGD49 / Gawler Circuit', '27225'),
('27226 : NZGD49 / Timaru Circuit', '27226'),
('27227 : NZGD49 / Lindis Peak Circuit', '27227'),
('27228 : NZGD49 / Mount Nicholas Circuit', '27228'),
('27229 : NZGD49 / Mount York Circuit', '27229'),
('27230 : NZGD49 / Observation Point Circuit', '27230'),
('27231 : NZGD49 / North Taieri Circuit', '27231'),
('27232 : NZGD49 / Bluff Circuit', '27232'),
('27258 : NZGD49 / UTM zone 58S', '27258'),
('27259 : NZGD49 / UTM zone 59S', '27259'),
('27260 : NZGD49 / UTM zone 60S', '27260'),
('27291 : NZGD49 / North Island Grid', '27291'),
('27292 : NZGD49 / South Island Grid', '27292'),
('27391 : NGO 1948 (Oslo) / NGO zone I', '27391'),
('27392 : NGO 1948 (Oslo) / NGO zone II', '27392'),
('27393 : NGO 1948 (Oslo) / NGO zone III', '27393'),
('27394 : NGO 1948 (Oslo) / NGO zone IV', '27394'),
('27395 : NGO 1948 (Oslo) / NGO zone V', '27395'),
('27396 : NGO 1948 (Oslo) / NGO zone VI', '27396'),
('27397 : NGO 1948 (Oslo) / NGO zone VII', '27397'),
('27398 : NGO 1948 (Oslo) / NGO zone VIII', '27398'),
('27429 : Datum 73 / UTM zone 29N', '27429'),
('27492 : Datum 73 / Modified Portuguese Grid', '27492'),
('27493 : Datum 73 / Modified Portuguese Grid', '27493'),
('27500 : ATF (Paris) / Nord de Guerre', '27500'),
('27561 : NTF (Paris) / Lambert Nord France', '27561'),
('27562 : NTF (Paris) / Lambert Centre France', '27562'),
('27563 : NTF (Paris) / Lambert Sud France', '27563'),
('27564 : NTF (Paris) / Lambert Corse', '27564'),
('27571 : NTF (Paris) / Lambert zone I', '27571'),
('27572 : NTF (Paris) / Lambert zone II', '27572'),
('27573 : NTF (Paris) / Lambert zone III', '27573'),
('27574 : NTF (Paris) / Lambert zone IV', '27574'),
('27581 : NTF (Paris) / France I', '27581'),
('27582 : NTF (Paris) / France II', '27582'),
('27583 : NTF (Paris) / France III', '27583'),
('27584 : NTF (Paris) / France IV', '27584'),
('27591 : NTF (Paris) / Nord France', '27591'),
('27592 : NTF (Paris) / Centre France', '27592'),
('27593 : NTF (Paris) / Sud France', '27593'),
('27594 : NTF (Paris) / Corse', '27594'),
('27700 : OSGB 1936 / British National Grid', '27700'),
('28191 : Palestine 1923 / Palestine Grid', '28191'),
('28192 : Palestine 1923 / Palestine Belt', '28192'),
('28193 : Palestine 1923 / Israeli CS Grid', '28193'),
('28232 : Pointe Noire / UTM zone 32S', '28232'),
('28348 : GDA94 / MGA zone 48', '28348'),
('28349 : GDA94 / MGA zone 49', '28349'),
('28350 : GDA94 / MGA zone 50', '28350'),
('28351 : GDA94 / MGA zone 51', '28351'),
('28352 : GDA94 / MGA zone 52', '28352'),
('28353 : GDA94 / MGA zone 53', '28353'),
('28354 : GDA94 / MGA zone 54', '28354'),
('28355 : GDA94 / MGA zone 55', '28355'),
('28356 : GDA94 / MGA zone 56', '28356'),
('28357 : GDA94 / MGA zone 57', '28357'),
('28358 : GDA94 / MGA zone 58', '28358'),
('28402 : Pulkovo 1942 / Gauss-Kruger zone 2', '28402'),
('28403 : Pulkovo 1942 / Gauss-Kruger zone 3', '28403'),
('28404 : Pulkovo 1942 / Gauss-Kruger zone 4', '28404'),
('28405 : Pulkovo 1942 / Gauss-Kruger zone 5', '28405'),
('28406 : Pulkovo 1942 / Gauss-Kruger zone 6', '28406'),
('28407 : Pulkovo 1942 / Gauss-Kruger zone 7', '28407'),
('28408 : Pulkovo 1942 / Gauss-Kruger zone 8', '28408'),
('28409 : Pulkovo 1942 / Gauss-Kruger zone 9', '28409'),
('28410 : Pulkovo 1942 / Gauss-Kruger zone 10', '28410'),
('28411 : Pulkovo 1942 / Gauss-Kruger zone 11', '28411'),
('28412 : Pulkovo 1942 / Gauss-Kruger zone 12', '28412'),
('28413 : Pulkovo 1942 / Gauss-Kruger zone 13', '28413'),
('28414 : Pulkovo 1942 / Gauss-Kruger zone 14', '28414'),
('28415 : Pulkovo 1942 / Gauss-Kruger zone 15', '28415'),
('28416 : Pulkovo 1942 / Gauss-Kruger zone 16', '28416'),
('28417 : Pulkovo 1942 / Gauss-Kruger zone 17', '28417'),
('28418 : Pulkovo 1942 / Gauss-Kruger zone 18', '28418'),
('28419 : Pulkovo 1942 / Gauss-Kruger zone 19', '28419'),
('28420 : Pulkovo 1942 / Gauss-Kruger zone 20', '28420'),
('28421 : Pulkovo 1942 / Gauss-Kruger zone 21', '28421'),
('28422 : Pulkovo 1942 / Gauss-Kruger zone 22', '28422'),
('28423 : Pulkovo 1942 / Gauss-Kruger zone 23', '28423'),
('28424 : Pulkovo 1942 / Gauss-Kruger zone 24', '28424'),
('28425 : Pulkovo 1942 / Gauss-Kruger zone 25', '28425'),
('28426 : Pulkovo 1942 / Gauss-Kruger zone 26', '28426'),
('28427 : Pulkovo 1942 / Gauss-Kruger zone 27', '28427'),
('28428 : Pulkovo 1942 / Gauss-Kruger zone 28', '28428'),
('28429 : Pulkovo 1942 / Gauss-Kruger zone 29', '28429'),
('28430 : Pulkovo 1942 / Gauss-Kruger zone 30', '28430'),
('28431 : Pulkovo 1942 / Gauss-Kruger zone 31', '28431'),
('28432 : Pulkovo 1942 / Gauss-Kruger zone 32', '28432'),
('28462 : Pulkovo 1942 / Gauss-Kruger 2N', '28462'),
('28463 : Pulkovo 1942 / Gauss-Kruger 3N', '28463'),
('28464 : Pulkovo 1942 / Gauss-Kruger 4N', '28464'),
('28465 : Pulkovo 1942 / Gauss-Kruger 5N', '28465'),
('28466 : Pulkovo 1942 / Gauss-Kruger 6N', '28466'),
('28467 : Pulkovo 1942 / Gauss-Kruger 7N', '28467'),
('28468 : Pulkovo 1942 / Gauss-Kruger 8N', '28468'),
('28469 : Pulkovo 1942 / Gauss-Kruger 9N', '28469'),
('28470 : Pulkovo 1942 / Gauss-Kruger 10N', '28470'),
('28471 : Pulkovo 1942 / Gauss-Kruger 11N', '28471'),
('28472 : Pulkovo 1942 / Gauss-Kruger 12N', '28472'),
('28473 : Pulkovo 1942 / Gauss-Kruger 13N', '28473'),
('28474 : Pulkovo 1942 / Gauss-Kruger 14N', '28474'),
('28475 : Pulkovo 1942 / Gauss-Kruger 15N', '28475'),
('28476 : Pulkovo 1942 / Gauss-Kruger 16N', '28476'),
('28477 : Pulkovo 1942 / Gauss-Kruger 17N', '28477'),
('28478 : Pulkovo 1942 / Gauss-Kruger 18N', '28478'),
('28479 : Pulkovo 1942 / Gauss-Kruger 19N', '28479'),
('28480 : Pulkovo 1942 / Gauss-Kruger 20N', '28480'),
('28481 : Pulkovo 1942 / Gauss-Kruger 21N', '28481'),
('28482 : Pulkovo 1942 / Gauss-Kruger 22N', '28482'),
('28483 : Pulkovo 1942 / Gauss-Kruger 23N', '28483'),
('28484 : Pulkovo 1942 / Gauss-Kruger 24N', '28484'),
('28485 : Pulkovo 1942 / Gauss-Kruger 25N', '28485'),
('28486 : Pulkovo 1942 / Gauss-Kruger 26N', '28486'),
('28487 : Pulkovo 1942 / Gauss-Kruger 27N', '28487'),
('28488 : Pulkovo 1942 / Gauss-Kruger 28N', '28488'),
('28489 : Pulkovo 1942 / Gauss-Kruger 29N', '28489'),
('28490 : Pulkovo 1942 / Gauss-Kruger 30N', '28490'),
('28491 : Pulkovo 1942 / Gauss-Kruger 31N', '28491'),
('28492 : Pulkovo 1942 / Gauss-Kruger 32N', '28492'),
('28600 : Qatar 1974 / Qatar National Grid', '28600'),
('28991 : Amersfoort / RD Old', '28991'),
('28992 : Amersfoort / RD New', '28992'),
('29100 : SAD69 / Brazil Polyconic', '29100'),
('29101 : SAD69 / Brazil Polyconic', '29101'),
('29118 : SAD69 / UTM zone 18N', '29118'),
('29119 : SAD69 / UTM zone 19N', '29119'),
('29120 : SAD69 / UTM zone 20N', '29120'),
('29121 : SAD69 / UTM zone 21N', '29121'),
('29122 : SAD69 / UTM zone 22N', '29122'),
('29168 : SAD69 / UTM zone 18N', '29168'),
('29169 : SAD69 / UTM zone 19N', '29169'),
('29170 : SAD69 / UTM zone 20N', '29170'),
('29171 : SAD69 / UTM zone 21N', '29171'),
('29172 : SAD69 / UTM zone 22N', '29172'),
('29177 : SAD69 / UTM zone 17S', '29177'),
('29178 : SAD69 / UTM zone 18S', '29178'),
('29179 : SAD69 / UTM zone 19S', '29179'),
('29180 : SAD69 / UTM zone 20S', '29180'),
('29181 : SAD69 / UTM zone 21S', '29181'),
('29182 : SAD69 / UTM zone 22S', '29182'),
('29183 : SAD69 / UTM zone 23S', '29183'),
('29184 : SAD69 / UTM zone 24S', '29184'),
('29185 : SAD69 / UTM zone 25S', '29185'),
('29187 : SAD69 / UTM zone 17S', '29187'),
('29188 : SAD69 / UTM zone 18S', '29188'),
('29189 : SAD69 / UTM zone 19S', '29189'),
('29190 : SAD69 / UTM zone 20S', '29190'),
('29191 : SAD69 / UTM zone 21S', '29191'),
('29192 : SAD69 / UTM zone 22S', '29192'),
('29193 : SAD69 / UTM zone 23S', '29193'),
('29194 : SAD69 / UTM zone 24S', '29194'),
('29195 : SAD69 / UTM zone 25S', '29195'),
('29220 : Sapper Hill 1943 / UTM zone 20S', '29220'),
('29221 : Sapper Hill 1943 / UTM zone 21S', '29221'),
('29333 : Schwarzeck / UTM zone 33S', '29333'),
('29371 : Schwarzeck / Lo22/11', '29371'),
('29373 : Schwarzeck / Lo22/13', '29373'),
('29375 : Schwarzeck / Lo22/15', '29375'),
('29377 : Schwarzeck / Lo22/17', '29377'),
('29379 : Schwarzeck / Lo22/19', '29379'),
('29381 : Schwarzeck / Lo22/21', '29381'),
('29383 : Schwarzeck / Lo22/23', '29383'),
('29385 : Schwarzeck / Lo22/25', '29385'),
('29635 : Sudan / UTM zone 35N', '29635'),
('29636 : Sudan / UTM zone 36N', '29636'),
('29700 : Tananarive (Paris) / Laborde Grid', '29700'),
('29701 : Tananarive (Paris) / Laborde Grid', '29701'),
('29702 : Tananarive (Paris) / Laborde Grid approximation', '29702'),
('29738 : Tananarive / UTM zone 38S', '29738'),
('29739 : Tananarive / UTM zone 39S', '29739'),
('29849 : Timbalai 1948 / UTM zone 49N', '29849'),
('29850 : Timbalai 1948 / UTM zone 50N', '29850'),
('29871 : Timbalai 1948 / RSO Borneo (ch)', '29871'),
('29872 : Timbalai 1948 / RSO Borneo (ft)', '29872'),
('29873 : Timbalai 1948 / RSO Borneo (m)', '29873'),
('29900 : TM65 / Irish National Grid', '29900'),
('29901 : OSNI 1952 / Irish National Grid', '29901'),
('29902 : TM65 / Irish Grid', '29902'),
('29903 : TM75 / Irish Grid', '29903'),
('30161 : Tokyo / Japan Plane Rectangular CS I', '30161'),
('30162 : Tokyo / Japan Plane Rectangular CS II', '30162'),
('30163 : Tokyo / Japan Plane Rectangular CS III', '30163'),
('30164 : Tokyo / Japan Plane Rectangular CS IV', '30164'),
('30165 : Tokyo / Japan Plane Rectangular CS V', '30165'),
('30166 : Tokyo / Japan Plane Rectangular CS VI', '30166'),
('30167 : Tokyo / Japan Plane Rectangular CS VII', '30167'),
('30168 : Tokyo / Japan Plane Rectangular CS VIII', '30168'),
('30169 : Tokyo / Japan Plane Rectangular CS IX', '30169'),
('30170 : Tokyo / Japan Plane Rectangular CS X', '30170'),
('30171 : Tokyo / Japan Plane Rectangular CS XI', '30171'),
('30172 : Tokyo / Japan Plane Rectangular CS XII', '30172'),
('30173 : Tokyo / Japan Plane Rectangular CS XIII', '30173'),
('30174 : Tokyo / Japan Plane Rectangular CS XIV', '30174'),
('30175 : Tokyo / Japan Plane Rectangular CS XV', '30175'),
('30176 : Tokyo / Japan Plane Rectangular CS XVI', '30176'),
('30177 : Tokyo / Japan Plane Rectangular CS XVII', '30177'),
('30178 : Tokyo / Japan Plane Rectangular CS XVIII', '30178'),
('30179 : Tokyo / Japan Plane Rectangular CS XIX', '30179'),
('30200 : Trinidad 1903 / Trinidad Grid', '30200'),
('30339 : TC(1948) / UTM zone 39N', '30339'),
('30340 : TC(1948) / UTM zone 40N', '30340'),
('30491 : Voirol 1875 / Nord Algerie (ancienne)', '30491'),
('30492 : Voirol 1875 / Sud Algerie (ancienne)', '30492'),
('30493 : Voirol 1879 / Nord Algerie (ancienne)', '30493'),
('30494 : Voirol 1879 / Sud Algerie (ancienne)', '30494'),
('30729 : Nord Sahara 1959 / UTM zone 29N', '30729'),
('30730 : Nord Sahara 1959 / UTM zone 30N', '30730'),
('30731 : Nord Sahara 1959 / UTM zone 31N', '30731'),
('30732 : Nord Sahara 1959 / UTM zone 32N', '30732'),
('30791 : Nord Sahara 1959 / Nord Algerie', '30791'),
('30792 : Nord Sahara 1959 / Sud Algerie', '30792'),
('30800 : RT38 2.5 gon W', '30800'),
('31028 : Yoff / UTM zone 28N', '31028'),
('31121 : Zanderij / UTM zone 21N', '31121'),
('31154 : Zanderij / TM 54 NW', '31154'),
('31170 : Zanderij / Suriname Old TM', '31170'),
('31171 : Zanderij / Suriname TM', '31171'),
('31251 : MGI (Ferro) / Austria GK West Zone', '31251'),
('31252 : MGI (Ferro) / Austria GK Central Zone', '31252'),
('31253 : MGI (Ferro) / Austria GK East Zone', '31253'),
('31254 : MGI / Austria GK West', '31254'),
('31255 : MGI / Austria GK Central', '31255'),
('31256 : MGI / Austria GK East', '31256'),
('31257 : MGI / Austria GK M28', '31257'),
('31258 : MGI / Austria GK M31', '31258'),
('31259 : MGI / Austria GK M34', '31259'),
('31265 : MGI / 3-degree Gauss zone 5', '31265'),
('31266 : MGI / 3-degree Gauss zone 6', '31266'),
('31267 : MGI / 3-degree Gauss zone 7', '31267'),
('31268 : MGI / 3-degree Gauss zone 8', '31268'),
('31275 : MGI / Balkans zone 5', '31275'),
('31276 : MGI / Balkans zone 6', '31276'),
('31277 : MGI / Balkans zone 7', '31277'),
('31278 : MGI / Balkans zone 8', '31278'),
('31279 : MGI / Balkans zone 8', '31279'),
('31281 : MGI (Ferro) / Austria West Zone', '31281'),
('31282 : MGI (Ferro) / Austria Central Zone', '31282'),
('31283 : MGI (Ferro) / Austria East Zone', '31283'),
('31284 : MGI / Austria M28', '31284'),
('31285 : MGI / Austria M31', '31285'),
('31286 : MGI / Austria M34', '31286'),
('31287 : MGI / Austria Lambert', '31287'),
('31288 : MGI (Ferro) / M28', '31288'),
('31289 : MGI (Ferro) / M31', '31289'),
('31290 : MGI (Ferro) / M34', '31290'),
('31291 : MGI (Ferro) / Austria West Zone', '31291'),
('31292 : MGI (Ferro) / Austria Central Zone', '31292'),
('31293 : MGI (Ferro) / Austria East Zone', '31293'),
('31294 : MGI / M28', '31294'),
('31295 : MGI / M31', '31295'),
('31296 : MGI / M34', '31296'),
('31297 : MGI / Austria Lambert', '31297'),
('31300 : Belge 1972 / Belge Lambert 72', '31300'),
('31370 : Belge 1972 / Belgian Lambert 72', '31370'),
('31461 : DHDN / 3-degree Gauss zone 1', '31461'),
('31462 : DHDN / 3-degree Gauss zone 2', '31462'),
('31463 : DHDN / 3-degree Gauss zone 3', '31463'),
('31464 : DHDN / 3-degree Gauss zone 4', '31464'),
('31465 : DHDN / 3-degree Gauss zone 5', '31465'),
('31466 : DHDN / 3-degree Gauss-Kruger zone 2', '31466'),
('31467 : DHDN / 3-degree Gauss-Kruger zone 3', '31467'),
('31468 : DHDN / 3-degree Gauss-Kruger zone 4', '31468'),
('31469 : DHDN / 3-degree Gauss-Kruger zone 5', '31469'),
('31528 : Conakry 1905 / UTM zone 28N', '31528'),
('31529 : Conakry 1905 / UTM zone 29N', '31529'),
('31600 : Dealul Piscului 1930 / Stereo 33', '31600'),
('31700 : Dealul Piscului 1970/ Stereo 70', '31700'),
('31838 : NGN / UTM zone 38N', '31838'),
('31839 : NGN / UTM zone 39N', '31839'),
('31900 : KUDAMS / KTM', '31900'),
('31901 : KUDAMS / KTM', '31901'),
('31965 : SIRGAS 2000 / UTM zone 11N', '31965'),
('31966 : SIRGAS 2000 / UTM zone 12N', '31966'),
('31967 : SIRGAS 2000 / UTM zone 13N', '31967'),
('31968 : SIRGAS 2000 / UTM zone 14N', '31968'),
('31969 : SIRGAS 2000 / UTM zone 15N', '31969'),
('31970 : SIRGAS 2000 / UTM zone 16N', '31970'),
('31971 : SIRGAS 2000 / UTM zone 17N', '31971'),
('31972 : SIRGAS 2000 / UTM zone 18N', '31972'),
('31973 : SIRGAS 2000 / UTM zone 19N', '31973'),
('31974 : SIRGAS 2000 / UTM zone 20N', '31974'),
('31975 : SIRGAS 2000 / UTM zone 21N', '31975'),
('31976 : SIRGAS 2000 / UTM zone 22N', '31976'),
('31977 : SIRGAS 2000 / UTM zone 17S', '31977'),
('31978 : SIRGAS 2000 / UTM zone 18S', '31978'),
('31979 : SIRGAS 2000 / UTM zone 19S', '31979'),
('31980 : SIRGAS 2000 / UTM zone 20S', '31980'),
('31981 : SIRGAS 2000 / UTM zone 21S', '31981'),
('31982 : SIRGAS 2000 / UTM zone 22S', '31982'),
('31983 : SIRGAS 2000 / UTM zone 23S', '31983'),
('31984 : SIRGAS 2000 / UTM zone 24S', '31984'),
('31985 : SIRGAS 2000 / UTM zone 25S', '31985'),
('31986 : SIRGAS 1995 / UTM zone 17N', '31986'),
('31987 : SIRGAS 1995 / UTM zone 18N', '31987'),
('31988 : SIRGAS 1995 / UTM zone 19N', '31988'),
('31989 : SIRGAS 1995 / UTM zone 20N', '31989'),
('31990 : SIRGAS 1995 / UTM zone 21N', '31990'),
('31991 : SIRGAS 1995 / UTM zone 22N', '31991'),
('31992 : SIRGAS 1995 / UTM zone 17S', '31992'),
('31993 : SIRGAS 1995 / UTM zone 18S', '31993'),
('31994 : SIRGAS 1995 / UTM zone 19S', '31994'),
('31995 : SIRGAS 1995 / UTM zone 20S', '31995'),
('31996 : SIRGAS 1995 / UTM zone 21S', '31996'),
('31997 : SIRGAS 1995 / UTM zone 22S', '31997'),
('31998 : SIRGAS 1995 / UTM zone 23S', '31998'),
('31999 : SIRGAS 1995 / UTM zone 24S', '31999'),
('32000 : SIRGAS 1995 / UTM zone 25S', '32000'),
('32001 : NAD27 / Montana North', '32001'),
('32002 : NAD27 / Montana Central', '32002'),
('32003 : NAD27 / Montana South', '32003'),
('32005 : NAD27 / Nebraska North', '32005'),
('32006 : NAD27 / Nebraska South', '32006'),
('32007 : NAD27 / Nevada East', '32007'),
('32008 : NAD27 / Nevada Central', '32008'),
('32009 : NAD27 / Nevada West', '32009'),
('32010 : NAD27 / New Hampshire', '32010'),
('32011 : NAD27 / New Jersey', '32011'),
('32012 : NAD27 / New Mexico East', '32012'),
('32013 : NAD27 / New Mexico Central', '32013'),
('32014 : NAD27 / New Mexico West', '32014'),
('32015 : NAD27 / New York East', '32015'),
('32016 : NAD27 / New York Central', '32016'),
('32017 : NAD27 / New York West', '32017'),
('32018 : NAD27 / New York Long Island', '32018'),
('32019 : NAD27 / North Carolina', '32019'),
('32020 : NAD27 / North Dakota North', '32020'),
('32021 : NAD27 / North Dakota South', '32021'),
('32022 : NAD27 / Ohio North', '32022'),
('32023 : NAD27 / Ohio South', '32023'),
('32024 : NAD27 / Oklahoma North', '32024'),
('32025 : NAD27 / Oklahoma South', '32025'),
('32026 : NAD27 / Oregon North', '32026'),
('32027 : NAD27 / Oregon South', '32027'),
('32028 : NAD27 / Pennsylvania North', '32028'),
('32029 : NAD27 / Pennsylvania South', '32029'),
('32030 : NAD27 / Rhode Island', '32030'),
('32031 : NAD27 / South Carolina North', '32031'),
('32033 : NAD27 / South Carolina South', '32033'),
('32034 : NAD27 / South Dakota North', '32034'),
('32035 : NAD27 / South Dakota South', '32035'),
('32036 : NAD27 / Tennessee', '32036'),
('32037 : NAD27 / Texas North', '32037'),
('32038 : NAD27 / Texas North Central', '32038'),
('32039 : NAD27 / Texas Central', '32039'),
('32040 : NAD27 / Texas South Central', '32040'),
('32041 : NAD27 / Texas South', '32041'),
('32042 : NAD27 / Utah North', '32042'),
('32043 : NAD27 / Utah Central', '32043'),
('32044 : NAD27 / Utah South', '32044'),
('32045 : NAD27 / Vermont', '32045'),
('32046 : NAD27 / Virginia North', '32046'),
('32047 : NAD27 / Virginia South', '32047'),
('32048 : NAD27 / Washington North', '32048'),
('32049 : NAD27 / Washington South', '32049'),
('32050 : NAD27 / West Virginia North', '32050'),
('32051 : NAD27 / West Virginia South', '32051'),
('32052 : NAD27 / Wisconsin North', '32052'),
('32053 : NAD27 / Wisconsin Central', '32053'),
('32054 : NAD27 / Wisconsin South', '32054'),
('32055 : NAD27 / Wyoming East', '32055'),
('32056 : NAD27 / Wyoming East Central', '32056'),
('32057 : NAD27 / Wyoming West Central', '32057'),
('32058 : NAD27 / Wyoming West', '32058'),
('32061 : NAD27 / Guatemala Norte', '32061'),
('32062 : NAD27 / Guatemala Sur', '32062'),
('32064 : NAD27 / BLM 14N (ftUS)', '32064'),
('32065 : NAD27 / BLM 15N (ftUS)', '32065'),
('32066 : NAD27 / BLM 16N (ftUS)', '32066'),
('32067 : NAD27 / BLM 17N (ftUS)', '32067'),
('32074 : NAD27 / BLM 14N (feet)', '32074'),
('32075 : NAD27 / BLM 15N (feet)', '32075'),
('32076 : NAD27 / BLM 16N (feet)', '32076'),
('32077 : NAD27 / BLM 17N (feet)', '32077'),
('32081 : NAD27 / MTM zone 1', '32081'),
('32082 : NAD27 / MTM zone 2', '32082'),
('32083 : NAD27 / MTM zone 3', '32083'),
('32084 : NAD27 / MTM zone 4', '32084'),
('32085 : NAD27 / MTM zone 5', '32085'),
('32086 : NAD27 / MTM zone 6', '32086'),
('32098 : NAD27 / Quebec Lambert', '32098'),
('32099 : NAD27 / Louisiana Offshore', '32099'),
('32100 : NAD83 / Montana', '32100'),
('32104 : NAD83 / Nebraska', '32104'),
('32107 : NAD83 / Nevada East', '32107'),
('32108 : NAD83 / Nevada Central', '32108'),
('32109 : NAD83 / Nevada West', '32109'),
('32110 : NAD83 / New Hampshire', '32110'),
('32111 : NAD83 / New Jersey', '32111'),
('32112 : NAD83 / New Mexico East', '32112'),
('32113 : NAD83 / New Mexico Central', '32113'),
('32114 : NAD83 / New Mexico West', '32114'),
('32115 : NAD83 / New York East', '32115'),
('32116 : NAD83 / New York Central', '32116'),
('32117 : NAD83 / New York West', '32117'),
('32118 : NAD83 / New York Long Island', '32118'),
('32119 : NAD83 / North Carolina', '32119'),
('32120 : NAD83 / North Dakota North', '32120'),
('32121 : NAD83 / North Dakota South', '32121'),
('32122 : NAD83 / Ohio North', '32122'),
('32123 : NAD83 / Ohio South', '32123'),
('32124 : NAD83 / Oklahoma North', '32124'),
('32125 : NAD83 / Oklahoma South', '32125'),
('32126 : NAD83 / Oregon North', '32126'),
('32127 : NAD83 / Oregon South', '32127'),
('32128 : NAD83 / Pennsylvania North', '32128'),
('32129 : NAD83 / Pennsylvania South', '32129'),
('32130 : NAD83 / Rhode Island', '32130'),
('32133 : NAD83 / South Carolina', '32133'),
('32134 : NAD83 / South Dakota North', '32134'),
('32135 : NAD83 / South Dakota South', '32135'),
('32136 : NAD83 / Tennessee', '32136'),
('32137 : NAD83 / Texas North', '32137'),
('32138 : NAD83 / Texas North Central', '32138'),
('32139 : NAD83 / Texas Central', '32139'),
('32140 : NAD83 / Texas South Central', '32140'),
('32141 : NAD83 / Texas South', '32141'),
('32142 : NAD83 / Utah North', '32142'),
('32143 : NAD83 / Utah Central', '32143'),
('32144 : NAD83 / Utah South', '32144'),
('32145 : NAD83 / Vermont', '32145'),
('32146 : NAD83 / Virginia North', '32146'),
('32147 : NAD83 / Virginia South', '32147'),
('32148 : NAD83 / Washington North', '32148'),
('32149 : NAD83 / Washington South', '32149'),
('32150 : NAD83 / West Virginia North', '32150'),
('32151 : NAD83 / West Virginia South', '32151'),
('32152 : NAD83 / Wisconsin North', '32152'),
('32153 : NAD83 / Wisconsin Central', '32153'),
('32154 : NAD83 / Wisconsin South', '32154'),
('32155 : NAD83 / Wyoming East', '32155'),
('32156 : NAD83 / Wyoming East Central', '32156'),
('32157 : NAD83 / Wyoming West Central', '32157'),
('32158 : NAD83 / Wyoming West', '32158'),
('32161 : NAD83 / Puerto Rico & Virgin Is.', '32161'),
('32164 : NAD83 / BLM 14N (ftUS)', '32164'),
('32165 : NAD83 / BLM 15N (ftUS)', '32165'),
('32166 : NAD83 / BLM 16N (ftUS)', '32166'),
('32167 : NAD83 / BLM 17N (ftUS)', '32167'),
('32180 : NAD83 / SCoPQ zone 2', '32180'),
('32181 : NAD83 / MTM zone 1', '32181'),
('32182 : NAD83 / MTM zone 2', '32182'),
('32183 : NAD83 / MTM zone 3', '32183'),
('32184 : NAD83 / MTM zone 4', '32184'),
('32185 : NAD83 / MTM zone 5', '32185'),
('32186 : NAD83 / MTM zone 6', '32186'),
('32187 : NAD83 / MTM zone 7', '32187'),
('32188 : NAD83 / MTM zone 8', '32188'),
('32189 : NAD83 / MTM zone 9', '32189'),
('32190 : NAD83 / MTM zone 10', '32190'),
('32191 : NAD83 / MTM zone 11', '32191'),
('32192 : NAD83 / MTM zone 12', '32192'),
('32193 : NAD83 / MTM zone 13', '32193'),
('32194 : NAD83 / MTM zone 14', '32194'),
('32195 : NAD83 / MTM zone 15', '32195'),
('32196 : NAD83 / MTM zone 16', '32196'),
('32197 : NAD83 / MTM zone 17', '32197'),
('32198 : NAD83 / Quebec Lambert', '32198'),
('32199 : NAD83 / Louisiana Offshore', '32199'),
('32201 : WGS 72 / UTM zone 1N', '32201'),
('32202 : WGS 72 / UTM zone 2N', '32202'),
('32203 : WGS 72 / UTM zone 3N', '32203'),
('32204 : WGS 72 / UTM zone 4N', '32204'),
('32205 : WGS 72 / UTM zone 5N', '32205'),
('32206 : WGS 72 / UTM zone 6N', '32206'),
('32207 : WGS 72 / UTM zone 7N', '32207'),
('32208 : WGS 72 / UTM zone 8N', '32208'),
('32209 : WGS 72 / UTM zone 9N', '32209'),
('32210 : WGS 72 / UTM zone 10N', '32210'),
('32211 : WGS 72 / UTM zone 11N', '32211'),
('32212 : WGS 72 / UTM zone 12N', '32212'),
('32213 : WGS 72 / UTM zone 13N', '32213'),
('32214 : WGS 72 / UTM zone 14N', '32214'),
('32215 : WGS 72 / UTM zone 15N', '32215'),
('32216 : WGS 72 / UTM zone 16N', '32216'),
('32217 : WGS 72 / UTM zone 17N', '32217'),
('32218 : WGS 72 / UTM zone 18N', '32218'),
('32219 : WGS 72 / UTM zone 19N', '32219'),
('32220 : WGS 72 / UTM zone 20N', '32220'),
('32221 : WGS 72 / UTM zone 21N', '32221'),
('32222 : WGS 72 / UTM zone 22N', '32222'),
('32223 : WGS 72 / UTM zone 23N', '32223'),
('32224 : WGS 72 / UTM zone 24N', '32224'),
('32225 : WGS 72 / UTM zone 25N', '32225'),
('32226 : WGS 72 / UTM zone 26N', '32226'),
('32227 : WGS 72 / UTM zone 27N', '32227'),
('32228 : WGS 72 / UTM zone 28N', '32228'),
('32229 : WGS 72 / UTM zone 29N', '32229'),
('32230 : WGS 72 / UTM zone 30N', '32230'),
('32231 : WGS 72 / UTM zone 31N', '32231'),
('32232 : WGS 72 / UTM zone 32N', '32232'),
('32233 : WGS 72 / UTM zone 33N', '32233'),
('32234 : WGS 72 / UTM zone 34N', '32234'),
('32235 : WGS 72 / UTM zone 35N', '32235'),
('32236 : WGS 72 / UTM zone 36N', '32236'),
('32237 : WGS 72 / UTM zone 37N', '32237'),
('32238 : WGS 72 / UTM zone 38N', '32238'),
('32239 : WGS 72 / UTM zone 39N', '32239'),
('32240 : WGS 72 / UTM zone 40N', '32240'),
('32241 : WGS 72 / UTM zone 41N', '32241'),
('32242 : WGS 72 / UTM zone 42N', '32242'),
('32243 : WGS 72 / UTM zone 43N', '32243'),
('32244 : WGS 72 / UTM zone 44N', '32244'),
('32245 : WGS 72 / UTM zone 45N', '32245'),
('32246 : WGS 72 / UTM zone 46N', '32246'),
('32247 : WGS 72 / UTM zone 47N', '32247'),
('32248 : WGS 72 / UTM zone 48N', '32248'),
('32249 : WGS 72 / UTM zone 49N', '32249'),
('32250 : WGS 72 / UTM zone 50N', '32250'),
('32251 : WGS 72 / UTM zone 51N', '32251'),
('32252 : WGS 72 / UTM zone 52N', '32252'),
('32253 : WGS 72 / UTM zone 53N', '32253'),
('32254 : WGS 72 / UTM zone 54N', '32254'),
('32255 : WGS 72 / UTM zone 55N', '32255'),
('32256 : WGS 72 / UTM zone 56N', '32256'),
('32257 : WGS 72 / UTM zone 57N', '32257'),
('32258 : WGS 72 / UTM zone 58N', '32258'),
('32259 : WGS 72 / UTM zone 59N', '32259'),
('32260 : WGS 72 / UTM zone 60N', '32260'),
('32301 : WGS 72 / UTM zone 1S', '32301'),
('32302 : WGS 72 / UTM zone 2S', '32302'),
('32303 : WGS 72 / UTM zone 3S', '32303'),
('32304 : WGS 72 / UTM zone 4S', '32304'),
('32305 : WGS 72 / UTM zone 5S', '32305'),
('32306 : WGS 72 / UTM zone 6S', '32306'),
('32307 : WGS 72 / UTM zone 7S', '32307'),
('32308 : WGS 72 / UTM zone 8S', '32308'),
('32309 : WGS 72 / UTM zone 9S', '32309'),
('32310 : WGS 72 / UTM zone 10S', '32310'),
('32311 : WGS 72 / UTM zone 11S', '32311'),
('32312 : WGS 72 / UTM zone 12S', '32312'),
('32313 : WGS 72 / UTM zone 13S', '32313'),
('32314 : WGS 72 / UTM zone 14S', '32314'),
('32315 : WGS 72 / UTM zone 15S', '32315'),
('32316 : WGS 72 / UTM zone 16S', '32316'),
('32317 : WGS 72 / UTM zone 17S', '32317'),
('32318 : WGS 72 / UTM zone 18S', '32318'),
('32319 : WGS 72 / UTM zone 19S', '32319'),
('32320 : WGS 72 / UTM zone 20S', '32320'),
('32321 : WGS 72 / UTM zone 21S', '32321'),
('32322 : WGS 72 / UTM zone 22S', '32322'),
('32323 : WGS 72 / UTM zone 23S', '32323'),
('32324 : WGS 72 / UTM zone 24S', '32324'),
('32325 : WGS 72 / UTM zone 25S', '32325'),
('32326 : WGS 72 / UTM zone 26S', '32326'),
('32327 : WGS 72 / UTM zone 27S', '32327'),
('32328 : WGS 72 / UTM zone 28S', '32328'),
('32329 : WGS 72 / UTM zone 29S', '32329'),
('32330 : WGS 72 / UTM zone 30S', '32330'),
('32331 : WGS 72 / UTM zone 31S', '32331'),
('32332 : WGS 72 / UTM zone 32S', '32332'),
('32333 : WGS 72 / UTM zone 33S', '32333'),
('32334 : WGS 72 / UTM zone 34S', '32334'),
('32335 : WGS 72 / UTM zone 35S', '32335'),
('32336 : WGS 72 / UTM zone 36S', '32336'),
('32337 : WGS 72 / UTM zone 37S', '32337'),
('32338 : WGS 72 / UTM zone 38S', '32338'),
('32339 : WGS 72 / UTM zone 39S', '32339'),
('32340 : WGS 72 / UTM zone 40S', '32340'),
('32341 : WGS 72 / UTM zone 41S', '32341'),
('32342 : WGS 72 / UTM zone 42S', '32342'),
('32343 : WGS 72 / UTM zone 43S', '32343'),
('32344 : WGS 72 / UTM zone 44S', '32344'),
('32345 : WGS 72 / UTM zone 45S', '32345'),
('32346 : WGS 72 / UTM zone 46S', '32346'),
('32347 : WGS 72 / UTM zone 47S', '32347'),
('32348 : WGS 72 / UTM zone 48S', '32348'),
('32349 : WGS 72 / UTM zone 49S', '32349'),
('32350 : WGS 72 / UTM zone 50S', '32350'),
('32351 : WGS 72 / UTM zone 51S', '32351'),
('32352 : WGS 72 / UTM zone 52S', '32352'),
('32353 : WGS 72 / UTM zone 53S', '32353'),
('32354 : WGS 72 / UTM zone 54S', '32354'),
('32355 : WGS 72 / UTM zone 55S', '32355'),
('32356 : WGS 72 / UTM zone 56S', '32356'),
('32357 : WGS 72 / UTM zone 57S', '32357'),
('32358 : WGS 72 / UTM zone 58S', '32358'),
('32359 : WGS 72 / UTM zone 59S', '32359'),
('32360 : WGS 72 / UTM zone 60S', '32360'),
('32401 : WGS 72BE / UTM zone 1N', '32401'),
('32402 : WGS 72BE / UTM zone 2N', '32402'),
('32403 : WGS 72BE / UTM zone 3N', '32403'),
('32404 : WGS 72BE / UTM zone 4N', '32404'),
('32405 : WGS 72BE / UTM zone 5N', '32405'),
('32406 : WGS 72BE / UTM zone 6N', '32406'),
('32407 : WGS 72BE / UTM zone 7N', '32407'),
('32408 : WGS 72BE / UTM zone 8N', '32408'),
('32409 : WGS 72BE / UTM zone 9N', '32409'),
('32410 : WGS 72BE / UTM zone 10N', '32410'),
('32411 : WGS 72BE / UTM zone 11N', '32411'),
('32412 : WGS 72BE / UTM zone 12N', '32412'),
('32413 : WGS 72BE / UTM zone 13N', '32413'),
('32414 : WGS 72BE / UTM zone 14N', '32414'),
('32415 : WGS 72BE / UTM zone 15N', '32415'),
('32416 : WGS 72BE / UTM zone 16N', '32416'),
('32417 : WGS 72BE / UTM zone 17N', '32417'),
('32418 : WGS 72BE / UTM zone 18N', '32418'),
('32419 : WGS 72BE / UTM zone 19N', '32419'),
('32420 : WGS 72BE / UTM zone 20N', '32420'),
('32421 : WGS 72BE / UTM zone 21N', '32421'),
('32422 : WGS 72BE / UTM zone 22N', '32422'),
('32423 : WGS 72BE / UTM zone 23N', '32423'),
('32424 : WGS 72BE / UTM zone 24N', '32424'),
('32425 : WGS 72BE / UTM zone 25N', '32425'),
('32426 : WGS 72BE / UTM zone 26N', '32426'),
('32427 : WGS 72BE / UTM zone 27N', '32427'),
('32428 : WGS 72BE / UTM zone 28N', '32428'),
('32429 : WGS 72BE / UTM zone 29N', '32429'),
('32430 : WGS 72BE / UTM zone 30N', '32430'),
('32431 : WGS 72BE / UTM zone 31N', '32431'),
('32432 : WGS 72BE / UTM zone 32N', '32432'),
('32433 : WGS 72BE / UTM zone 33N', '32433'),
('32434 : WGS 72BE / UTM zone 34N', '32434'),
('32435 : WGS 72BE / UTM zone 35N', '32435'),
('32436 : WGS 72BE / UTM zone 36N', '32436'),
('32437 : WGS 72BE / UTM zone 37N', '32437'),
('32438 : WGS 72BE / UTM zone 38N', '32438'),
('32439 : WGS 72BE / UTM zone 39N', '32439'),
('32440 : WGS 72BE / UTM zone 40N', '32440'),
('32441 : WGS 72BE / UTM zone 41N', '32441'),
('32442 : WGS 72BE / UTM zone 42N', '32442'),
('32443 : WGS 72BE / UTM zone 43N', '32443'),
('32444 : WGS 72BE / UTM zone 44N', '32444'),
('32445 : WGS 72BE / UTM zone 45N', '32445'),
('32446 : WGS 72BE / UTM zone 46N', '32446'),
('32447 : WGS 72BE / UTM zone 47N', '32447'),
('32448 : WGS 72BE / UTM zone 48N', '32448'),
('32449 : WGS 72BE / UTM zone 49N', '32449'),
('32450 : WGS 72BE / UTM zone 50N', '32450'),
('32451 : WGS 72BE / UTM zone 51N', '32451'),
('32452 : WGS 72BE / UTM zone 52N', '32452'),
('32453 : WGS 72BE / UTM zone 53N', '32453'),
('32454 : WGS 72BE / UTM zone 54N', '32454'),
('32455 : WGS 72BE / UTM zone 55N', '32455'),
('32456 : WGS 72BE / UTM zone 56N', '32456'),
('32457 : WGS 72BE / UTM zone 57N', '32457'),
('32458 : WGS 72BE / UTM zone 58N', '32458'),
('32459 : WGS 72BE / UTM zone 59N', '32459'),
('32460 : WGS 72BE / UTM zone 60N', '32460'),
('32501 : WGS 72BE / UTM zone 1S', '32501'),
('32502 : WGS 72BE / UTM zone 2S', '32502'),
('32503 : WGS 72BE / UTM zone 3S', '32503'),
('32504 : WGS 72BE / UTM zone 4S', '32504'),
('32505 : WGS 72BE / UTM zone 5S', '32505'),
('32506 : WGS 72BE / UTM zone 6S', '32506'),
('32507 : WGS 72BE / UTM zone 7S', '32507'),
('32508 : WGS 72BE / UTM zone 8S', '32508'),
('32509 : WGS 72BE / UTM zone 9S', '32509'),
('32510 : WGS 72BE / UTM zone 10S', '32510'),
('32511 : WGS 72BE / UTM zone 11S', '32511'),
('32512 : WGS 72BE / UTM zone 12S', '32512'),
('32513 : WGS 72BE / UTM zone 13S', '32513'),
('32514 : WGS 72BE / UTM zone 14S', '32514'),
('32515 : WGS 72BE / UTM zone 15S', '32515'),
('32516 : WGS 72BE / UTM zone 16S', '32516'),
('32517 : WGS 72BE / UTM zone 17S', '32517'),
('32518 : WGS 72BE / UTM zone 18S', '32518'),
('32519 : WGS 72BE / UTM zone 19S', '32519'),
('32520 : WGS 72BE / UTM zone 20S', '32520'),
('32521 : WGS 72BE / UTM zone 21S', '32521'),
('32522 : WGS 72BE / UTM zone 22S', '32522'),
('32523 : WGS 72BE / UTM zone 23S', '32523'),
('32524 : WGS 72BE / UTM zone 24S', '32524'),
('32525 : WGS 72BE / UTM zone 25S', '32525'),
('32526 : WGS 72BE / UTM zone 26S', '32526'),
('32527 : WGS 72BE / UTM zone 27S', '32527'),
('32528 : WGS 72BE / UTM zone 28S', '32528'),
('32529 : WGS 72BE / UTM zone 29S', '32529'),
('32530 : WGS 72BE / UTM zone 30S', '32530'),
('32531 : WGS 72BE / UTM zone 31S', '32531'),
('32532 : WGS 72BE / UTM zone 32S', '32532'),
('32533 : WGS 72BE / UTM zone 33S', '32533'),
('32534 : WGS 72BE / UTM zone 34S', '32534'),
('32535 : WGS 72BE / UTM zone 35S', '32535'),
('32536 : WGS 72BE / UTM zone 36S', '32536'),
('32537 : WGS 72BE / UTM zone 37S', '32537'),
('32538 : WGS 72BE / UTM zone 38S', '32538'),
('32539 : WGS 72BE / UTM zone 39S', '32539'),
('32540 : WGS 72BE / UTM zone 40S', '32540'),
('32541 : WGS 72BE / UTM zone 41S', '32541'),
('32542 : WGS 72BE / UTM zone 42S', '32542'),
('32543 : WGS 72BE / UTM zone 43S', '32543'),
('32544 : WGS 72BE / UTM zone 44S', '32544'),
('32545 : WGS 72BE / UTM zone 45S', '32545'),
('32546 : WGS 72BE / UTM zone 46S', '32546'),
('32547 : WGS 72BE / UTM zone 47S', '32547'),
('32548 : WGS 72BE / UTM zone 48S', '32548'),
('32549 : WGS 72BE / UTM zone 49S', '32549'),
('32550 : WGS 72BE / UTM zone 50S', '32550'),
('32551 : WGS 72BE / UTM zone 51S', '32551'),
('32552 : WGS 72BE / UTM zone 52S', '32552'),
('32553 : WGS 72BE / UTM zone 53S', '32553'),
('32554 : WGS 72BE / UTM zone 54S', '32554'),
('32555 : WGS 72BE / UTM zone 55S', '32555'),
('32556 : WGS 72BE / UTM zone 56S', '32556'),
('32557 : WGS 72BE / UTM zone 57S', '32557'),
('32558 : WGS 72BE / UTM zone 58S', '32558'),
('32559 : WGS 72BE / UTM zone 59S', '32559'),
('32560 : WGS 72BE / UTM zone 60S', '32560'),
('32600 : WGS 84 / UTM grid system (northern hemisphere)', '32600'),
('32601 : WGS 84 / UTM zone 1N', '32601'),
('32602 : WGS 84 / UTM zone 2N', '32602'),
('32603 : WGS 84 / UTM zone 3N', '32603'),
('32604 : WGS 84 / UTM zone 4N', '32604'),
('32605 : WGS 84 / UTM zone 5N', '32605'),
('32606 : WGS 84 / UTM zone 6N', '32606'),
('32607 : WGS 84 / UTM zone 7N', '32607'),
('32608 : WGS 84 / UTM zone 8N', '32608'),
('32609 : WGS 84 / UTM zone 9N', '32609'),
('32610 : WGS 84 / UTM zone 10N', '32610'),
('32611 : WGS 84 / UTM zone 11N', '32611'),
('32612 : WGS 84 / UTM zone 12N', '32612'),
('32613 : WGS 84 / UTM zone 13N', '32613'),
('32614 : WGS 84 / UTM zone 14N', '32614'),
('32615 : WGS 84 / UTM zone 15N', '32615'),
('32616 : WGS 84 / UTM zone 16N', '32616'),
('32617 : WGS 84 / UTM zone 17N', '32617'),
('32618 : WGS 84 / UTM zone 18N', '32618'),
('32619 : WGS 84 / UTM zone 19N', '32619'),
('32620 : WGS 84 / UTM zone 20N', '32620'),
('32621 : WGS 84 / UTM zone 21N', '32621'),
('32622 : WGS 84 / UTM zone 22N', '32622'),
('32623 : WGS 84 / UTM zone 23N', '32623'),
('32624 : WGS 84 / UTM zone 24N', '32624'),
('32625 : WGS 84 / UTM zone 25N', '32625'),
('32626 : WGS 84 / UTM zone 26N', '32626'),
('32627 : WGS 84 / UTM zone 27N', '32627'),
('32628 : WGS 84 / UTM zone 28N', '32628'),
('32629 : WGS 84 / UTM zone 29N', '32629'),
('32630 : WGS 84 / UTM zone 30N', '32630'),
('32631 : WGS 84 / UTM zone 31N', '32631'),
('32632 : WGS 84 / UTM zone 32N', '32632'),
('32633 : WGS 84 / UTM zone 33N', '32633'),
('32634 : WGS 84 / UTM zone 34N', '32634'),
('32635 : WGS 84 / UTM zone 35N', '32635'),
('32636 : WGS 84 / UTM zone 36N', '32636'),
('32637 : WGS 84 / UTM zone 37N', '32637'),
('32638 : WGS 84 / UTM zone 38N', '32638'),
('32639 : WGS 84 / UTM zone 39N', '32639'),
('32640 : WGS 84 / UTM zone 40N', '32640'),
('32641 : WGS 84 / UTM zone 41N', '32641'),
('32642 : WGS 84 / UTM zone 42N', '32642'),
('32643 : WGS 84 / UTM zone 43N', '32643'),
('32644 : WGS 84 / UTM zone 44N', '32644'),
('32645 : WGS 84 / UTM zone 45N', '32645'),
('32646 : WGS 84 / UTM zone 46N', '32646'),
('32647 : WGS 84 / UTM zone 47N', '32647'),
('32648 : WGS 84 / UTM zone 48N', '32648'),
('32649 : WGS 84 / UTM zone 49N', '32649'),
('32650 : WGS 84 / UTM zone 50N', '32650'),
('32651 : WGS 84 / UTM zone 51N', '32651'),
('32652 : WGS 84 / UTM zone 52N', '32652'),
('32653 : WGS 84 / UTM zone 53N', '32653'),
('32654 : WGS 84 / UTM zone 54N', '32654'),
('32655 : WGS 84 / UTM zone 55N', '32655'),
('32656 : WGS 84 / UTM zone 56N', '32656'),
('32657 : WGS 84 / UTM zone 57N', '32657'),
('32658 : WGS 84 / UTM zone 58N', '32658'),
('32659 : WGS 84 / UTM zone 59N', '32659'),
('32660 : WGS 84 / UTM zone 60N', '32660'),
('32661 : WGS 84 / UPS North (N,E)', '32661'),
('32662 : WGS 84 / Plate Carree', '32662'),
('32663 : WGS 84 / World Equidistant Cylindrical', '32663'),
('32664 : WGS 84 / BLM 14N (ftUS)', '32664'),
('32665 : WGS 84 / BLM 15N (ftUS)', '32665'),
('32666 : WGS 84 / BLM 16N (ftUS)', '32666'),
('32667 : WGS 84 / BLM 17N (ftUS)', '32667'),
('32700 : WGS 84 / UTM grid system (southern hemisphere)', '32700'),
('32701 : WGS 84 / UTM zone 1S', '32701'),
('32702 : WGS 84 / UTM zone 2S', '32702'),
('32703 : WGS 84 / UTM zone 3S', '32703'),
('32704 : WGS 84 / UTM zone 4S', '32704'),
('32705 : WGS 84 / UTM zone 5S', '32705'),
('32706 : WGS 84 / UTM zone 6S', '32706'),
('32707 : WGS 84 / UTM zone 7S', '32707'),
('32708 : WGS 84 / UTM zone 8S', '32708'),
('32709 : WGS 84 / UTM zone 9S', '32709'),
('32710 : WGS 84 / UTM zone 10S', '32710'),
('32711 : WGS 84 / UTM zone 11S', '32711'),
('32712 : WGS 84 / UTM zone 12S', '32712'),
('32713 : WGS 84 / UTM zone 13S', '32713'),
('32714 : WGS 84 / UTM zone 14S', '32714'),
('32715 : WGS 84 / UTM zone 15S', '32715'),
('32716 : WGS 84 / UTM zone 16S', '32716'),
('32717 : WGS 84 / UTM zone 17S', '32717'),
('32718 : WGS 84 / UTM zone 18S', '32718'),
('32719 : WGS 84 / UTM zone 19S', '32719'),
('32720 : WGS 84 / UTM zone 20S', '32720'),
('32721 : WGS 84 / UTM zone 21S', '32721'),
('32722 : WGS 84 / UTM zone 22S', '32722'),
('32723 : WGS 84 / UTM zone 23S', '32723'),
('32724 : WGS 84 / UTM zone 24S', '32724'),
('32725 : WGS 84 / UTM zone 25S', '32725'),
('32726 : WGS 84 / UTM zone 26S', '32726'),
('32727 : WGS 84 / UTM zone 27S', '32727'),
('32728 : WGS 84 / UTM zone 28S', '32728'),
('32729 : WGS 84 / UTM zone 29S', '32729'),
('32730 : WGS 84 / UTM zone 30S', '32730'),
('32731 : WGS 84 / UTM zone 31S', '32731'),
('32732 : WGS 84 / UTM zone 32S', '32732'),
('32733 : WGS 84 / UTM zone 33S', '32733'),
('32734 : WGS 84 / UTM zone 34S', '32734'),
('32735 : WGS 84 / UTM zone 35S', '32735'),
('32736 : WGS 84 / UTM zone 36S', '32736'),
('32737 : WGS 84 / UTM zone 37S', '32737'),
('32738 : WGS 84 / UTM zone 38S', '32738'),
('32739 : WGS 84 / UTM zone 39S', '32739'),
('32740 : WGS 84 / UTM zone 40S', '32740'),
('32741 : WGS 84 / UTM zone 41S', '32741'),
('32742 : WGS 84 / UTM zone 42S', '32742'),
('32743 : WGS 84 / UTM zone 43S', '32743'),
('32744 : WGS 84 / UTM zone 44S', '32744'),
('32745 : WGS 84 / UTM zone 45S', '32745'),
('32746 : WGS 84 / UTM zone 46S', '32746'),
('32747 : WGS 84 / UTM zone 47S', '32747'),
('32748 : WGS 84 / UTM zone 48S', '32748'),
('32749 : WGS 84 / UTM zone 49S', '32749'),
('32750 : WGS 84 / UTM zone 50S', '32750'),
('32751 : WGS 84 / UTM zone 51S', '32751'),
('32752 : WGS 84 / UTM zone 52S', '32752'),
('32753 : WGS 84 / UTM zone 53S', '32753'),
('32754 : WGS 84 / UTM zone 54S', '32754'),
('32755 : WGS 84 / UTM zone 55S', '32755'),
('32756 : WGS 84 / UTM zone 56S', '32756'),
('32757 : WGS 84 / UTM zone 57S', '32757'),
('32758 : WGS 84 / UTM zone 58S', '32758'),
('32759 : WGS 84 / UTM zone 59S', '32759'),
('32760 : WGS 84 / UTM zone 60S', '32760'),
('32761 : WGS 84 / UPS South (N,E)', '32761'),
('32766 : WGS 84 / TM 36 SE', '32766'),
] | python |