id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/Homevee_Dev-0.0.0.0-py3-none-any.whl/Homevee/Manager/gateway.py | import json
import requests
from Homevee.Item.Gateway import *
from Homevee.Item.Status import *
def add_edit_gateway(username, type, user, password, change_pw, ip, port, gateway_type, db):
if not user.has_permission("admin"):
return Status(type=STATUS_NO_ADMIN).get_dict()
param_array = {'name': type, 'ip': ip, 'port': port, 'key1': user, 'type': gateway_type}
with db:
cur = db.cursor()
if change_pw == "true":
param_array['key2'] = password
Database.update("UPDATE OR IGNORE 'GATEWAYS' SET IP = :ip, PORT = :port, KEY1 = :key1, KEY2 = :key2 TYPE = :type, WHERE NAME = :name;",
param_array)
Database.insert("INSERT OR IGNORE INTO 'GATEWAYS' (NAME, IP, PORT, KEY1, KEY2, TYPE) VALUES (:name, :ip, :port, :key1, :key2, :type);",
param_array)
else:
Database.update("UPDATE OR IGNORE 'GATEWAYS' SET IP = :ip, PORT = :port, KEY1 = :key1 WHERE NAME = :name;",
param_array)
Database.insert("INSERT OR IGNORE INTO 'GATEWAYS' (NAME, IP, PORT, KEY1, TYPE) VALUES (:name, :ip, :port, :key1, :type);",
param_array)
return Status(type=STATUS_OK).get_dict()
def delete_gateway(user, key, db):
if not user.has_permission("admin"):
return Status(type=STATUS_NO_ADMIN).get_dict()
gateway = Item.load_from_db(Gateway, key)
try:
if(gateway.delete()):
return Status(type=STATUS_OK).get_dict()
except:
return Status(type=STATUS_ERROR).get_dict()
'''
with db:
cur = db.cursor()
Database.delete("DELETE FROM GATEWAYS WHERE NAME == :key", {'key': key})
cur.close()
#Abfrage erfolgreich?
if True:
return Status(type=STATUS_OK).get_dict()
else:
return Status(type=STATUS_ERROR).get_dict()
'''
def get_gateways(user, db):
if not user.has_permission("admin"):
return Status(type=STATUS_NO_ADMIN).get_dict()
results = Database.select_all("SELECT * FROM GATEWAYS", {}, db)
gateways = []
for gateway in results:
gateways.append({'name': gateway['NAME'], 'ip': gateway['IP'], 'port': gateway['PORT'],
'key1': gateway['KEY1'], 'type': gateway['TYPE']})
return {'gateways': gateways, 'gatewaytypesleft': get_gateway_types(user.username, db)}
def get_gateway_types(user, db):
if not user.has_permission("admin"):
return Status(type=STATUS_NO_ADMIN).get_dict()
gateway_types = [
{'key': Z_WAVE_GATEWAY, 'type': 'user'},
{'key': FUNKSTECKDOSEN_CONTROLLER, 'type': 'url'},
{'key': MAX_CUBE, 'type': 'url'},
{'key': MQTT_BROKER, 'type': 'user'},
{'key': PHILIPS_HUE_BRIDGE, 'type': 'apikey'},
{'key': MIYO_CUBE, 'type': 'apikey'},
{'key': RADEMACHER_HOMEPILOT, 'type': 'url'},
]
with db:
cur = db.cursor()
cur.execute("SELECT NAME FROM GATEWAYS")
for gateway in cur.fetchall():
name = gateway['NAME']
for i in range(0, len(gateway_types)-1):
if gateway_types[i]['key'] == name:
del gateway_types[i]
cur.close()
return gateway_types
def connect_gateway(user, type, ip, db):
if not user.has_permission("admin"):
return {'result': 'noadmin'}
if type == PHILIPS_HUE_BRIDGE:
while True:
#print("try connecting...")
data = {"devicetype": "Homevee#system"}
response = requests.post("http://" + ip + "/api", data=json.dumps(data))
result = response.text
#print("result: "+result)
result = json.loads(result)
result = result[0]
if "error" in result:
if result['error']['type'] == 101:
return {'result': 'error', 'msg': 'Drücke bitte die Taste auf deinem Philips Hue Gateway.'}
elif "success" in result:
user = result['success']['username']
add_edit_gateway(user.username, type, user, "", "", ip, "80", "apikey", db)
return Status(type=STATUS_OK).get_dict()
elif type == MIYO_CUBE:
while True:
#print("try connecting...")
response = requests.get("http://" + ip + "/api/link")
result = response.text
#print("result: "+result)
result = json.loads(result)
if "error" in result:
if result['errorLoc'] == "NOTIFY_ERROR_LINKINACTIVE":
return {'result': 'error', 'msg': 'Drücke bitte die Taste hinten auf deinem MIYO Cube.'}
elif "success" in result:
user = result['success']['username']
add_edit_gateway(user.username, type, user, "", "", ip, "80", "apikey", db)
return Status(type=STATUS_OK).get_dict()
else:
return Status(type=STATUS_ERROR).get_dict() | PypiClean |
/genie.libs.sdk-23.8.1-py3-none-any.whl/genie/libs/sdk/apis/ios/utils.py |
# Python
import logging
import re
# Genie
from genie.libs.sdk.apis.utils import get_config_dict
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.utils.timeout import Timeout
from genie.libs.parser.iosxe.ping import Ping
# unicon
from unicon.eal.dialogs import Dialog, Statement
from unicon.core.errors import SubCommandFailure
log = logging.getLogger(__name__)
def delete_local_file(device, path, file):
""" Delete local file
Args:
device (`obj`): Device object
path (`str`): directory
file (`str`): file name
Returns:
None
"""
try:
device.execute("delete {path}{file}".format(path=path, file=file))
except SubCommandFailure as e:
raise SubCommandFailure(
"Could not delete file {file} from device "
"{device}".format(file=file, device=device.name)
)
def get_config_from_file(device, disk, filename):
""" Get configuration from a file in disk
Args:
device ('obj'): Device object
disk ('str'): Disk name
filename ('str'): File name
Raises:
SubCommandFailure
Returns:
Dictionary: Configuration
"""
try:
config = device.execute(
"more {disk}{filename}".format(disk=disk, filename=filename)
)
except SubCommandFailure as e:
log.error(
"Could not retrieve configuration information from "
"file {filename}".format(filename=filename)
)
return None
return get_config_dict(config)
def start_packet_capture(
device, capture_name=None, interface=None, capture_command=None
):
"""Start packet capture
Args:
device (`obj`): Device object
capture_name (`str`): Packet capture name
interface (`str`): Interface to capture the packets on
capture_command (`str`): Monitor command
Returns:
None
Raises:
pyATS Results
"""
log.info("Start the packet capture")
log.info("Clear packet buffer")
# Making sure packet buffers are empty the next
# time when capturing packets
try:
device.execute("no monitor capture {cn}".format(cn=capture_name))
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed in no monitor capture, Error: {}".format(str(e))
) from e
# Set default
monitor_command = capture_command
if not capture_command:
# User provided own command, use that one instead
monitor_command = (
"monitor capture {capture_name} interface {interface} "
"out match any".format(
capture_name=capture_name, interface=interface
)
)
# Send capture command
device.execute(monitor_command)
# Start capture of packets
# TODO: Karim - What if it fails
try:
device.execute(
"monitor capture {capture_name} start".format(
capture_name=capture_name
)
)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed in monitor capture, Error: {}".format(str(e))
) from e
def stop_packet_capture(device, capture_name):
"""Stop the packet capture
Args:
device (`obj`): Device object
capture_name (`str`): Packet capture name
Returns:
None
Raises:
pyATS Results
"""
log.info("Stop the packet capture")
try:
device.execute(
"monitor capture {capture_name} stop".format(
capture_name=capture_name
)
)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed in monitor capture, Error: {}".format(str(e))
) from e
def export_packet_capture(device, testbed, filename, capture_name, protocol='tftp',
path='', username='', password=''):
"""Export the packet capture to a pcap file
Args:
device (`obj`): Device object
testbed (`obj`): Testbed object
filename (`str`): Filename to save
capture_name (`str`): Packet capture name
protocol (`str`): Protocol name
path (`str`): Path to export
username (`str`): user name
password (`str`): password
Returns:
pcap_file_name or None
Raises:
pyATS Results
"""
if protocol in testbed.servers and "server" in testbed.servers[protocol]:
execution_server = testbed.servers[protocol]["server"]
else:
raise Exception("{pro} server is missing from the testbed yaml file".format(pro=protocol))
pcap_file_name = filename.replace(".", "_") + ".pcap"
credential = ''
if username and password:
credential = '{}:{}@'.format(username, password)
export_to = '{pro}://{credential}{server}/{path}/{pcap_file_name}'.format(
pro=protocol, path=path,
credential=credential,
server=execution_server,
pcap_file_name=pcap_file_name)
cmd = "monitor capture {capture_name} export {export_to}".format(
capture_name=capture_name, export_to=export_to)
log.info("Export the capture to {p}".format(p=pcap_file_name))
try:
out = device.execute(cmd, error_pattern=["Failed to Export"])
except SubCommandFailure:
log.error("Invalid command: Failed to Export packet capture")
return None
except Exception as e:
log.error("Failed to export pcap file: {e}".format(e=e))
return None
# Making sure packet buffers are empty the next
# time when capturing packets
clear_packet_buffer(device, capture_name)
return pcap_file_name
def clear_packet_buffer(device, capture_name):
"""Clear packet buffer
Args:
device (`obj`): Device object
Returns:
None
Raises:
pyATS Results
"""
# Making sure packet buffers are empty the next
# time when capturing packets
try:
device.execute(
"no monitor capture {capture_name}".format(
capture_name=capture_name
)
)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed in no monitor capture, Error: {}".format(str(e))
) from e
def ping_interface_success_rate(device, ips, success_rate, **kwargs):
""" Ping interfaces and verify success rate
Args:
device (`obj`): Device object
ips (`list`): IP list to ping
ips (`str`): Single IP address to ping
success_rate (`int`): Ping success rate
Returns:
None
"""
if isinstance(ips, str):
log.info("Pinging '{ip}'".format(ip=ips))
try:
out = device.ping(addr=ips, **kwargs)
except Exception as e:
raise Exception("Failed in ping, Error: {}".format(str(e))) from e
p = re.compile(r"Success +rate +is +(?P<rate>\d+) +percent.*")
m = p.search(out)
if not m or int(m.groupdict()["rate"]) < int(success_rate):
raise Exception(
"Ping success rate was lower than '{s}'".format(s=success_rate)
)
else:
for ip in ips:
log.info("Pinging '{ip}'".format(ip=ip))
try:
out = device.ping(addr=ip, **kwargs)
except Exception as e:
raise Exception(
"Failed in ping, Error: {}".format(str(e))
) from e
p = re.compile(r"Success +rate +is +(?P<rate>\d+) +percent.*")
m = p.search(out)
if not m or int(m.groupdict()["rate"]) < int(success_rate):
raise Exception(
"Ping success rate was lower than '{s}'".format(
s=success_rate
)
)
def change_hostname(device, name):
""" Change the hostname on device
Args:
device('obj'): device to change hostname on
name('str'): name to change hostname to
Returns:
N/A
"""
log.info('Changing hostname to "{}".'.format(name))
device.state_machine.hostname = name
try:
device.configure("hostname {}".format(name))
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed in changing hostname on device"
" {device}, Error: {e}".format(device=device.name, e=str(e))
) from e
def save_running_config_configuration(device):
"""Save configuration on the device after configure action
Args:
device (`obj`): Device object
Returns:
None
"""
try:
device.execute("write memory")
except Exception as e:
raise Exception("{}".format(e))
def set_clock(device, datetime):
""" Set clock date and time on device
Args:
device ('obj') : Device object to update clock
datetime ('str') : Date and time value
ex.)
datetime = '23:55:00 20 Dec 2019'
Returns:
None
"""
try:
device.execute("clock set {}".format(datetime))
except Exception as e:
raise Exception("{}".format(e))
def scp(device,
local_path,
remote_path,
remote_device,
remote_user=None,
remote_pass=None,
remote_via=None,
vrf=None):
""" copy files from local device to remote device via scp
Args:
device (`obj`) : Device object (local device)
local_path (`str`): path with file on local device
remote_device (`str`): remote device name
remote_path (`str`): path with/without file on remote device
remote_user (`str`): use given username to scp
Default to None
remote_pass (`str`): use given password to scp
Default to None
remote_via (`str`) : specify connection to get ip
Default to None
vrf (`str`): use vrf where scp find route to remote device
Default to None
Returns:
result (`bool`): True if scp successfully done
"""
# convert from device name to device object
remote_device = device.testbed.devices[remote_device]
# set credential for remote device
username, password = remote_device.api.get_username_password()
if remote_user:
username = remote_user
if remote_pass:
password = remote_pass
# find ip for remote server from testbed yaml
if remote_via:
remote_device_ip = str(remote_device.connections[remote_via]['ip'])
else:
remote_device_ip = str(
remote_device.connections[remote_device.via]['ip'])
# complete remote_path with credential and ip
if remote_path[-1] != '/':
remote_path += '/'
remote_path = "scp://{id}@{ip}/{rp}".format(id=username,
ip=remote_device_ip,
rp=remote_path)
s1 = Statement(pattern=r".*Address or name of remote host",
action="sendline()",
args=None,
loop_continue=True,
continue_timer=False)
s2 = Statement(pattern=r".*Destination username",
action="sendline()",
args=None,
loop_continue=True,
continue_timer=False)
s3 = Statement(pattern=r".*Destination filename",
action="sendline()",
args=None,
loop_continue=True,
continue_timer=False)
s4 = Statement(pattern=r".*Password:",
action="sendline({pw})".format(pw=password),
args=None,
loop_continue=True,
continue_timer=False)
dialog = Dialog([s1, s2, s3, s4])
try:
if vrf:
out = device.execute("copy {lp} {rp} vrf {vrf}".format(
lp=local_path, rp=remote_path, vrf=vrf),
reply=dialog)
else:
out = device.execute("copy {lp} {rp}".format(lp=local_path,
rp=remote_path),
reply=dialog)
except Exception as e:
log.warn("Failed to copy from {lp} to {rp} via scp: {e}".format(
lp=local_path, rp=remote_path, e=e))
return False
# return True/False depending on result
return 'bytes copied in' in out
def delete_files(device, locations, filenames):
""" Delete local file
Args:
device (`obj`): Device object
locations (`list`): list of locations
ex.) bootflash:/core/
filenames (`list`): file name. regular expression is supported
Returns:
deleted_files (`list`): list of deleted files
"""
deleted_files = []
# loop by given locations
for location in locations:
log.info('Checking {location}...'.format(location=location))
parsed = device.parse('dir {location}'.format(location=location))
# loop by given filenames
for filename in filenames:
# find target files by using Dq with regex
matched_files = parsed.q.contains_key_value(
'files', filename, value_regex=True).get_values('files')
log.debug('Matched files to delete: {matched_files}'.format(
matched_files=matched_files))
# delete files which were found
for file in matched_files:
if location[-1] != '/':
location += '/'
device.execute('delete /force {location}{file}'.format(
location=location, file=file))
# build up list for return
deleted_files.append('{location}{file}'.format(
location=location, file=file))
return deleted_files
def verify_ping(
device, address, expected_max_success_rate=100, expected_min_success_rate=1,
count=None, source=None, max_time=60, check_interval=10,
):
"""Verify ping
Args:
device ('obj'): Device object
address ('str'): Address value
expected_max_success_rate (int): Expected maximum success rate (default: 100)
expected_min_success_rate (int): Expected minimum success rate (default: 1)
count ('int'): Count value for ping command
source ('str'): Source IP address, default: None
max_time (`int`): Max time, default: 30
check_interval (`int`): Check interval, default: 10
"""
p = re.compile(r"Success +rate +is +(?P<rate>\d+) +percent.*")
timeout = Timeout(max_time, check_interval)
while timeout.iterate():
if address and count and source:
cmd = 'ping {address} source {source} repeat {count}'.format(
address=address,
source=source,
count=count)
elif address and count:
cmd = 'ping {address} repeat {count}'.format(
address=address,
count=count)
elif address and source:
cmd = 'ping {address} source {source}'.format(
address=address,
source=source)
elif address:
cmd = 'ping {address}'.format(address=address)
else:
log.info('Need to pass address as argument')
return False
try:
out = device.execute(cmd)
except SubCommandFailure as e:
timeout.sleep()
continue
rate = int(p.search(out).groupdict().get('rate', 0))
if expected_max_success_rate >= rate >= expected_min_success_rate:
return True
timeout.sleep()
return False
def get_md5_hash_of_file(device, file, timeout=180):
""" Return the MD5 hash of a given file.
Args:
device (obj): Device to execute on
file (str): File to calculate the MD5 on
timeout (int, optional): Max time in seconds allowed for calculation.
Defaults to 180.
Returns:
MD5 hash (str), or None if something went wrong
"""
# verify /md5 bootflash:test_file.bin
# ....................................
# ....................................Done!
# verify /md5 (bootflash:test1.bin) = 2c9bf2c64bee6fb22277fc89bd1c8ff0
try:
output = device.execute('verify /md5 {}'.format(file), timeout=timeout)
m = re.search(r' = (\S+)', output)
if m:
hash_value = m.group(1)
return hash_value
else:
log.error('Could not find MD5 hash in output')
except Exception as e:
log.warning(e)
return None
def ping(device,
address,
ttl=None,
timeout=None,
tos=None,
dscp=None,
size=None,
count=None,
source=None,
rapid=False,
do_not_fragment=False,
validate=False,
vrf=None,
command=None,
output=None):
""" execute ping and parse ping result and return structure data
Args:
device ('obj'): Device object
address ('str'): Address value
tos ('int'): type of service value
dscp (`str`): DSCP value
size ('str'): data bytes expected
ttl ('int'): Not supported
timeout ('int'): timeout interval
count ('int'): repeat count
source ('str'): source address or interface, default: None
rapid ('bool'): Not supported
do_not_fragment ('bool'): enable do not fragment bit in IP header, default: False
validate (`bool`): validate reply data, default: False
vrf ('str'): VRF name
command (`str`): ping command. This will ignore all other arguments
output (`str`): ping command output. no parser call involved
Returns:
Boolean
Raises:
None
"""
try:
obj = Ping(device=device)
return obj.parse(addr=address,
vrf=vrf,
tos=tos,
dscp=dscp,
size=size,
ttl=ttl,
timeout=timeout,
count=count,
source=source,
rapid=rapid,
do_not_fragment=do_not_fragment,
validate=validate,
command=command,
output=output)
except SchemaEmptyParserError:
log.info('parsed_output was empty')
return {}
except Exception as e:
log.warning(e)
return {} | PypiClean |
/gaeframework-2.0.10.tar.gz/gaeframework-2.0.10/google_appengine/lib/django_1_2/django/contrib/sessions/backends/base.py | import base64
import os
import random
import sys
import time
from datetime import datetime, timedelta
try:
import cPickle as pickle
except ImportError:
import pickle
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.utils.hashcompat import md5_constructor
# Use the system (hardware-based) random number generator if it exists.
if hasattr(random, 'SystemRandom'):
randrange = random.SystemRandom().randrange
else:
randrange = random.randrange
MAX_SESSION_KEY = 18446744073709551616L # 2 << 63
class CreateError(Exception):
"""
Used internally as a consistent exception type to catch from save (see the
docstring for SessionBase.save() for details).
"""
pass
class SessionBase(object):
"""
Base class for all Session classes.
"""
TEST_COOKIE_NAME = 'testcookie'
TEST_COOKIE_VALUE = 'worked'
def __init__(self, session_key=None):
self._session_key = session_key
self.accessed = False
self.modified = False
def __contains__(self, key):
return key in self._session
def __getitem__(self, key):
return self._session[key]
def __setitem__(self, key, value):
self._session[key] = value
self.modified = True
def __delitem__(self, key):
del self._session[key]
self.modified = True
def keys(self):
return self._session.keys()
def items(self):
return self._session.items()
def get(self, key, default=None):
return self._session.get(key, default)
def pop(self, key, *args):
self.modified = self.modified or key in self._session
return self._session.pop(key, *args)
def setdefault(self, key, value):
if key in self._session:
return self._session[key]
else:
self.modified = True
self._session[key] = value
return value
def set_test_cookie(self):
self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE
def test_cookie_worked(self):
return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE
def delete_test_cookie(self):
del self[self.TEST_COOKIE_NAME]
def encode(self, session_dict):
"Returns the given session dictionary pickled and encoded as a string."
pickled = pickle.dumps(session_dict, pickle.HIGHEST_PROTOCOL)
pickled_md5 = md5_constructor(pickled + settings.SECRET_KEY).hexdigest()
return base64.encodestring(pickled + pickled_md5)
def decode(self, session_data):
encoded_data = base64.decodestring(session_data)
pickled, tamper_check = encoded_data[:-32], encoded_data[-32:]
if md5_constructor(pickled + settings.SECRET_KEY).hexdigest() != tamper_check:
raise SuspiciousOperation("User tampered with session cookie.")
try:
return pickle.loads(pickled)
# Unpickling can cause a variety of exceptions. If something happens,
# just return an empty dictionary (an empty session).
except:
return {}
def update(self, dict_):
self._session.update(dict_)
self.modified = True
def has_key(self, key):
return self._session.has_key(key)
def values(self):
return self._session.values()
def iterkeys(self):
return self._session.iterkeys()
def itervalues(self):
return self._session.itervalues()
def iteritems(self):
return self._session.iteritems()
def clear(self):
# To avoid unnecessary persistent storage accesses, we set up the
# internals directly (loading data wastes time, since we are going to
# set it to an empty dict anyway).
self._session_cache = {}
self.accessed = True
self.modified = True
def _get_new_session_key(self):
"Returns session key that isn't being used."
# The random module is seeded when this Apache child is created.
# Use settings.SECRET_KEY as added salt.
try:
pid = os.getpid()
except AttributeError:
# No getpid() in Jython, for example
pid = 1
while 1:
session_key = md5_constructor("%s%s%s%s"
% (randrange(0, MAX_SESSION_KEY), pid, time.time(),
settings.SECRET_KEY)).hexdigest()
if not self.exists(session_key):
break
return session_key
def _get_session_key(self):
if self._session_key:
return self._session_key
else:
self._session_key = self._get_new_session_key()
return self._session_key
def _set_session_key(self, session_key):
self._session_key = session_key
session_key = property(_get_session_key, _set_session_key)
def _get_session(self, no_load=False):
"""
Lazily loads session from storage (unless "no_load" is True, when only
an empty dict is stored) and stores it in the current instance.
"""
self.accessed = True
try:
return self._session_cache
except AttributeError:
if self._session_key is None or no_load:
self._session_cache = {}
else:
self._session_cache = self.load()
return self._session_cache
_session = property(_get_session)
def get_expiry_age(self):
"""Get the number of seconds until the session expires."""
expiry = self.get('_session_expiry')
if not expiry: # Checks both None and 0 cases
return settings.SESSION_COOKIE_AGE
if not isinstance(expiry, datetime):
return expiry
delta = expiry - datetime.now()
return delta.days * 86400 + delta.seconds
def get_expiry_date(self):
"""Get session the expiry date (as a datetime object)."""
expiry = self.get('_session_expiry')
if isinstance(expiry, datetime):
return expiry
if not expiry: # Checks both None and 0 cases
expiry = settings.SESSION_COOKIE_AGE
return datetime.now() + timedelta(seconds=expiry)
def set_expiry(self, value):
"""
Sets a custom expiration for the session. ``value`` can be an integer,
a Python ``datetime`` or ``timedelta`` object or ``None``.
If ``value`` is an integer, the session will expire after that many
seconds of inactivity. If set to ``0`` then the session will expire on
browser close.
If ``value`` is a ``datetime`` or ``timedelta`` object, the session
will expire at that specific future time.
If ``value`` is ``None``, the session uses the global session expiry
policy.
"""
if value is None:
# Remove any custom expiration for this session.
try:
del self['_session_expiry']
except KeyError:
pass
return
if isinstance(value, timedelta):
value = datetime.now() + value
self['_session_expiry'] = value
def get_expire_at_browser_close(self):
"""
Returns ``True`` if the session is set to expire when the browser
closes, and ``False`` if there's an expiry date. Use
``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry
date/age, if there is one.
"""
if self.get('_session_expiry') is None:
return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE
return self.get('_session_expiry') == 0
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
"""
self.clear()
self.delete()
self.create()
def cycle_key(self):
"""
Creates a new session key, whilst retaining the current session data.
"""
data = self._session_cache
key = self.session_key
self.create()
self._session_cache = data
self.delete(key)
# Methods that child classes must implement.
def exists(self, session_key):
"""
Returns True if the given session_key already exists.
"""
raise NotImplementedError
def create(self):
"""
Creates a new session instance. Guaranteed to create a new object with
a unique key and will have saved the result once (with empty data)
before the method returns.
"""
raise NotImplementedError
def save(self, must_create=False):
"""
Saves the session data. If 'must_create' is True, a new session object
is created (otherwise a CreateError exception is raised). Otherwise,
save() can update an existing object with the same key.
"""
raise NotImplementedError
def delete(self, session_key=None):
"""
Deletes the session data under this key. If the key is None, the
current session key value is used.
"""
raise NotImplementedError
def load(self):
"""
Loads the session data and returns a dictionary.
"""
raise NotImplementedError | PypiClean |
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/ZhimaCreditPayafteruseCreditagreementSignModel.py | import json
from alipay.aop.api.constant.ParamConstants import *
class ZhimaCreditPayafteruseCreditagreementSignModel(object):
def __init__(self):
self._cancel_back_link = None
self._category_id = None
self._external_logon_id = None
self._extra_param = None
self._out_agreement_no = None
self._product_code = None
self._return_back_link = None
self._zm_service_id = None
@property
def cancel_back_link(self):
return self._cancel_back_link
@cancel_back_link.setter
def cancel_back_link(self, value):
self._cancel_back_link = value
@property
def category_id(self):
return self._category_id
@category_id.setter
def category_id(self, value):
self._category_id = value
@property
def external_logon_id(self):
return self._external_logon_id
@external_logon_id.setter
def external_logon_id(self, value):
self._external_logon_id = value
@property
def extra_param(self):
return self._extra_param
@extra_param.setter
def extra_param(self, value):
self._extra_param = value
@property
def out_agreement_no(self):
return self._out_agreement_no
@out_agreement_no.setter
def out_agreement_no(self, value):
self._out_agreement_no = value
@property
def product_code(self):
return self._product_code
@product_code.setter
def product_code(self, value):
self._product_code = value
@property
def return_back_link(self):
return self._return_back_link
@return_back_link.setter
def return_back_link(self, value):
self._return_back_link = value
@property
def zm_service_id(self):
return self._zm_service_id
@zm_service_id.setter
def zm_service_id(self, value):
self._zm_service_id = value
def to_alipay_dict(self):
params = dict()
if self.cancel_back_link:
if hasattr(self.cancel_back_link, 'to_alipay_dict'):
params['cancel_back_link'] = self.cancel_back_link.to_alipay_dict()
else:
params['cancel_back_link'] = self.cancel_back_link
if self.category_id:
if hasattr(self.category_id, 'to_alipay_dict'):
params['category_id'] = self.category_id.to_alipay_dict()
else:
params['category_id'] = self.category_id
if self.external_logon_id:
if hasattr(self.external_logon_id, 'to_alipay_dict'):
params['external_logon_id'] = self.external_logon_id.to_alipay_dict()
else:
params['external_logon_id'] = self.external_logon_id
if self.extra_param:
if hasattr(self.extra_param, 'to_alipay_dict'):
params['extra_param'] = self.extra_param.to_alipay_dict()
else:
params['extra_param'] = self.extra_param
if self.out_agreement_no:
if hasattr(self.out_agreement_no, 'to_alipay_dict'):
params['out_agreement_no'] = self.out_agreement_no.to_alipay_dict()
else:
params['out_agreement_no'] = self.out_agreement_no
if self.product_code:
if hasattr(self.product_code, 'to_alipay_dict'):
params['product_code'] = self.product_code.to_alipay_dict()
else:
params['product_code'] = self.product_code
if self.return_back_link:
if hasattr(self.return_back_link, 'to_alipay_dict'):
params['return_back_link'] = self.return_back_link.to_alipay_dict()
else:
params['return_back_link'] = self.return_back_link
if self.zm_service_id:
if hasattr(self.zm_service_id, 'to_alipay_dict'):
params['zm_service_id'] = self.zm_service_id.to_alipay_dict()
else:
params['zm_service_id'] = self.zm_service_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ZhimaCreditPayafteruseCreditagreementSignModel()
if 'cancel_back_link' in d:
o.cancel_back_link = d['cancel_back_link']
if 'category_id' in d:
o.category_id = d['category_id']
if 'external_logon_id' in d:
o.external_logon_id = d['external_logon_id']
if 'extra_param' in d:
o.extra_param = d['extra_param']
if 'out_agreement_no' in d:
o.out_agreement_no = d['out_agreement_no']
if 'product_code' in d:
o.product_code = d['product_code']
if 'return_back_link' in d:
o.return_back_link = d['return_back_link']
if 'zm_service_id' in d:
o.zm_service_id = d['zm_service_id']
return o | PypiClean |
/mmcif.utils-0.26.tar.gz/mmcif.utils-0.26/README.md | ## mmCIF Utility Classes
### Introduction
This module contains a collection of utility classes used by the Protein Data Bank
to manage structure and chemical reference definition mmCIF data objects.
### Installation
Download the library source software from the project repository:
```bash
git clone https://github.com/rcsb/py-mmcif_utils.git
```
Optionally, run test suite (Python versions 2.7 or 3.7) using:
```bash
python setup.py test
```
Installation is via the program [pip](https://pypi.python.org/pypi/pip).
```bash
pip install -e .
```
| PypiClean |
/panel-1.2.2.tar.gz/panel-1.2.2/doc/explanation/api/examples/outliers_declarative.md | # Simple Outlier App - Declarative API
This app is a complement to the simple app demonstrated in the [Getting Started > Build an app](../../../getting_started/build_app.md) tutorial which utilized the reactive API.
The reactive API approach is very flexible, but it ties your domain-specific code (the parts about sine waves) with your widget display code. That's fine for small, quick projects or projects dominated by visualization code, but what about large-scale, long-lived projects, where the code is used in many different contexts over time, such as in large batch runs, one-off command-line usage, notebooks, and deployed dashboards? For larger projects like that, it's important to be able to separate the parts of the code that are about the underlying domain (i.e. application or research area) from those that are tied to specific display technologies (such as Jupyter notebooks or web servers).
For such usages, Panel supports objects declared with the separate [Param](http://param.pyviz.org) library, which provides a GUI-independent way of capturing and declaring the parameters of your objects (and dependencies between your code and those parameters), in a way that's independent of any particular application or dashboard technology. For instance, the app in [Getting Started > Build an app](../../../getting_started/build_app.md) can be captured in an object that declares the ranges and values of all parameters, as well as how to generate the plot, independently of the Panel library or any other way of interacting with the object. First, we'll copy the initial steps :
```{pyodide}
import panel as pn
import hvplot.pandas
import numpy as np
import param
import pandas as pd
pn.extension()
csv_file = 'https://raw.githubusercontent.com/holoviz/panel/main/examples/assets/occupancy.csv'
data = pd.read_csv(csv_file, parse_dates=['date'], index_col='date')
data.tail()
```
```{pyodide}
def view_hvplot(avg, highlight):
return avg.hvplot(height=300, width=400, legend=False) * highlight.hvplot.scatter(
color="orange", padding=0.1, legend=False
)
def find_outliers(variable="Temperature", window=30, sigma=10, view_fn=view_hvplot):
avg = data[variable].rolling(window=window).mean()
residual = data[variable] - avg
std = residual.rolling(window=window).std()
outliers = np.abs(residual) > std * sigma
return view_fn(avg, avg[outliers])
```
Now, let's implement the declarative API approach:
```{pyodide}
class RoomOccupancy(param.Parameterized):
variable = param.Selector(default="Temperature", objects=list(data.columns))
window = param.Integer(default=30, bounds=(1, 60))
sigma = param.Number(default=10, bounds=(0, 20))
def view(self):
return find_outliers(self.variable, self.window, self.sigma, view_fn=view_hvplot)
obj = RoomOccupancy()
obj
```
The `RoomOccupancy` class and the `obj` instance have no dependency on Panel, Jupyter, or any other GUI or web toolkit; they simply declare facts about a certain domain (such as that smoothing requires window and sigma parameters, and that window is an integer greater than 0 and sigma is a positive real number). This information is then enough for Panel to create an editable and viewable representation for this object without having to specify anything that depends on the domain-specific details encapsulated in `obj`:
```{pyodide}
pn.Column(obj.param, obj.view)
```
To support a particular domain, you can create hierarchies of such classes encapsulating all the parameters and functionality you need across different families of objects, with both parameters and code inheriting across the classes as appropriate, all without any dependency on a particular GUI library or even the presence of a GUI at all. This approach makes it practical to maintain a large codebase, all fully displayable and editable with Panel, in a way that can be maintained and adapted over time. See the [Attractors Panel app](https://examples.pyviz.org/attractors/attractors_panel.html) ([source](https://github.com/holoviz-topics/examples/tree/main/attractors)) for a more complex illustration of this approach, and the Panel codebase itself for the ultimate demonstration of using Param throughout a codebase!
| PypiClean |
/medicine-utils-0.1.7.tar.gz/medicine-utils-0.1.7/medicine_utils/static/medicine_utils/ace/mode-python.js | define("ace/mode/python_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text_highlight_rules").TextHighlightRules,s=function(){var e="and|as|assert|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|not|or|pass|print|raise|return|try|while|with|yield|async|await|nonlocal",t="True|False|None|NotImplemented|Ellipsis|__debug__",n="abs|divmod|input|open|staticmethod|all|enumerate|int|ord|str|any|eval|isinstance|pow|sum|basestring|execfile|issubclass|print|super|binfile|bin|iter|property|tuple|bool|filter|len|range|type|bytearray|float|list|raw_input|unichr|callable|format|locals|reduce|unicode|chr|frozenset|long|reload|vars|classmethod|getattr|map|repr|xrange|cmp|globals|max|reversed|zip|compile|hasattr|memoryview|round|__import__|complex|hash|min|apply|delattr|help|next|setattr|set|buffer|dict|hex|object|slice|coerce|dir|id|oct|sorted|intern|ascii|breakpoint|bytes",r=this.createKeywordMapper({"invalid.deprecated":"debugger","support.function":n,"variable.language":"self|cls","constant.language":t,keyword:e},"identifier"),i="[uU]?",s="[rR]",o="[fF]",u="(?:[rR][fF]|[fF][rR])",a="(?:(?:[1-9]\\d*)|(?:0))",f="(?:0[oO]?[0-7]+)",l="(?:0[xX][\\dA-Fa-f]+)",c="(?:0[bB][01]+)",h="(?:"+a+"|"+f+"|"+l+"|"+c+")",p="(?:[eE][+-]?\\d+)",d="(?:\\.\\d+)",v="(?:\\d+)",m="(?:(?:"+v+"?"+d+")|(?:"+v+"\\.))",g="(?:(?:"+m+"|"+v+")"+p+")",y="(?:"+g+"|"+m+")",b="\\\\(x[0-9A-Fa-f]{2}|[0-7]{3}|[\\\\abfnrtv'\"]|U[0-9A-Fa-f]{8}|u[0-9A-Fa-f]{4})";this.$rules={start:[{token:"comment",regex:"#.*$"},{token:"string",regex:i+'"{3}',next:"qqstring3"},{token:"string",regex:i+'"(?=.)',next:"qqstring"},{token:"string",regex:i+"'{3}",next:"qstring3"},{token:"string",regex:i+"'(?=.)",next:"qstring"},{token:"string",regex:s+'"{3}',next:"rawqqstring3"},{token:"string",regex:s+'"(?=.)',next:"rawqqstring"},{token:"string",regex:s+"'{3}",next:"rawqstring3"},{token:"string",regex:s+"'(?=.)",next:"rawqstring"},{token:"string",regex:o+'"{3}',next:"fqqstring3"},{token:"string",regex:o+'"(?=.)',next:"fqqstring"},{token:"string",regex:o+"'{3}",next:"fqstring3"},{token:"string",regex:o+"'(?=.)",next:"fqstring"},{token:"string",regex:u+'"{3}',next:"rfqqstring3"},{token:"string",regex:u+'"(?=.)',next:"rfqqstring"},{token:"string",regex:u+"'{3}",next:"rfqstring3"},{token:"string",regex:u+"'(?=.)",next:"rfqstring"},{token:"keyword.operator",regex:"\\+|\\-|\\*|\\*\\*|\\/|\\/\\/|%|@|<<|>>|&|\\||\\^|~|<|>|<=|=>|==|!=|<>|="},{token:"punctuation",regex:",|:|;|\\->|\\+=|\\-=|\\*=|\\/=|\\/\\/=|%=|@=|&=|\\|=|^=|>>=|<<=|\\*\\*="},{token:"paren.lparen",regex:"[\\[\\(\\{]"},{token:"paren.rparen",regex:"[\\]\\)\\}]"},{token:"text",regex:"\\s+"},{include:"constants"}],qqstring3:[{token:"constant.language.escape",regex:b},{token:"string",regex:'"{3}',next:"start"},{defaultToken:"string"}],qstring3:[{token:"constant.language.escape",regex:b},{token:"string",regex:"'{3}",next:"start"},{defaultToken:"string"}],qqstring:[{token:"constant.language.escape",regex:b},{token:"string",regex:"\\\\$",next:"qqstring"},{token:"string",regex:'"|$',next:"start"},{defaultToken:"string"}],qstring:[{token:"constant.language.escape",regex:b},{token:"string",regex:"\\\\$",next:"qstring"},{token:"string",regex:"'|$",next:"start"},{defaultToken:"string"}],rawqqstring3:[{token:"string",regex:'"{3}',next:"start"},{defaultToken:"string"}],rawqstring3:[{token:"string",regex:"'{3}",next:"start"},{defaultToken:"string"}],rawqqstring:[{token:"string",regex:"\\\\$",next:"rawqqstring"},{token:"string",regex:'"|$',next:"start"},{defaultToken:"string"}],rawqstring:[{token:"string",regex:"\\\\$",next:"rawqstring"},{token:"string",regex:"'|$",next:"start"},{defaultToken:"string"}],fqqstring3:[{token:"constant.language.escape",regex:b},{token:"string",regex:'"{3}',next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],fqstring3:[{token:"constant.language.escape",regex:b},{token:"string",regex:"'{3}",next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],fqqstring:[{token:"constant.language.escape",regex:b},{token:"string",regex:"\\\\$",next:"fqqstring"},{token:"string",regex:'"|$',next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],fqstring:[{token:"constant.language.escape",regex:b},{token:"string",regex:"'|$",next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],rfqqstring3:[{token:"string",regex:'"{3}',next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],rfqstring3:[{token:"string",regex:"'{3}",next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],rfqqstring:[{token:"string",regex:"\\\\$",next:"rfqqstring"},{token:"string",regex:'"|$',next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],rfqstring:[{token:"string",regex:"'|$",next:"start"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"},{defaultToken:"string"}],fqstringParRules:[{token:"paren.lparen",regex:"[\\[\\(]"},{token:"paren.rparen",regex:"[\\]\\)]"},{token:"string",regex:"\\s+"},{token:"string",regex:"'(.)*'"},{token:"string",regex:'"(.)*"'},{token:"function.support",regex:"(!s|!r|!a)"},{include:"constants"},{token:"paren.rparen",regex:"}",next:"pop"},{token:"paren.lparen",regex:"{",push:"fqstringParRules"}],constants:[{token:"constant.numeric",regex:"(?:"+y+"|\\d+)[jJ]\\b"},{token:"constant.numeric",regex:y},{token:"constant.numeric",regex:h+"[lL]\\b"},{token:"constant.numeric",regex:h+"\\b"},{token:["punctuation","function.support"],regex:"(\\.)([a-zA-Z_]+)\\b"},{token:r,regex:"[a-zA-Z_$][a-zA-Z0-9_$]*\\b"}]},this.normalizeRules()};r.inherits(s,i),t.PythonHighlightRules=s}),define("ace/mode/folding/pythonic",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("./fold_mode").FoldMode,s=t.FoldMode=function(e){this.foldingStartMarker=new RegExp("([\\[{])(?:\\s*)$|("+e+")(?:\\s*)(?:#.*)?$")};r.inherits(s,i),function(){this.getFoldWidgetRange=function(e,t,n){var r=e.getLine(n),i=r.match(this.foldingStartMarker);if(i)return i[1]?this.openingBracketBlock(e,i[1],n,i.index):i[2]?this.indentationBlock(e,n,i.index+i[2].length):this.indentationBlock(e,n)}}.call(s.prototype)}),define("ace/mode/python",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/python_highlight_rules","ace/mode/folding/pythonic","ace/range"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text").Mode,s=e("./python_highlight_rules").PythonHighlightRules,o=e("./folding/pythonic").FoldMode,u=e("../range").Range,a=function(){this.HighlightRules=s,this.foldingRules=new o("\\:"),this.$behaviour=this.$defaultBehaviour};r.inherits(a,i),function(){this.lineCommentStart="#",this.getNextLineIndent=function(e,t,n){var r=this.$getIndent(t),i=this.getTokenizer().getLineTokens(t,e),s=i.tokens;if(s.length&&s[s.length-1].type=="comment")return r;if(e=="start"){var o=t.match(/^.*[\{\(\[:]\s*$/);o&&(r+=n)}return r};var e={pass:1,"return":1,raise:1,"break":1,"continue":1};this.checkOutdent=function(t,n,r){if(r!=="\r\n"&&r!=="\r"&&r!=="\n")return!1;var i=this.getTokenizer().getLineTokens(n.trim(),t).tokens;if(!i)return!1;do var s=i.pop();while(s&&(s.type=="comment"||s.type=="text"&&s.value.match(/^\s+$/)));return s?s.type=="keyword"&&e[s.value]:!1},this.autoOutdent=function(e,t,n){n+=1;var r=this.$getIndent(t.getLine(n)),i=t.getTabString();r.slice(-i.length)==i&&t.remove(new u(n,r.length-i.length,n,r.length))},this.$id="ace/mode/python"}.call(a.prototype),t.Mode=a}); (function() {
window.require(["ace/mode/python"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})(); | PypiClean |
/jarviscore_nightly-0.1.1.203-py3-none-any.whl/jarviscore/socket.py | import traceback
import socket
from time import sleep
from datetime import datetime
from .log import Log
# from .bridge import Bridge
from threading import Thread
from .message import RawMessage
from .messageparser import parse_line
class Socket():
# bridge: Bridge
socket: socket.socket
buffer: str
active: bool
ready: bool
channel_list: list
log: Log
last_ping: datetime
# def __init__(self, bridge: Bridge = None):
def __init__(self, channel, nick: str, token: str, id = None, verbose=False):
self.name = f"Thread - {channel.channel_name} Socket"
self.nick = nick
self.token = token
self.active = True
self.verbose = verbose
self.buffer = ""
self.id = id
self.stream_id = None
self.socket = None
self.channel = channel
self.log = Log(f"{channel.channel_name} Socket")
self.__connect()
def __connect(self):
self.log.log("Engageing Socket. Standby...")
self.buffer = ""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect(("irc.chat.twitch.tv", 6667))
self._send_raw(f"PASS {self.token}")
self._send_raw(f"NICK {self.nick}")
self._send_raw("CAP REQ :twitch.tv/membership")
self._send_raw("CAP REQ :twitch.tv/tags")
self._send_raw("CAP REQ :twitch.tv/commands")
self._send_raw(f"JOIN #{self.channel.channel_name} ")
self.last_ping = datetime.now()
self.log.info("Socket engaged.")
pass
def disconnect(self):
if self.verbose:
print("departing channels")
try:
self._send_raw(f"PART #{self.channel.channel_name} ")
except Exception as e:
self.log.exception(f"Suppressing a caught an exception in `Socket.disconnect()` [Parting channel]. Details below\n{type(e)}: {traceback.format_exc()}")
try:
self.socket.close()
except Exception as e:
self.log.exception(f"Suppressing a caught an exception in `Socket.disconnect()` [closing socket]. Details below\n{type(e)}: {traceback.format_exc()}")
def reconnect(self):
if self.verbose:
print("Reconnect detected!")
self.disconnect()
if self.verbose:
print("Waiting to reconnect.")
sleep(10)
self.__connect()
def connect_to_channel(self, channel: str):
self._send_raw("JOIN #"+ channel.lower() +" ")
# self.channel_list.append(channel)
if self.verbose:
print(f"connecting to '{channel}'")
def disconnect_from_channel(self, channel: str):
self._send_raw("PART #"+ channel.lower() +" ")
# self.__clearchannel(channel)
if self.verbose:
print(f"departing from '{channel}'")
def __clearchannel(self, channel: str):
counter = 0
for chn in self.channel_list:
if chn.name == channel:
self.channel_list.pop(counter)
return
counter += 1
def listenToStream(self):
try:
self.__process_stream_data()
except Exception as e:
self.log.exception(f"Suppressing a caught an exception in `Socket.listenToStream()`, will attempt to reconnect to recover the connection and continue without raising. Details below\n{type(e)}: {traceback.format_exc()}")
self.reconnect()
self.log.info("Reconnect attempt executed.")
def run(self):
while self.active:
self.__process_stream_data()
try:
self.socket.close()
except Exception as e:
self.log.exception(f"Suppressing a caught an exception while attempting to close the socket in `Socket.run()`. Details below\n{type(e)}: {traceback.format_exc()}")
def close(self):
if self.verbose:
print(f"({self.name}) Closing Socket")
self.active = False
self.socket.close()
def _send_raw(self, message: str):
try:
self.socket.send((f"{message}\r\n").encode('utf-8'))
if self.verbose:
if message[:4] == "PASS":
print(f"({self.name}) < PASS ****")
else:
print(f"({self.name}) < {message}")
except OSError:
self.log.error(f"Socket is closed and must be reopened to send the message '{message}'")
def __process_stream_data(self):
try:
self.buffer = self.buffer + self.socket.recv(1024).decode()
except ConnectionAbortedError:
self.log.info("Socket connection has Closed")
except UnicodeDecodeError:
self.log.warn(f"Unicode Decode error detected, possible issue with the buffer.\nBuffer: [{self.buffer}]\n\nRegenerating buffer...")
self.buffer = ""
self.log.info("Buffer regeneration completed.")
except OSError:
self.log.warn("OSError detected, socket issue identitfied. Attempting to recover socket.")
self.reconnect()
temp = self.buffer.split("\n")
self.buffer = temp.pop()
for line in temp:
if ("PING :tmi.twitch.tv" in line): # Keep Alive Mechanism
self._send_raw("PONG :tmi.twitch.tv")
self.last_ping = datetime.now()
self.on_socket_data(line)
def health_check(self):
if (datetime.now() - self.last_ping).seconds > 420:
self.log.error(f"Extended delay detected between pings, regenerating Thread.[PING timeout: {(datetime.now() - self.last_ping).seconds} sec]")
return False
# self.reconnect()
return True
def set_stream_id(self, new_id):
self.stream_id = new_id
def on_socket_data(self, line: str):
self.on_data(parse_line(line))
def on_data(self, data):
if self.verbose:
print(f" >", data.line)
self.channel.on_raw(data)
if data.inner == "Message":
self.channel.on_message(data)
elif data.inner == "Join":
self.channel.on_join(data)
elif data.inner == "Mode":
self.channel.on_mode(data)
elif data.inner == "Names":
self.channel.on_names(data)
elif data.inner == "Part":
self.channel.on_part(data)
elif data.inner == "ClearChat":
self.channel.on_clearchat(data)
elif data.inner == "ClearMessage":
self.channel.on_clearmessage(data)
elif data.inner == "HostTarget":
self.channel.on_hosttarget(data)
elif data.inner == "Notice":
self.channel.on_notice(data)
elif data.inner == "Reconnect":
self.channel.on_reconnect(data)
elif data.inner == "RoomState":
self.channel.on_roomstate(data)
elif data.inner == "UserState":
self.channel.on_userstate(data)
elif data.inner == "GlobalUserState":
self.channel.on_globaluserstate(data)
elif data.inner == "UserNotice":
self.channel.on_usernotice(data)
elif data.inner == "RitualUserNotice":
self.channel.on_ritual_usernotice(data)
elif data.inner == "BitBadgeUpgradeUserNotice":
self.channel.on_bitbadgeupgrade_usernotice(data)
elif data.inner == "RaidUserNotice":
self.channel.on_raid_usernotice(data)
elif data.inner == "Whisper":
if data.channel == self.channel.channel_name:
self.log.whisper (data.message_text, data.display_name)
self.channel.on_whisper(data)
elif data.inner == "SubscriberUserNotice":
if data.display_name.lower() != self.nick.lower():
self.channel.on_subscriber_usernotice(data)
elif data.inner == "GiftedSubscriberUserNotice":
if data.display_name.lower() != self.nick.lower():
self.channel.on_giftedsubscriber_usernotice(data)
elif data.inner == "PrivateMessage":
if data.display_name.lower() != self.nick.lower():
self.log.chat(data.message_text, data.channel, data.display_name, jid=self.id, sid=self.stream_id)
self.channel.on_privmessage(data)
elif data.inner == "CommandMessage":
if data.display_name.lower() != self.nick.lower():
self.log.chat(f"[CMD] {data.message_text}",data.channel, data.display_name, jid=self.id, sid=self.stream_id)
self.channel.on_command(data)
def join(self, channel: str):
send = f"JOIN #{channel.lower()}"
self._send_raw(send)
def send_message(self, channel: str, message: str):
send = f"PRIVMSG #{channel.lower()} :{message}"
self._send_raw(send)
self.log.sent(message, channel.lower(), jid=self.id, sid=self.stream_id)
def send_action_message(self, channel: str, message: str):
send = f"PRIVMSG #{channel.lower()} :/me {message}"
self._send_raw(send)
self.log.sent(f"ACTION: {message}", channel.lower(), jid=self.id, sid=self.stream_id)
def send_whisper(self, user: str, message: str):
# send = f"PRIVMSG #{user.lower()} :/w {user.lower()} {message}"
send = f"PRIVMSG #{self.nick.lower()} :/w {user.lower()} {message}"
self._send_raw(send)
self.log.sent_whisper(message, user.lower())
def timeout_user(self, user: str, channel: str, timeout=1):
send = f"PRIVMSG #{channel} :/timeout {user} {timeout}"
self._send_raw(send)
self.log.sent(f"TIMEOUT: Timed out user {user} for {timeout} seconds in {channel}'s stream chat", channel.lower(), jid=self.id, sid=self.stream_id)
self.log.info(f"TIMEOUT: Timed out user {user} for {timeout} seconds in {channel}'s stream chat")
def clear_message(self, channel: str, message_id: str):
send = f"PRIVMSG #{channel} :/delete {message_id}"
self._send_raw(send)
self.log.sent(f"CLEAR MESSAGE: Deleted a message (ID: {message_id}) from {channel}'s stream chat", channel.lower(), jid=self.id, sid=self.stream_id)
self.log.info(f"CLEAR MESSAGE: Deleted a message (ID: {message_id}) from {channel}'s stream chat")
def ban_user(self, user: str, channel: str):
send = f"PRIVMSG #{channel} :/ban {user}"
self._send_raw(send)
self.log.sent(f"BAN: Banned user {user} from {channel}'s stream chat", channel.lower(), jid=self.id, sid=self.stream_id)
self.log.info(f"BAN: Banned user {user} from {channel}'s stream chat") | PypiClean |
/hydra-base-0.1.7.tar.gz/hydra-base-0.1.7/hydra_base/util/dataset_util.py |
# (c) Copyright 2013 to 2017 University of Manchester
#
# HydraPlatform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HydraPlatform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with HydraPlatform. If not, see <http://www.gnu.org/licenses/>
#
import logging
from decimal import Decimal
from operator import mul
from ..exceptions import HydraError, ValidationError
import numpy as np
import pandas as pd
import re
from .hydra_dateutil import get_datetime
log = logging.getLogger(__name__)
def array_dim(arr):
"""Return the size of a multidimansional array.
"""
dim = []
while True:
try:
dim.append(len(arr))
arr = arr[0]
except TypeError:
return dim
def check_array_struct(array):
"""
Check to ensure arrays are symmetrical, for example:
[[1, 2, 3], [1, 2]] is invalid
"""
#If a list is transformed into a numpy array and the sub elements
#of this array are still lists, then numpy failed to fully convert
#the list, meaning it is not symmetrical.
try:
arr = np.array(array)
except:
raise HydraError("Array %s is not valid."%(array,))
if type(arr[0]) is list:
raise HydraError("Array %s is not valid."%(array,))
def arr_to_vector(arr):
"""Reshape a multidimensional array to a vector.
"""
dim = array_dim(arr)
tmp_arr = []
for n in range(len(dim) - 1):
for inner in arr:
for i in inner:
tmp_arr.append(i)
arr = tmp_arr
tmp_arr = []
return arr
def vector_to_arr(vec, dim):
"""Reshape a vector to a multidimensional array with dimensions 'dim'.
"""
if len(dim) <= 1:
return vec
array = vec
while len(dim) > 1:
i = 0
outer_array = []
for m in range(reduce(mul, dim[0:-1])):
inner_array = []
for n in range(dim[-1]):
inner_array.append(array[i])
i += 1
outer_array.append(inner_array)
array = outer_array
dim = dim[0:-1]
return array
def _get_val(val, full=False):
"""
Get the value(s) of a dataset as a single value or as 1-d list of
values. In the special case of timeseries, when a check is for time-based
criteria, you can return the entire timeseries.
"""
try:
val = val.strip()
except:
pass
logging.debug("%s, type=%s", val, type(val))
if isinstance(val, float):
return val
if isinstance(val, int):
return val
if isinstance(val, np.ndarray):
return list(val)
try:
val = float(val)
return val
except:
pass
try:
val = int(val)
return val
except:
pass
if type(val) == pd.DataFrame:
if full:
return val
newval = []
values = val.values
for v in values:
newv = _get_val(v)
if type(newv) == list:
newval.extend(newv)
else:
newval.append(newv)
val = newval
elif type(val) == dict:
if full:
return val
newval = []
for v in val.values():
newv = _get_val(v)
if type(newv) == list:
newval.extend(newv)
else:
newval.append(newv)
val = newval
elif type(val) == list or type(val) == np.ndarray:
newval = []
for arr_val in val:
v = _get_val(arr_val)
newval.append(v)
val = newval
return val
def get_restriction_as_dict(restriction_xml):
"""
turn:
::
<restrictions>
<restriction>
<type>MAXLEN</type>
<value>3</value>
</restriction>
<restriction>
<type>VALUERANGE</type>
<value><item>1</item><item>10</item></value>
</restriction>
</restrictions>
into:
::
{
'MAXLEN' : 3,
'VALUERANGE' : [1, 10]
}
"""
restriction_dict = {}
if restriction_xml is None:
return restriction_dict
if restriction_xml.find('restriction') is not None:
restrictions = restriction_xml.findall('restriction')
for restriction in restrictions:
restriction_type = restriction.find('type').text
restriction_val = restriction.find('value')
val = None
if restriction_val is not None:
if restriction_val.text.strip() != "":
val = _get_val(restriction_val.text)
else:
items = restriction_val.findall('item')
val = []
for item in items:
val.append(_get_val(item.text))
restriction_dict[restriction_type] = val
return restriction_dict
def validate_ENUM(in_value, restriction):
"""
Test to ensure that the given value is contained in the provided list.
the value parameter must be either a single value or a 1-dimensional list.
All the values in this list must satisfy the ENUM
"""
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_ENUM(subval, restriction)
else:
if value not in restriction:
raise ValidationError("ENUM : %s"%(restriction))
def validate_BOOL(in_value, restriction):
"""
Validation for the generic Boolean type consisting of arbitrary
'true' and 'false' values.
"""
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_BOOL(subval, restriction)
else:
if value not in restriction:
raise ValidationError("BOOL")
def validate_BOOLYN(in_value, restriction):
"""
Restriction is not used here. It is just present to be
in line with all the other validation functions
"""
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_BOOLYN(subval, restriction)
else:
if value not in ('Y', 'N'):
raise ValidationError("BOOLYN")
def validate_BOOL10(value, restriction):
value = _get_val(value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_BOOL10(subval, restriction)
else:
if value not in (1, 0):
raise ValidationError("BOOL10")
def validate_NUMPLACES(in_value, restriction):
"""
the value parameter must be either a single value or a 1-dimensional list.
All the values in this list must satisfy the condition
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_NUMPLACES(subval, restriction)
else:
restriction = int(restriction) # Just in case..
dec_val = Decimal(str(value))
num_places = dec_val.as_tuple().exponent * -1 #exponent returns a negative num
if restriction != num_places:
raise ValidationError("NUMPLACES: %s"%(restriction))
def validate_VALUERANGE(in_value, restriction):
"""
Test to ensure that a value sits between a lower and upper bound.
Parameters: A Decimal value and a tuple, containing a lower and upper bound,
both as Decimal values.
"""
if len(restriction) != 2:
raise ValidationError("Template ERROR: Only two values can be specified in a date range.")
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_VALUERANGE(subval, restriction)
else:
min_val = Decimal(restriction[0])
max_val = Decimal(restriction[1])
val = Decimal(value)
if val < min_val or val > max_val:
raise ValidationError("VALUERANGE: %s, %s"%(min_val, max_val))
def validate_DATERANGE(value, restriction):
"""
Test to ensure that the times in a timeseries fall between a lower and upper bound
Parameters: A timeseries in the form [(datetime, val), (datetime, val)..]
and a tuple containing the lower and upper bound as datetime objects.
"""
if len(restriction) != 2:
raise ValidationError("Template ERROR: Only two values can be specified in a date range.")
if type(value) == pd.DataFrame:
dates = [get_datetime(v) for v in list(value.index)]
else:
dates = value
if type(dates) is list:
for date in dates:
validate_DATERANGE(date, restriction)
return
min_date = get_datetime(restriction[0])
max_date = get_datetime(restriction[1])
if value < min_date or value > max_date:
raise ValidationError("DATERANGE: %s <%s> %s"%(min_date,value,max_date))
def validate_MAXLEN(value, restriction):
"""
Test to ensure that a list has the prescribed length.
Parameters: A list and an integer, which defines the required length of
the list.
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
else:
return
if len(value) > restriction:
raise ValidationError("MAXLEN: %s"%(restriction))
def validate_NOTNULL(value, restriction):
"""
Restriction is not used here. It is just present to be
in line with all the other validation functions
"""
if value is None or str(value).lower == 'null':
raise ValidationError("NOTNULL")
def validate_ISNULL(value, restriction):
"""
Restriction is not used here. It is just present to be
in line with all the other validation functions
"""
if value is not None and str(value).lower != 'null':
raise ValidationError("ISNULL")
def validate_EQUALTO(in_value, restriction):
"""
Test to ensure that a value is equal to a prescribed value.
Parameter: Two values, which will be compared for equality.
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_EQUALTO(subval, restriction)
else:
if value != restriction:
raise ValidationError("EQUALTO: %s"%(restriction))
def validate_NOTEQUALTO(in_value, restriction):
"""
Test to ensure that a value is NOT equal to a prescribed value.
Parameter: Two values, which will be compared for non-equality.
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_NOTEQUALTO(subval, restriction)
else:
if value == restriction:
raise ValidationError("NOTEQUALTO: %s"%(restriction))
def validate_LESSTHAN(in_value, restriction):
"""
Test to ensure that a value is less than a prescribed value.
Parameter: Two values, which will be compared for the difference..
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_LESSTHAN(subval, restriction)
else:
try:
if value >= restriction:
raise ValidationError("LESSTHAN: %s"%(restriction))
except TypeError:
# Incompatible types for comparison.
raise ValidationError("LESSTHAN: Incompatible types %s"%(restriction))
def validate_LESSTHANEQ(value, restriction):
"""
Test to ensure that a value is less than or equal to a prescribed value.
Parameter: Two values, which will be compared for the difference..
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_LESSTHANEQ(subval, restriction)
else:
try:
if value > restriction:
raise ValidationError("LESSTHANEQ: %s" % (restriction))
except TypeError:
# Incompatible types for comparison.
raise ValidationError("LESSTHANEQ: Incompatible types %s"%(restriction))
def validate_GREATERTHAN(in_value, restriction):
"""
Test to ensure that a value is greater than a prescribed value.
Parameter: Two values, which will be compared for the difference..
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_GREATERTHAN(subval, restriction)
else:
try:
if value <= restriction:
raise ValidationError("GREATERTHAN: %s" % (restriction))
except TypeError:
# Incompatible types for comparison.
raise ValidationError("GREATERTHAN: Incompatible types %s"%(restriction))
def validate_GREATERTHANEQ(value, restriction):
"""
Test to ensure that a value is greater than or equal to a prescribed value.
Parameter: Two values, which will be compared for the difference..
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_GREATERTHANEQ(subval, restriction)
else:
try:
if value < restriction:
raise ValidationError("GREATERTHANEQ: %s" % (restriction))
except TypeError:
# Incompatible types for comparison.
raise ValidationError("GREATERTHANEQ: Incompatible types %s"%(restriction))
def validate_MULTIPLEOF(in_value, restriction):
"""
Test to ensure that a value is a multiple of a specified restriction value.
Parameters: Numeric value and an integer
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value)
if type(value) is list:
for subval in value:
if type(subval) is tuple:
subval = subval[1]
validate_MULTIPLEOF(subval, restriction)
else:
try:
if value % restriction != 0:
raise ValidationError("MULTIPLEOF: %s" % (restriction))
except TypeError:
# Incompatible types for comparison.
raise ValidationError("MULTIPLEOF: Incompatible types %s"%(restriction))
def validate_SUMTO(in_value, restriction):
"""
Test to ensure the values of a list sum to a specified value:
Parameters: a list of numeric values and a target to which the values
in the list must sum
"""
#Sometimes restriction values can accidentally be put in the template <item>100</items>,
#Making them a list, not a number. Rather than blowing up, just get value 1 from the list.
if type(restriction) is list:
restriction = restriction[0]
value = _get_val(in_value, full=True)
if len(value) == 0:
return
flat_list = _flatten_value(value)
try:
sum(flat_list)
except:
raise ValidationError("List cannot be summed: %s"%(flat_list,))
if sum(flat_list) != restriction:
raise ValidationError("SUMTO: %s"%(restriction))
def validate_INCREASING(in_value, restriction):
"""
Test to ensure the values in a list are increasing.
Parameters: a list of values and None. The none is there simply
to conform with the rest of the validation routines.
"""
flat_list = _flatten_value(in_value)
previous = None
for a in flat_list:
if previous is None:
previous = a
continue
try:
if a < previous:
raise ValidationError("INCREASING")
except TypeError:
raise ValueError("INCREASING: Incompatible types")
previous = a
def validate_DECREASING(in_value,restriction):
"""
Test to ensure the values in a list are decreasing.
Parameters: a list of values and None. The none is there simply
to conform with the rest of the validation routines.
"""
flat_list = _flatten_value(in_value)
previous = None
for a in flat_list:
if previous is None:
previous = a
continue
try:
if a > previous:
raise ValidationError("DECREASING")
except TypeError:
raise ValueError("DECREASING: Incompatible types")
previous = a
def validate_EQUALTIMESTEPS(value, restriction):
"""
Ensure that the timesteps in a timeseries are equal. If a restriction
is provided, they must be equal to the specified restriction.
Value is a pandas dataframe.
"""
if len(value) == 0:
return
if type(value) == pd.DataFrame:
if str(value.index[0]).startswith('9999'):
tmp_val = value.to_json().replace('9999', '1900')
value = pd.read_json(tmp_val)
#If the timeseries is not datetime-based, check for a consistent timestep
if type(value.index) == pd.Int64Index:
timesteps = list(value.index)
timestep = timesteps[1] - timesteps[0]
for i, t in enumerate(timesteps[1:]):
if timesteps[i] - timesteps[i-1] != timestep:
raise ValidationError("Timesteps not equal: %s"%(list(value.index)))
if not hasattr(value.index, 'inferred_freq'):
raise ValidationError("Timesteps not equal: %s"%(list(value.index),))
if restriction is None:
if value.index.inferred_freq is None:
raise ValidationError("Timesteps not equal: %s"%(list(value.index),))
else:
if value.index.inferred_freq != restriction:
raise ValidationError("Timesteps not equal: %s"%(list(value.index),))
validation_func_map = dict(
ENUM = validate_ENUM,
BOOL = validate_BOOL,
BOOLYN = validate_BOOLYN,
BOOL10 = validate_BOOL10,
NUMPLACES = validate_NUMPLACES,
VALUERANGE = validate_VALUERANGE,
DATERANGE = validate_DATERANGE,
MAXLEN = validate_MAXLEN,
EQUALTO = validate_EQUALTO,
NOTEQUALTO = validate_NOTEQUALTO,
LESSTHAN = validate_LESSTHAN,
LESSTHANEQ = validate_LESSTHANEQ,
GREATERTHAN = validate_GREATERTHAN,
GREATERTHANEQ = validate_GREATERTHANEQ,
MULTIPLEOF = validate_MULTIPLEOF,
SUMTO = validate_SUMTO,
INCREASING = validate_INCREASING,
DECREASING = validate_DECREASING,
EQUALTIMESTEPS = validate_EQUALTIMESTEPS,
NOTNULL = validate_NOTNULL,
ISNULL = validate_ISNULL,
)
def validate_value(restriction_dict, inval):
if len(restriction_dict) == 0:
return
try:
for restriction_type, restriction in restriction_dict.items():
func = validation_func_map.get(restriction_type)
if func is None:
raise Exception("Validation type {} does not exist".format(restriction_type,))
func(inval, restriction)
except ValidationError as e:
log.exception(e)
err_val = re.sub('\s+', ' ', str(inval)).strip()
if len(err_val) > 60:
err_val = "{}...".format(err_val[:60])
raise HydraError("Validation error ({}). Val {} does not conform with rule {}".format(restriction_type, err_val, e.args[0]))
except Exception as e:
log.exception(e)
raise HydraError("An error occurred in validation. ({})".format(e))
def _flatten_value(value):
"""
1: Turn a multi-dimensional array into a 1-dimensional array
2: Turn a timeseries of values into a single 1-dimensional array
"""
if type(value) == pd.DataFrame:
value = value.values.tolist()
if type(value) != list:
raise ValidationError("Value %s cannot be processed."%(value))
if len(value) == 0:
return
flat_list = _flatten_list(value)
return flat_list
def _flatten_list(l):
flat_list = []
for item in l:
if type(item) is list:
flat_list.extend(_flatten_list(item))
else:
flat_list.append(item)
return flat_list
if __name__ == '__main__':
pass | PypiClean |
/azureml_core-1.53.0-py3-none-any.whl/azureml/_history/utils/_hdi_wasb_utils.py |
import re
import subprocess
from datetime import datetime
import logging
from azureml._vendor.azure_storage.blob import ContainerSasPermissions
from azureml._vendor.azure_storage.blob import generate_container_sas
module_logger = logging.getLogger(__name__)
WASB_REGEX = r'wasbs?://(.*)@(.*)\.blob\.(core.windows.net|core.chinacloudapi.cn|core.usgovcloudapi.net)$'
WASB_MATCHES = ['.blob.core.windows.net', '.blob.core.chinacloudapi.cn', 'blob.core.usgovcloudapi.net']
def get_wasb_container_url():
get_wasb_url_cmd = ["hdfs", "getconf", "-confKey", "fs.defaultFS"]
return subprocess.check_output(get_wasb_url_cmd).strip().decode('utf-8')
def get_url_components(uri):
wasb_regex = WASB_REGEX
match = re.search(wasb_regex, uri)
# Extract storage account name and container name from the above URL
storage_container_name = match.group(1)
storage_account_name = match.group(2)
endpoint_suffix = match.group(3)
return storage_container_name, storage_account_name, endpoint_suffix
def get_regular_container_path(wasb_container_uri):
module_logger.debug("Remapping wasb to https: {0}".format(wasb_container_uri))
storage_container_name, storage_account_name, endpoint_suffix = get_url_components(wasb_container_uri)
res = "https://{0}.blob.{2}/{1}".format(
storage_account_name, storage_container_name, endpoint_suffix)
module_logger.debug("Mapped to {0}".format(res))
return res
def get_container_sas(wasb_container_url=None, request_session=None):
if (wasb_container_url is None):
# Get the entire wasb container URL
wasb_container_url = get_wasb_container_url()
module_logger.debug(
"Generating container-level Read SAS for {0}".format(wasb_container_url))
if all(x not in wasb_container_url for x in WASB_MATCHES):
module_logger.debug(
"Outputs Error: Currently - Only default wasb file systems are supported to generate SAS URLs for Outputs")
# TODO: log error or something - better handling
return ""
# Extract storage account, container and endpoint suffix from the above URL
storage_container_name, storage_account_name, endpoint_suffix = get_url_components(wasb_container_url)
# Get Encrypted Key #
ACCOUNT_KEY_CONF_FMT = "fs.azure.account.key.{StorageAccountName}.blob.{EndPointSuffix}"
get_hdfs_encrypted_key_cmd = ["hdfs", "getconf", "-confKey",
ACCOUNT_KEY_CONF_FMT.format(StorageAccountName=storage_account_name,
EndPointSuffix=endpoint_suffix)]
encrypted_key = subprocess.check_output(
get_hdfs_encrypted_key_cmd).strip().decode('utf-8')
# Get Decrypted Key #
get_hdfs_decrypted_key_cmd = ["/usr/lib/hdinsight-common/scripts/decrypt.sh",
"{encrypted_key}".format(encrypted_key=encrypted_key)]
storage_account_key = subprocess.check_output(get_hdfs_decrypted_key_cmd)
# Create a block blob service instance
permissions = ContainerSasPermissions(read=True, list=True)
container_sas = generate_container_sas(
account_name=storage_account_name,
container_name=storage_container_name,
account_key=storage_account_key,
permission=permissions,
expiry=datetime.max)
return "?{0}".format(container_sas) | PypiClean |
/collective.idashboard-1.2.6a1.zip/collective.idashboard-1.2.6a1/collective/idashboard/docs/README.txt | Introduction
============
Pimp your Plone dashboard!
This is a Plone Product that makes your user dashboard behave similarly to the iGoogle dashboard.
Specifically, it adds the following functionality:
- Drag and drop portlets within and between rows
- Ajax enabled inline portlet editing and saving
- Ajax removal/deletion of portlets with themable confirmation modal dialog.
- Toggle show/hide portlets
Dependencies
============
**collective.js.jquery**
This product requires JQuery 1.3.2 which is currently not part of the latest Plone Release.
The JQuery 1.3.2 source code is bundled in this released but the xml and zcml directives for registering it has been disabled.
Instead you should use collective.js.jquery. See the included buildout.cfg.
WARNING: Installing collective.js.jquery without enabling JQuery 1.3.2 (as
bundled in collective.idashboard, or through collective.js.jquery) will not
enable *any* of the Ajax features.
Install
=======
collective.idashboard uses overrides.zcml, so make sure you add the following to your buildout.cfg:
[instance]
zcml = collective.idashboard-overrides
When adding a new Plone site, choose both the JQuery and the collective.idashboard profiles.
Acknowledgements
================
This product is a rewrite of a product initially written by me for Upfront Systems. http://www.upfrontsystems.co.za
Thanks goes out to Roche Compaan ([email protected]) for his help and assistance.
TODO
====
Install collective.js.jquery through setuphandlers
Add new icons
Add sticky mininize/maximise
Add css class change for minimise button.
Move header button images out to css.
| PypiClean |
/MaterialDjango-0.2.5.tar.gz/MaterialDjango-0.2.5/bower_components/prism/components/prism-puppet.js | (function (Prism) {
Prism.languages.puppet = {
'heredoc': [
// Matches the content of a quoted heredoc string (subject to interpolation)
{
pattern: /(@\("([^"\r\n\/):]+)"(?:\/[nrts$uL]*)?\).*(?:\r?\n|\r))(?:.*(?:\r?\n|\r))*?[ \t]*\|?[ \t]*-?[ \t]*\2/,
lookbehind: true,
alias: 'string',
inside: {
// Matches the end tag
'punctuation': /(?=\S).*\S(?= *$)/
// See interpolation below
}
},
// Matches the content of an unquoted heredoc string (no interpolation)
{
pattern: /(@\(([^"\r\n\/):]+)(?:\/[nrts$uL]*)?\).*(?:\r?\n|\r))(?:.*(?:\r?\n|\r))*?[ \t]*\|?[ \t]*-?[ \t]*\2/,
lookbehind: true,
alias: 'string',
inside: {
// Matches the end tag
'punctuation': /(?=\S).*\S(?= *$)/
}
},
// Matches the start tag of heredoc strings
{
pattern: /@\("?(?:[^"\r\n\/):]+)"?(?:\/[nrts$uL]*)?\)/,
alias: 'string',
inside: {
'punctuation': {
pattern: /(\().+?(?=\))/,
lookbehind: true
}
}
}
],
'multiline-comment': {
pattern: /(^|[^\\])\/\*[\s\S]*?\*\//,
lookbehind: true,
alias: 'comment'
},
'regex': {
// Must be prefixed with the keyword "node" or a non-word char
pattern: /((?:\bnode\s+|[~=\(\[\{,]\s*|[=+]>\s*|^\s*))\/(?:[^\/\\]|\\[\s\S])+\/(?:[imx]+\b|\B)/,
lookbehind: true,
inside: {
// Extended regexes must have the x flag. They can contain single-line comments.
'extended-regex': {
pattern: /^\/(?:[^\/\\]|\\[\s\S])+\/[im]*x[im]*$/,
inside: {
'comment': /#.*/
}
}
}
},
'comment': {
pattern: /(^|[^\\])#.*/,
lookbehind: true
},
'string': {
// Allow for one nested level of double quotes inside interpolation
pattern: /(["'])(?:\$\{(?:[^'"}]|(["'])(?:(?!\2)[^\\]|\\[\s\S])*\2)+\}|(?!\1)[^\\]|\\[\s\S])*\1/,
inside: {
'double-quoted': {
pattern: /^"[\s\S]*"$/,
inside: {
// See interpolation below
}
}
}
},
'variable': {
pattern: /\$(?:::)?\w+(?:::\w+)*/,
inside: {
'punctuation': /::/
}
},
'attr-name': /(?:\w+|\*)(?=\s*=>)/,
'function': [
{
pattern: /(\.)(?!\d)\w+/,
lookbehind: true
},
/\b(?:contain|debug|err|fail|include|info|notice|realize|require|tag|warning)\b|\b(?!\d)\w+(?=\()/
],
'number': /\b(?:0x[a-f\d]+|\d+(?:\.\d+)?(?:e-?\d+)?)\b/i,
'boolean': /\b(?:true|false)\b/,
// Includes words reserved for future use
'keyword': /\b(?:application|attr|case|class|consumes|default|define|else|elsif|function|if|import|inherits|node|private|produces|type|undef|unless)\b/,
'datatype': {
pattern: /\b(?:Any|Array|Boolean|Callable|Catalogentry|Class|Collection|Data|Default|Enum|Float|Hash|Integer|NotUndef|Numeric|Optional|Pattern|Regexp|Resource|Runtime|Scalar|String|Struct|Tuple|Type|Undef|Variant)\b/,
alias: 'symbol'
},
'operator': /=[=~>]?|![=~]?|<(?:<\|?|[=~|-])?|>[>=]?|->?|~>|\|>?>?|[*\/%+?]|\b(?:and|in|or)\b/,
'punctuation': /[\[\]{}().,;]|:+/
};
var interpolation = [
{
// Allow for one nested level of braces inside interpolation
pattern: /(^|[^\\])\$\{(?:[^'"{}]|\{[^}]*\}|(["'])(?:(?!\2)[^\\]|\\[\s\S])*\2)+\}/,
lookbehind: true,
inside: {
'short-variable': {
// Negative look-ahead prevent wrong highlighting of functions
pattern: /(^\$\{)(?!\w+\()(?:::)?\w+(?:::\w+)*/,
lookbehind: true,
alias: 'variable',
inside: {
'punctuation': /::/
}
},
'delimiter': {
pattern: /^\$/,
alias: 'variable'
},
rest: Prism.util.clone(Prism.languages.puppet)
}
},
{
pattern: /(^|[^\\])\$(?:::)?\w+(?:::\w+)*/,
lookbehind: true,
alias: 'variable',
inside: {
'punctuation': /::/
}
}
];
Prism.languages.puppet['heredoc'][0].inside.interpolation = interpolation;
Prism.languages.puppet['string'].inside['double-quoted'].inside.interpolation = interpolation;
}(Prism)); | PypiClean |
/gears-jsx-0.1.tar.gz/gears-jsx-0.1/gears_jsx/node_modules/react-tools/node_modules/commoner/node_modules/recast/node_modules/ast-types/lib/types.js | var assert = require("assert");
var Ap = Array.prototype;
var slice = Ap.slice;
var map = Ap.map;
var each = Ap.forEach;
var Op = Object.prototype;
var objToStr = Op.toString;
var funObjStr = objToStr.call(function(){});
var strObjStr = objToStr.call("");
var hasOwn = Op.hasOwnProperty;
// A type is an object with a .check method that takes a value and returns
// true or false according to whether the value matches the type.
function Type(check, name) {
var self = this;
assert.ok(self instanceof Type, self);
// Unfortunately we can't elegantly reuse isFunction and isString,
// here, because this code is executed while defining those types.
assert.strictEqual(objToStr.call(check), funObjStr,
check + " is not a function");
// The `name` parameter can be either a function or a string.
var nameObjStr = objToStr.call(name);
assert.ok(nameObjStr === funObjStr ||
nameObjStr === strObjStr,
name + " is neither a function nor a string");
Object.defineProperties(self, {
name: { value: name },
check: {
value: function(value, deep) {
var result = check.call(self, value, deep);
if (!result && deep && objToStr.call(deep) === funObjStr)
deep(self, value);
return result;
}
}
});
}
var Tp = Type.prototype;
// Throughout this file we use Object.defineProperty to prevent
// redefinition of exported properties.
Object.defineProperty(exports, "Type", { value: Type });
// Like .check, except that failure triggers an AssertionError.
Tp.assert = function(value, deep) {
if (!this.check(value, deep)) {
var str = shallowStringify(value);
assert.ok(false, str + " does not match type " + this);
return false;
}
return true;
};
function shallowStringify(value) {
if (isObject.check(value))
return "{" + Object.keys(value).map(function(key) {
return key + ": " + value[key];
}).join(", ") + "}";
if (isArray.check(value))
return "[" + value.map(shallowStringify).join(", ") + "]";
return JSON.stringify(value);
}
Tp.toString = function() {
var name = this.name;
if (isString.check(name))
return name;
if (isFunction.check(name))
return name.call(this) + "";
return name + " type";
};
var builtInTypes = {};
Object.defineProperty(exports, "builtInTypes", {
enumerable: true,
value: builtInTypes
});
function defBuiltInType(example, name) {
var objStr = objToStr.call(example);
Object.defineProperty(builtInTypes, name, {
enumerable: true,
value: new Type(function(value) {
return objToStr.call(value) === objStr;
}, name)
});
return builtInTypes[name];
}
// These types check the underlying [[Class]] attribute of the given
// value, rather than using the problematic typeof operator. Note however
// that no subtyping is considered; so, for instance, isObject.check
// returns false for [], /./, new Date, and null.
var isString = defBuiltInType("", "string");
var isFunction = defBuiltInType(function(){}, "function");
var isArray = defBuiltInType([], "array");
var isObject = defBuiltInType({}, "object");
var isRegExp = defBuiltInType(/./, "RegExp");
var isDate = defBuiltInType(new Date, "Date");
var isNumber = defBuiltInType(3, "number");
var isBoolean = defBuiltInType(true, "boolean");
var isNull = defBuiltInType(null, "null");
var isUndefined = defBuiltInType(void 0, "undefined");
// There are a number of idiomatic ways of expressing types, so this
// function serves to coerce them all to actual Type objects. Note that
// providing the name argument is not necessary in most cases.
function toType(from, name) {
// The toType function should of course be idempotent.
if (from instanceof Type)
return from;
// The Def type is used as a helper for constructing compound
// interface types for AST nodes.
if (from instanceof Def)
return from.type;
// Support [ElemType] syntax.
if (isArray.check(from))
return Type.fromArray(from);
// Support { someField: FieldType, ... } syntax.
if (isObject.check(from))
return Type.fromObject(from);
// If isFunction.check(from), assume that from is a binary predicate
// function we can use to define the type.
if (isFunction.check(from))
return new Type(from, name);
// As a last resort, toType returns a type that matches any value that
// is === from. This is primarily useful for literal values like
// toType(null), but it has the additional advantage of allowing
// toType to be a total function.
return new Type(function(value) {
return value === from;
}, isUndefined.check(name) ? function() {
return from + "";
} : name);
}
// Returns a type that matches the given value iff any of type1, type2,
// etc. match the value.
Type.or = function(/* type1, type2, ... */) {
var types = [];
var len = arguments.length;
for (var i = 0; i < len; ++i)
types.push(toType(arguments[i]));
return new Type(function(value, deep) {
for (var i = 0; i < len; ++i)
if (types[i].check(value, deep))
return true;
return false;
}, function() {
return types.join(" | ");
});
};
Type.fromArray = function(arr) {
assert.ok(isArray.check(arr));
assert.strictEqual(
arr.length, 1,
"only one element type is permitted for typed arrays");
return toType(arr[0]).arrayOf();
};
Tp.arrayOf = function() {
var elemType = this;
return new Type(function(value, deep) {
return isArray.check(value) && value.every(function(elem) {
return elemType.check(elem, deep);
});
}, function() {
return "[" + elemType + "]";
});
};
Type.fromObject = function(obj) {
var fields = Object.keys(obj).map(function(name) {
return new Field(name, obj[name]);
});
return new Type(function(value, deep) {
return isObject.check(value) && fields.every(function(field) {
return field.type.check(value[field.name], deep);
});
}, function() {
return "{ " + fields.join(", ") + " }";
});
};
function Field(name, type, defaultFn, hidden) {
var self = this;
assert.ok(self instanceof Field);
isString.assert(name);
type = toType(type);
var properties = {
name: { value: name },
type: { value: type },
hidden: { value: !!hidden }
};
if (isFunction.check(defaultFn)) {
properties.defaultFn = { value: defaultFn };
}
Object.defineProperties(self, properties);
}
var Fp = Field.prototype;
Fp.toString = function() {
return JSON.stringify(this.name) + ": " + this.type;
};
Fp.getValue = function(obj) {
var value = obj[this.name];
if (!isUndefined.check(value))
return value;
if (this.defaultFn)
value = this.defaultFn.call(obj);
return value;
};
// Define a type whose name is registered in a namespace (the defCache) so
// that future definitions will return the same type given the same name.
// In particular, this system allows for circular and forward definitions.
// The Def object d returned from Type.def may be used to configure the
// type d.type by calling methods such as d.bases, d.build, and d.field.
Type.def = function(typeName) {
isString.assert(typeName);
return hasOwn.call(defCache, typeName)
? defCache[typeName]
: defCache[typeName] = new Def(typeName);
};
// In order to return the same Def instance every time Type.def is called
// with a particular name, those instances need to be stored in a cache.
var defCache = {};
function Def(typeName) {
var self = this;
assert.ok(self instanceof Def);
Object.defineProperties(self, {
typeName: { value: typeName },
baseNames: { value: [] },
ownFields: { value: {} },
// These two are populated during finalization.
allSupertypes: { value: {} }, // Includes own typeName.
allFields: { value: {} }, // Includes inherited fields.
type: {
value: new Type(function(value, deep) {
return self.check(value, deep);
}, typeName)
}
});
}
Def.fromValue = function(value) {
if (isObject.check(value) &&
hasOwn.call(value, "type") &&
hasOwn.call(defCache, value.type))
{
var vDef = defCache[value.type];
assert.strictEqual(vDef.finalized, true);
return vDef;
}
};
var Dp = Def.prototype;
Dp.isSupertypeOf = function(that) {
if (that instanceof Def) {
assert.strictEqual(this.finalized, true);
assert.strictEqual(that.finalized, true);
return hasOwn.call(that.allSupertypes, this.typeName);
} else {
assert.ok(false, that + " is not a Def");
}
};
Dp.checkAllFields = function(value, deep) {
var allFields = this.allFields;
assert.strictEqual(this.finalized, true);
function checkFieldByName(name) {
var field = allFields[name];
var type = field.type;
var child = field.getValue(value);
return type.check(child, deep);
}
return isObject.check(value)
&& Object.keys(allFields).every(checkFieldByName);
};
Dp.check = function(value, deep) {
assert.strictEqual(
this.finalized, true,
"prematurely checking unfinalized type " + this.typeName);
// A Def type can only match an object value.
if (!isObject.check(value))
return false;
var vDef = Def.fromValue(value);
if (!vDef) {
// If we couldn't infer the Def associated with the given value,
// and we expected it to be a SourceLocation or a Position, it was
// probably just missing a "type" field (because Esprima does not
// assign a type property to such nodes). Be optimistic and let
// this.checkAllFields make the final decision.
if (this.typeName === "SourceLocation" ||
this.typeName === "Position") {
return this.checkAllFields(value, deep);
}
// Calling this.checkAllFields for any other type of node is both
// bad for performance and way too forgiving.
return false;
}
// If checking deeply and vDef === this, then we only need to call
// checkAllFields once. Calling checkAllFields is too strict when deep
// is false, because then we only care about this.isSupertypeOf(vDef).
if (deep && vDef === this)
return this.checkAllFields(value, deep);
// In most cases we rely exclusively on isSupertypeOf to make O(1)
// subtyping determinations. This suffices in most situations outside
// of unit tests, since interface conformance is checked whenever new
// instances are created using builder functions.
if (!this.isSupertypeOf(vDef))
return false;
// The exception is when deep is true; then, we recursively check all
// fields.
if (!deep)
return true;
// Use the more specific Def (vDef) to perform the deep check, but
// shallow-check fields defined by the less specific Def (this).
return vDef.checkAllFields(value, deep)
&& this.checkAllFields(value, false);
};
Dp.bases = function() {
var bases = this.baseNames;
assert.strictEqual(this.finalized, false);
each.call(arguments, function(baseName) {
isString.assert(baseName);
// This indexOf lookup may be O(n), but the typical number of base
// names is very small, and indexOf is a native Array method.
if (bases.indexOf(baseName) < 0)
bases.push(baseName);
});
return this; // For chaining.
};
// False by default until .build(...) is called on an instance.
Object.defineProperty(Dp, "buildable", { value: false });
var builders = {};
Object.defineProperty(exports, "builders", {
value: builders
});
// This object is used as prototype for any node created by a builder.
var nodePrototype = {};
// Call this function to define a new method to be shared by all AST
// nodes. The replaced method (if any) is returned for easy wrapping.
Object.defineProperty(exports, "defineMethod", {
value: function(name, func) {
var old = nodePrototype[name];
// Pass undefined as func to delete nodePrototype[name].
if (isUndefined.check(func)) {
delete nodePrototype[name];
} else {
isFunction.assert(func);
Object.defineProperty(nodePrototype, name, {
enumerable: true, // For discoverability.
configurable: true, // For delete proto[name].
value: func
});
}
return old;
}
});
// Calling the .build method of a Def simultaneously marks the type as
// buildable (by defining builders[getBuilderName(typeName)]) and
// specifies the order of arguments that should be passed to the builder
// function to create an instance of the type.
Dp.build = function(/* param1, param2, ... */) {
var self = this;
var buildParams = slice.call(arguments);
var typeName = self.typeName;
assert.strictEqual(self.finalized, false);
isString.arrayOf().assert(buildParams);
// Every buildable type will have its "type" field filled in
// automatically. This includes types that are not subtypes of Node,
// like SourceLocation, but that seems harmless (TODO?).
self.field("type", typeName, function() { return typeName });
// Override Dp.buildable for this Def instance.
Object.defineProperty(self, "buildable", { value: true });
Object.defineProperty(builders, getBuilderName(typeName), {
enumerable: true,
value: function() {
var args = arguments;
var argc = args.length;
var built = Object.create(nodePrototype);
assert.ok(
self.finalized,
"attempting to instantiate unfinalized type " + typeName);
function add(param, i) {
if (hasOwn.call(built, param))
return;
var all = self.allFields;
assert.ok(hasOwn.call(all, param), param);
var field = all[param];
var type = field.type;
var value;
if (isNumber.check(i) && i < argc) {
value = args[i];
} else if (field.defaultFn) {
// Expose the partially-built object to the default
// function as its `this` object.
value = field.defaultFn.call(built);
} else {
var message = "no value or default function given for field " +
JSON.stringify(param) + " of " + typeName + "(" +
buildParams.map(function(name) {
return all[name];
}).join(", ") + ")";
assert.ok(false, message);
}
assert.ok(
type.check(value),
shallowStringify(value) +
" does not match field " + field +
" of type " + typeName);
// TODO Could attach getters and setters here to enforce
// dynamic type safety.
built[param] = value;
}
buildParams.forEach(function(param, i) {
add(param, i);
});
Object.keys(self.allFields).forEach(function(param) {
add(param); // Use the default value.
});
// Make sure that the "type" field was filled automatically.
assert.strictEqual(built.type, typeName);
return built;
}
});
return self; // For chaining.
};
function getBuilderName(typeName) {
return typeName.replace(/^[A-Z]+/, function(upperCasePrefix) {
var len = upperCasePrefix.length;
switch (len) {
case 0: return "";
// If there's only one initial capital letter, just lower-case it.
case 1: return upperCasePrefix.toLowerCase();
default:
// If there's more than one initial capital letter, lower-case
// all but the last one, so that XMLDefaultDeclaration (for
// example) becomes xmlDefaultDeclaration.
return upperCasePrefix.slice(
0, len - 1).toLowerCase() +
upperCasePrefix.charAt(len - 1);
}
});
}
// The reason fields are specified using .field(...) instead of an object
// literal syntax is somewhat subtle: the object literal syntax would
// support only one key and one value, but with .field(...) we can pass
// any number of arguments to specify the field.
Dp.field = function(name, type, defaultFn, hidden) {
assert.strictEqual(this.finalized, false);
this.ownFields[name] = new Field(name, type, defaultFn, hidden);
return this; // For chaining.
};
var namedTypes = {};
Object.defineProperty(exports, "namedTypes", {
value: namedTypes
});
// Get the value of an object property, taking object.type and default
// functions into account.
Object.defineProperty(exports, "getFieldValue", {
value: function(object, fieldName) {
var d = Def.fromValue(object);
if (d) {
var field = d.allFields[fieldName];
if (field) {
return field.getValue(object);
}
}
return object[fieldName];
}
});
// Iterate over all defined fields of an object, including those missing
// or undefined, passing each field name and effective value (as returned
// by getFieldValue) to the callback. If the object has no corresponding
// Def, the callback will never be called.
Object.defineProperty(exports, "eachField", {
value: function(object, callback, context) {
var d = Def.fromValue(object);
if (d) {
var all = d.allFields;
Object.keys(all).forEach(function(name) {
var field = all[name];
if (!field.hidden) {
callback.call(this, name, field.getValue(object));
}
}, context);
} else {
assert.strictEqual(
"type" in object, false,
"did not recognize object of type " + JSON.stringify(object.type)
);
// If we could not infer a Def type for this object, just
// iterate over its keys in the normal way.
Object.keys(object).forEach(function(name) {
callback.call(this, name, object[name]);
}, context);
}
}
});
// Similar to eachField, except that iteration stops as soon as the
// callback returns a truthy value. Like Array.prototype.some, the final
// result is either true or false to indicates whether the callback
// returned true for any element or not.
Object.defineProperty(exports, "someField", {
value: function(object, callback, context) {
var d = Def.fromValue(object);
if (d) {
var all = d.allFields;
return Object.keys(all).some(function(name) {
var field = all[name];
if (!field.hidden) {
var value = field.getValue(object);
return callback.call(this, name, value);
}
}, context);
}
assert.strictEqual(
"type" in object, false,
"did not recognize object of type " + JSON.stringify(object.type)
);
// If we could not infer a Def type for this object, just iterate
// over its keys in the normal way.
return Object.keys(object).some(function(name) {
return callback.call(this, name, object[name]);
}, context);
}
});
// This property will be overridden as true by individual Def instances
// when they are finalized.
Object.defineProperty(Dp, "finalized", { value: false });
Dp.finalize = function() {
// It's not an error to finalize a type more than once, but only the
// first call to .finalize does anything.
if (!this.finalized) {
var allFields = this.allFields;
var allSupertypes = this.allSupertypes;
this.baseNames.forEach(function(name) {
var def = defCache[name];
def.finalize();
extend(allFields, def.allFields);
extend(allSupertypes, def.allSupertypes);
});
// TODO Warn if fields are overridden with incompatible types.
extend(allFields, this.ownFields);
allSupertypes[this.typeName] = this;
// Types are exported only once they have been finalized.
Object.defineProperty(namedTypes, this.typeName, {
enumerable: true,
value: this.type
});
Object.defineProperty(this, "finalized", { value: true });
}
};
function extend(into, from) {
Object.keys(from).forEach(function(name) {
into[name] = from[name];
});
return into;
};
Object.defineProperty(exports, "finalize", {
// This function should be called at the end of any complete file of
// type definitions. It declares that everything defined so far is
// complete and needs no further modification, and defines all
// finalized types as properties of exports.namedTypes.
value: function() {
Object.keys(defCache).forEach(function(name) {
defCache[name].finalize();
});
}
}); | PypiClean |
/CLAchievements-0.1.0.tar.gz/CLAchievements-0.1.0/doc/plugin.rst | .. _plugin:
Write your own achievement
==========================
Achievement without persistent data
-----------------------------------
Suppose you want to create an achievement ``Foo`` awarded when user successfully run a command on a file :file:`foo`. Let's write this achievement.
Meta-information
""""""""""""""""
First, we need to define a class and define meta-information: any achievement is a subclass of :class:`~clachievements.achievements.__init__.Achievement`. Two arguments are compulsory:
* :attr:`~clachievements.achievements.__init__.Achievement.title`: if ``None``, your class is an abstract achievement, meant to be subclassed; if a string, your achievement is an *actual* achievement. See the :class:`class documentation <clachievements.achievements.__init__.Achievement>` for other attributes;
* `description`: your achievement must have a description. The first non-empty line of your class docstring is used, unless :attr:`~clachievements.achievements.__init__.Achievements._description` is defined, when it is used instead.
See :class:`the class documentation <clachievements.achievements.__init__.Achievement>` to get more information about other attributes.
.. code-block:: python
from clachievements.achievements import Achievement
from clachievements.testutils import test_lock, test_unlock
class Foo(Achievement):
"""Successfully run a command on file `foo`."""
title = "Foo"
Unlocking the achievement
"""""""""""""""""""""""""
Great: you have an achievement. But it is never unlocked: it will be frustrating for the user.
An achievement is a :ref:`context manager <typecontextmanager>`: its :meth:`~contextmanager.__enter__` and :meth:`~contextmanager.__exit__` methods are called before and after the actual system call. They can be used to test the command line, the environment before and after the command, etc.
Here, we test that:
* ``foo`` is a positional argument;
* the command did not fail.
If so, we call :meth:`~clachievements.achievements.__init__.Achievement.unlock()` to unlock the argument. It ensures that the argument is marked as unlocked, and it displays a pop-up to notify the user. No need to make sure that parallel calls to your achievement might unlock it at the same time: it is handled within the :meth:`~clachievements.achievements.__init__.Achievement.unlock()` method itself.
.. code-block:: python
from clachievements.achievements import Achievement
from clachievements.testutils import test_lock, test_unlock
class Foo(Achievement):
"""Successfully run a command on file `foo`."""
title = "Foo"
def __exit__(self, exc_type, exc_value, traceback):
if "foo" in self.command.positional:
if isinstance(exc_value, SystemExit):
if exc_value.code == 0:
self.unlock()
.. _testing:
Testing
"""""""
If we are done, the achievement will work, but the unit tests will fail. An achievement *must* define a test that unlock the achievement.
Each achievement must define a static or class method, :pep:`decorated <318>` with :func:`~clachievements.testutils.test_unlock`. This method must iterate strings which are shell commands, unlocking the achievement. To be wrapped by CLAchievements, system calls must use string substitution: ``"foo bar"`` will call the ``foo`` binary, *not wrapped* by CLAchievements, where ``"{bin.foo} bar"`` will call the ``foo`` binary, wrapped by CLAchievements.
You can add as many test methods as you want. You can also define test methods that must not unlock achievements, by decorating them with :func:`~clachievements.testutils.test_lock`.
When performing tests, each test method is run inside an empty temporary directory, which will be deleted afterward.
.. code-block:: python
from clachievements.achievements import Achievement
from clachievements.testutils import test_lock, test_unlock
class Foo(Achievement):
"""Successfully run a command on file `foo`."""
title = "Foo"
def __exit__(self, exc_type, exc_value, traceback):
if "foo" in self.command.positional:
if isinstance(exc_value, SystemExit):
if exc_value.code == 0:
self.unlock()
@staticmethod
@test_unlock
def test_touch():
yield "{bin.touch} foo"
@staticmethod
@test_lock
def test_ls():
yield "{bin.ls} foo"
Achievement with persistent data
--------------------------------
Now, we want a new achievement ``FooBar`` to be triggered when 50 successful commands have been run on a file :file:`foo`. Let's do this.
To do this, we have to store the number of successful commands. A class is defined to ease this process: :class:`~clachievements.achievements.__init__.SimplePersistentDataAchievement`. It is wrong (see below), but is works for simple cases.
When using this class, a row is created in the CLAchievements database with this achievement name.
* The first time this achievement is created, this row is filled with the content of attribute :attr:`~clachievements.achievements.__init__.SimplePersistentDataAchievement.default_data`.
* When accessing to :attr:`~clachievements.achievements.__init__.SimplePersistentDataAchievement.data`, data is read from the database.
* When assigning a value to :attr:`~clachievements.achievements.__init__.SimplePersistentDataAchievement.data`, data is written to the database.
Any :mod:`picklable <pickle>` data can be stored using this method.
This is simple, but this is not robust to concurrent access: if an integrity error occurs when assigning a value to :attr:`~clachievements.achievements.__init__.SimplePersistentDataAchievement.data`, it is silently ignored.
With this example achievement, if I run this argument 50 times in parallel, about 30 of the assignments are ignored. If I were to design a life critical application, this would be a big issues. But this is only a game: it does not work perfectly, but it is so much simpler to implement!
.. code-block:: python
from clachievements.achievements import SimplePersistentDataAchievement
from clachievements.testutils import test_lock, test_unlock
class FooBar(SimplePersistentDataAchievement):
"""Successfully run 50 command on file `foo`."""
title = "FooBar"
default_data = 0
def __exit__(self, exc_type, exc_value, traceback):
if "foo" in self.command.positional:
if isinstance(exc_value, SystemExit):
if exc_value.code == 0:
self.data += 1
if self.data >= 50:
self.unlock()
@staticmethod
@test_lock
def test_touch():
for _ in range(49):
yield "{bin.touch} foo"
@staticmethod
@test_unlock
def test_ls_touch():
for _ in range(25):
yield "{bin.touch} foo"
yield "{bin.ls} foo"
More
----
Suppose this error-prone persistent data management does not suit you. Just write your own: within the achievement, the :class:`sqlite3 database connection <sqlite3.Connection>` is available as :attr:`self.database.conn`. Do whatever you want with it (without breaking other plugin databases)!
In this case, to be sure not to mess with tables of CLA core or other plugins, use the tables named (case insensitive) ``achievement_YourPluginName`` or ``achievement_YourPluginName_*``.
Methods :meth:`~clachievements.achievements.__init__.Achievement.first` and :meth:`~clachievements.achievements.__init__.Achievement.last` can be used to initialize or clean the achievement: the first one is called the first time the achievement is ever loaded (so it can be used to create some tables into the database), while the last one is called when the achievement has just been unlocked (so it can be used to clean stuff). Both these methods are meant to be subclassed, and are expected to call ``super().first(...)`` at the beginning of their code.
| PypiClean |
/pydbproperties-0.3.tar.gz/pydbproperties-0.3/pydbproperties.py |
from __future__ import print_function
import re
import sys
__doc__ = """
author: José Roberto Meza Cabrera
mail: [email protected]
With this script, you can use store properties into a
MySQL table, you can change the name of the table, and
list the properties into a output stream or file
stream, etc.
"""
try:
import myquerybuilder
except:
print('MySql Simple Query Builder module not found')
print('pip install MysqlSimpleQueryBuilder')
sys.exit(1)
pass
NULL = ('', None, (), [], {})
class pydbproperties():
"""
A Python implements of pyjavaproperties for database
"""
def __init__(self):
# Dictionary of properties.
self._props = {}
self.bspacere = re.compile(r'\\(?!\s$)')
self.othercharre = re.compile(r'(?<!\\)(\s*\=)|(?<!\\)(\s*\:)')
# Dictionary mapping keys from property
# Dictionary to pristine dictionary
self._keymap = {}
# This is used for dumping the properties to a file
# using the 'store' method
self._origprops = {}
self._keyorder = []
# Connection to DB
self._conn = None
# Table name for properties
self._table_name = 'pydbproperties'
# auto store and auto load atributes
self._auto_load = False
self._auto_store = False
pass
def set_auto_load(self, boolean):
""" Set True for working directly with the database """
self._auto_load = boolean
pass
def set_auto_store(self, boolean):
""" Set True for working directly with the database """
self._auto_store = boolean
pass
def get_db_connector(self):
return self._conn
def _load(self):
if self._auto_load and self._conn is not None:
self.load()
pass
pass
def _store(self):
if self._auto_store and self._conn is not None:
self.store()
pass
pass
def get_property(self, key):
""" Return a property for the given key """
self._load()
return self._props.get(key, '')
def set_property(self, key, value):
""" Set the property for the given key """
if type(key) is str and type(value) is str:
if len(key) != 0:
self.process_pair(key, value)
self._store()
else:
raise ValueError("key can't be null!")
else:
raise TypeError('both key and value should be strings!')
pass
def process_pair(self, key, value):
""" Process a (key, value) pair """
oldkey = key
oldvalue = value
# Create key intelligently
keyparts = self.bspacere.split(key)
# print keyparts
strippable = False
lastpart = keyparts[-1]
if lastpart.find('\\ ') != -1:
keyparts[-1] = lastpart.replace('\\', '')
# If no backspace is found at the end, but empty
# space is found, strip it
elif lastpart and lastpart[-1] == ' ':
strippable = True
key = ''.join(keyparts)
if strippable:
key = key.strip()
oldkey = oldkey.strip()
# oldvalue = self.unescape(oldvalue)
# value = self.unescape(value)
# Patch from N B @ ActiveState
curlies = re.compile("{.+?}")
found = curlies.findall(value)
for f in found:
srcKey = f[1:-1]
if srcKey in self._props:
value = value.replace(f, self._props[srcKey], 1)
self._props[key] = value.strip()
# Check if an entry exists in pristine keys
if key in self._keymap:
oldkey = self._keymap.get(key)
self._origprops[oldkey] = oldvalue.strip()
else:
self._origprops[oldkey] = oldvalue.strip()
# Store entry in keymap
self._keymap[key] = oldkey
if key not in self._keyorder:
self._keyorder.append(key)
pass
def escape(self, value):
# Java escapes the '=' and ':' in the value
# string with backslashes in the store method.
# So let us do the same.
newvalue = value.replace(':', '\:')
newvalue = newvalue.replace('=', '\=')
return newvalue
def unescape(self, value):
# Reverse of escape
newvalue = value.replace('\:', ':')
newvalue = newvalue.replace('\=', '=')
return newvalue
def list(self, out=sys.stdout):
""" Prints a listing of the properties to the
stream 'out' which defaults to the standard output """
self._load()
if out == sys.stdout or type(out) is file:
out.write('-- listing properties --\n')
for key, value in self._props.items():
out.write(''.join((key, '=', value, '\n')))
pass
pass
else:
raise TypeError('Argument should be a file or sys.stdout object!')
pass
def get_property_dict(self):
"""
Returns property dict
"""
self._load()
return self._props
def store(self):
"""
Stores the dict to a database
"""
try:
# Create the table, and be happy without errors
self.create_table()
for prop in self._keyorder:
if prop in self._origprops:
val = self._origprops[prop]
self._conn.ping()
if prop == self._conn.one(('key',), self.get_table_name(),
{'key': prop}):
# if prop == self._conn.query('update `my_table` set
# `key`='key5' where `key`='key0')
# Update
self._conn.update(self.get_table_name(),
{'value': val}, {'key': prop})
else:
# Insert
self._conn.insert(self.get_table_name(),
{'key': prop,
# 'value': self.escape(val)})
'value': val})
except:
raise
pass
def load(self):
"""
Load properties from database
"""
try:
# Create the table, and be happy without errors
self.create_table()
except:
pass
# self._props = {}
# self._keyorder = []
# self._origprops = {}
if self._conn is None:
raise ValueError('Connection not initialized')
attr = ('key', 'value')
if self._table_name in NULL:
raise ValueError('Table name can\'t be null')
self._conn.ping()
properties_dict = self._conn.select(attr, self._table_name)
properties_list = [b.get('key') + '=' + b.get('value') +
'\n' for b in properties_dict]
self.__parse(properties_list)
pass
def __parse(self, lines):
""" Parse a list of lines and create
an internal property dictionary """
# Every line in the file must consist of either a comment
# or a key-value pair. A key-value pair is a line consisting
# of a key which is a combination of non-white space characters
# The separator character between key-value pairs is a '=',
# ':' or a whitespace character not including the newline.
# If the '=' or ':' characters are found, in the line, even
# keys containing whitespace chars are allowed.
# A line with only a key according to the rules above is also
# fine. In such case, the value is considered as the empty string.
# In order to include characters '=' or ':' in a key or value,
# they have to be properly escaped using the backslash character.
# Some examples of valid key-value pairs:
#
# key value
# key=value
# key:value
# key value1,value2,value3
# key value1,value2,value3 \
# value4, value5
# key
# This key= this value
# key = value1 value2 value3
# Any line that starts with a '#' or '!' is considerered a comment
# and skipped. Also any trailing or preceding whitespaces
# are removed from the key/value.
# This is a line parser. It parses the
# contents like by line.
lineno = 0
i = iter(lines)
for line in i:
lineno += 1
line = line.strip()
# Skip null lines
if not line:
continue
# Skip lines which are comments
if line[0] in ('#', '!'):
continue
# Some flags
# escaped = False
# Position of first separation char
sepidx = -1
# A flag for performing wspace re check
# flag = 0
# Check for valid space separation
# First obtain the max index to which we
# can search.
m = self.othercharre.search(line)
if m:
first, last = m.span()
start, end = 0, first
# flag = 1
wspacere = re.compile(r'(?<![\\\=\:])(\s)')
else:
if self.othercharre2.search(line):
# Check if either '=' or ':' is present
# in the line. If they are then it means
# they are preceded by a backslash.
# This means, we need to modify the
# wspacere a bit, not to look for
# : or = characters.
wspacere = re.compile(r'(?<![\\])(\s)')
start, end = 0, len(line)
m2 = wspacere.search(line, start, end)
if m2:
# print 'Space match=>',line
# Means we need to split by space.
first, last = m2.span()
sepidx = first
elif m:
# print 'Other match=>',line
# No matching wspace char found, need
# to split by either '=' or ':'
first, last = m.span()
sepidx = last - 1
# print line[sepidx]
pass
# If the last character is a backslash
# it has to be preceded by a space in which
# case the next line is read as part of the
# same property
while line[-1] == '\\':
# Read next line
nextline = i.next()
nextline = nextline.strip()
lineno += 1
# This line will become part of the value
line = line[:-1] + nextline
pass
# Now split to key,value according to separation char
if sepidx != -1:
key, value = line[:sepidx], line[sepidx+1:]
else:
key, value = line, ''
self._keyorder.append(key)
self.process_pair(key, value)
pass
pass
def set_table_name(self, table_name):
"""
Sets table name
"""
if table_name not in NULL:
self._table_name = table_name
return
raise ValueError('Table name can\'t be null')
def get_table_name(self):
"""
Returns table name
"""
return self._table_name
def get_property_names(self):
""" Return an iterator over all the keys of the property
dictionary, i.e the names of the properties """
self._load()
return self._props.keys()
def remove_property(self, property):
"""
Remove a property
if property is None: remove all properties
"""
if self._auto_store and self._conn is not None:
self.remove_property_db(property)
if property is None:
self._props = {}
self._keyorder = []
self._keymap = {}
pass
else:
try:
self._props.pop(property)
self._keyorder.remove(property)
self._keymap.pop(property)
except:
pass
pass
def remove_property_db(self, prop):
"""
Remove a property directly from a database
if property is None: remove all properties directly from a database
"""
if prop is None:
value = None
else:
value = {'key': prop}
pass
self._conn.ping()
self._conn.delete(self.get_table_name(), value)
pass
def __getitem__(self, name):
""" To support direct dictionary like access """
return self.get_property(name)
def __setitem__(self, name, value):
""" To support direct dictionary like access """
self.set_property(name, value)
pass
def conn(self, **kwargs):
"""
Instance a connection with the database
"""
try:
self._conn = myquerybuilder.QueryBuilder(**kwargs)
except:
print('An error has occurred\n')
raise
pass
def create_table(self):
"""
Create a table, if you don't use set_table_name() method, the
name of the table will be default( pydbproperties )
"""
def validate_table():
"""
This method is auxiliar to create_table() method, it will
return True if table definition is correct
"""
try:
aux = self._conn.query("describe " +
self._table_name).fetchall()
key = aux[0]
value = aux[1]
if len(aux) != 2 or\
key['Field'] != 'key' or \
value['Field'] != 'value' or\
not key['Type'].lower().startswith('varchar') or\
not value['Type'].lower().startswith('longtext') or\
not key['Null'].upper() == 'NO' or\
not value['Null'].upper() == 'YES':
return False
return True
except:
return False
query = """
create table {0} ( `key` varchar(30) not null,
`value` longtext null, primary key (`key`));
""".format(self.get_table_name())
try:
self._conn.ping()
self._conn.query(query)
except:
pass
return validate_table()
pass | PypiClean |
/IFEM-to-VT-2.0.2.tar.gz/IFEM-to-VT-2.0.2/ifem_to_vt/geometry.py | from abc import ABC, abstractmethod
from collections import OrderedDict
from io import StringIO
import lrspline as lr
import numpy as np
from singledispatchmethod import singledispatchmethod
import splipy.io
from splipy import SplineObject, BSplineBasis
import treelog as log
from typing import Tuple, Any, Union, IO, Dict, Hashable, List
from .typing import Array2D, BoundingBox, PatchID, Shape
from . import config
from .util import (
prod, flatten_2d, ensure_ncomps,
subdivide_face, subdivide_linear, subdivide_volume,
structured_cells,
)
# Abstract superclasses
# ----------------------------------------------------------------------
class CellType:
num_nodes: int
num_pardim: int
structured: bool
class Quad(CellType):
num_nodes = 4
num_pardim = 2
structured = True
class Hex(CellType):
num_nodes = 8
num_pardim = 3
structured = True
# Abstract superclasses
# ----------------------------------------------------------------------
class Patch(ABC):
key: PatchID
@property
@abstractmethod
def num_physdim(self) -> int:
"""Number of physical dimensions."""
pass
@property
@abstractmethod
def num_pardim(self) -> int:
"""Number of parametric dimensions."""
pass
@property
@abstractmethod
def num_nodes(self) -> int:
"""Number of nodes."""
pass
@property
@abstractmethod
def num_cells(self) -> int:
"""Number of cells."""
pass
@property
@abstractmethod
def bounding_box(self) -> BoundingBox:
"""Hashable bounding box."""
pass
@abstractmethod
def tesselate(self) -> 'UnstructuredPatch':
"""Convert to a suitable discrete representation.
Currently an UnstructuredPatch.
"""
pass
@abstractmethod
def tesselate_field(self, coeffs: Array2D, cells: bool = False) -> Array2D:
"""Convert a nodal or cell field to the same representation as
returned by tesselate.
"""
pass
@abstractmethod
def ensure_ncomps(self, ncomps: int, allow_scalar: bool = True):
pass
class Tesselator(ABC):
def __init__(self, patch: Patch):
self.source_patch = patch
@abstractmethod
def tesselate(self, patch: Patch) -> Patch:
pass
@abstractmethod
def tesselate_field(self, patch: Patch, coeffs: Array2D, cells: bool = False) -> Array2D:
pass
# Unstructured and structured support
# ----------------------------------------------------------------------
class UnstructuredPatch(Patch):
"""A patch that represents an unstructured collection of nodes and
cells. This is the lowest common grid form: all other grids
should be convertable to it.
"""
nodes: Array2D
celltype: CellType
_cells: Array2D
def __init__(self, key: PatchID, nodes: Array2D, cells: Array2D, celltype: CellType):
assert nodes.ndim == cells.ndim == 2
self.key = key
self.nodes = nodes
self._cells = cells
self.celltype = celltype
assert cells.shape[-1] == celltype.num_nodes
@property
def cells(self) -> Array2D:
return self._cells
@classmethod
def from_lagrangian(cls, key: PatchID, data: Union[bytes, str]) -> 'UnstructuredPatch':
if isinstance(data, bytes):
data = data.decode()
assert isinstance(data, str)
assert data.startswith('# LAGRANGIAN')
all_lines = data.split('\n')
specs, lines = all_lines[0][12:].split(), iter(all_lines[1:])
# Decode nodes, elements, type
assert specs[0].startswith('nodes=')
nnodes = int(specs[0].split('=')[-1])
assert specs[1].startswith('elements=')
ncells = int(specs[1].split('=')[-1])
assert specs[2].startswith('type=')
celltype = specs[2].split('=')[-1]
if celltype not in ('hexahedron',):
raise ValueError("Unknown cell type: {}".format(celltype))
# Read nodes and cells
nodes = np.zeros((nnodes, 3))
for i in range(nnodes):
nodes[i] = list(map(float, next(lines).split()))
cells = np.zeros((ncells, 8), dtype=np.int32)
for i in range(ncells):
cells[i] = list(map(int, next(lines).split()))
cells[:,6], cells[:,7] = np.array(cells[:,7]), np.array(cells[:,6])
cells[:,2], cells[:,3] = np.array(cells[:,3]), np.array(cells[:,2])
return cls(key, nodes, cells, celltype=Hex())
@property
def num_physdim(self) -> int:
return self.nodes.shape[-1]
@property
def num_pardim(self) -> int:
return self.celltype.num_pardim
@property
def bounding_box(self) -> BoundingBox:
return tuple(
(np.min(self.nodes[:,i]), np.max(self.nodes[:,i]))
for i in range(self.num_physdim)
)
@property
def num_nodes(self) -> int:
return len(self.nodes)
@property
def num_cells(self) -> int:
return len(self.cells)
def tesselate(self) -> 'UnstructuredPatch':
return self
def tesselate_field(self, coeffs: Array2D, cells: bool = False) -> Array2D:
if cells:
return coeffs.reshape((self.num_cells, -1))
return coeffs.reshape((self.num_nodes, -1))
def ensure_ncomps(self, ncomps: int, allow_scalar: bool = True):
self.nodes = ensure_ncomps(self.nodes, ncomps, allow_scalar)
class StructuredPatch(UnstructuredPatch):
"""A patch that represents an structured collection of nodes and
cells. This is interchangeable with UnstructuredPatch
"""
shape: Shape
def __init__(self, key: PatchID, nodes: Array2D, shape: Shape, celltype: CellType):
self.key = key
self.nodes = nodes
self.celltype = celltype
self.shape = shape
assert celltype.structured
assert len(shape) == celltype.num_pardim
assert prod(k+1 for k in shape) == len(nodes)
@property
def cells(self) -> Array2D:
return structured_cells(self.shape, self.num_pardim)
@property
def num_cells(self) -> int:
return prod(self.shape)
# LRSpline support
# ----------------------------------------------------------------------
class LRPatch(Patch):
def __init__(self, key: PatchID, obj: Union[bytes, str, lr.LRSplineObject]):
if isinstance(obj, bytes):
obj = obj.decode()
if isinstance(obj, str):
if obj.startswith('# LRSPLINE SURFACE'):
obj = lr.LRSplineSurface(obj)
elif obj.startswith('# LRSPLINE VOLUME'):
obj = lr.LRSplineVolume(obj)
assert isinstance(obj, lr.LRSplineObject)
self.obj = obj
self.key = key
@property
def num_physdim(self) -> int:
return self.obj.dimension
@property
def num_pardim(self) -> int:
return self.obj.pardim
@property
def bounding_box(self) -> BoundingBox:
return tuple(
(np.min(self.obj.controlpoints[:,i]), np.max(self.obj.controlpoints[:,i]))
for i in range(self.num_physdim)
)
@property
def num_nodes(self) -> int:
return len(self.obj)
@property
def num_cells(self) -> int:
return len(self.obj.elements)
def tesselate(self) -> UnstructuredPatch:
tess = LRTesselator(self)
return tess.tesselate(self)
def tesselate_field(self, coeffs: Array2D, cells: bool = False) -> Array2D:
tess = LRTesselator(self)
return tess.tesselate_field(self, coeffs, cells=cells)
def ensure_ncomps(self, ncomps: int, allow_scalar: bool = True):
self.obj.controlpoints = ensure_ncomps(self.obj.controlpoints, ncomps, allow_scalar)
class LRTesselator(Tesselator):
def __init__(self, patch: LRPatch):
super().__init__(patch)
nodes: Dict[Tuple[float, ...], int] = dict()
cells: List[List[int]] = []
subdivider = subdivide_face if patch.obj.pardim == 2 else subdivide_volume
for el in patch.obj.elements:
subdivider(el, nodes, cells, config.nvis)
self.nodes = np.array(list(nodes))
self.cells = np.array(cells, dtype=int)
@singledispatchmethod
def tesselate(self, patch: Patch) -> UnstructuredPatch:
raise NotImplementedError
@tesselate.register(LRPatch)
def _1(self, patch: LRPatch) -> UnstructuredPatch:
spline = patch.obj
nodes = np.array([spline(*node) for node in self.nodes], dtype=float)
celltype = Hex() if patch.num_pardim == 3 else Quad()
return UnstructuredPatch((*patch.key, 'tesselated'), nodes, self.cells, celltype=celltype)
@singledispatchmethod
def tesselate_field(self, patch: Patch, coeffs: Array2D, cells: bool = False) -> Array2D:
raise NotImplementedError
@tesselate_field.register(LRPatch)
def _2(self, patch: LRPatch, coeffs: Array2D, cells: bool = False) -> Array2D:
spline = patch.obj
if not cells:
# Create a new patch with substituted control points, and
# evaluate it at the predetermined knot values.
newspline = spline.clone()
newspline.controlpoints = coeffs.reshape((len(spline), -1))
return np.array([newspline(*node) for node in self.nodes], dtype=float)
# For every cell center, check which cell it belongs to in the
# reference spline, then use that coefficient.
coeffs = flatten_2d(coeffs)
cell_centers = [np.mean(self.nodes[c,:], axis=0) for c in self.cells]
return np.array([coeffs[spline.element_at(*c).id, :] for c in cell_centers])
# Splipy support
# ----------------------------------------------------------------------
class G2Object(splipy.io.G2):
"""G2 reader subclass to allow reading from a stream."""
def __init__(self, fstream: IO, mode: str):
self.fstream = fstream
self.onlywrite = mode == 'w'
super(G2Object, self).__init__('')
def __enter__(self) -> 'G2Object':
return self
class SplinePatch(Patch):
"""A representation of a Splipy SplineObject."""
def __init__(self, key: PatchID, obj: Union[bytes, str, SplineObject]):
if isinstance(obj, bytes):
obj = obj.decode()
if isinstance(obj, str):
g2data = StringIO(obj)
with G2Object(g2data, 'r') as g:
obj = g.read()[0]
assert isinstance(obj, SplineObject)
self.obj = obj
self.key = key
@property
def num_physdim(self) -> int:
return self.obj.dimension
@property
def num_pardim(self) -> int:
return self.obj.pardim
@property
def bounding_box(self) -> BoundingBox:
return tuple(
(np.min(self.obj.controlpoints[...,i]), np.max(self.obj.controlpoints[...,i]))
for i in range(self.num_physdim)
)
@property
def num_nodes(self) -> int:
return len(self.obj)
@property
def num_cells(self) -> int:
return prod(len(k) - 1 for k in self.obj.knots())
def tesselate(self) -> UnstructuredPatch:
tess = TensorTesselator(self)
return tess.tesselate(self)
def tesselate_field(self, coeffs: Array2D, cells: bool = False) -> Array2D:
tess = TensorTesselator(self)
return tess.tesselate_field(self, coeffs, cells=cells)
def ensure_ncomps(self, ncomps: int, allow_scalar: bool = True):
if allow_scalar and self.obj.dimension == 1:
return
self.obj.set_dimension(ncomps)
class TensorTesselator(Tesselator):
def __init__(self, patch: SplinePatch):
super().__init__(patch)
knots = patch.obj.knots()
self.knots = list(subdivide_linear(kts, config.nvis) for kts in knots)
@singledispatchmethod
def tesselate(self, patch: Patch) -> Patch:
raise NotImplementedError
@tesselate.register(SplinePatch)
def _1(self, patch: SplinePatch) -> UnstructuredPatch:
nodes = flatten_2d(patch.obj(*self.knots))
celltype = Hex() if patch.num_pardim == 3 else Quad()
cellshape = tuple(len(kts) - 1 for kts in self.knots)
return StructuredPatch((*patch.key, 'tesselated'), nodes, cellshape, celltype=celltype)
@singledispatchmethod
def tesselate_field(self, patch: Patch, coeffs: Array2D, cells: bool = False) -> Array2D:
raise NotImplementedError
@tesselate_field.register(SplinePatch)
def _2(self, patch: SplinePatch, coeffs: Array2D, cells: bool = False) -> Array2D:
spline = patch.obj
if not cells:
# Create a new patch with substituted control points, and
# evaluate it at the predetermined knot values.
coeffs = splipy.utils.reshape(coeffs, spline.shape, order='F')
if spline.rational:
coeffs = np.concatenate((coeffs, spline.controlpoints[..., -1, np.newaxis]), axis=-1)
newspline = SplineObject(spline.bases, coeffs, spline.rational, raw=True)
knots = self.knots
else:
# Create a piecewise constant spline object, and evaluate
# it in cell centers.
bases = [BSplineBasis(1, kts) for kts in spline.knots()]
shape = tuple(b.num_functions() for b in bases)
coeffs = splipy.utils.reshape(coeffs, shape, order='F')
newspline = SplineObject(bases, coeffs, False, raw=True)
knots = [[(a+b)/2 for a, b in zip(t[:-1], t[1:])] for t in self.knots]
return flatten_2d(newspline(*knots))
# GeometryManager
# ----------------------------------------------------------------------
class GeometryManager:
patch_keys: Dict[PatchID, int]
bounding_boxes: Dict[BoundingBox, int]
def __init__(self):
self.patch_keys = dict()
self.bounding_boxes = dict()
def update(self, patch: Patch):
if patch.key not in self.patch_keys:
patchid = len(self.patch_keys)
log.debug(f"New unique patch detected, assigned ID {patchid}")
self.patch_keys[patch.key] = patchid
else:
patchid = self.patch_keys[patch.key]
self.bounding_boxes[patch.bounding_box] = patchid
return patchid
def global_id(self, patch: Patch):
try:
return self.bounding_boxes[patch.bounding_box]
except KeyError:
log.error("Unable to find corresponding geometry patch")
return None | PypiClean |
/fabric-credmgr-client-1.6.0b3.tar.gz/fabric-credmgr-client-1.6.0b3/docs/Status400BadRequestErrors.md | # Status400BadRequestErrors
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**message** | **str** | | [optional] [default to 'Bad Request']
**details** | **str** | | [optional]
**type** | **str** | | [optional] [default to 'error']
**size** | **int** | | [optional] [default to 1]
**status** | **int** | | [optional] [default to 400]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
| PypiClean |
/Organise-2.0.0.tar.gz/Organise-2.0.0/README.md | [![Downloads](https://pepy.tech/badge/organise)](https://pepy.tech/project/organise) [![Downloads](https://pepy.tech/badge/organise/month)](https://pepy.tech/project/organise/month) [![Downloads](https://pepy.tech/badge/organise/week)](https://pepy.tech/project/organise/week)
# Auto Organise
A little tool that helps you organise your directory into meaningful subdirectories.
## Installation
```
git clone https://github.com/mayankkushal/auto-organise.git
cd auto-organise
python setup.py install
```
or
`pip install Organise`
## Usage
`organise`
### Help
`organise --help`
For comprehensive details [Read the docs](https://mayankkushal.github.io/auto-organise/)
<br/><br/>
Version 2.0.0
* added support for selective filetypes to be organised
| PypiClean |
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/migrate/v20191001/assessment.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['Assessment']
class Assessment(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assessment_name: Optional[pulumi.Input[str]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
group_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['AssessmentPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
An assessment created for a group in the Migration project.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] assessment_name: Unique name of an assessment within a project.
:param pulumi.Input[str] e_tag: For optimistic concurrency control.
:param pulumi.Input[str] group_name: Unique name of a group within a project.
:param pulumi.Input[str] project_name: Name of the Azure Migrate project.
:param pulumi.Input[pulumi.InputType['AssessmentPropertiesArgs']] properties: Properties of the assessment.
:param pulumi.Input[str] resource_group_name: Name of the Azure Resource Group that project is part of.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if assessment_name is None and not opts.urn:
raise TypeError("Missing required property 'assessment_name'")
__props__['assessment_name'] = assessment_name
__props__['e_tag'] = e_tag
if group_name is None and not opts.urn:
raise TypeError("Missing required property 'group_name'")
__props__['group_name'] = group_name
if project_name is None and not opts.urn:
raise TypeError("Missing required property 'project_name'")
__props__['project_name'] = project_name
if properties is None and not opts.urn:
raise TypeError("Missing required property 'properties'")
__props__['properties'] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:migrate:Assessment"), pulumi.Alias(type_="azure-nextgen:migrate/latest:Assessment")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Assessment, __self__).__init__(
'azure-nextgen:migrate/v20191001:Assessment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Assessment':
"""
Get an existing Assessment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Assessment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
"""
For optimistic concurrency control.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Unique name of an assessment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.AssessmentPropertiesResponse']:
"""
Properties of the assessment.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Type of the object = [Microsoft.Migrate/assessmentProjects/groups/assessments].
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop | PypiClean |
/batchcompute-cli-1.7.7.tar.gz/batchcompute-cli-1.7.7/src/batchcompute_cli/cli.py |
from terminal import red, blue, bold, magenta, white
from .externals.command import Command
from . import const,i18n_util,cli_template,cli_oss,cli_pbs,cli_apps,cli_jobs,cli_clusters,cli_images,cli_project
from .action import login, config, info,events, instance_type,log_fetch,checker,disk_type,resource_type
import sys
import os
####################
## for debug
# from batchcompute.utils.log import get_logger
# logger = get_logger('batchcompute.test', level='DEBUG', file_name='batchcompute_python_sdk.LOG')
#####################
COMMAND = const.COMMAND
CMD = const.CMD
IMG_ID = 'img-xxxxxx'
INS_TYPE = 'ecs.sn1ne.large'
VERSION = const.VERSION
IS_GOD = const.IS_GOD
# SPLITER = '\n --%s' + ('-' * 40)
MSG=i18n_util.msg()
class Cli:
def __help(self):
self.program.print_help()
def __init__(self):
self.program = Command(COMMAND, version=VERSION,
title=bold(magenta('AliCloud BatchCompute CLI')),
usage="Usage: %s <command> [option]" % COMMAND,
func=self.__help,
help_footer=white(' type "%s [command] -h" for more' % CMD))
# login
cmd_login = Command('login',
description=MSG['login']['description'],
func=login.login,
spliter='\n -----%s---------------------------' % blue('config'),
arguments=['region', 'accessKeyId', 'accessKeySecret'],
usage='''Usage: %s login <region> [accessKeyId] [accessKeySecret] [option]
Examples:
1. %s login cn-qingdao kywj6si2hkdfy9 las****bc=
2. %s login cn-qingdao ''' % (COMMAND, CMD, CMD))
self.program.action(cmd_login)
# logout
cmd_logout = Command('logout',
description=MSG['logout']['description'],
func=login.logout)
self.program.action(cmd_logout)
# set config
cmd_config = Command('config', alias=['me','set'], description=MSG['config']['description'],
detail=MSG['config']['detail'],
usage='''Usage: %s <command> config|me|set [option]
Examples:
1. %s config # show configurations
2. %s set -r cn-qingdao -o oss://my-bucket/bcscli/ -l zh_CN
3. %s set -i %s -t %s
4. %s set --vpc_cidr_block 192.168.0.0/16''' % (COMMAND, CMD, CMD,CMD ,IMG_ID, INS_TYPE, CMD),
func=config.all)
cmd_config.option('-r,--region [region]', MSG['config']['option']['region'])
cmd_config.option('-o,--osspath [osspath]', MSG['config']['option']['osspath'] )
cmd_config.option('-l,--locale [locale]', MSG['config']['option']['locale'])
cmd_config.option('-i,--image [imageId]', MSG['config']['option']['image'])
cmd_config.option('-t,--type [instance_type]', MSG['config']['option']['type'])
#cmd_config.option('-c,--cluster [clusterId]', MSG['config']['option']['cluster'])
cmd_config.option('-s,--ssl [ssl]', MSG['config']['option']['ssl'], visible=IS_GOD)
cmd_config.option('-v,--version [version]', MSG['config']['option']['version'], visible=IS_GOD)
cmd_config.option('-u,--qsubuser [qsubuser]', MSG['config']['option']['qsubuser'], visible=IS_GOD)
cmd_config.option('--vpc_cidr_block [vpc_cidr_block]', MSG['config']['option']['vpc_cidr_block'])
cmd_config.option('-g,--god [god]', MSG['config']['option']['god'], visible=IS_GOD)
cmd_config.option('--networktype [networktype]', MSG['config']['option']['networktype'])
self.program.action(cmd_config)
# info
cmd_info = Command('info',alias=['about'], description=MSG['info']['description'],
visible=IS_GOD,
func=info.info)
self.program.action(cmd_info)
# resource type
cmd_resource_type = Command('resource_type', alias=['rt', 'r'],
description=MSG['resource_type']['description'],
usage='''Usage: %s resource_type|rt|r [option]
Examples:
1. %s r ''' % (COMMAND, CMD),
spliter='\n -----%s---------------------------' % blue('query, show'),
func=resource_type.list)
cmd_resource_type.option('--show_json', MSG['resource_type']['option']['show_json'], visible=IS_GOD)
self.program.action(cmd_resource_type)
# instance type
cmd_instance_type = Command('instance_type', alias=['it', 't'],
description=MSG['instance_type']['description'],
usage='''Usage: %s instance_type|it|t [option]
Examples:
1. %s it ''' % (COMMAND, CMD),
func=instance_type.list)
cmd_instance_type.option('--show_json', MSG['instance_type']['option']['show_json'], visible=IS_GOD)
self.program.action(cmd_instance_type)
# disk type
cmd_disk_type = Command('disk_type', alias=['d'],
description=MSG['disk_type']['description'],
usage='''Usage: %s disk_type|d [option]
Examples:
1. %s d ''' % (COMMAND, CMD),
func=disk_type.list)
cmd_disk_type.option('--show_json', MSG['disk_type']['option']['show_json'], visible=IS_GOD)
self.program.action(cmd_disk_type)
# events
cmd_events = Command('event', alias=['e'],
description=MSG['event']['description'],
usage='''Usage: %s event|e [option]
Examples:
1. %s e ''' % (COMMAND, CMD),
func=events.list)
cmd_events.option('--show_json', MSG['event']['option']['show_json'], visible=IS_GOD)
self.program.action(cmd_events)
# images
cmd_images = cli_images.images()
self.program.action(cmd_images)
# apps
cmd_apps = cli_apps.apps()
self.program.action(cmd_apps)
####################################
####################################
########################################
# clusters
cmd_clusters = cli_clusters.clusters()
self.program.action(cmd_clusters)
# jobs
cmd_job = cli_jobs.jobs()
self.program.action(cmd_job)
# log
cmd_log = Command('log',alias=['l'],
arguments=['jobId', 'taskName', 'instanceId'],
description=MSG['log']['description'],
usage='''Usage:
%s log <jobId> [taskName] [instanceId] [options]
Examples:
1. %s log <jobId> # show logs for all instances in a job
2. %s log <jobId> -e # show stderr log only
3. %s log <jobId> -d /path/to/save/logs/ # download logs for all instances in a job
4. %s log <jobId> <taskName> -d /path/to/save/logs/ # download logs for all instances in a task
5. %s log <jobId> <taskName> <intanceId> -d /path/to/save/logs/ # download logs for a instance
6. %s log <No.> <No.> <intanceId> -d /path/to/save/logs/ # use <No.> instead of jobId and taskName''' % (
COMMAND, CMD, CMD, CMD, CMD, CMD, CMD),
func=log_fetch.fetch)
cmd_log.option('-d, --dir_path [dir_path]', MSG['log']['option']['dir_path'])
cmd_log.option('-e, --stderr', MSG['log']['option']['stderr'])
cmd_log.option('-o, --stdout', MSG['log']['option']['stdout'])
cmd_log.option('-m, --log_mapping', MSG['log']['option']['log_mapping'])
self.program.action(cmd_log)
##################################################
# create image
cmd_create_image = cli_images.create()
self.program.action(cmd_create_image)
# delete image
cmd_del_image = cli_images.delete()
self.program.action(cmd_del_image)
##################################################
# create cluster
cmd_create_cluster = cli_clusters.create()
self.program.action(cmd_create_cluster)
# delete cluster
cmd_del_cluster = cli_clusters.delete()
self.program.action(cmd_del_cluster)
# update cluster
cmd_update_cluster = cli_clusters.update()
self.program.action(cmd_update_cluster)
######################
# recreate cluster instance
cmd_recreate_cluster_instance = cli_clusters.recreate_cluster_instance()
self.program.action(cmd_recreate_cluster_instance)
# delete cluster instance
cmd_delete_cluster_instance = cli_clusters.delete_cluster_instance()
self.program.action(cmd_delete_cluster_instance)
#################
cmd_create_app = cli_apps.create()
self.program.action(cmd_create_app)
cmd_update_app = cli_apps.update()
self.program.action(cmd_update_app)
cmd_del_app = cli_apps.delete()
self.program.action(cmd_del_app)
######################
# create job
cmd_create_job = cli_jobs.create()
self.program.action(cmd_create_job)
# restart job
cmd_restart_job = cli_jobs.restart()
self.program.action(cmd_restart_job)
# stop job
cmd_stop_job = cli_jobs.stop()
self.program.action(cmd_stop_job)
# delete job
cmd_del_job = cli_jobs.delete()
self.program.action(cmd_del_job)
# update job
cmd_update_job = cli_jobs.update()
self.program.action(cmd_update_job)
########################################
##############################################
##############################################
# submit job
cmd_submit_job = cli_jobs.submit()
self.program.action(cmd_submit_job)
self.cmd_asub = cli_apps.asub()
self.program.action(self.cmd_asub)
##############
def cmd_oss_print_help():
cmd_oss.print_help()
# oss
cmd_oss = Command('oss', alias=['o'],
description=MSG['oss']['description'],
func=cmd_oss_print_help,
#spliter='\n -----%s----------------' % blue('sub-cmd')
)
self.program.action(cmd_oss)
# sub command for oss
cli_oss.init(cmd_oss)
# check debug
cmd_check = Command('check', alias=['ch'],
arguments=['jobId'],
description=MSG['check']['description'],
usage='''Usage: %s check|ch <jobId|No.> [option]
Examples:
1. %s check job-0000000056D7FE9A0000368000000661
2. %s ch 2 # use No. instead of jobId''' % (
COMMAND, CMD, CMD),
func=checker.check)
self.program.action(cmd_check)
##############################################
#################### template ###################
cmd_template_gen = cli_template.gen()
self.program.action(cmd_template_gen)
#### project ##########################
def cmd_project_print_help():
cmd_project.print_help()
# project
cmd_project = Command('project', alias=['p'],
visible=IS_GOD,
description=MSG['project']['description'],
func=cmd_project_print_help)
self.program.action(cmd_project)
# sub command for project
cli_project.init(cmd_project)
##########################################
##############################################
# qsub
cmd_qsub = cli_pbs.qsub()
self.program.action(cmd_qsub)
# qstat
cmd_qstat = cli_pbs.qstat()
self.program.action(cmd_qstat)
# qdel
cmd_qdel = cli_pbs.qdel()
self.program.action(cmd_qdel)
# # qhost
cmd_qhost = cli_pbs.qhost()
self.program.action(cmd_qhost)
# hack: append options
def append_options(self,arr=None):
arr = arr or sys.argv
if len(arr) > 1:
if arr[1]=='asub':
## for asub
if len(arr)>2 and not arr[2].startswith('-'):
app_name = arr[2]
cli_apps.asub_app_options(app_name, self.cmd_asub)
else:
self.cmd_asub.option("--show_json", MSG['asub']['option']['show_json'])
##############################################
def go(self, arr=None):
if os.getenv('DEBUG'):
self.append_options(arr)
self.program.parse(arr)
else:
try:
self.append_options(arr)
self.program.parse(arr)
except Exception as e:
msg = format(e)
print(red('\n ERROR: %s\n' % msg))
if '()' in msg and 'argument' in msg:
print(red(' add "-h" for more information\n'))
def main():
try:
Cli().go()
except KeyboardInterrupt:
print('\nKeyboardInterrupt')
if __name__ == '__main__':
main() | PypiClean |
/sas_log_viewer-0.2.0-py3-none-any.whl/sas_log_viewer-0.2.0.data/data/share/jupyter/labextensions/sas_log_viewer/static/lib_index_js.f1abfa74696980b7b1f3.js | (self["webpackChunksas_log_viewer"] = self["webpackChunksas_log_viewer"] || []).push([["lib_index_js"],{
/***/ "./lib/iconImport.js":
/*!***************************!*\
!*** ./lib/iconImport.js ***!
\***************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "sasLogIcon": () => (/* binding */ sasLogIcon)
/* harmony export */ });
/* harmony import */ var _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @jupyterlab/ui-components */ "webpack/sharing/consume/default/@jupyterlab/ui-components");
/* harmony import */ var _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _style_icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../style/icons/Jupyter_Log.svg */ "./style/icons/Jupyter_Log.svg?966e");
const sasLogIcon = new _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_0__.LabIcon({
name: 'custom-ui-components:sasLog',
svgstr: _style_icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_1__.default
});
/***/ }),
/***/ "./lib/index.js":
/*!**********************!*\
!*** ./lib/index.js ***!
\**********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "ISASLogExtension": () => (/* binding */ ISASLogExtension),
/* harmony export */ "saslogExtension": () => (/* binding */ saslogExtension),
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @jupyterlab/application */ "webpack/sharing/consume/default/@jupyterlab/application");
/* harmony import */ var _jupyterlab_application__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @jupyterlab/apputils */ "webpack/sharing/consume/default/@jupyterlab/apputils");
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var _jupyterlab_mainmenu__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @jupyterlab/mainmenu */ "webpack/sharing/consume/default/@jupyterlab/mainmenu");
/* harmony import */ var _jupyterlab_mainmenu__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_mainmenu__WEBPACK_IMPORTED_MODULE_2__);
/* harmony import */ var _jupyterlab_notebook__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @jupyterlab/notebook */ "webpack/sharing/consume/default/@jupyterlab/notebook");
/* harmony import */ var _jupyterlab_notebook__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_notebook__WEBPACK_IMPORTED_MODULE_3__);
/* harmony import */ var _lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @lumino/algorithm */ "webpack/sharing/consume/default/@lumino/algorithm");
/* harmony import */ var _lumino_algorithm__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__);
/* harmony import */ var _lumino_coreutils__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! @lumino/coreutils */ "webpack/sharing/consume/default/@lumino/coreutils");
/* harmony import */ var _lumino_coreutils__WEBPACK_IMPORTED_MODULE_5___default = /*#__PURE__*/__webpack_require__.n(_lumino_coreutils__WEBPACK_IMPORTED_MODULE_5__);
/* harmony import */ var _lumino_disposable__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! @lumino/disposable */ "webpack/sharing/consume/default/@lumino/disposable");
/* harmony import */ var _lumino_disposable__WEBPACK_IMPORTED_MODULE_6___default = /*#__PURE__*/__webpack_require__.n(_lumino_disposable__WEBPACK_IMPORTED_MODULE_6__);
/* harmony import */ var _lumino_properties__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! @lumino/properties */ "webpack/sharing/consume/default/@lumino/properties");
/* harmony import */ var _lumino_properties__WEBPACK_IMPORTED_MODULE_7___default = /*#__PURE__*/__webpack_require__.n(_lumino_properties__WEBPACK_IMPORTED_MODULE_7__);
/* harmony import */ var _widget__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./widget */ "./lib/widget.js");
/**
* IDs of the commands added by this extension.
*/
var CommandIDs;
(function (CommandIDs) {
CommandIDs.newSASlog = 'saslog:new';
})(CommandIDs || (CommandIDs = {}));
/**
* The token identifying the JupyterLab plugin.
*/
const ISASLogExtension = new _lumino_coreutils__WEBPACK_IMPORTED_MODULE_5__.Token('jupyter.extensions.saslog');
const saslogProp = new _lumino_properties__WEBPACK_IMPORTED_MODULE_7__.AttachedProperty({
create: () => '',
name: 'SASLogTarget'
});
class saslogExtension {
/**
*
*/
constructor(commands) {
this.commands = commands;
}
/**
* Create a new extension object.
*/
createNew(nb, context) {
// Add buttons to toolbar
const buttons = [];
let insertionPoint = -1;
(0,_lumino_algorithm__WEBPACK_IMPORTED_MODULE_4__.find)(nb.toolbar.children(), (tbb, index) => {
if (tbb.hasClass('jp-Notebook-toolbarCellType')) {
insertionPoint = index;
return true;
}
return false;
});
let i = 1;
for (const id of [CommandIDs.newSASlog]) {
const button = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.CommandToolbarButton({ id, commands: this.commands });
button.addClass('jp-saslog-nbtoolbarbutton');
if (insertionPoint >= 0) {
nb.toolbar.insertItem(insertionPoint + i++, this.commands.label(id), button);
}
else {
nb.toolbar.insertAfter('cellType', this.commands.label(id), button);
}
buttons.push(button);
}
return new _lumino_disposable__WEBPACK_IMPORTED_MODULE_6__.DisposableDelegate(() => {
// Cleanup extension here
for (const btn of buttons) {
btn.dispose();
}
});
}
}
/**
* Add the main file view commands to the application's command registry.
*/
function addCommands(app, tracker, saslogTracker, palette, menu) {
const { commands, shell } = app;
/**
* Whether there is an active SAS notebook
*/
function hasKernel() {
var _a, _b, _c;
return (tracker.currentWidget !== null &&
((_c = (_b = (_a = tracker.currentWidget.context.sessionContext) === null || _a === void 0 ? void 0 : _a.session) === null || _b === void 0 ? void 0 : _b.kernel) !== null && _c !== void 0 ? _c : null) !== null &&
tracker.currentWidget.sessionContext.prevKernelName == 'sas');
}
commands.addCommand(CommandIDs.newSASlog, {
label: 'Show SAS Log',
caption: 'Show the SAS log for the associated notebook',
iconClass: 'jp-Icon jp-Icon-16 jp-saslogIcon',
isEnabled: hasKernel,
execute: args => {
var _a, _b, _c;
let notebook;
if (args.path) {
notebook = (_a = tracker.find(nb => nb.context.path === args.path)) !== null && _a !== void 0 ? _a : null;
}
else {
notebook = tracker.currentWidget;
}
if (!notebook) {
return;
}
const widget = new _widget__WEBPACK_IMPORTED_MODULE_8__.SASLogView((_c = (_b = notebook.context.sessionContext) === null || _b === void 0 ? void 0 : _b.session) === null || _c === void 0 ? void 0 : _c.kernel);
widget.title.label = `SAS Log: ${notebook.title.label}`;
notebook.title.changed.connect(() => {
widget.title.label = `SAS Log: ${notebook.title.label}`;
});
const outer = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.MainAreaWidget({ content: widget });
saslogProp.set(widget, notebook.context.path);
notebook.context.pathChanged.connect((_, path) => {
saslogProp.set(widget, path);
saslogTracker.save(outer);
});
saslogTracker.add(outer);
notebook.context.sessionContext.kernelChanged.connect((_, args) => {
widget.model.kernel = args.newValue;
});
shell.add(outer, 'main', { mode: 'split-right' });
if (args['activate'] !== false) {
shell.activateById(outer.id);
}
notebook.disposed.connect(() => {
outer.close();
});
}
});
palette === null || palette === void 0 ? void 0 : palette.addItem({
command: CommandIDs.newSASlog,
category: 'Kernel'
});
menu === null || menu === void 0 ? void 0 : menu.kernelMenu.addGroup([{ command: CommandIDs.newSASlog }]);
}
/**
* Initialization data for the jupyterlab-saslog extension.
*/
const extension = {
id: 'sas-log-viewer:plugin',
autoStart: true,
requires: [_jupyterlab_notebook__WEBPACK_IMPORTED_MODULE_3__.INotebookTracker],
optional: [_jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.ICommandPalette, _jupyterlab_mainmenu__WEBPACK_IMPORTED_MODULE_2__.IMainMenu, _jupyterlab_application__WEBPACK_IMPORTED_MODULE_0__.ILayoutRestorer],
provides: ISASLogExtension,
activate: async (app, tracker, palette, mainMenu, restorer) => {
console.log('SAS log_viewer extension is activated!');
const { commands, docRegistry } = app;
const extension = new saslogExtension(commands);
docRegistry.addWidgetExtension('Notebook', extension);
// Recreate views from layout restorer
const saslogTracker = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.WidgetTracker({
namespace: 'saslog'
});
if (restorer) {
void restorer.restore(saslogTracker, {
command: CommandIDs.newSASlog,
args: widget => ({
path: saslogProp.get(widget.content),
activate: false
}),
name: widget => saslogProp.get(widget.content),
when: tracker.restored
});
}
addCommands(app, tracker, saslogTracker, palette, mainMenu);
function refreshNewCommand() {
commands.notifyCommandChanged(CommandIDs.newSASlog);
}
// Update the command registry when the notebook state changes.
tracker.currentChanged.connect(refreshNewCommand);
let prevWidget = tracker.currentWidget;
if (prevWidget) {
prevWidget.context.sessionContext.kernelChanged.connect(refreshNewCommand);
}
tracker.currentChanged.connect(tracker => {
if (prevWidget) {
prevWidget.context.sessionContext.kernelChanged.disconnect(refreshNewCommand);
}
prevWidget = tracker.currentWidget;
if (prevWidget) {
prevWidget.context.sessionContext.kernelChanged.connect(refreshNewCommand);
}
});
return extension;
}
};
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (extension);
/***/ }),
/***/ "./lib/model.js":
/*!**********************!*\
!*** ./lib/model.js ***!
\**********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "ThreadIterator": () => (/* binding */ ThreadIterator),
/* harmony export */ "SASLogModel": () => (/* binding */ SASLogModel)
/* harmony export */ });
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @jupyterlab/apputils */ "webpack/sharing/consume/default/@jupyterlab/apputils");
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_0__);
const showLogCommand = '%showLog';
const executeRequestMsgType = 'execute_request';
const statusMsgType = 'status';
const idleExecutionState = 'idle';
function isHeader(candidate) {
return candidate.msg_id !== undefined;
}
class ThreadIterator {
constructor(threads, collapsed) {
this._threads = threads;
this._collapsed = collapsed;
this._index = -1;
this._child = null;
}
iter() {
return this;
}
next() {
if (this._child) {
const next = this._child.next();
if (next !== undefined) {
return next;
}
this._child = null;
}
// Move to next thread
++this._index;
if (this._index >= this._threads.length) {
return undefined;
}
const entry = this._threads[this._index];
if (entry.children.length > 0 &&
!this._collapsed[entry.args.msg.header.msg_id]) {
// Iterate over children after this
this._child = new ThreadIterator(entry.children, this._collapsed);
}
return { args: entry.args, hasChildren: entry.children.length > 0 };
}
clone() {
const r = new ThreadIterator(this._threads, this._collapsed);
r._index = this._index;
if (this._child) {
r._child = this._child.clone();
}
return r;
}
}
/**
* Model for a SAS Log.
*/
class SASLogModel extends _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_0__.VDomModel {
constructor(kernel) {
super();
this.requestStarted = '';
this.logRequestStarted = '';
this.logStreamObj = {};
this._log = [];
this._kernel = null;
this._messages = {};
this._childLUT = {};
this._roots = [];
this.kernel = kernel !== null && kernel !== void 0 ? kernel : null;
}
clear() {
this._log.splice(0, this._log.length);
this._messages = {};
this._childLUT = {};
this._roots = [];
this.logStreamObj = {};
this.stateChanged.emit(void 0);
}
get kernel() {
return this._kernel;
}
set kernel(value) {
if (this._kernel) {
this._kernel.anyMessage.disconnect(this.onMessage, this);
}
this._kernel = value;
if (this._kernel) {
this._kernel.anyMessage.connect(this.onMessage, this);
}
}
get log() {
return this._log;
}
get tree() {
return this._roots.map(rootId => {
return this.getThread(rootId, false);
});
}
depth(args) {
if (args === null) {
return -1;
}
let depth = 0;
while ((args = this._findParent(args))) {
++depth;
}
return depth;
}
getThread(msgId, ancestors = true) {
const args = this._messages[msgId];
if (ancestors) {
// Work up to root, then work downwards
let root = args;
let candidate;
while ((candidate = this._findParent(root))) {
root = candidate;
}
return this.getThread(root.msg.header.msg_id, false);
}
const childMessages = this._childLUT[msgId] || [];
const childThreads = childMessages.map(childId => {
return this.getThread(childId, false);
});
const thread = {
args: this._messages[msgId],
children: childThreads
};
return thread;
}
onMessage(sender, args) {
var _a, _b, _c, _d, _e;
const { msg } = args;
console.log(`%c ${msg.header.msg_type}`, 'color: purple; font-weight: bold; font-size: 14px;');
console.log(msg.content);
this._log.push(args);
this._messages[msg.header.msg_id] = args;
const parent = this._findParent(args);
if (parent === null) {
this._roots.push(msg.header.msg_id);
}
else {
const header = parent.msg.header;
this._childLUT[header.msg_id] = this._childLUT[header.msg_id] || [];
this._childLUT[header.msg_id].push(msg.header.msg_id);
}
// Check if execute_request has started
// @ts-ignore
if (msg.header.msg_type === executeRequestMsgType && msg.content.code !== showLogCommand && !this.requestStarted) {
this.requestStarted = msg.header.msg_id;
}
// Check if execute_request has ended
const execRequestHasEnded = this.requestStarted &&
// @ts-ignore
((_a = msg.parent_header) === null || _a === void 0 ? void 0 : _a.msg_id) === this.requestStarted &&
// @ts-ignore
msg.content.execution_state === idleExecutionState && msg.header.msg_type === statusMsgType;
// If execute_request has finished run %showLog command
if (execRequestHasEnded) {
console.log('%c --------- showLog start -------', 'color: red; font-weight: bold;');
// Fetch the log
(_b = this.kernel) === null || _b === void 0 ? void 0 : _b.requestExecute({ code: showLogCommand }, true);
}
// @ts-ignore
const isLogRequest = msg.header.msg_type === executeRequestMsgType && ((_c = msg.content) === null || _c === void 0 ? void 0 : _c.code) === showLogCommand;
// If it's %showLog execute_request
if (isLogRequest) {
this.requestStarted = ''; // reset initial execute_request has started flag
let id = msg.header.msg_id; // get msg_id
this.logRequestStarted = id; // set logRequestStartedId so we can start tracking log streams that will come in future
this.logStreamObj[id] = []; // create array in logStreamObj under msg_id key - this key is later used to identify stream msg.parent_header id
}
// If we have log request stared and msg_type is stream save it in logStreamsObj under parent_header msg_id key
if (this.logRequestStarted && msg.header.msg_type === 'stream') {
// @ts-ignore
let id = (_d = msg.parent_header) === null || _d === void 0 ? void 0 : _d.msg_id;
// @ts-ignore
this.logStreamObj[id].push(msg.content.text);
}
// Check if %showLog has ended
const logRequestHasEnded = this.logRequestStarted &&
// @ts-ignore
((_e = msg.parent_header) === null || _e === void 0 ? void 0 : _e.msg_id) === this.logRequestStarted &&
// @ts-ignore
msg.content.execution_state === idleExecutionState && msg.header.msg_type === statusMsgType;
// If status is Idle and logRequestsStarted this menas that %showLog command has finished
// and logRequestStarted has to be cleared
if (logRequestHasEnded && this.logRequestStarted) {
console.log('%c --------- showLog end -------', 'color: red; font-weight: bold;');
this.logRequestStarted = '';
}
this.stateChanged.emit(undefined);
}
_findParent(args) {
if (isHeader(args.msg.parent_header)) {
return this._messages[args.msg.parent_header.msg_id] || null;
}
return null;
}
}
/***/ }),
/***/ "./lib/widget.js":
/*!***********************!*\
!*** ./lib/widget.js ***!
\***********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "MessageLogView": () => (/* binding */ MessageLogView),
/* harmony export */ "SASLogView": () => (/* binding */ SASLogView)
/* harmony export */ });
/* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "webpack/sharing/consume/default/react");
/* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @jupyterlab/apputils */ "webpack/sharing/consume/default/@jupyterlab/apputils");
/* harmony import */ var _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! @jupyterlab/ui-components */ "webpack/sharing/consume/default/@jupyterlab/ui-components");
/* harmony import */ var _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__);
/* harmony import */ var _lumino_coreutils__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @lumino/coreutils */ "webpack/sharing/consume/default/@lumino/coreutils");
/* harmony import */ var _lumino_coreutils__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_lumino_coreutils__WEBPACK_IMPORTED_MODULE_3__);
/* harmony import */ var _lumino_widgets__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @lumino/widgets */ "webpack/sharing/consume/default/@lumino/widgets");
/* harmony import */ var _lumino_widgets__WEBPACK_IMPORTED_MODULE_4___default = /*#__PURE__*/__webpack_require__.n(_lumino_widgets__WEBPACK_IMPORTED_MODULE_4__);
/* harmony import */ var ansi_to_html__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ansi-to-html */ "webpack/sharing/consume/default/ansi-to-html/ansi-to-html");
/* harmony import */ var ansi_to_html__WEBPACK_IMPORTED_MODULE_5___default = /*#__PURE__*/__webpack_require__.n(ansi_to_html__WEBPACK_IMPORTED_MODULE_5__);
/* harmony import */ var _model__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./model */ "./lib/model.js");
/* harmony import */ var _iconImport__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./iconImport */ "./lib/iconImport.js");
/* harmony import */ var _style_index_css__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ../style/index.css */ "./style/index.css");
const convert = new (ansi_to_html__WEBPACK_IMPORTED_MODULE_5___default())();
/**
* The main view for the SAS log viewer.
*/
class MessageLogView extends _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.VDomRenderer {
constructor(model) {
super(model);
this.collapsedKeys = {};
this.id = `saslog-messagelog-${_lumino_coreutils__WEBPACK_IMPORTED_MODULE_3__.UUID.uuid4()}`;
this.addClass('jp-saslog-messagelog');
}
/**
* Render the extension discovery view using the virtual DOM.
*/
render() {
const logStreamObj = this.model.logStreamObj;
return (react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", null, Object.keys(logStreamObj).map((key) => {
const collapsed = this.collapsedKeys[key];
const collapserIcon = collapsed ? _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__.caretRightIcon : _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__.caretDownIcon;
return react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { key: key, className: `collapsible ${collapsed ? 'collapsed' : ''}` },
react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { className: 'log-header' },
react__WEBPACK_IMPORTED_MODULE_0__.createElement("button", { className: `collapser`, onClick: () => {
this.collapsedKeys[key] = !this.collapsedKeys[key];
this.update();
} },
react__WEBPACK_IMPORTED_MODULE_0__.createElement(collapserIcon.react, { className: 'kspy-collapser-icon' })),
react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { className: 'log-id' }, key)),
react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { className: 'log-container' }, logStreamObj[key].map((stream, i) => react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { key: `${key}-${i}` },
react__WEBPACK_IMPORTED_MODULE_0__.createElement("div", { dangerouslySetInnerHTML: { __html: convert.toHtml(stream) } })))));
})));
}
collapseAll() {
for (const key in this.model.logStreamObj) {
this.collapsedKeys[key] = true;
}
this.update();
}
expandAll() {
this.collapsedKeys = {};
this.update();
}
}
/**
* The main view for the SAS Log viewer.
*/
class SASLogView extends _lumino_widgets__WEBPACK_IMPORTED_MODULE_4__.Widget {
constructor(kernel) {
super();
this._model = new _model__WEBPACK_IMPORTED_MODULE_7__.SASLogModel(kernel);
this.addClass('jp-saslog-view');
this.id = `saslog-${_lumino_coreutils__WEBPACK_IMPORTED_MODULE_3__.UUID.uuid4()}`;
this.title.label = 'SAS Log';
this.title.closable = true;
this.title.icon = _iconImport__WEBPACK_IMPORTED_MODULE_8__.sasLogIcon;
const layout = (this.layout = new _lumino_widgets__WEBPACK_IMPORTED_MODULE_4__.BoxLayout());
this._toolbar = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.Toolbar();
this._toolbar.addClass('jp-saslog-toolbar');
this._messagelog = new MessageLogView(this._model);
layout.addWidget(this._toolbar);
layout.addWidget(this._messagelog);
_lumino_widgets__WEBPACK_IMPORTED_MODULE_4__.BoxLayout.setStretch(this._toolbar, 0);
_lumino_widgets__WEBPACK_IMPORTED_MODULE_4__.BoxLayout.setStretch(this._messagelog, 1);
this.collapseAllButton = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.ToolbarButton({
onClick: () => {
this._messagelog.collapseAll();
},
className: 'jp-saslog-collapseAll',
icon: _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__.caretRightIcon,
tooltip: 'Collapse all threads'
});
this._toolbar.addItem('collapse-all', this.collapseAllButton);
this.expandAllButton = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.ToolbarButton({
onClick: () => {
this._messagelog.expandAll();
},
className: 'jp-saslog-expandAll',
icon: _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__.caretDownIcon,
tooltip: 'Expand all threads'
});
this._toolbar.addItem('expand-all', this.expandAllButton);
this.clearAllButton = new _jupyterlab_apputils__WEBPACK_IMPORTED_MODULE_1__.ToolbarButton({
onClick: () => {
this._model.clear();
},
className: 'jp-saslog-clearAll',
icon: _jupyterlab_ui_components__WEBPACK_IMPORTED_MODULE_2__.closeIcon,
tooltip: 'Clear all threads'
});
this._toolbar.addItem('clear-all', this.clearAllButton);
}
/**
* Handle `'activate-request'` messages.
*/
onActivateRequest(msg) {
if (!this.node.contains(document.activeElement)) {
this.collapseAllButton.node.focus();
}
}
get model() {
return this._model;
}
}
/***/ }),
/***/ "./node_modules/css-loader/dist/cjs.js!./style/base.css":
/*!**************************************************************!*\
!*** ./node_modules/css-loader/dist/cjs.js!./style/base.css ***!
\**************************************************************/
/***/ ((module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../node_modules/css-loader/dist/runtime/cssWithMappingToString.js */ "./node_modules/css-loader/dist/runtime/cssWithMappingToString.js");
/* harmony import */ var _node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../node_modules/css-loader/dist/runtime/api.js */ "./node_modules/css-loader/dist/runtime/api.js");
/* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var _node_modules_css_loader_dist_runtime_getUrl_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../node_modules/css-loader/dist/runtime/getUrl.js */ "./node_modules/css-loader/dist/runtime/getUrl.js");
/* harmony import */ var _node_modules_css_loader_dist_runtime_getUrl_js__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_getUrl_js__WEBPACK_IMPORTED_MODULE_2__);
/* harmony import */ var _icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./icons/Jupyter_Log.svg */ "./style/icons/Jupyter_Log.svg?7594");
/* harmony import */ var _icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_3__);
// Imports
var ___CSS_LOADER_EXPORT___ = _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default()((_node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0___default()));
var ___CSS_LOADER_URL_REPLACEMENT_0___ = _node_modules_css_loader_dist_runtime_getUrl_js__WEBPACK_IMPORTED_MODULE_2___default()((_icons_Jupyter_Log_svg__WEBPACK_IMPORTED_MODULE_3___default()));
// Module
___CSS_LOADER_EXPORT___.push([module.id, ".jp-saslog-view .jp-saslog-toolbar {\n border-bottom: var(--jp-border-width) solid var(--jp-toolbar-border-color);\n padding: 2px;\n min-height: var(--jp-private-notebook-panel-toolbar-height);\n box-shadow: var(--jp-toolbar-box-shadow);\n background: var(--jp-toolbar-background);\n z-index: 1;\n }\n \n .jp-saslog-view .jp-saslog-messagelog {\n padding: 8px;\n overflow: auto;\n white-space: nowrap;\n font-family: var(--jp-code-font-family);\n font-size: var(--jp-code-font-size);\n line-height: var(--jp-code-line-height);\n color: var(--jp-content-font-color1);\n background-color: var(--jp-layout-color0);\n display: grid;\n grid-template-columns: max-content auto;\n grid-column-gap: 12px;\n grid-row-gap: 2px;\n align-content: start;\n }\n \n .jp-saslog-view .jp-saslog-logheader {\n font-family: var(--jp-ui-font-family);\n font-size: var(--jp-ui-font-size1);\n line-height: 1;\n }\n \n .jp-saslog-view .jp-saslog-divider {\n grid-column-end: span 2;\n border-bottom: var(--jp-border-width) solid var(--jp-border-color2);\n padding-top: 2px;\n margin-bottom: 3px;\n }\n \n .jp-saslog-view .jp-saslog-divider.jp-saslog-logheader {\n border-bottom: var(--jp-border-width) solid var(--jp-border-color0);\n }\n \n button.jp-saslog-threadcollapser {\n background-color: transparent;\n border: none;\n }\n \n .jp-saslogIcon {\n background-image: var(--jp-icon-json);\n }\n\n .jp-saslogIcon {\n background-image: url(" + ___CSS_LOADER_URL_REPLACEMENT_0___ + ");\n }\n \n .jp-saslog-nbtoolbarbutton .jp-ToolbarButtonComponent-label {\n display: none;\n }\n \n .kspy-collapser-icon {\n padding: 0;\n }\n \n .kspy-collapser-icon svg {\n vertical-align: middle;\n }\n ", "",{"version":3,"sources":["webpack://./style/base.css"],"names":[],"mappings":"AAAA;IACI,0EAA0E;IAC1E,YAAY;IACZ,2DAA2D;IAC3D,wCAAwC;IACxC,wCAAwC;IACxC,UAAU;EACZ;;EAEA;IACE,YAAY;IACZ,cAAc;IACd,mBAAmB;IACnB,uCAAuC;IACvC,mCAAmC;IACnC,uCAAuC;IACvC,oCAAoC;IACpC,yCAAyC;IACzC,aAAa;IACb,uCAAuC;IACvC,qBAAqB;IACrB,iBAAiB;IACjB,oBAAoB;EACtB;;EAEA;IACE,qCAAqC;IACrC,kCAAkC;IAClC,cAAc;EAChB;;EAEA;IACE,uBAAuB;IACvB,mEAAmE;IACnE,gBAAgB;IAChB,kBAAkB;EACpB;;EAEA;IACE,mEAAmE;EACrE;;EAEA;IACE,6BAA6B;IAC7B,YAAY;EACd;;EAEA;IACE,qCAAqC;EACvC;;EAEA;IACE,yDAAqD;EACvD;;EAEA;IACE,aAAa;EACf;;EAEA;IACE,UAAU;EACZ;;EAEA;IACE,sBAAsB;EACxB","sourcesContent":[".jp-saslog-view .jp-saslog-toolbar {\n border-bottom: var(--jp-border-width) solid var(--jp-toolbar-border-color);\n padding: 2px;\n min-height: var(--jp-private-notebook-panel-toolbar-height);\n box-shadow: var(--jp-toolbar-box-shadow);\n background: var(--jp-toolbar-background);\n z-index: 1;\n }\n \n .jp-saslog-view .jp-saslog-messagelog {\n padding: 8px;\n overflow: auto;\n white-space: nowrap;\n font-family: var(--jp-code-font-family);\n font-size: var(--jp-code-font-size);\n line-height: var(--jp-code-line-height);\n color: var(--jp-content-font-color1);\n background-color: var(--jp-layout-color0);\n display: grid;\n grid-template-columns: max-content auto;\n grid-column-gap: 12px;\n grid-row-gap: 2px;\n align-content: start;\n }\n \n .jp-saslog-view .jp-saslog-logheader {\n font-family: var(--jp-ui-font-family);\n font-size: var(--jp-ui-font-size1);\n line-height: 1;\n }\n \n .jp-saslog-view .jp-saslog-divider {\n grid-column-end: span 2;\n border-bottom: var(--jp-border-width) solid var(--jp-border-color2);\n padding-top: 2px;\n margin-bottom: 3px;\n }\n \n .jp-saslog-view .jp-saslog-divider.jp-saslog-logheader {\n border-bottom: var(--jp-border-width) solid var(--jp-border-color0);\n }\n \n button.jp-saslog-threadcollapser {\n background-color: transparent;\n border: none;\n }\n \n .jp-saslogIcon {\n background-image: var(--jp-icon-json);\n }\n\n .jp-saslogIcon {\n background-image: url(../style/icons/Jupyter_Log.svg);\n }\n \n .jp-saslog-nbtoolbarbutton .jp-ToolbarButtonComponent-label {\n display: none;\n }\n \n .kspy-collapser-icon {\n padding: 0;\n }\n \n .kspy-collapser-icon svg {\n vertical-align: middle;\n }\n "],"sourceRoot":""}]);
// Exports
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (___CSS_LOADER_EXPORT___);
/***/ }),
/***/ "./node_modules/css-loader/dist/cjs.js!./style/index.css":
/*!***************************************************************!*\
!*** ./node_modules/css-loader/dist/cjs.js!./style/index.css ***!
\***************************************************************/
/***/ ((module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../node_modules/css-loader/dist/runtime/cssWithMappingToString.js */ "./node_modules/css-loader/dist/runtime/cssWithMappingToString.js");
/* harmony import */ var _node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../node_modules/css-loader/dist/runtime/api.js */ "./node_modules/css-loader/dist/runtime/api.js");
/* harmony import */ var _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1__);
/* harmony import */ var _node_modules_css_loader_dist_cjs_js_base_css__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! -!../node_modules/css-loader/dist/cjs.js!./base.css */ "./node_modules/css-loader/dist/cjs.js!./style/base.css");
// Imports
var ___CSS_LOADER_EXPORT___ = _node_modules_css_loader_dist_runtime_api_js__WEBPACK_IMPORTED_MODULE_1___default()((_node_modules_css_loader_dist_runtime_cssWithMappingToString_js__WEBPACK_IMPORTED_MODULE_0___default()));
___CSS_LOADER_EXPORT___.i(_node_modules_css_loader_dist_cjs_js_base_css__WEBPACK_IMPORTED_MODULE_2__.default);
// Module
___CSS_LOADER_EXPORT___.push([module.id, "\n", "",{"version":3,"sources":[],"names":[],"mappings":"","sourceRoot":""}]);
// Exports
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (___CSS_LOADER_EXPORT___);
/***/ }),
/***/ "./style/icons/Jupyter_Log.svg?966e":
/*!*************************************!*\
!*** ./style/icons/Jupyter_Log.svg ***!
\*************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->\n<svg version=\"1.1\" id=\"Log\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" x=\"0px\" y=\"0px\"\n\t viewBox=\"0 0 16 16\" style=\"enable-background:new 0 0 16 16;\" xml:space=\"preserve\">\n<style type=\"text/css\">\n\t.st0{fill:none;}\n\t.st1{fill:#5D5D5D;}\n</style>\n<rect id=\"svgToolTipPane\" y=\"1\" class=\"st0\" width=\"14\" height=\"14\"/>\n<path id=\"iconColor\" class=\"st1\" d=\"M13,1H3C2.5,1,2,1.5,2,2v12c0,0.6,0.5,1,1,1h10c0.6,0,1-0.4,1-1V2C14,1.5,13.6,1,13,1z M13,14H3\n\tV3h10V14z M12,8H4V7h8V8z M12,5H4v1h8V5z M12,9H4v1h8V9z M12,11H4v1h8V11z\"/>\n</svg>\n");
/***/ }),
/***/ "./style/index.css":
/*!*************************!*\
!*** ./style/index.css ***!
\*************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _node_modules_style_loader_dist_runtime_injectStylesIntoStyleTag_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! !../node_modules/style-loader/dist/runtime/injectStylesIntoStyleTag.js */ "./node_modules/style-loader/dist/runtime/injectStylesIntoStyleTag.js");
/* harmony import */ var _node_modules_style_loader_dist_runtime_injectStylesIntoStyleTag_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_node_modules_style_loader_dist_runtime_injectStylesIntoStyleTag_js__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _node_modules_css_loader_dist_cjs_js_index_css__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! !!../node_modules/css-loader/dist/cjs.js!./index.css */ "./node_modules/css-loader/dist/cjs.js!./style/index.css");
var options = {};
options.insert = "head";
options.singleton = false;
var update = _node_modules_style_loader_dist_runtime_injectStylesIntoStyleTag_js__WEBPACK_IMPORTED_MODULE_0___default()(_node_modules_css_loader_dist_cjs_js_index_css__WEBPACK_IMPORTED_MODULE_1__.default, options);
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_node_modules_css_loader_dist_cjs_js_index_css__WEBPACK_IMPORTED_MODULE_1__.default.locals || {});
/***/ }),
/***/ "./style/icons/Jupyter_Log.svg?7594":
/*!*************************************!*\
!*** ./style/icons/Jupyter_Log.svg ***!
\*************************************/
/***/ ((module) => {
module.exports = "data:image/svg+xml,%3C!-- Generator: Adobe Illustrator 24.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) --%3E %3Csvg version='1.1' id='Log' xmlns='http://www.w3.org/2000/svg' xmlns:xlink='http://www.w3.org/1999/xlink' x='0px' y='0px' viewBox='0 0 16 16' style='enable-background:new 0 0 16 16;' xml:space='preserve'%3E %3Cstyle type='text/css'%3E .st0%7Bfill:none;%7D .st1%7Bfill:%235D5D5D;%7D %3C/style%3E %3Crect id='svgToolTipPane' y='1' class='st0' width='14' height='14'/%3E %3Cpath id='iconColor' class='st1' d='M13,1H3C2.5,1,2,1.5,2,2v12c0,0.6,0.5,1,1,1h10c0.6,0,1-0.4,1-1V2C14,1.5,13.6,1,13,1z M13,14H3 V3h10V14z M12,8H4V7h8V8z M12,5H4v1h8V5z M12,9H4v1h8V9z M12,11H4v1h8V11z'/%3E %3C/svg%3E"
/***/ })
}]);
//# sourceMappingURL=lib_index_js.f1abfa74696980b7b1f3.js.map | PypiClean |
/symphony_bdk_python-2.6.1-py3-none-any.whl/symphony/bdk/gen/group_model/status.py | import re # noqa: F401
import sys # noqa: F401
from typing import List, Union
from symphony.bdk.gen.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from symphony.bdk.gen.exceptions import ApiAttributeError
class Status(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'ACTIVE': "ACTIVE",
'DELETED': "DELETED",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a group_model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""Status - a group_model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status flag to distinguish between active and deleted objects., must be one of ["ACTIVE", "DELETED", ] # noqa: E501
Keyword Args:
value (str): Status flag to distinguish between active and deleted objects., must be one of ["ACTIVE", "DELETED", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the group_model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value: str = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""Status - a group_model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status flag to distinguish between active and deleted objects., must be one of ["ACTIVE", "DELETED", ] # noqa: E501
Keyword Args:
value (str): Status flag to distinguish between active and deleted objects., must be one of ["ACTIVE", "DELETED", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the group_model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | PypiClean |
/django_outbox_base_layout-1.2.tar.gz/django_outbox_base_layout-1.2/outbox_base_layout/static/bower_components/jquery/src/queue.js | define([
"./core",
"./data/var/data_priv",
"./deferred",
"./callbacks"
], function( jQuery, data_priv ) {
jQuery.extend({
queue: function( elem, type, data ) {
var queue;
if ( elem ) {
type = ( type || "fx" ) + "queue";
queue = data_priv.get( elem, type );
// Speed up dequeue by getting out quickly if this is just a lookup
if ( data ) {
if ( !queue || jQuery.isArray( data ) ) {
queue = data_priv.access( elem, type, jQuery.makeArray(data) );
} else {
queue.push( data );
}
}
return queue || [];
}
},
dequeue: function( elem, type ) {
type = type || "fx";
var queue = jQuery.queue( elem, type ),
startLength = queue.length,
fn = queue.shift(),
hooks = jQuery._queueHooks( elem, type ),
next = function() {
jQuery.dequeue( elem, type );
};
// If the fx queue is dequeued, always remove the progress sentinel
if ( fn === "inprogress" ) {
fn = queue.shift();
startLength--;
}
if ( fn ) {
// Add a progress sentinel to prevent the fx queue from being
// automatically dequeued
if ( type === "fx" ) {
queue.unshift( "inprogress" );
}
// Clear up the last queue stop function
delete hooks.stop;
fn.call( elem, next, hooks );
}
if ( !startLength && hooks ) {
hooks.empty.fire();
}
},
// Not public - generate a queueHooks object, or return the current one
_queueHooks: function( elem, type ) {
var key = type + "queueHooks";
return data_priv.get( elem, key ) || data_priv.access( elem, key, {
empty: jQuery.Callbacks("once memory").add(function() {
data_priv.remove( elem, [ type + "queue", key ] );
})
});
}
});
jQuery.fn.extend({
queue: function( type, data ) {
var setter = 2;
if ( typeof type !== "string" ) {
data = type;
type = "fx";
setter--;
}
if ( arguments.length < setter ) {
return jQuery.queue( this[0], type );
}
return data === undefined ?
this :
this.each(function() {
var queue = jQuery.queue( this, type, data );
// Ensure a hooks for this queue
jQuery._queueHooks( this, type );
if ( type === "fx" && queue[0] !== "inprogress" ) {
jQuery.dequeue( this, type );
}
});
},
dequeue: function( type ) {
return this.each(function() {
jQuery.dequeue( this, type );
});
},
clearQueue: function( type ) {
return this.queue( type || "fx", [] );
},
// Get a promise resolved when queues of a certain type
// are emptied (fx is the type by default)
promise: function( type, obj ) {
var tmp,
count = 1,
defer = jQuery.Deferred(),
elements = this,
i = this.length,
resolve = function() {
if ( !( --count ) ) {
defer.resolveWith( elements, [ elements ] );
}
};
if ( typeof type !== "string" ) {
obj = type;
type = undefined;
}
type = type || "fx";
while ( i-- ) {
tmp = data_priv.get( elements[ i ], type + "queueHooks" );
if ( tmp && tmp.empty ) {
count++;
tmp.empty.add( resolve );
}
}
resolve();
return defer.promise( obj );
}
});
return jQuery;
}); | PypiClean |
/pyrsl-2.1.1-py3-none-any.whl/rsl/runtime.py | import sys
import os
import stat
import string
import subprocess
import datetime
import logging
import re
import difflib
import getpass
from functools import partial
import rsl.version
import xtuml
try:
from future_builtins import filter
except ImportError:
pass
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
logger = logging.getLogger(__name__)
class RuntimeException(Exception):
pass
class Info(object):
'''
Helper class for providing access to the built-in
substitution variables "${info.date}" et.al.
'''
arch_file_path = ''
arch_file_line = 0
def __init__(self, metamodel):
self.metamodel = metamodel
@property
def arch_file_name(self):
return os.path.basename(self.arch_file_path)
@property
def arch_folder_path(self):
return os.path.dirname(self.arch_file_path)
@property
def date(self):
now = datetime.datetime.now()
now = datetime.datetime.ctime(now)
return now
@property
def unique_num(self):
return next(self.metamodel.id_generator)
@property
def user_id(self):
return getpass.getuser()
@property
def interpreter_version(self):
return rsl.version.complete_string
@property
def interpreter_platform(self):
return os.name
class MetaFragment(type):
cache = dict()
attributes = list()
class Fragment(xtuml.Class):
__metaclass__ = MetaFragment
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
xtuml.Class.__init__(self)
def __str__(self):
return str(self.__dict__)
__repr__ = __str__
class Runtime(object):
bridges = dict()
string_formatters = dict()
def __init__(self, metamodel, emit=None, force=False, diff=None):
self.metamodel = metamodel
self.emit = emit
self.force_emit = force
self.diff = diff
self.functions = dict()
self.buffer = StringIO()
self.include_cache = dict()
self.info = Info(metamodel)
def format_string(self, expr, fmt):
def swap_rc_with_cr(formats):
'''
The removal of whitespace should occur after the capitalization
has taken place in the case of the CR or RC combination.
'''
for i in range(len(formats) - 1):
if formats[i].lower() == 'r' and formats[i+1].lower() == 'c':
formats[i], formats[i+1] = formats[i+1], formats[i]
def apply_formats(s, formats):
for formatter in formats:
try:
f = self.string_formatters[formatter.lower()]
except KeyError:
raise RuntimeException('%s is not a valid string formatter' % formatter)
s = f(s)
return s
s = '%s' % expr
swap_rc_with_cr(fmt)
s = apply_formats(s, [f for f in fmt if f[0] == 't'])
s = apply_formats(s, [f for f in fmt if f[0] != 't'])
return s
@staticmethod
def parse_keyword(expr, keyword):
regexp = re.compile(keyword + ":([^\n]*)")
result = regexp.search(expr)
if result:
return result.groups()[0].strip()
else:
return ''
def define_function(self, name, fn):
self.functions[name] = fn
def invoke_function(self, name, args):
if name in self.functions:
fn = self.functions[name]
elif name in self.bridges:
fn = self.bridges[name]
else:
raise RuntimeException("Function '%s' is undefined" % name)
previous_buffer = self.buffer
self.buffer = StringIO()
d = fn(*args)
return_values = dict({'body': self.buffer.getvalue()})
self.buffer.close()
self.buffer = previous_buffer
for key, value in d.items():
if key.lower().startswith("attr_"):
key = key.split("_", 1)[1]
return_values[key] = value
return Fragment(**return_values)
def invoke_print(self, value, prefix='INFO'):
sys.stdout.write("%s: %d: %s: %s\n" % (self.info.arch_file_name,
self.info.arch_file_line,
prefix,
value))
@staticmethod
def invoke_exit(exit_code):
sys.exit(exit_code)
@staticmethod
def cast_to_set(value):
if not isinstance(value, xtuml.QuerySet):
return xtuml.QuerySet([value])
else:
return value
def buffer_literal(self, literal):
if literal.endswith('\\' * 3):
self.buffer.write(literal[:-2])
elif literal.endswith('\\' * 2):
self.buffer.write(literal[:-1])
self.buffer.write('\n')
elif literal.endswith('\\'):
self.buffer.write(literal[:-1])
elif literal.endswith('\n'):
self.buffer.write(literal)
else:
self.buffer.write(literal)
self.buffer.write('\n')
def append_diff(self, filename, org, buf):
org = org.splitlines(1)
buf = buf.splitlines(1)
fromfile = filename
tofile = filename
if os.path.exists(filename):
fromdate = os.path.getctime(filename)
fromdate = str(datetime.datetime.fromtimestamp(fromdate))
todate = str(datetime.datetime.now())
else:
fromdate = ''
todate = ''
diff = difflib.unified_diff(org, buf, fromfile, tofile, fromdate, todate)
with open(self.diff, 'a') as f:
f.write(''.join(diff))
def emit_buffer(self, filename):
org = ''
buf = self.buffer.getvalue()
self.clear_buffer()
if buf and not buf.endswith('\n'):
buf += '\n'
filename = os.path.normpath(filename)
if os.path.exists(filename):
with open(filename, 'r') as f:
org = f.read()
if self.emit == 'never':
do_write = False
elif self.emit == 'change' and org == buf:
do_write = False
else:
do_write = True
if self.diff:
self.append_diff(filename, org, buf)
if do_write and self.force_emit and os.path.exists(filename):
st = os.stat(filename)
os.chmod(filename, st.st_mode | stat.S_IWRITE)
if do_write:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
if os.path.exists(filename):
self.invoke_print("File '%s' REPLACED" % filename)
else:
self.invoke_print("File '%s' CREATED" % filename)
with open(filename, 'w+') as f:
f.write(buf)
def clear_buffer(self):
self.buffer.close()
self.buffer = StringIO()
def new(self, key_letter):
return self.metamodel.new(key_letter)
def chain(self, inst):
return xtuml.navigate_many(inst)
def select_any_from(self, key_letter, where_cond):
return self.metamodel.select_any(key_letter, where_cond)
def select_many_from(self, key_letter, where_cond, order_by):
return self.metamodel.select_many(key_letter, where_cond, order_by)
@staticmethod
def select_many_in(inst_set, where_cond, order_by):
s = filter(where_cond, inst_set)
if order_by:
s = order_by(s)
return xtuml.QuerySet(s)
@staticmethod
def select_any_in(inst_set, where_cond):
for inst in iter(inst_set):
if where_cond(inst):
return inst
@staticmethod
def select_one_in(inst_set, where_cond):
inst_set = xtuml.QuerySet(inst_set)
cardinality = Runtime.cardinality(inst_set)
if cardinality > 1:
raise RuntimeException('select one from a set with cardinality %d' %
cardinality)
return Runtime.select_any_in(inst_set, where_cond)
@staticmethod
def cardinality(arg):
if Runtime.is_set(arg):
return len(arg)
if Runtime.is_instance(arg):
return 1
return 0
@staticmethod
def empty(arg):
return Runtime.cardinality(arg) == 0
@staticmethod
def not_empty(arg):
return Runtime.cardinality(arg) != 0
@staticmethod
def first(inst, inst_set):
if Runtime.is_instance(inst) and Runtime.is_set(inst_set):
return inst == inst_set.first
@staticmethod
def not_first(inst, inst_set):
if Runtime.is_instance(inst) and Runtime.is_set(inst_set):
return inst != inst_set.first
@staticmethod
def last(inst, inst_set):
if Runtime.is_instance(inst) and Runtime.is_set(inst_set):
return inst == inst_set.last
@staticmethod
def not_last(inst, inst_set):
if Runtime.is_instance(inst) and Runtime.is_set(inst_set):
return inst != inst_set.last
@staticmethod
def is_set(inst):
return isinstance(inst, xtuml.QuerySet)
@staticmethod
def is_instance(inst):
return isinstance(inst, xtuml.Class)
def assert_type(self, exptected_type, value):
value_type = self.type_name(type(value))
if exptected_type.name.upper() != value_type.upper():
raise RuntimeException('expected type %s, not %s' %
(exptected_type.name, value_type))
if not exptected_type.kind:
return
value_kind = self.type_kind(value)
if value_kind and exptected_type.kind.upper() != value_kind.upper():
raise RuntimeException('expected kind %s, not %s' %
(exptected_type.kind, value_kind))
def type_name(self, ty):
if issubclass(ty, bool): return 'boolean'
elif issubclass(ty, int): return 'integer'
elif issubclass(ty, float): return 'real'
elif issubclass(ty, str): return 'string'
elif issubclass(ty, Fragment): return 'frag_ref'
elif issubclass(ty, xtuml.Class): return 'inst_ref'
elif issubclass(ty, type(None)): return 'inst_ref'
elif issubclass(ty, xtuml.QuerySet): return 'inst_ref_set'
elif issubclass(ty, type(self.metamodel.id_generator.peek())): return 'unique_id'
else: raise RuntimeException("Unknown type '%s'" % ty.__name__)
def type_kind(self, value):
if isinstance(value, xtuml.QuerySet):
value = value.first
if isinstance(value, xtuml.Class):
return value.__metaclass__.kind
class Bridge(object):
'''
Decorator for adding bridges to the Runtime class.
'''
cls = None
name = None
def __init__(self, name, cls=None):
self.name = name
self.cls = cls
def __call__(self, f):
cls = self.cls or Runtime
name = self.name or f.__name__
def wrapper(*args):
res = {}
rc = f(*args) or {}
for key, value in rc.items():
res['attr_%s' % key] = value
return res
cls.bridges[name] = wrapper
return f
bridge = Bridge
@bridge('GET_ENV_VAR')
def get_env_var(name):
if name in os.environ:
result = os.environ[name]
success = True
else:
result = ''
success = False
return {'success': success,
'result': result}
@bridge('PUT_ENV_VAR')
def put_env_var(value, name):
os.environ[name] = value
return {'success': name in os.environ}
@bridge('SHELL_COMMAND')
def shell_command(cmd):
return {'result': subprocess.call(cmd, shell=True)}
@bridge('FILE_READ')
def file_read(filename):
try:
with open(filename, 'r') as f:
result = f.read()
success = True
except:
success = False
result = ''
return {'success': success,
'result': result}
@bridge('FILE_WRITE')
def file_write(contents, filename):
try:
with open(filename, 'w') as f:
f.write('%s\n' % contents)
success = True
except:
success = False
return {'success': success}
@bridge('STRING_TO_INTEGER')
def string_to_integer(value):
try:
return {'result': int(value.strip())}
except:
raise RuntimeException('Unable to convert the string "%s" to an integer' % value)
@bridge('STRING_TO_REAL')
def string_to_real(value):
try:
return {'result': float(value.strip())}
except:
raise RuntimeException('Unable to convert the string "%s" to a real' % value)
@bridge('INTEGER_TO_STRING')
def integer_to_string(value):
return {'result': str(value)}
@bridge('REAL_TO_STRING')
def real_to_string(value):
return {'result': str(value)}
@bridge('BOOLEAN_TO_STRING')
def boolean_to_string(value):
return {'result': str(value).upper()}
class StringFormatter(object):
'''
Decorator for adding string formatters to the Runtime class.
'''
cls = None
name = None
def __init__(self, name, cls=None):
self.name = name
self.cls = cls
def __call__(self, f):
cls = self.cls or Runtime
name = self.name or f.__name__
cls.string_formatters[name] = f
return f
string_formatter = StringFormatter
@string_formatter('o')
def camelcase(value):
'''
Make the first word all lower case, make the first character of each
following word capitalized and all other characters of the words lower
case. Characters other than a-Z a-z 0-9 are ignored.
'''
whitespace_regexp = re.compile(r'\s+')
nonword_regexp = re.compile(r'[^\w]')
value = value.replace('_', ' ')
value = value.title()
value = re.sub(nonword_regexp, '', value)
value = re.sub(whitespace_regexp, '', value)
if value:
value = value[0].lower() + value[1:]
return value
@string_formatter('u')
def uppercase(value):
'Make all characters in value upper case'
return value.upper()
@string_formatter('l')
def lowercase(value):
'Make all characters in value lower case'
return value.lower()
@string_formatter('c')
def capitalize(value):
'''
Make the first character of each word in value capitalized and all other
characters of a word lower case.
'''
return string.capwords(value)
@string_formatter('_')
def underscore(value):
'Change all white space characters in value to underscore characters'
regexp = re.compile(r'\s+')
return re.sub(regexp, '_', value)
@string_formatter('r')
def remove_whitespace(value):
'Remove all white space characters in value'
regexp = re.compile(r'\s+')
return re.sub(regexp, '', value)
@string_formatter('t')
def default_user_translator(value):
'''
Default user supplied translate format function. No translation is made.
Generally, this rule is overridden by a user.
'''
return value
@string_formatter('tnosplat')
def remove_splat(value):
'''
Removes *'s (splats). This can be used to remove the * character found
in polymorphic events expressed in the BridgePoint meta model.
'''
return value.replace('*', '')
@string_formatter('t2tick')
def escape_single_quote(value):
'Replace all occurrences of a single quote with two single quotes'
return value.replace("'", "''")
@string_formatter('tnonl')
def linebreak_to_space(value):
'Replace all occurrences of a line break with a white space'
return value.replace('\n', ' ')
@string_formatter('tu2d')
def underscore_to_dash(value):
'Replace all occurrences of an underscore with a dash'
return value.replace('_', '-')
@string_formatter('td2u')
def dash_to_underscore(value):
'Replace all occurrences of a dash with an underscore'
return value.replace('-', '_')
@string_formatter('tstrsep_')
def remove_underscore_suffix(value):
'Remove all characters following an underscore'
return value.split('_', 1)[0]
@string_formatter('t_strsep')
def remove_underscore_prefix(value):
'Remove all characters preceding an underscore'
try:
return value.split('_', 1)[1]
except IndexError:
return ''
@string_formatter('txmlclean')
def xml_clean(value):
'Replace reserved XML characters with XML entities'
return (value.replace("&", "&")
.replace("<", "<")
.replace(">", ">"))
@string_formatter('txmlquot')
def xml_quot(value):
'Add quotes to a string intended to be used as an xml attribute'
if "'" in value:
return '"%s"' % value
else:
return "'%s'" % value
@string_formatter('txmlname')
def xml_name(value):
'Replace illegal characters in an XML name with an underscore'
regexp = re.compile(r'(^[^\w_])|[^\w_.-]')
return re.sub(regexp, '_', value)
class NavigationParser(StringFormatter):
'''
Decorator for adding navigation formatters to the Runtime class.
'''
regexp = re.compile(r"(\s*->\s*([\w]+)\[[Rr](\d+)(?:\.\'([^\']+)\')?\]\s*)")
def parse_string(self, f, value):
result = self.regexp.search(value)
if result:
return f(result) or ''
else:
return ''
def __call__(self, f):
f = partial(self.parse_string, f)
return string_formatter.__call__(self, f)
navigation_parser = NavigationParser
@navigation_parser('tcf_kl')
def first_key_letter(result):
'Get the first key letter in a navigation'
return result.group(2)
@navigation_parser('tcf_rel')
def first_association_id(result):
'Get the first association id in a navigation'
return result.group(3)
@navigation_parser('tcf_phrase')
def first_phrase(result):
'Get the first phrase in a navigation'
return result.group(4)
@navigation_parser('tcf_rest')
def remove_first_navigation_step(result):
'Remove the first step in a navigation'
return result.string[result.end():]
class BackwardNavigationParser(NavigationParser):
'''
Decorator for adding navigation formatters to the Runtime class.
The parsing is done backwards, i.e. from right to left.
'''
regexp = re.compile(r"(\s*->\s*([\w]+)\[[Rr](\d+)(?:\.\'([^\']+)\')?\]\s*)$")
backward_navigation_parser = BackwardNavigationParser
@backward_navigation_parser('tcb_kl')
def last_key_letter(result):
'Get the last key letter in a navigation'
return result.group(2)
@backward_navigation_parser('tcb_rel')
def last_association_id(result):
'Get the last association id in a navigation'
return result.group(3)
@backward_navigation_parser('tcb_phrase')
def last_phrase(result):
'Get the last phrase in a navigation'
return result.group(4)
@backward_navigation_parser('tcb_rest')
def remove_last_navigation_step(result):
'Remove the last step in a navigation'
return result.string[:result.start(1)] | PypiClean |
/gears-babel-0.1.tar.gz/gears-babel-0.1/gears_babel/node_modules/babel/node_modules/chokidar/node_modules/readdirp/node_modules/minimatch/node_modules/sigmund/bench.js |
// sparseFE10 and sigmund are usually pretty close, which is to be expected,
// since they are essentially the same algorithm, except that sigmund handles
// regular expression objects properly.
var http = require('http')
var util = require('util')
var sigmund = require('./sigmund.js')
var sreq, sres, creq, cres, test
http.createServer(function (q, s) {
sreq = q
sres = s
sres.end('ok')
this.close(function () { setTimeout(function () {
start()
}, 200) })
}).listen(1337, function () {
creq = http.get({ port: 1337 })
creq.on('response', function (s) { cres = s })
})
function start () {
test = [sreq, sres, creq, cres]
// test = sreq
// sreq.sres = sres
// sreq.creq = creq
// sreq.cres = cres
for (var i in exports.compare) {
console.log(i)
var hash = exports.compare[i]()
console.log(hash)
console.log(hash.length)
console.log('')
}
require('bench').runMain()
}
function customWs (obj, md, d) {
d = d || 0
var to = typeof obj
if (to === 'undefined' || to === 'function' || to === null) return ''
if (d > md || !obj || to !== 'object') return ('' + obj).replace(/[\n ]+/g, '')
if (Array.isArray(obj)) {
return obj.map(function (i, _, __) {
return customWs(i, md, d + 1)
}).reduce(function (a, b) { return a + b }, '')
}
var keys = Object.keys(obj)
return keys.map(function (k, _, __) {
return k + ':' + customWs(obj[k], md, d + 1)
}).reduce(function (a, b) { return a + b }, '')
}
function custom (obj, md, d) {
d = d || 0
var to = typeof obj
if (to === 'undefined' || to === 'function' || to === null) return ''
if (d > md || !obj || to !== 'object') return '' + obj
if (Array.isArray(obj)) {
return obj.map(function (i, _, __) {
return custom(i, md, d + 1)
}).reduce(function (a, b) { return a + b }, '')
}
var keys = Object.keys(obj)
return keys.map(function (k, _, __) {
return k + ':' + custom(obj[k], md, d + 1)
}).reduce(function (a, b) { return a + b }, '')
}
function sparseFE2 (obj, maxDepth) {
var seen = []
var soFar = ''
function ch (v, depth) {
if (depth > maxDepth) return
if (typeof v === 'function' || typeof v === 'undefined') return
if (typeof v !== 'object' || !v) {
soFar += v
return
}
if (seen.indexOf(v) !== -1 || depth === maxDepth) return
seen.push(v)
soFar += '{'
Object.keys(v).forEach(function (k, _, __) {
// pseudo-private values. skip those.
if (k.charAt(0) === '_') return
var to = typeof v[k]
if (to === 'function' || to === 'undefined') return
soFar += k + ':'
ch(v[k], depth + 1)
})
soFar += '}'
}
ch(obj, 0)
return soFar
}
function sparseFE (obj, maxDepth) {
var seen = []
var soFar = ''
function ch (v, depth) {
if (depth > maxDepth) return
if (typeof v === 'function' || typeof v === 'undefined') return
if (typeof v !== 'object' || !v) {
soFar += v
return
}
if (seen.indexOf(v) !== -1 || depth === maxDepth) return
seen.push(v)
soFar += '{'
Object.keys(v).forEach(function (k, _, __) {
// pseudo-private values. skip those.
if (k.charAt(0) === '_') return
var to = typeof v[k]
if (to === 'function' || to === 'undefined') return
soFar += k
ch(v[k], depth + 1)
})
}
ch(obj, 0)
return soFar
}
function sparse (obj, maxDepth) {
var seen = []
var soFar = ''
function ch (v, depth) {
if (depth > maxDepth) return
if (typeof v === 'function' || typeof v === 'undefined') return
if (typeof v !== 'object' || !v) {
soFar += v
return
}
if (seen.indexOf(v) !== -1 || depth === maxDepth) return
seen.push(v)
soFar += '{'
for (var k in v) {
// pseudo-private values. skip those.
if (k.charAt(0) === '_') continue
var to = typeof v[k]
if (to === 'function' || to === 'undefined') continue
soFar += k
ch(v[k], depth + 1)
}
}
ch(obj, 0)
return soFar
}
function noCommas (obj, maxDepth) {
var seen = []
var soFar = ''
function ch (v, depth) {
if (depth > maxDepth) return
if (typeof v === 'function' || typeof v === 'undefined') return
if (typeof v !== 'object' || !v) {
soFar += v
return
}
if (seen.indexOf(v) !== -1 || depth === maxDepth) return
seen.push(v)
soFar += '{'
for (var k in v) {
// pseudo-private values. skip those.
if (k.charAt(0) === '_') continue
var to = typeof v[k]
if (to === 'function' || to === 'undefined') continue
soFar += k + ':'
ch(v[k], depth + 1)
}
soFar += '}'
}
ch(obj, 0)
return soFar
}
function flatten (obj, maxDepth) {
var seen = []
var soFar = ''
function ch (v, depth) {
if (depth > maxDepth) return
if (typeof v === 'function' || typeof v === 'undefined') return
if (typeof v !== 'object' || !v) {
soFar += v
return
}
if (seen.indexOf(v) !== -1 || depth === maxDepth) return
seen.push(v)
soFar += '{'
for (var k in v) {
// pseudo-private values. skip those.
if (k.charAt(0) === '_') continue
var to = typeof v[k]
if (to === 'function' || to === 'undefined') continue
soFar += k + ':'
ch(v[k], depth + 1)
soFar += ','
}
soFar += '}'
}
ch(obj, 0)
return soFar
}
exports.compare =
{
// 'custom 2': function () {
// return custom(test, 2, 0)
// },
// 'customWs 2': function () {
// return customWs(test, 2, 0)
// },
'JSON.stringify (guarded)': function () {
var seen = []
return JSON.stringify(test, function (k, v) {
if (typeof v !== 'object' || !v) return v
if (seen.indexOf(v) !== -1) return undefined
seen.push(v)
return v
})
},
'flatten 10': function () {
return flatten(test, 10)
},
// 'flattenFE 10': function () {
// return flattenFE(test, 10)
// },
'noCommas 10': function () {
return noCommas(test, 10)
},
'sparse 10': function () {
return sparse(test, 10)
},
'sparseFE 10': function () {
return sparseFE(test, 10)
},
'sparseFE2 10': function () {
return sparseFE2(test, 10)
},
sigmund: function() {
return sigmund(test, 10)
},
// 'util.inspect 1': function () {
// return util.inspect(test, false, 1, false)
// },
// 'util.inspect undefined': function () {
// util.inspect(test)
// },
// 'util.inspect 2': function () {
// util.inspect(test, false, 2, false)
// },
// 'util.inspect 3': function () {
// util.inspect(test, false, 3, false)
// },
// 'util.inspect 4': function () {
// util.inspect(test, false, 4, false)
// },
// 'util.inspect Infinity': function () {
// util.inspect(test, false, Infinity, false)
// }
}
/** results
**/ | PypiClean |
/stable_baselines-2.10.2-py3-none-any.whl/stable_baselines/common/distributions.py | import numpy as np
import tensorflow as tf
from tensorflow.python.ops import math_ops
from gym import spaces
from stable_baselines.common.tf_layers import linear
class ProbabilityDistribution(object):
"""
Base class for describing a probability distribution.
"""
def __init__(self):
super(ProbabilityDistribution, self).__init__()
def flatparam(self):
"""
Return the direct probabilities
:return: ([float]) the probabilities
"""
raise NotImplementedError
def mode(self):
"""
Returns the probability
:return: (Tensorflow Tensor) the deterministic action
"""
raise NotImplementedError
def neglogp(self, x):
"""
returns the of the negative log likelihood
:param x: (str) the labels of each index
:return: ([float]) The negative log likelihood of the distribution
"""
# Usually it's easier to define the negative logprob
raise NotImplementedError
def kl(self, other):
"""
Calculates the Kullback-Leibler divergence from the given probability distribution
:param other: ([float]) the distribution to compare with
:return: (float) the KL divergence of the two distributions
"""
raise NotImplementedError
def entropy(self):
"""
Returns Shannon's entropy of the probability
:return: (float) the entropy
"""
raise NotImplementedError
def sample(self):
"""
returns a sample from the probability distribution
:return: (Tensorflow Tensor) the stochastic action
"""
raise NotImplementedError
def logp(self, x):
"""
returns the of the log likelihood
:param x: (str) the labels of each index
:return: ([float]) The log likelihood of the distribution
"""
return - self.neglogp(x)
class ProbabilityDistributionType(object):
"""
Parametrized family of probability distributions
"""
def probability_distribution_class(self):
"""
returns the ProbabilityDistribution class of this type
:return: (Type ProbabilityDistribution) the probability distribution class associated
"""
raise NotImplementedError
def proba_distribution_from_flat(self, flat):
"""
Returns the probability distribution from flat probabilities
flat: flattened vector of parameters of probability distribution
:param flat: ([float]) the flat probabilities
:return: (ProbabilityDistribution) the instance of the ProbabilityDistribution associated
"""
return self.probability_distribution_class()(flat)
def proba_distribution_from_latent(self, pi_latent_vector, vf_latent_vector, init_scale=1.0, init_bias=0.0):
"""
returns the probability distribution from latent values
:param pi_latent_vector: ([float]) the latent pi values
:param vf_latent_vector: ([float]) the latent vf values
:param init_scale: (float) the initial scale of the distribution
:param init_bias: (float) the initial bias of the distribution
:return: (ProbabilityDistribution) the instance of the ProbabilityDistribution associated
"""
raise NotImplementedError
def param_shape(self):
"""
returns the shape of the input parameters
:return: ([int]) the shape
"""
raise NotImplementedError
def sample_shape(self):
"""
returns the shape of the sampling
:return: ([int]) the shape
"""
raise NotImplementedError
def sample_dtype(self):
"""
returns the type of the sampling
:return: (type) the type
"""
raise NotImplementedError
def param_placeholder(self, prepend_shape, name=None):
"""
returns the TensorFlow placeholder for the input parameters
:param prepend_shape: ([int]) the prepend shape
:param name: (str) the placeholder name
:return: (TensorFlow Tensor) the placeholder
"""
return tf.placeholder(dtype=tf.float32, shape=prepend_shape + self.param_shape(), name=name)
def sample_placeholder(self, prepend_shape, name=None):
"""
returns the TensorFlow placeholder for the sampling
:param prepend_shape: ([int]) the prepend shape
:param name: (str) the placeholder name
:return: (TensorFlow Tensor) the placeholder
"""
return tf.placeholder(dtype=self.sample_dtype(), shape=prepend_shape + self.sample_shape(), name=name)
class CategoricalProbabilityDistributionType(ProbabilityDistributionType):
def __init__(self, n_cat):
"""
The probability distribution type for categorical input
:param n_cat: (int) the number of categories
"""
self.n_cat = n_cat
def probability_distribution_class(self):
return CategoricalProbabilityDistribution
def proba_distribution_from_latent(self, pi_latent_vector, vf_latent_vector, init_scale=1.0, init_bias=0.0):
pdparam = linear(pi_latent_vector, 'pi', self.n_cat, init_scale=init_scale, init_bias=init_bias)
q_values = linear(vf_latent_vector, 'q', self.n_cat, init_scale=init_scale, init_bias=init_bias)
return self.proba_distribution_from_flat(pdparam), pdparam, q_values
def param_shape(self):
return [self.n_cat]
def sample_shape(self):
return []
def sample_dtype(self):
return tf.int64
class MultiCategoricalProbabilityDistributionType(ProbabilityDistributionType):
def __init__(self, n_vec):
"""
The probability distribution type for multiple categorical input
:param n_vec: ([int]) the vectors
"""
# Cast the variable because tf does not allow uint32
self.n_vec = n_vec.astype(np.int32)
# Check that the cast was valid
assert (self.n_vec > 0).all(), "Casting uint32 to int32 was invalid"
def probability_distribution_class(self):
return MultiCategoricalProbabilityDistribution
def proba_distribution_from_flat(self, flat):
return MultiCategoricalProbabilityDistribution(self.n_vec, flat)
def proba_distribution_from_latent(self, pi_latent_vector, vf_latent_vector, init_scale=1.0, init_bias=0.0):
pdparam = linear(pi_latent_vector, 'pi', sum(self.n_vec), init_scale=init_scale, init_bias=init_bias)
q_values = linear(vf_latent_vector, 'q', sum(self.n_vec), init_scale=init_scale, init_bias=init_bias)
return self.proba_distribution_from_flat(pdparam), pdparam, q_values
def param_shape(self):
return [sum(self.n_vec)]
def sample_shape(self):
return [len(self.n_vec)]
def sample_dtype(self):
return tf.int64
class DiagGaussianProbabilityDistributionType(ProbabilityDistributionType):
def __init__(self, size):
"""
The probability distribution type for multivariate Gaussian input
:param size: (int) the number of dimensions of the multivariate gaussian
"""
self.size = size
def probability_distribution_class(self):
return DiagGaussianProbabilityDistribution
def proba_distribution_from_flat(self, flat):
"""
returns the probability distribution from flat probabilities
:param flat: ([float]) the flat probabilities
:return: (ProbabilityDistribution) the instance of the ProbabilityDistribution associated
"""
return self.probability_distribution_class()(flat)
def proba_distribution_from_latent(self, pi_latent_vector, vf_latent_vector, init_scale=1.0, init_bias=0.0):
mean = linear(pi_latent_vector, 'pi', self.size, init_scale=init_scale, init_bias=init_bias)
logstd = tf.get_variable(name='pi/logstd', shape=[1, self.size], initializer=tf.zeros_initializer())
pdparam = tf.concat([mean, mean * 0.0 + logstd], axis=1)
q_values = linear(vf_latent_vector, 'q', self.size, init_scale=init_scale, init_bias=init_bias)
return self.proba_distribution_from_flat(pdparam), mean, q_values
def param_shape(self):
return [2 * self.size]
def sample_shape(self):
return [self.size]
def sample_dtype(self):
return tf.float32
class BernoulliProbabilityDistributionType(ProbabilityDistributionType):
def __init__(self, size):
"""
The probability distribution type for Bernoulli input
:param size: (int) the number of dimensions of the Bernoulli distribution
"""
self.size = size
def probability_distribution_class(self):
return BernoulliProbabilityDistribution
def proba_distribution_from_latent(self, pi_latent_vector, vf_latent_vector, init_scale=1.0, init_bias=0.0):
pdparam = linear(pi_latent_vector, 'pi', self.size, init_scale=init_scale, init_bias=init_bias)
q_values = linear(vf_latent_vector, 'q', self.size, init_scale=init_scale, init_bias=init_bias)
return self.proba_distribution_from_flat(pdparam), pdparam, q_values
def param_shape(self):
return [self.size]
def sample_shape(self):
return [self.size]
def sample_dtype(self):
return tf.int32
class CategoricalProbabilityDistribution(ProbabilityDistribution):
def __init__(self, logits):
"""
Probability distributions from categorical input
:param logits: ([float]) the categorical logits input
"""
self.logits = logits
super(CategoricalProbabilityDistribution, self).__init__()
def flatparam(self):
return self.logits
def mode(self):
return tf.argmax(self.logits, axis=-1)
def neglogp(self, x):
# Note: we can't use sparse_softmax_cross_entropy_with_logits because
# the implementation does not allow second-order derivatives...
one_hot_actions = tf.one_hot(x, self.logits.get_shape().as_list()[-1])
return tf.nn.softmax_cross_entropy_with_logits_v2(
logits=self.logits,
labels=tf.stop_gradient(one_hot_actions))
def kl(self, other):
a_0 = self.logits - tf.reduce_max(self.logits, axis=-1, keepdims=True)
a_1 = other.logits - tf.reduce_max(other.logits, axis=-1, keepdims=True)
exp_a_0 = tf.exp(a_0)
exp_a_1 = tf.exp(a_1)
z_0 = tf.reduce_sum(exp_a_0, axis=-1, keepdims=True)
z_1 = tf.reduce_sum(exp_a_1, axis=-1, keepdims=True)
p_0 = exp_a_0 / z_0
return tf.reduce_sum(p_0 * (a_0 - tf.log(z_0) - a_1 + tf.log(z_1)), axis=-1)
def entropy(self):
a_0 = self.logits - tf.reduce_max(self.logits, axis=-1, keepdims=True)
exp_a_0 = tf.exp(a_0)
z_0 = tf.reduce_sum(exp_a_0, axis=-1, keepdims=True)
p_0 = exp_a_0 / z_0
return tf.reduce_sum(p_0 * (tf.log(z_0) - a_0), axis=-1)
def sample(self):
# Gumbel-max trick to sample
# a categorical distribution (see http://amid.fish/humble-gumbel)
uniform = tf.random_uniform(tf.shape(self.logits), dtype=self.logits.dtype)
return tf.argmax(self.logits - tf.log(-tf.log(uniform)), axis=-1)
@classmethod
def fromflat(cls, flat):
"""
Create an instance of this from new logits values
:param flat: ([float]) the categorical logits input
:return: (ProbabilityDistribution) the instance from the given categorical input
"""
return cls(flat)
class MultiCategoricalProbabilityDistribution(ProbabilityDistribution):
def __init__(self, nvec, flat):
"""
Probability distributions from multicategorical input
:param nvec: ([int]) the sizes of the different categorical inputs
:param flat: ([float]) the categorical logits input
"""
self.flat = flat
self.categoricals = list(map(CategoricalProbabilityDistribution, tf.split(flat, nvec, axis=-1)))
super(MultiCategoricalProbabilityDistribution, self).__init__()
def flatparam(self):
return self.flat
def mode(self):
return tf.stack([p.mode() for p in self.categoricals], axis=-1)
def neglogp(self, x):
return tf.add_n([p.neglogp(px) for p, px in zip(self.categoricals, tf.unstack(x, axis=-1))])
def kl(self, other):
return tf.add_n([p.kl(q) for p, q in zip(self.categoricals, other.categoricals)])
def entropy(self):
return tf.add_n([p.entropy() for p in self.categoricals])
def sample(self):
return tf.stack([p.sample() for p in self.categoricals], axis=-1)
@classmethod
def fromflat(cls, flat):
"""
Create an instance of this from new logits values
:param flat: ([float]) the multi categorical logits input
:return: (ProbabilityDistribution) the instance from the given multi categorical input
"""
raise NotImplementedError
class DiagGaussianProbabilityDistribution(ProbabilityDistribution):
def __init__(self, flat):
"""
Probability distributions from multivariate Gaussian input
:param flat: ([float]) the multivariate Gaussian input data
"""
self.flat = flat
mean, logstd = tf.split(axis=len(flat.shape) - 1, num_or_size_splits=2, value=flat)
self.mean = mean
self.logstd = logstd
self.std = tf.exp(logstd)
super(DiagGaussianProbabilityDistribution, self).__init__()
def flatparam(self):
return self.flat
def mode(self):
# Bounds are taken into account outside this class (during training only)
return self.mean
def neglogp(self, x):
return 0.5 * tf.reduce_sum(tf.square((x - self.mean) / self.std), axis=-1) \
+ 0.5 * np.log(2.0 * np.pi) * tf.cast(tf.shape(x)[-1], tf.float32) \
+ tf.reduce_sum(self.logstd, axis=-1)
def kl(self, other):
assert isinstance(other, DiagGaussianProbabilityDistribution)
return tf.reduce_sum(other.logstd - self.logstd + (tf.square(self.std) + tf.square(self.mean - other.mean)) /
(2.0 * tf.square(other.std)) - 0.5, axis=-1)
def entropy(self):
return tf.reduce_sum(self.logstd + .5 * np.log(2.0 * np.pi * np.e), axis=-1)
def sample(self):
# Bounds are taken into acount outside this class (during training only)
# Otherwise, it changes the distribution and breaks PPO2 for instance
return self.mean + self.std * tf.random_normal(tf.shape(self.mean),
dtype=self.mean.dtype)
@classmethod
def fromflat(cls, flat):
"""
Create an instance of this from new multivariate Gaussian input
:param flat: ([float]) the multivariate Gaussian input data
:return: (ProbabilityDistribution) the instance from the given multivariate Gaussian input data
"""
return cls(flat)
class BernoulliProbabilityDistribution(ProbabilityDistribution):
def __init__(self, logits):
"""
Probability distributions from Bernoulli input
:param logits: ([float]) the Bernoulli input data
"""
self.logits = logits
self.probabilities = tf.sigmoid(logits)
super(BernoulliProbabilityDistribution, self).__init__()
def flatparam(self):
return self.logits
def mode(self):
return tf.round(self.probabilities)
def neglogp(self, x):
return tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits,
labels=tf.cast(x, tf.float32)),
axis=-1)
def kl(self, other):
return tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=other.logits,
labels=self.probabilities), axis=-1) - \
tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits,
labels=self.probabilities), axis=-1)
def entropy(self):
return tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.logits,
labels=self.probabilities), axis=-1)
def sample(self):
samples_from_uniform = tf.random_uniform(tf.shape(self.probabilities))
return tf.cast(math_ops.less(samples_from_uniform, self.probabilities), tf.float32)
@classmethod
def fromflat(cls, flat):
"""
Create an instance of this from new Bernoulli input
:param flat: ([float]) the Bernoulli input data
:return: (ProbabilityDistribution) the instance from the given Bernoulli input data
"""
return cls(flat)
def make_proba_dist_type(ac_space):
"""
return an instance of ProbabilityDistributionType for the correct type of action space
:param ac_space: (Gym Space) the input action space
:return: (ProbabilityDistributionType) the appropriate instance of a ProbabilityDistributionType
"""
if isinstance(ac_space, spaces.Box):
assert len(ac_space.shape) == 1, "Error: the action space must be a vector"
return DiagGaussianProbabilityDistributionType(ac_space.shape[0])
elif isinstance(ac_space, spaces.Discrete):
return CategoricalProbabilityDistributionType(ac_space.n)
elif isinstance(ac_space, spaces.MultiDiscrete):
return MultiCategoricalProbabilityDistributionType(ac_space.nvec)
elif isinstance(ac_space, spaces.MultiBinary):
return BernoulliProbabilityDistributionType(ac_space.n)
else:
raise NotImplementedError("Error: probability distribution, not implemented for action space of type {}."
.format(type(ac_space)) +
" Must be of type Gym Spaces: Box, Discrete, MultiDiscrete or MultiBinary.")
def shape_el(tensor, index):
"""
get the shape of a TensorFlow Tensor element
:param tensor: (TensorFlow Tensor) the input tensor
:param index: (int) the element
:return: ([int]) the shape
"""
maybe = tensor.get_shape()[index]
if maybe is not None:
return maybe
else:
return tf.shape(tensor)[index] | PypiClean |
/django-media-explorer-0.3.10.tar.gz/django-media-explorer-0.3.10/media_explorer/fields.py | import json
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from media_explorer.models import Element, Gallery
from media_explorer.forms import MediaFormField, RichTextFormField
from django.db.models import signals, FileField
from django.forms import forms
from django.template.defaultfilters import filesizeformat
def parse_media(string_or_obj):
"""Takes a JSON string, converts it into a Media object."""
data = {}
kwargs = {}
kwargs["id"] = None
kwargs["type"] = None
kwargs["caption"] = None
kwargs["credit"] = None
try:
if type(string_or_obj) is dict:
data = string_or_obj
elif type(string_or_obj) is Element:
data["id"] = string_or_obj.id
data["type"] = string_or_obj.type
data["caption"] = string_or_obj.description
data["credit"] = string_or_obj.credit
elif type(string_or_obj) is Gallery:
data["id"] = string_or_obj.id
data["type"] = "gallery"
else:
data = json.loads(string_or_obj)
except Exception as e:
raise ValidationError("Media parsing error: " + str(e))
if data:
kwargs.update(data)
return Media(**kwargs)
def parse_richtext(text):
"""Takes a string, converts it into a RichText object."""
return RichText(text)
class Media(object):
"""The corresponding Python object for the Django MediaField."""
def __init__(self,id=None,type=None,caption=None,credit=None):
self.id = id
self.type = type
self.caption = caption
self.credit = credit
def to_dict(self):
_dict = {}
_dict["id"] = self.id
_dict["type"] = self.type
_dict["caption"] = self.caption
_dict["credit"] = self.credit
return _dict
def __repr__(self):
#return "[MediaField object]: id=%s, type=%s" % (self.id, self.type)
return json.dumps(self.to_dict())
class MediaField(models.TextField):
"""The Django MediaField."""
description = _("A Media Explorer custom model field")
def __init__(self, id=None, type=None, \
credit=None, caption=None, *args, **kwargs):
self.id = id
self.type = type
self.caption = caption
self.credit = credit
kwargs['null'] = True
kwargs['blank'] = True
super(MediaField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(MediaField, self).deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
def db_type(self, connection):
return "longtext"
def from_db_value(self, value, expression, connection, context):
if value is None:
return value
return parse_media(value)
def do_validation(self, media):
if self.type and media.type and self.type != media.type:
raise ValidationError("Invalid media type selected for this MediaField instance. It expected a '%s' but got a '%s' instead." % (self.type, media.type))
#Override id/credit/caption
if self.id : media.id = self.id
if self.type: media.type = self.type
if self.caption : media.caption = self.caption
if self.credit: media.credit = self.credit
#Validate that the image/video is in the system
if media.type in ["image","video"] and \
not Element.objects.filter(id=media.id,type=media.type).exists():
raise ValidationError("Invalid %s selected. The %s was not found." % (media.type, media.type))
#Validate that the gallery is in the system
if media.type == "gallery" and \
not Gallery.objects.filter(id=media.id).exists():
raise ValidationError("Invalid %s selected. The %s was not found." % (media.type, media.type))
return media
def to_python(self, value):
if isinstance(value, Media):
return value
if value is None:
return value
return self.do_validation(parse_media(value))
def get_prep_value(self, value):
value_dict = {}
try:
value_dict["id"] = value.id
value_dict["type"] = value.type
value_dict["caption"] = value.caption
value_dict["credit"] = value.credit
except:
pass
if type(value) is Element:
value_dict["id"] = value.id
value_dict["type"] = value.type
value_dict["caption"] = None
value_dict["credit"] = None
if value.description:
value_dict["caption"] = value.description
if value.credit:
value_dict["credit"] = value.credit
if value_dict:
self.do_validation(parse_media(value_dict))
return str(json.dumps(value_dict))
if value: return str(value)
return value
def formfield(self, **kwargs):
defaults = {}
defaults["form_class"] = MediaFormField
defaults.update(kwargs)
return super(MediaField, self).formfield(**defaults)
class RichText(object):
"""The corresponding Python object for the Django RichTextField."""
def __init__(self,text):
self.text = text
def __repr__(self):
return self.text
class RichTextField(models.TextField):
"""The Django RichTextField."""
description = _("A RichText Explorer custom model field")
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
super(RichTextField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(RichTextField, self).deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
def db_type(self, connection):
return "longtext"
def from_db_value(self, value, expression, connection, context):
if value is None:
return value
return parse_richtext(value)
def do_validation(self, richtext):
return richtext
def to_python(self, value):
if isinstance(value, RichText):
return value
if value is None:
return value
return self.do_validation(parse_richtext(value))
def get_prep_value(self, value):
if value: return str(value)
return value
def formfield(self, **kwargs):
defaults = {}
defaults["form_class"] = RichTextFormField
defaults.update(kwargs)
return super(RichTextField, self).formfield(**defaults)
class MediaImageField(FileField):
"""
Forked from: https://djangosnippets.org/snippets/2206
Same as FileField, but you can specify:
* max_upload_size - a number indicating the maximum file size allowed for upload.
2.5MB - 2621440
5MB - 5242880
10MB - 10485760
20MB - 20971520
50MB - 5242880
100MB 104857600
250MB - 214958080
500MB - 429916160
"""
def __init__(self, *args, **kwargs):
self.content_types = []
self.max_upload_size = 0
self.new_upload = False
#try:
# self.content_types = ["image/png","image/jpeg","image/jpg","image/bmp","image/gif","image/tiff","image/ief","image/g3fax"]
#except Exception as e:
# pass
try:
self.max_upload_size = kwargs.pop("max_upload_size")
except Exception as e:
pass
super(MediaImageField, self).__init__(*args, **kwargs)
def clean(self, *args, **kwargs):
data = super(MediaImageField, self).clean(*args, **kwargs)
file = data.file
#content_type = file.content_type
content_type = getattr(file,"content_type",None)
if "django.core.files.uploadedfile.InMemoryUploadedFile" in str(type(file)):
self.new_upload = True
if content_type and not content_type.lower().startswith("image/"):
raise forms.ValidationError(_('The file you selected is not an image. Please select an image.'))
if self.max_upload_size > 0 and \
file._size > self.max_upload_size:
raise forms.ValidationError(_('Please keep filesize under %s. Current filesize %s') % (filesizeformat(self.max_upload_size), filesizeformat(file._size)))
return data
def contribute_to_class(self, cls, name, **kwargs):
super(MediaImageField, self).contribute_to_class( cls, name, **kwargs)
signals.post_save.connect(self.on_post_save_callback, sender=cls)
#signals.post_delete.connect(self.on_post_delete_callback, sender=cls)
def on_post_save_callback(self, instance, force=False, *args, **kwargs):
"""
Save image into Element model
"""
if self.new_upload and \
type(instance.__dict__[self.name]) in [str,unicode]:
data = {}
data["image"] = instance.__dict__[self.name]
element = Element()
element.__dict__.update(data)
element.save()
#def on_post_delete_callback(self, instance, force=False, *args, **kwargs):
# """
# TODO
# Delete file from Element model
# """
# pass | PypiClean |
/mapfish.plugin.client-0.2.tar.gz/mapfish.plugin.client-0.2/mapfishpluginclient/template/+package+/public/lib/ext/Ext/build/data/Store-min.js | Ext.data.Store=function(a){this.data=new Ext.util.MixedCollection(false);this.data.getKey=function(b){return b.id};this.baseParams={};this.paramNames={start:"start",limit:"limit",sort:"sort",dir:"dir"};if(a&&a.data){this.inlineData=a.data;delete a.data}Ext.apply(this,a);if(this.url&&!this.proxy){this.proxy=new Ext.data.HttpProxy({url:this.url})}if(this.reader){if(!this.recordType){this.recordType=this.reader.recordType}if(this.reader.onMetaChange){this.reader.onMetaChange=this.onMetaChange.createDelegate(this)}}if(this.recordType){this.fields=this.recordType.prototype.fields}this.modified=[];this.addEvents("datachanged","metachange","add","remove","update","clear","beforeload","load","loadexception");if(this.proxy){this.relayEvents(this.proxy,["loadexception"])}this.sortToggle={};if(this.sortInfo){this.setDefaultSort(this.sortInfo.field,this.sortInfo.direction)}Ext.data.Store.superclass.constructor.call(this);if(this.storeId||this.id){Ext.StoreMgr.register(this)}if(this.inlineData){this.loadData(this.inlineData);delete this.inlineData}else{if(this.autoLoad){this.load.defer(10,this,[typeof this.autoLoad=="object"?this.autoLoad:undefined])}}};Ext.extend(Ext.data.Store,Ext.util.Observable,{remoteSort:false,pruneModifiedRecords:false,lastOptions:null,destroy:function(){if(this.storeId||this.id){Ext.StoreMgr.unregister(this)}this.data=null;this.purgeListeners()},add:function(b){b=[].concat(b);if(b.length<1){return}for(var d=0,a=b.length;d<a;d++){b[d].join(this)}var c=this.data.length;this.data.addAll(b);if(this.snapshot){this.snapshot.addAll(b)}this.fireEvent("add",this,b,c)},addSorted:function(a){var b=this.findInsertIndex(a);this.insert(b,a)},remove:function(a){var b=this.data.indexOf(a);this.data.removeAt(b);if(this.pruneModifiedRecords){this.modified.remove(a)}if(this.snapshot){this.snapshot.remove(a)}this.fireEvent("remove",this,a,b)},removeAt:function(a){this.remove(this.getAt(a))},removeAll:function(){this.data.clear();if(this.snapshot){this.snapshot.clear()}if(this.pruneModifiedRecords){this.modified=[]}this.fireEvent("clear",this)},insert:function(c,b){b=[].concat(b);for(var d=0,a=b.length;d<a;d++){this.data.insert(c,b[d]);b[d].join(this)}this.fireEvent("add",this,b,c)},indexOf:function(a){return this.data.indexOf(a)},indexOfId:function(a){return this.data.indexOfKey(a)},getById:function(a){return this.data.key(a)},getAt:function(a){return this.data.itemAt(a)},getRange:function(b,a){return this.data.getRange(b,a)},storeOptions:function(a){a=Ext.apply({},a);delete a.callback;delete a.scope;this.lastOptions=a},load:function(b){b=b||{};if(this.fireEvent("beforeload",this,b)!==false){this.storeOptions(b);var c=Ext.apply(b.params||{},this.baseParams);if(this.sortInfo&&this.remoteSort){var a=this.paramNames;c[a.sort]=this.sortInfo.field;c[a.dir]=this.sortInfo.direction}this.proxy.load(c,this.reader,this.loadRecords,this,b);return true}else{return false}},reload:function(a){this.load(Ext.applyIf(a||{},this.lastOptions))},loadRecords:function(g,b,f){if(!g||f===false){if(f!==false){this.fireEvent("load",this,[],b)}if(b.callback){b.callback.call(b.scope||this,[],b,false)}return}var e=g.records,d=g.totalRecords||e.length;if(!b||b.add!==true){if(this.pruneModifiedRecords){this.modified=[]}for(var c=0,a=e.length;c<a;c++){e[c].join(this)}if(this.snapshot){this.data=this.snapshot;delete this.snapshot}this.data.clear();this.data.addAll(e);this.totalLength=d;this.applySort();this.fireEvent("datachanged",this)}else{this.totalLength=Math.max(d,this.data.length+e.length);this.add(e)}this.fireEvent("load",this,e,b);if(b.callback){b.callback.call(b.scope||this,e,b,true)}},loadData:function(c,a){var b=this.reader.readRecords(c);this.loadRecords(b,{add:a},true)},getCount:function(){return this.data.length||0},getTotalCount:function(){return this.totalLength||0},getSortState:function(){return this.sortInfo},applySort:function(){if(this.sortInfo&&!this.remoteSort){var a=this.sortInfo,b=a.field;this.sortData(b,a.direction)}},sortData:function(c,d){d=d||"ASC";var a=this.fields.get(c).sortType;var b=function(f,e){var h=a(f.data[c]),g=a(e.data[c]);return h>g?1:(h<g?-1:0)};this.data.sort(d,b);if(this.snapshot&&this.snapshot!=this.data){this.snapshot.sort(d,b)}},setDefaultSort:function(b,a){a=a?a.toUpperCase():"ASC";this.sortInfo={field:b,direction:a};this.sortToggle[b]=a},sort:function(e,c){var d=this.fields.get(e);if(!d){return false}if(!c){if(this.sortInfo&&this.sortInfo.field==d.name){c=(this.sortToggle[d.name]||"ASC").toggle("ASC","DESC")}else{c=d.sortDir}}var b=(this.sortToggle)?this.sortToggle[d.name]:null;var a=(this.sortInfo)?this.sortInfo:null;this.sortToggle[d.name]=c;this.sortInfo={field:d.name,direction:c};if(!this.remoteSort){this.applySort();this.fireEvent("datachanged",this)}else{if(!this.load(this.lastOptions)){if(b){this.sortToggle[d.name]=b}if(a){this.sortInfo=a}}}},each:function(b,a){this.data.each(b,a)},getModifiedRecords:function(){return this.modified},createFilterFn:function(c,b,d,a){if(Ext.isEmpty(b,false)){return false}b=this.data.createValueMatcher(b,d,a);return function(e){return b.test(e.data[c])}},sum:function(e,f,a){var c=this.data.items,b=0;f=f||0;a=(a||a===0)?a:c.length-1;for(var d=f;d<=a;d++){b+=(c[d].data[e]||0)}return b},filter:function(d,c,e,a){var b=this.createFilterFn(d,c,e,a);return b?this.filterBy(b):this.clearFilter()},filterBy:function(b,a){this.snapshot=this.snapshot||this.data;this.data=this.queryBy(b,a||this);this.fireEvent("datachanged",this)},query:function(d,c,e,a){var b=this.createFilterFn(d,c,e,a);return b?this.queryBy(b):this.data.clone()},queryBy:function(b,a){var c=this.snapshot||this.data;return c.filterBy(b,a||this)},find:function(d,c,f,e,a){var b=this.createFilterFn(d,c,e,a);return b?this.data.findIndexBy(b,null,f):-1},findBy:function(b,a,c){return this.data.findIndexBy(b,a,c)},collect:function(h,j,b){var g=(b===true&&this.snapshot)?this.snapshot.items:this.data.items;var k,m,a=[],c={};for(var e=0,f=g.length;e<f;e++){k=g[e].data[h];m=String(k);if((j||!Ext.isEmpty(k))&&!c[m]){c[m]=true;a[a.length]=k}}return a},clearFilter:function(a){if(this.isFiltered()){this.data=this.snapshot;delete this.snapshot;if(a!==true){this.fireEvent("datachanged",this)}}},isFiltered:function(){return this.snapshot&&this.snapshot!=this.data},afterEdit:function(a){if(this.modified.indexOf(a)==-1){this.modified.push(a)}this.fireEvent("update",this,a,Ext.data.Record.EDIT)},afterReject:function(a){this.modified.remove(a);this.fireEvent("update",this,a,Ext.data.Record.REJECT)},afterCommit:function(a){this.modified.remove(a);this.fireEvent("update",this,a,Ext.data.Record.COMMIT)},commitChanges:function(){var b=this.modified.slice(0);this.modified=[];for(var c=0,a=b.length;c<a;c++){b[c].commit()}},rejectChanges:function(){var b=this.modified.slice(0);this.modified=[];for(var c=0,a=b.length;c<a;c++){b[c].reject()}},onMetaChange:function(b,a,c){this.recordType=a;this.fields=a.prototype.fields;delete this.snapshot;this.sortInfo=b.sortInfo;this.modified=[];this.fireEvent("metachange",this,this.reader.meta)},findInsertIndex:function(a){this.suspendEvents();var c=this.data.clone();this.data.add(a);this.applySort();var b=this.data.indexOf(a);this.data=c;this.resumeEvents();return b}}); | PypiClean |
/PySMT-0.9.5.tar.gz/PySMT-0.9.5/pysmt/cmd/installers/btor.py | import os
from pysmt.cmd.installers.base import SolverInstaller
class BtorInstaller(SolverInstaller):
SOLVER = "btor"
def __init__(self, install_dir, bindings_dir, solver_version,
mirror_link=None, git_version=None):
native_link = "https://github.com/Boolector/boolector/archive/%s.tar.gz"
archive_name = "boolector-%s.tar.gz"
if git_version:
native_link = native_link % git_version
archive_name = archive_name % git_version
else:
native_link = native_link % solver_version
archive_name = archive_name % solver_version
SolverInstaller.__init__(self, install_dir=install_dir,
bindings_dir=bindings_dir,
solver_version=solver_version,
archive_name=archive_name,
native_link=native_link,
mirror_link=mirror_link)
def compile(self):
# Override default Python library, include, and interpreter
# path into Boolector's CMake because CMake can get confused
# if multiple interpreters are available, especially python 2
# vs python 3.
import distutils.sysconfig as sysconfig
import sys
PYTHON_LIBRARY = os.environ.get('PYSMT_PYTHON_LIBDIR')
PYTHON_INCLUDE_DIR = sysconfig.get_python_inc()
PYTHON_EXECUTABLE = sys.executable
CMAKE_OPTS = ' -DPYTHON_INCLUDE_DIR=' + PYTHON_INCLUDE_DIR
CMAKE_OPTS += ' -DPYTHON_EXECUTABLE=' + PYTHON_EXECUTABLE
if PYTHON_LIBRARY:
CMAKE_OPTS += ' -DPYTHON_LIBRARY=' + PYTHON_LIBRARY
# Unpack
SolverInstaller.untar(os.path.join(self.base_dir, self.archive_name),
self.extract_path)
# Build lingeling
SolverInstaller.run("bash ./contrib/setup-lingeling.sh",
directory=self.extract_path)
# Build Btor
SolverInstaller.run("bash ./contrib/setup-btor2tools.sh",
directory=self.extract_path)
# Build Boolector Solver
SolverInstaller.run("bash ./configure.sh --python",
directory=self.extract_path,
env_variables={"CMAKE_OPTS": CMAKE_OPTS})
SolverInstaller.run("make -j2",
directory=os.path.join(self.extract_path, "build"))
def move(self):
libdir = os.path.join(self.extract_path, "build", "lib")
for f in os.listdir(libdir):
if f.startswith("pyboolector") and f.endswith(".so"):
SolverInstaller.mv(os.path.join(libdir, f),
self.bindings_dir)
def get_installed_version(self):
import re
res = self.get_installed_version_script(self.bindings_dir, "btor")
version = None
if res == "OK":
vfile = os.path.join(self.extract_path, "CMakeLists.txt")
try:
with open(vfile) as f:
content = f.read().strip()
m = re.search('set\(VERSION "(.*)"\)', content)
if m is not None:
version = m.group(1)
except OSError:
print("File not found")
return None
except IOError:
print("IO Error")
return None
return version | PypiClean |
/nobinobi-child-0.1.4.14.tar.gz/nobinobi-child-0.1.4.14/nobinobi_child/static/vendor/bootstrap-4.3.1/CODE_OF_CONDUCT.md | # Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [email protected]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://www.contributor-covenant.org/version/1/4/code-of-conduct.html][version]
[homepage]: https://www.contributor-covenant.org/
[version]: https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
| PypiClean |
/ais_dom-2023.7.2-py3-none-any.whl/homeassistant/components/ping/device_tracker.py | from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
import logging
import subprocess
from icmplib import async_multiping
import voluptuous as vol
from homeassistant.components.device_tracker import (
CONF_SCAN_INTERVAL,
PLATFORM_SCHEMA as BASE_PLATFORM_SCHEMA,
SCAN_INTERVAL,
AsyncSeeCallback,
SourceType,
)
from homeassistant.const import CONF_HOSTS
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import gather_with_concurrency
from homeassistant.util.process import kill_subprocess
from .const import DOMAIN, ICMP_TIMEOUT, PING_ATTEMPTS_COUNT, PING_PRIVS, PING_TIMEOUT
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
CONF_PING_COUNT = "count"
CONCURRENT_PING_LIMIT = 6
PLATFORM_SCHEMA = BASE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOSTS): {cv.slug: cv.string},
vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int,
}
)
class HostSubProcess:
"""Host object with ping detection."""
def __init__(
self,
ip_address: str,
dev_id: str,
hass: HomeAssistant,
config: ConfigType,
privileged: bool | None,
) -> None:
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT]
self._ping_cmd = ["ping", "-n", "-q", "-c1", "-W1", ip_address]
def ping(self) -> bool | None:
"""Send an ICMP echo request and return True if success."""
with subprocess.Popen(
self._ping_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
close_fds=False, # required for posix_spawn
) as pinger:
try:
pinger.communicate(timeout=1 + PING_TIMEOUT)
return pinger.returncode == 0
except subprocess.TimeoutExpired:
kill_subprocess(pinger)
return False
except subprocess.CalledProcessError:
return False
def update(self) -> bool:
"""Update device state by sending one or more ping messages."""
failed = 0
while failed < self._count: # check more times if host is unreachable
if self.ping():
return True
failed += 1
_LOGGER.debug("No response from %s failed=%d", self.ip_address, failed)
return False
async def async_setup_scanner(
hass: HomeAssistant,
config: ConfigType,
async_see: AsyncSeeCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> bool:
"""Set up the Host objects and return the update function."""
privileged = hass.data[DOMAIN][PING_PRIVS]
ip_to_dev_id = {ip: dev_id for (dev_id, ip) in config[CONF_HOSTS].items()}
interval = config.get(
CONF_SCAN_INTERVAL,
timedelta(seconds=len(ip_to_dev_id) * config[CONF_PING_COUNT]) + SCAN_INTERVAL,
)
_LOGGER.debug(
"Started ping tracker with interval=%s on hosts: %s",
interval,
",".join(ip_to_dev_id.keys()),
)
if privileged is None:
hosts = [
HostSubProcess(ip, dev_id, hass, config, privileged)
for (dev_id, ip) in config[CONF_HOSTS].items()
]
async def async_update(now: datetime) -> None:
"""Update all the hosts on every interval time."""
results = await gather_with_concurrency(
CONCURRENT_PING_LIMIT,
*(hass.async_add_executor_job(host.update) for host in hosts),
)
await asyncio.gather(
*(
async_see(dev_id=host.dev_id, source_type=SourceType.ROUTER)
for idx, host in enumerate(hosts)
if results[idx]
)
)
else:
async def async_update(now: datetime) -> None:
"""Update all the hosts on every interval time."""
responses = await async_multiping(
list(ip_to_dev_id),
count=PING_ATTEMPTS_COUNT,
timeout=ICMP_TIMEOUT,
privileged=privileged,
)
_LOGGER.debug("Multiping responses: %s", responses)
await asyncio.gather(
*(
async_see(dev_id=dev_id, source_type=SourceType.ROUTER)
for idx, dev_id in enumerate(ip_to_dev_id.values())
if responses[idx].is_alive
)
)
async def _async_update_interval(now: datetime) -> None:
try:
await async_update(now)
finally:
if not hass.is_stopping:
async_track_point_in_utc_time(
hass, _async_update_interval, now + interval
)
await _async_update_interval(dt_util.now())
return True | PypiClean |
/graphene-sqlalchemy-filter-1.13.0.tar.gz/graphene-sqlalchemy-filter-1.13.0/graphene_sqlalchemy_filter/connection_field.py | from contextlib import suppress
from functools import partial
from typing import cast
# GraphQL
import graphene_sqlalchemy
from graphene.utils.str_converters import to_snake_case
from promise import Promise, dataloader
# Database
from sqlalchemy import inspection, tuple_
from sqlalchemy.orm import Load, aliased, contains_eager
MYPY = False
if MYPY:
from typing import (
Any,
Callable,
Dict,
List,
Optional,
Tuple,
Type,
Union,
) # noqa: F401; pragma: no cover
from graphql import ResolveInfo # noqa: F401; pragma: no cover
from graphene.relay import Connection # noqa: F401; pragma: no cover
from sqlalchemy.orm import Query # noqa: F401; pragma: no cover
from .filters import FilterSet # noqa: F401; pragma: no cover
graphene_sqlalchemy_version_lt_2_1_2 = tuple(
map(int, graphene_sqlalchemy.__version__.split('.'))
) < (2, 1, 2)
if graphene_sqlalchemy_version_lt_2_1_2:
default_connection_field_factory = None # pragma: no cover
else:
from graphene_sqlalchemy.fields import default_connection_field_factory
DEFAULT_FILTER_ARG: str = 'filters'
class FilterableConnectionField(graphene_sqlalchemy.SQLAlchemyConnectionField):
filter_arg: str = DEFAULT_FILTER_ARG
factory: 'Union[FilterableFieldFactory, Callable, None]' = None
filters: dict = {}
def __init_subclass__(cls):
if graphene_sqlalchemy_version_lt_2_1_2:
return # pragma: no cover
if cls.filters and cls.factory is None:
cls.factory = FilterableFieldFactory(cls.filters)
if cls.filter_arg != DEFAULT_FILTER_ARG:
# Update filter arg for nested fields.
cls.factory.model_loader_class = type(
'CustomModelLoader',
(ModelLoader,),
{'filter_arg': cls.filter_arg},
)
elif cls.factory is None:
cls.factory = default_connection_field_factory
def __init__(self, connection, *args, **kwargs):
if self.filter_arg not in kwargs:
model = connection._meta.node._meta.model
with suppress(KeyError):
kwargs[self.filter_arg] = self.filters[model]
super().__init__(connection, *args, **kwargs)
@classmethod
def get_query(cls, model, info: 'ResolveInfo', sort=None, **args):
"""Standard get_query with filtering."""
query = super().get_query(model, info, sort, **args)
request_filters = args.get(cls.filter_arg)
if request_filters:
filter_set = cls.get_filter_set(info)
query = filter_set.filter(info, query, request_filters)
return query
@classmethod
def get_filter_set(cls, info: 'ResolveInfo') -> 'FilterSet':
"""
Get field filter set.
Args:
info: Graphene resolve info object.
Returns:
FilterSet class from field args.
"""
field_name = info.field_asts[0].name.value
schema_field = info.parent_type.fields.get(field_name)
filters_type = schema_field.args[cls.filter_arg].type
filters: 'FilterSet' = filters_type.graphene_type
return filters
class ModelLoader(dataloader.DataLoader):
filter_arg: str = DEFAULT_FILTER_ARG
def __init__(
self,
parent_model: 'Any',
model: 'Any',
info: 'ResolveInfo',
graphql_args: dict,
):
"""
Dataloader for SQLAlchemy model relations.
Args:
parent_model: Parent SQLAlchemy model.
model: SQLAlchemy model.
info: Graphene resolve info object.
graphql_args: Request args: filters, sort, ...
"""
super().__init__()
self.info: 'ResolveInfo' = info
self.graphql_args: dict = graphql_args
self.model: 'Any' = model
self.parent_model: 'Any' = parent_model
self.parent_model_pks: 'Tuple[str, ...]' = self._get_model_pks(
self.parent_model
)
self.parent_model_pk_fields: tuple = tuple(
getattr(self.parent_model, pk) for pk in self.parent_model_pks
)
self.model_relation_field: str = to_snake_case(self.info.field_name)
self.relation: 'Any' = getattr(
self.parent_model, self.model_relation_field
)
def batch_load_fn(self, keys: 'List[tuple]') -> Promise:
"""
Load related objects.
Args:
keys: Primary key values of parent model.
Returns:
Lists of related orm objects.
"""
if len(self.parent_model_pk_fields) == 1:
left_hand_side = self.parent_model_pk_fields[0]
right_hand_side = [k[0] for k in keys]
else:
left_hand_side = tuple_(*self.parent_model_pk_fields)
right_hand_side = keys
query: 'Query' = self._get_query().filter(
left_hand_side.in_(right_hand_side)
)
objects: 'Dict[tuple, Any]' = {
self.parent_model_object_to_key(parent_object): getattr(
parent_object, self.model_relation_field
)
for parent_object in query
}
return Promise.resolve(
[objects.get(object_id, []) for object_id in keys]
)
@staticmethod
def _get_model_pks(model) -> 'Tuple[str, ...]':
"""
Get primary key field name.
Args:
model: SQLAlchemy model.
Returns:
Field name.
"""
model_pk_fields: 'Tuple[str]' = tuple(
(
cast(str, name)
for name, c in inspection.inspect(model).columns.items()
if c.primary_key
)
)
return model_pk_fields
def parent_model_object_to_key(self, parent_object: 'Any') -> 'Any':
"""
Get primary key value from SQLAlchemy orm object.
Args:
parent_object: SQLAlchemy orm object.
Returns:
Primary key value.
"""
key = tuple(getattr(parent_object, pk) for pk in self.parent_model_pks)
return key
@classmethod
def _get_filter_set(cls, info: 'ResolveInfo') -> 'FilterSet':
"""
Get field filter set.
Args:
info: Graphene resolve info object.
Returns:
FilterSet class from field args.
"""
field_name = info.field_asts[0].name.value
schema_field = info.parent_type.fields.get(field_name)
filters_type = schema_field.args[cls.filter_arg].type
filters: 'FilterSet' = filters_type.graphene_type
return filters
def _get_query(self) -> 'Query':
"""
Build, filter and sort the query.
Returns:
SQLAlchemy query.
"""
subquery = graphene_sqlalchemy.get_query(self.model, self.info.context)
request_filters = self.graphql_args.get(self.filter_arg)
if request_filters:
filter_set = self._get_filter_set(self.info)
subquery = filter_set.filter(self.info, subquery, request_filters)
aliased_model = aliased(
self.model, subquery.subquery(with_labels=True)
)
query = (
graphene_sqlalchemy.get_query(self.parent_model, self.info.context)
.join(aliased_model, self.relation)
.options(
contains_eager(self.relation, alias=aliased_model),
Load(self.parent_model).load_only(*self.parent_model_pks),
)
)
query = self._sorted_query(
query, self.graphql_args.get('sort'), aliased_model
)
return query
def _sorted_query(
self, query: 'Query', sort: 'Optional[list]', by_model: 'Any'
) -> 'Query':
"""Sort query."""
order = []
for s in sort:
sort_field_name = s.value
if not isinstance(sort_field_name, str):
sort_field_name = sort_field_name.element.name
sort_field = getattr(by_model, sort_field_name)
if s.endswith('_ASC'):
sort_field = sort_field.asc()
elif s.endswith('_DESC'):
sort_field = sort_field.desc()
order.append(sort_field)
query = query.order_by(*order)
return query
class NestedFilterableConnectionField(FilterableConnectionField):
dataloaders_field: str = '_sqla_filter_dataloaders'
@classmethod
def _get_or_create_data_loader(
cls, root: 'Any', model: 'Any', info: 'ResolveInfo', args: dict
) -> ModelLoader:
"""
Get or create (and save) dataloader from ResolveInfo
Args:
root: Parent model orm object.
model: SQLAlchemy model.
info: Graphene resolve info object.
args: Request args: filters, sort, ...
Returns:
Dataloader for SQLAlchemy model.
"""
context: 'Union[dict, object]' = info.context
if isinstance(context, dict):
try:
data_loaders = context[cls.dataloaders_field]
except KeyError:
data_loaders = {}
context[cls.dataloaders_field] = data_loaders
else:
data_loaders = getattr(context, cls.dataloaders_field, None)
if data_loaders is None:
data_loaders = {}
setattr(info.context, cls.dataloaders_field, data_loaders)
# Unique dataloader key for context.
data_loader_key = tuple((p for p in info.path if isinstance(p, str)))
try:
current_data_loader: ModelLoader = data_loaders[data_loader_key]
except KeyError:
current_data_loader = ModelLoader(type(root), model, info, args)
data_loaders[data_loader_key] = current_data_loader
return current_data_loader
@classmethod
def connection_resolver(
cls,
resolver: 'Any',
connection_type: 'Any',
model: 'Any',
root: 'Any',
info: 'ResolveInfo',
**kwargs: dict,
) -> 'Union[Promise, Connection]':
"""
Resolve nested connection.
Args:
resolver: Default resolver.
connection_type: Connection class.
model: SQLAlchemy model.
root: Parent SQLAlchemy object.
info: Graphene resolve info object.
**kwargs: Request args: filters, sort, ...
Returns:
Connection object.
"""
data_loader: ModelLoader = cls._get_or_create_data_loader(
root, model, info, kwargs
)
root_pk_value: tuple = data_loader.parent_model_object_to_key(root)
resolved: Promise = data_loader.load(root_pk_value)
on_resolve = partial(
cls.resolve_connection, connection_type, model, info, kwargs
)
return Promise.resolve(resolved).then(on_resolve)
class FilterableFieldFactory:
model_loader_class: 'Type[ModelLoader]' = ModelLoader
field_class: 'Type[NestedFilterableConnectionField]' = (
NestedFilterableConnectionField
)
def __init__(self, model_filters: dict):
self.model_filters: dict = model_filters
def __call__(
self, relationship: 'Any', registry: 'Any' = None, **field_kwargs: dict
) -> NestedFilterableConnectionField:
"""
Get field for relation.
Args:
relationship: SQLAlchemy relation.
registry: graphene-sqlalchemy registry.
**field_kwargs: Field args.
Returns:
Filed object.
"""
model = relationship.mapper.entity
model_type = registry.get_type_for_model(model)
filters: 'Optional[FilterSet]' = self.model_filters.get(model)
if filters is not None:
field_kwargs.setdefault(
self.model_loader_class.filter_arg, filters
)
return self.field_class(model_type._meta.connection, **field_kwargs) | PypiClean |
/ga4gh.vrs-0.9.0a1.tar.gz/ga4gh.vrs-0.9.0a1/notebooks/Haplotypes.ipynb | # Haplotypes
This notebook demonstrates VRS Haplotypes using ApoE alleles.
The ApoE gene is associated with risks of Alzheimer's disease and hypercholesterolemia. Risk of AD is attributed to haplotypes comprised of two locations, [rs429358](https://www.ncbi.nlm.nih.gov/snp/rs429358) and [rs7412](https://www.ncbi.nlm.nih.gov/snp/rs7412), both of which are C/T transitions. The four ApoE haplotypes are defined by the two states (C and T) at the two locations shown below. (Each location is shown with GRCh37 , GRCh38, and RefSeq transcript coordinates.)
```
rs7412
NC_000019.9:g.45411941
NC_000019.10:g.44908822
NM_000041.3:c.526
rs429358 C T
NC_000019.9:g.45412079 C APOE-ε4 APOE-ε1
NC_000019.10:g.44908684 T APOE-ε3 APOE-ε2
NM_000041.3:c.388
```
Links:
* [NCBI APOE Gene record](https://ghr.nlm.nih.gov/gene/APOE)
* [ClinVar APO E4 record](https://www.ncbi.nlm.nih.gov/clinvar/variation/441269/)
* [Snpedia APOE page](http://snpedia.com/index.php/APOE)
# Setup
```
from ga4gh.vrs import models, vrs_deref, vrs_enref
from ga4gh.core import ga4gh_identify, ga4gh_serialize, ga4gh_digest, ga4gh_deref
import json
def ppo(o, indent=2):
"""pretty print object as json"""
print(json.dumps(o.as_dict(), sort_keys=True, indent=indent))
```
## APOE Alleles
Construct the four Alleles above on GRCh38.
```
# NC_000019.10 (GRCh38 chr 19 primary assembly) sequence id
# The sequence id would typically be provided by a sequence repository
sequence_id = "ga4gh:SQ.IIB53T8CNeJJdUqzn9V_JnRtQadwWCbl"
locations = {
"rs429358_38": models.SequenceLocation(
sequence_id = sequence_id,
interval = models.SequenceInterval(start=models.Number(value=44908683, type="Number"),
end=models.Number(value=44908684, type="Number"),
type="SequenceInterval"),
type="SequenceLocation"),
"rs7412_38": models.SequenceLocation(
sequence_id = sequence_id,
interval=models.SequenceInterval(start=models.Number(value=44908821, type="Number"),
end=models.Number(value=44908822, type="Number"),
type="SequenceInterval"),
type="SequenceLocation")
}
alleles = {
"rs429358_38_C": models.Allele(location=locations["rs429358_38"], state=models.SequenceState(sequence="C", type="SequenceState"), type="Allele"),
"rs429358_38_T": models.Allele(location=locations["rs429358_38"], state=models.SequenceState(sequence="T", type="SequenceState"), type="Allele"),
"rs7412_38_C": models.Allele(location=locations["rs7412_38"], state=models.SequenceState(sequence="C", type="SequenceState"), type="Allele"),
"rs7412_38_T": models.Allele(location=locations["rs7412_38"], state=models.SequenceState(sequence="T", type="SequenceState"), type="Allele"),
}
haplotypes = {
"APOE-ε1": models.Haplotype(members=[alleles["rs429358_38_C"], alleles["rs7412_38_T"]]),
"APOE-ε2": models.Haplotype(members=[alleles["rs429358_38_T"], alleles["rs7412_38_T"]]),
"APOE-ε3": models.Haplotype(members=[alleles["rs429358_38_T"], alleles["rs7412_38_C"]]),
"APOE-ε4": models.Haplotype(members=[alleles["rs429358_38_C"], alleles["rs7412_38_C"]]),
}
ppo(haplotypes["APOE-ε1"])
# Generated a computed identifier for the Haplotype
ga4gh_identify(haplotypes["APOE-ε1"])
# The order of haplotype members does not change GA4GH Computed Identifier
apoe1_alleles = (alleles["rs7412_38_T"], alleles["rs429358_38_C"])
#note: this should be the same, but currently is not?
print(ga4gh_identify(models.Haplotype(members=apoe1_alleles, type="Haplotype")))
print(ga4gh_identify(models.Haplotype(members=tuple(reversed(apoe1_alleles)), type="Haplotype")))
# assert (ga4gh_identify(models.Haplotype(members=apoe1_alleles)) ==
# ga4gh_identify(models.Haplotype(members=tuple(reversed(apoe1_alleles)))))
# Haplotype members may be referenced (rather than inline) for more concise representations
apoe1_haplotype_ref = vrs_enref(haplotypes["APOE-ε1"])
ppo(apoe1_haplotype_ref)
```
| PypiClean |
/gtl_ville-2.1.51-py3-none-any.whl/gv_vendor/pylr/decoder.py | from __future__ import print_function
from collections import namedtuple
from itertools import groupby, chain
from gv_vendor.pylr import rating as Rating
from gv_vendor.pylr.constants import (LocationType,
WITH_LINE_DIRECTION,
AGAINST_LINE_DIRECTION,
BINARY_VERSION_2,
BINARY_VERSION_3)
''' The Max_ node_ distance '''
MAX_NODE_DIST = 100
''''The node factor.'''
NODE_FACTOR = 3
'''The line factor'''
LINE_FACTOR = 3
'''The FRC variance'''
FRC_VARIANCE = 2
'''The minimum accepted rating'''
MIN_ACC_RATING = 800
'''Max number of retries'''
MAX_NR_RETRIES = 3
'''The same line degradation'''
SAME_LINE_DEGRAD = 0.10
'''Connected route increase'''
CONNECT_ROUTE_INC = 0.10
'''DNP variance'''
DNP_VARIANCE = 118
'''FRC rating'''
FRC_RATING = {Rating.EXCELLENT: 100,
Rating.GOOD: 75,
Rating.AVERAGE: 50,
Rating.POOR: 0}
'''FRC_Intervals'''
FRC_INTERVALS = {Rating.EXCELLENT: 0,
Rating.GOOD: 1,
Rating.AVERAGE: 2}
'''FOW rating'''
FOW_RATING = {Rating.EXCELLENT: 100,
Rating.GOOD: 50,
Rating.AVERAGE: 50,
Rating.POOR: 25}
'''Bearing rating'''
BEAR_RATING = {Rating.EXCELLENT: 100,
Rating.GOOD: 50,
Rating.AVERAGE: 25,
Rating.POOR: 0}
# Compare bearing using integer bearing values
'''Bearing intervals'''
BEAR_INTERVALS = {Rating.EXCELLENT: 0,
Rating.GOOD: 1,
Rating.AVERAGE: 2}
'''Max bearing diff'''
MAX_BEAR_DIFF = 8 # 90deg
HALF_CIRCLE = 16 # 180deg
FULL_CIRCLE = 32 # 360deg
'''Calc affected lines'''
CALC_AFFECTED_LINES = False
'''Lines directly factor'''
LINES_DIRECTLY_FACTOR = 0.95
'''CompTime4Cache'''
COMP_TIME_4_CACHE = 0
# ----------------
# Special values
# ----------------
# Define empty route
EMPTY_ROUTE = ((), 0)
# ----------------
# Decoder exceptions
# ----------------
class DecoderError(Exception):
pass
class DecoderInvalidLocation(DecoderError):
pass
class DecoderNodesNotFound(DecoderError):
pass
class DecoderNoCandidateLines(DecoderError):
pass
class RouteSearchException(DecoderError):
pass
class RouteNotFoundException(RouteSearchException):
pass
class InvalidRouteLength(RouteNotFoundException):
pass
class RouteConstructionFailed(RouteSearchException):
pass
# ----------------
# Map database
# ----------------
class MapDatabase(object):
""" Abstract interface used by the decoder object.
Implementor of database should inherit from this abstract class.
MapDatabase defines two data structure as named tuples:
:py:class:`MapDatabase.Node`
:py:class:`MapDatabase.Line`
These structures may be extended by MapDatabase implementor accordings to their specific needs.
"""
Node = namedtuple('Node', ('distance',))
"""
.. attribute:: distance
The distance from the search location
"""
Line = namedtuple('Line', ('id', 'bear', 'frc', 'fow', 'len', 'projected_len'))
"""
.. attribute:: id
id of the line
.. attribute:: bear
the bearing according to the start node
.. attribute:: frc
the frc of the line
.. attribute:: fow
the fow of the line
.. attribute:: projected_len
return the value of the projected length of the search location
(i.e) the distance between the start node and the projection of the
point given by the search coordinates. None if the line is not
projected
"""
def connected_lines(self, node, frc_max, beardir):
""" Return connected lines to/from the node 'node'
:param frc_max: the frc max of the requested lines
:param beardir: select the inwards (AGAINST_LINE_DIRECTION)
or the outwards (WITH_LINE_DIRECTION) connected lines
return an iterable of objects of type Line
"""
raise NotImplementedError("MapDatabase:connectedlines")
def find_closeby_nodes(self, coords, max_node_dist):
""" Look for all nodes at less than max_node_dist from
the given coordinates
:param coords: an tuple or iterable holding location coordinates
:param max_node_dist: max distance to nearch for nodes
return an iterable of Node objects
"""
raise NotImplementedError("MapDatabase:find_closeby_nodes")
def find_closeby_lines(self, coords, max_node_dist, frc_max, beardir):
""" Look for all lines at less than max_node_dist from
the given coordinates
:param coords: an tuple or iterable holding location coordinates
:param max_node_dist: max distance to nearch for nodes
:param frc_max: the frc max of the requested line
:param beardir: select the inwards (AGAINST_LINE_DIRECTION)
or the outwards (WITH_LINE_DIRECTION) connected lines
return an iterable of Line objects
"""
raise NotImplementedError("MapDatabase:find_closeby_lines")
def calculate_route(self, l1, l2, maxdist, lfrc, islastrp):
""" Calculate the shortest paths between two lines
:param l1: the first candidate line to begin the search from
:param l2: the second candidate line to stop the search to
:param maxdist: The maximum distance allowed
:param lfrc: The least frc allowed
:param islastrp: True if we are calculating the route to the last
reference point
:return: (route, length) where route is an iterable holding the lines found
and length the calculated length of the route
The method must throw a RouteNotFoundException or a RouteConstructionFailed
exception in case a route cannot be calculated
"""
raise NotImplementedError("MapDatabase:calculate_route")
# ----------------
# Decoder
# ----------------
class DecoderBase(object):
pass
class RatingCalculator(object):
""" Implement default rating calculation
"""
RatingDetails = namedtuple('RatingDetails', ('bear_rating', 'frc_rating', 'fow_rating'))
def _frc_rating(self, frc, linefrc):
diff = abs(frc - linefrc)
for cat in (Rating.EXCELLENT, Rating.GOOD, Rating.AVERAGE):
if diff <= FRC_INTERVALS[cat]:
return FRC_RATING[cat]
return FRC_RATING[Rating.POOR]
def _fow_rating(self, fow, linefow):
return FOW_RATING[Rating.get_fow_rating_category(fow, linefow)]
def _distance_rating(self, dist):
return max(0, self._max_node_dist - round(dist))
def _bear_rating(self, bearing, linebear):
diff = abs(bearing - linebear)
if diff > HALF_CIRCLE:
diff = FULL_CIRCLE - diff
if diff > MAX_BEAR_DIFF:
return -1
for cat in (Rating.EXCELLENT, Rating.GOOD, Rating.AVERAGE):
if diff <= BEAR_INTERVALS[cat]:
return BEAR_RATING[cat]
return BEAR_RATING[Rating.POOR]
def rating(self, lrp, line, dist):
node_rating = self._distance_rating(dist)
bear_rating = self._bear_rating(lrp.bear, line.bear)
if bear_rating < 0:
return -1
line_rating = self._frc_rating(lrp.frc, line.frc) +\
self._fow_rating(lrp.fow, line.fow) +\
bear_rating
return node_rating*NODE_FACTOR + line_rating*LINE_FACTOR
def rating_details(self, lrp, line):
details = self.RatingDetails(bear_rating=self._bear_rating(lrp.bear, line.bear),
frc_rating=self._frc_rating(lrp.frc, line.frc),
fow_rating=self._fow_rating(lrp.fow, line.fow))
return details
def calculate_pairs(lines1, lines2, lastline, islastrp, islinelocation):
""" Each LRP might have several candidate lines. In order to find the best
pair to start with each pair needs to be investigated and rated. The
rating process includes:
- score of the first candidate line
- score of the second candidate line
- connection to the previously calculated path
- candidate lines shall not be equal
"""
# check connection with previously calculated path
for l1, score1 in lines1:
if lastline is not None and l1.id == lastline.id:
score1 += CONNECT_ROUTE_INC * score1
for l2, score2 in lines2:
if not islastrp and islinelocation and l2.id == l1.id:
score2 -= SAME_LINE_DEGRAD * score2
yield (l1, l2), score1*score2
# Check for single line coverage
def singleline(candidates):
bests = (lines[0] for lrp, lines in candidates)
sl, _ = bests.next()
for l, _ in bests:
if l.id != sl.id:
return None
return sl
class ClassicDecoder(DecoderBase, RatingCalculator):
""" OpenLR location decoder that use an abstract map object
See :py:class:`MapDatabase` for map database interface.
"""
def __init__(self, map_database,
max_node_distance=MAX_NODE_DIST,
frc_variance=FRC_VARIANCE,
dnp_variance=DNP_VARIANCE,
minimum_acc_rating=MIN_ACC_RATING,
find_lines_directly=True,
max_retry=MAX_NR_RETRIES,
verbose=False,
logger=lambda m: print(m)):
""" Initialize the decoder
:param map_database: a map database instance
:param max_node_distance: the maximun distance for candidate nodes
:param frc_variance: allowed frc variances
:param minimum_acc_rating: minimum acceptance rating for candidate lines
:param find_lines_directly: enable direct search of candidate lines
from lrp projection
:param max_retry: maximum number of retry when searching for route
between consecutive lines
"""
self._mdb = map_database
self._max_node_dist = max_node_distance
self._frc_var = frc_variance
self._min_acc_rating = minimum_acc_rating
self._max_retry = max_retry
self._dnp_variance = dnp_variance
self.verbose = verbose
self.find_lines_directly = find_lines_directly
self.logger = logger
@property
def database(self):
return self._mdb
def find_candidate_nodes(self, lrp):
""" Find candidate nodes for one location reference point.
The max_node_distance configure the search for nodes being
a possibility.
"""
return self._mdb.find_closeby_nodes(lrp.coords, self._max_node_dist)
def find_candidate_lines(self, lrp, beardir=WITH_LINE_DIRECTION, with_details=False):
""" Find candidate lines for each location reference point. The candidate
lines will be rated indicating how good they match the LRP attributes.
The method will be configured by OpenLR properties.
The connectedlines method takes a 'beardir' argument indicating
inwards (AGAINST_LINE_DIRECTION) or outwards (WITH_LINE_DIRECTION) arcs
"""
frc_max = lrp.frc + self._frc_var
nodes = list(self.find_candidate_nodes(lrp))
rating_f = self.rating
min_acc = self._min_acc_rating
rating_key = lambda l, r: r
group_key = lambda l, r: l.id
candidates = ((l, rating_f(lrp, l, n.distance)) for n in nodes for l in self._mdb.connected_lines(
n, frc_max=frc_max, beardir=beardir))
if self.find_lines_directly:
candidates = chain(candidates, self.find_candidate_lines_directly(
lrp, frc_max=frc_max, alreadyfound=bool(nodes), beardir=beardir))
candidates = (max(vals, key=rating_key) for k, vals in groupby(
sorted(candidates, key=group_key), key=group_key))
if not with_details:
candidates = filter(lambda l, r: r >= min_acc, candidates)
lines = sorted(candidates, key=rating_key, reverse=True)
if not with_details and not lines:
raise DecoderNoCandidateLines("No candidate lines found....")
if with_details:
lines = [(l, r, self.rating_details(lrp, l)) for l, r, in lines]
return lines
def find_candidate_lines_directly(self, lrp, frc_max, alreadyfound=False, beardir=WITH_LINE_DIRECTION):
""" Find candidate lines directly if no node or line has been detected so
far. This method tries to find all lines which are around the LRP
coordinate. The coordinate will be projected onto the line and the
distance between that projection point and the coordinate shall be small
(according to the encoder properties). All lines will be rated and
proposed as candidate lines for the LRP.
:param lrp: the location reference point (having no candidate lines so far)
:param alreadyfound: the already found lines
"""
rating_f = self.rating
lines = self._mdb.find_closeby_lines(lrp.coords, self._max_node_dist, frc_max=frc_max, beardir=beardir)
for line, dist in lines:
rating = rating_f(lrp, line, dist)
if alreadyfound:
rating = round(LINES_DIRECTLY_FACTOR * rating)
yield line, rating
def resolve_route(self, location, candidates):
""" Resolves the shortest-paths between each subsequent pair of location
reference points. The method orders the candidate line pairs for two
subsequent LRPs and starts with the best rated pair to calculate a
shortest-path in between. The method further checks the minimum and
maximum distance criteria for the calculated shortest-path. If one of the
criteria is not fulfilled the methods tries the next best candidate line
pair. If no further pair is available the method fails. For each
subsequent pair of LRPs the start LRP will hold the calculated route
after finishing this method.
:param location: the location
:param candidates: an iterable holding tuples of (lrp,candidate_lines)
"""
if not isinstance(candidates, (list, tuple)):
candidates = tuple(candidates)
sl = singleline(candidates)
if sl is not None:
return (((sl,), sl.len),)
islinelocation = (location.type == LocationType.LINE_LOCATION)
lastlrp = location.llrp
lastline, prevlrp = None, None
routes = ()
nr_retry = self._max_retry+1
# iterate over all LRP pairs
for i, (lrp, lines) in enumerate(candidates[:-1]):
lrpnext, nextlines = candidates[i+1]
islastrp = lrpnext is lastlrp
pairs = sorted(calculate_pairs(lines, nextlines, lastline,
islastrp, islinelocation), key=lambda p, r: r, reverse=True)
# check candidate pairs
for (l1, l2), _ in pairs[:nr_retry]:
if self.verbose:
self.logger("openlr: computing route ({},{})".format(l1.id, l2.id))
# handle same start/end.
if l1.id == l2.id:
if islastrp:
route = ((l1,), l1.len)
else:
# Skip this
route = EMPTY_ROUTE
break # search finished
try:
# calculate route between start and end and a maximum distance
route = self._calculate_route(l1, l2, lrp, islastrp)
# Handle change in start index
if lastline is not None and lastline.id != l1.id:
self._handle_start_change(routes, l1, lrp, prevlrp)
break # search finished
except (RouteNotFoundException, RouteConstructionFailed):
# Let a chance to retry
route = None
if route is None:
raise RouteNotFoundException("Route not found")
if route is not EMPTY_ROUTE:
routes += (route,)
if self.verbose:
# Display route
lines, length = route
self.logger("openlr: resolved route ({},{}):{} length={}".format(
l1.id, l2.id, tuple(l.id for l in lines), length))
prevlrp, lastline = lrp, l2
return routes
def _handle_start_change(self, routes, lend, lrp, prevlrp):
""" Recompute previous route using new end line
"""
lstart, _ = routes[-1][0]
if self.verbose:
self.logger("openlr: recomputing last route between {} and {}".format(lstart.id, lend.id))
route = self._calculate_route(lstart, lend, prevlrp, islastrp=False)
routes = routes[:-1] + (route, )
def _calculate_route(self, l1, l2, lrp, islastrp):
""" Calculate shortest-path between two lines
"""
# determine the minimum frc for the path to be calculated
lfrc = lrp.lfrcnp + self._frc_var
# Calculates the maximum allowed distance between two location reference
# points taking into account that at least one LRP might be projected onto
# a line and the maximum distance must be adjusted as the route calculation
# can only stop at distances between real nodes.
maxdist = lrp.dnp + self._dnp_variance
# check if LRPs were projected on line (i.e obtained directly)
# if yes, add line length to maxDistance (complete length as route
# search stops at nodes)
if l1.projected_len is not None:
maxdist += l1.len
if l2.projected_len is not None:
maxdist += l2.len
# calculate route between start and end and a maximum distance
route, length = self._mdb.calculate_route(l1, l2, maxdist, lfrc, islastrp)
# adjust and check the route length
if l2.projected_len is not None:
if islastrp:
length -= l2.len
length += l2.projected_len
# check the minimum distance criteria
if max(0, lrp.dnp - self._dnp_variance) > length:
raise InvalidRouteLength("openlr: route: {} to {}, calculated length:{}, lrp:{}".format(
l1.id, l2.id, length, lrp))
return route, length
def calculate_offsets(self, location, routes):
# Compute offsets
if location.version == BINARY_VERSION_2:
return location.poffs, location.noffs
elif location.version == BINARY_VERSION_3:
(head, head_len), (tail, tail_len) = routes[0], routes[-1]
head_start_line = head[0]
cutstart = 0
if head_start_line.projected_len is not None:
cutstart = head_start_line.projected_len
head_len -= cutstart
tail_end_line = tail[-1]
cutend = 0
if tail_end_line.projected_len is not None:
cutend = tail_end_line.len - tail_end_line.projected_len
tail_len -= cutend
if len(routes) == 1:
head_len -= cutend
tail_len -= cutstart
else:
# get the first line of the next sub-path
head_end_line = (routes[1][0])[0]
if head_end_line.projected_len is not None:
# there is another part on the first line of the next
# sub-path that relates to the head part
head_len += head_end_line.projected_len
# get the first line of the last sub-path
tail_start_line = tail[0]
if tail_start_line.projected_len is not None:
# not everything of the tail belongs to it, there is a snippet
# at the start that refers to the former sub-path
tail_len -= tail_start_line.projected_len
return (round(location.poffs * head_len / 100.0),
round(location.noffs * tail_len / 100.0))
@staticmethod
def _prune(pruned, off, index):
prunedlen = 0
while len(pruned) > 1:
line = pruned[index]
length = line.len
if prunedlen + length > off:
break
else:
prunedlen += length
pruned.pop(index)
return off - prunedlen
@staticmethod
def _calculated_path(pruned, poff, noff=0):
length = sum(l.len for l in pruned)
return [l.id for l in pruned], length, poff, noff
def decode_line(self, location):
""" Decode a line from a list of a location reference points
return (edges, length, poffset, noffset)
"""
# assert location.type == LocationType.LINE_LOCATION
def candidates():
yield location.flrp, self.find_candidate_lines(location.flrp)
for lrp in location.points:
yield lrp, self.find_candidate_lines(lrp)
yield location.llrp, self.find_candidate_lines(location.llrp, AGAINST_LINE_DIRECTION)
routes = self.resolve_route(location, candidates())
poff, noff = self.calculate_offsets(location, routes)
route_length = sum(length for _, length in routes)
offsum = noff + poff
# check for too long offset values
if offsum >= 2*route_length:
raise DecoderInvalidLocation("Invalid offsets")
# prune path
# The positive offset will be used to shorten the
# location from the beginning and the negative offset will be used to
# shorten the location from the end. <br>
# The pruning will always stop at nodes and there will be no pruning of
# parts of lines. The remaining offsets can be accessed from the returned
# decoded location object. Remaining offsets which are below the length
# variance parameter will be refused and set to 0
if offsum > route_length:
# offsets exceed location length
# decrease offset values
ratio = route_length / float(offsum)
poff = round(poff * ratio)
noff = round(noff * ratio)
# retainable length shall be 1 meter
if poff > noff:
poff -= 1
else:
noff -= 1
pruned = list(chain(*(lines for lines, _ in routes)))
if poff > 0:
poff = self._prune(pruned, poff, 0)
if noff > 0:
noff = self._prune(pruned, noff, -1)
return self._calculated_path(pruned, poff, noff)
def decode_point(self, location):
""" Decode a point location from a couple of lrps
return (edges, length, poffset)
"""
# assert location.type in (LocationType.POINT_LOCATION_TYPES, LocationType.POI_WITH_ACCESS_POINT)
routes = self.resolve_route(location, ((location.flrp, self.find_candidate_lines(location.flrp)),
(location.llrp, self.find_candidate_lines(location.llrp, AGAINST_LINE_DIRECTION))))
head, head_len = routes[0]
lstart, lend = head[0], head[-1]
prunedlen = 0
poff = 0
if lstart.projected_len is not None:
poff = lstart.projected_len
prunedlen = poff
if lend.projected_len is not None:
prunedlen += lend.len - lend.projected_len
poff = round(location.poffs*(head_len-prunedlen)/100.0)+poff
if poff > head_len:
poff = head_len
pruned = list(head)
poff = self._prune(pruned, poff, 0)
return self._calculated_path(pruned, poff)
def decode(self, location):
if location.type == LocationType.LINE_LOCATION:
return self.decode_line(location)
else:
return self.decode_point(location) | PypiClean |
/artellapipe-libs-usd-0.0.5.tar.gz/artellapipe-libs-usd-0.0.5/artellapipe/libs/usd/externals/python/2/OpenGL/raw/GLX/SGIX/fbconfig.py | from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GLX import _types as _cs
# End users want this...
from OpenGL.raw.GLX._types import *
from OpenGL.raw.GLX import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GLX_SGIX_fbconfig'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GLX,'GLX_SGIX_fbconfig',error_checker=_errors._error_checker)
GLX_COLOR_INDEX_BIT_SGIX=_C('GLX_COLOR_INDEX_BIT_SGIX',0x00000002)
GLX_COLOR_INDEX_TYPE_SGIX=_C('GLX_COLOR_INDEX_TYPE_SGIX',0x8015)
GLX_DRAWABLE_TYPE_SGIX=_C('GLX_DRAWABLE_TYPE_SGIX',0x8010)
GLX_FBCONFIG_ID_SGIX=_C('GLX_FBCONFIG_ID_SGIX',0x8013)
GLX_PIXMAP_BIT_SGIX=_C('GLX_PIXMAP_BIT_SGIX',0x00000002)
GLX_RENDER_TYPE_SGIX=_C('GLX_RENDER_TYPE_SGIX',0x8011)
GLX_RGBA_BIT_SGIX=_C('GLX_RGBA_BIT_SGIX',0x00000001)
GLX_RGBA_TYPE_SGIX=_C('GLX_RGBA_TYPE_SGIX',0x8014)
GLX_SCREEN_EXT=_C('GLX_SCREEN_EXT',0x800C)
GLX_WINDOW_BIT_SGIX=_C('GLX_WINDOW_BIT_SGIX',0x00000001)
GLX_X_RENDERABLE_SGIX=_C('GLX_X_RENDERABLE_SGIX',0x8012)
@_f
@_p.types(ctypes.POINTER(_cs.GLXFBConfigSGIX),ctypes.POINTER(_cs.Display),_cs.c_int,ctypes.POINTER(_cs.c_int),ctypes.POINTER(_cs.c_int))
def glXChooseFBConfigSGIX(dpy,screen,attrib_list,nelements):pass
@_f
@_p.types(_cs.GLXContext,ctypes.POINTER(_cs.Display),_cs.GLXFBConfigSGIX,_cs.c_int,_cs.GLXContext,_cs.Bool)
def glXCreateContextWithConfigSGIX(dpy,config,render_type,share_list,direct):pass
@_f
@_p.types(_cs.GLXPixmap,ctypes.POINTER(_cs.Display),_cs.GLXFBConfigSGIX,_cs.Pixmap)
def glXCreateGLXPixmapWithConfigSGIX(dpy,config,pixmap):pass
@_f
@_p.types(_cs.c_int,ctypes.POINTER(_cs.Display),_cs.GLXFBConfigSGIX,_cs.c_int,ctypes.POINTER(_cs.c_int))
def glXGetFBConfigAttribSGIX(dpy,config,attribute,value):pass
@_f
@_p.types(_cs.GLXFBConfigSGIX,ctypes.POINTER(_cs.Display),ctypes.POINTER(_cs.XVisualInfo))
def glXGetFBConfigFromVisualSGIX(dpy,vis):pass
@_f
@_p.types(ctypes.POINTER(_cs.XVisualInfo),ctypes.POINTER(_cs.Display),_cs.GLXFBConfigSGIX)
def glXGetVisualFromFBConfigSGIX(dpy,config):pass | PypiClean |
/metaapi_cloud_risk_management_sdk-2.0.0.tar.gz/metaapi_cloud_risk_management_sdk-2.0.0/lib/riskManagement.py | from .clients.httpClient import HttpClient
from .clients.domain_client import DomainClient
from .clients.equityTracking.equityTracking_client import EquityTrackingClient
from .models import format_error
from typing_extensions import TypedDict
from typing import Optional
class RetryOpts(TypedDict):
retries: Optional[int]
"""Maximum amount of request retries, default value is 5."""
minDelayInSeconds: Optional[float]
"""Minimum delay in seconds until request retry, default value is 1."""
maxDelayInSeconds: Optional[float]
"""Maximum delay in seconds until request retry, default value is 30."""
class RiskManagementOptions(TypedDict):
"""Risk management SDK options."""
domain: Optional[str]
"""Domain to connect to."""
extendedTimeout: Optional[float]
"""Timeout for extended http requests in seconds."""
requestTimeout: Optional[float]
"""Timeout for http requests in seconds."""
retryOpts: Optional[RetryOpts]
"""Options for request retries."""
class RiskManagement:
"""MetaApi risk management API SDK."""
def __init__(self, token: str, opts: RiskManagementOptions = None):
"""Inits class instance.
Args:
token: Authorization token.
opts: Connection options.
"""
opts: RiskManagementOptions = opts or {}
domain = opts['domain'] if 'domain' in opts else 'agiliumtrade.agiliumtrade.ai'
request_timeout = opts['requestTimeout'] if 'requestTimeout' in opts else 10
request_extended_timeout = opts['extendedTimeout'] if 'extendedTimeout' in opts else 70
retry_opts = opts['retryOpts'] if 'retryOpts' in opts else {}
http_client = HttpClient(request_timeout, request_extended_timeout, retry_opts)
self._domainClient = DomainClient(http_client, token, 'risk-management-api-v1', domain)
self._equityTrackingClient = EquityTrackingClient(self._domainClient)
@property
def risk_management_api(self) -> EquityTrackingClient:
"""Returns CopyFactory configuration API.
Returns:
Configuration API.
"""
return self._equityTrackingClient
@staticmethod
def format_error(err: Exception):
"""Formats and outputs metaApi errors with additional information.
Args:
err: Exception to process.
"""
return format_error(err) | PypiClean |
/dataone.gmn-3.5.2-py3-none-any.whl/d1_gmn/app/views/decorators.py | """View decorators."""
import functools
import d1_common.const
import d1_common.types
import d1_common.types.exceptions
import django.conf
import d1_gmn.app.auth
import d1_gmn.app.did
import d1_gmn.app.revision
import d1_gmn.app.views.assert_db
import d1_gmn.app.views.util
# ------------------------------------------------------------------------------
# Series ID (SID)
# ------------------------------------------------------------------------------
def resolve_sid(f):
"""View handler decorator that adds SID resolve and PID validation.
- For v1 calls, assume that ``did`` is a pid and raise NotFound exception if it's
not valid.
- For v2 calls, if DID is a valid PID, return it. If not, try to resolve it as a
SID and, if successful, return the new PID. Else, raise NotFound exception.
"""
@functools.wraps(f)
def wrapper(request, did, *args, **kwargs):
pid = resolve_sid_func(request, did)
return f(request, pid, *args, **kwargs)
return wrapper
def resolve_sid_func(request, did):
if d1_gmn.app.views.util.is_v1_api(request):
return d1_gmn.app.did.resolve_sid_v1(did)
elif d1_gmn.app.views.util.is_v2_api(request):
return d1_gmn.app.did.resolve_sid_v2(did)
else:
assert False, "Unable to determine API version"
def decode_did(f):
"""View handler decorator that decodes "%2f" ("/") in SID or PID extracted from URL
path segment by Django."""
@functools.wraps(f)
def wrapper(request, did, *args, **kwargs):
return f(request, decode_path_segment(did), *args, **kwargs)
return wrapper
def decode_path_segment(s):
"""Django decodes URL elements before passing them to views, but passes "%2f" ("/")
through undecoded.
Why..?
"""
return s.replace("%2f", "/").replace("%2F", "/")
# ------------------------------------------------------------------------------
# Auth
# ------------------------------------------------------------------------------
# The following decorators check if the subject in the provided client side
# certificate has the permissions required to perform a given action. If
# the required permissions are not present, a NotAuthorized exception is
# return to the client.
#
# The decorators require the first argument to be request and the second to
# be PID.
def trusted_permission(f):
"""Access only by D1 infrastructure."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def list_objects_access(f):
"""Access to listObjects() controlled by settings.PUBLIC_OBJECT_LIST."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_OBJECT_LIST:
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def get_log_records_access(f):
"""Access to getLogRecords() controlled by settings.PUBLIC_LOG_RECORDS."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if not django.conf.settings.PUBLIC_LOG_RECORDS:
trusted(request)
return f(request, *args, **kwargs)
return wrapper
def trusted(request):
if not d1_gmn.app.auth.is_trusted_subject(request):
raise d1_common.types.exceptions.NotAuthorized(
0,
'Access allowed only for trusted subjects. session_subjects="{}", '
'trusted_subjects="{}"'.format(
d1_gmn.app.auth.format_session_subjects(request),
d1_gmn.app.auth.get_trusted_subjects_string(),
),
)
def assert_create_update_delete_permission(f):
"""Access only by subjects with Create/Update/Delete permission and by trusted
infrastructure (CNs)."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
d1_gmn.app.auth.assert_create_update_delete_permission(request)
return f(request, *args, **kwargs)
return wrapper
def authenticated(f):
"""Access only with a valid session."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if d1_common.const.SUBJECT_AUTHENTICATED not in request.all_subjects_set:
raise d1_common.types.exceptions.NotAuthorized(
0,
"Access allowed only for authenticated subjects. Please reconnect with "
'a valid DataONE session certificate. session_subjects="{}"'.format(
d1_gmn.app.auth.format_session_subjects(request)
),
)
return f(request, *args, **kwargs)
return wrapper
def verified(f):
"""Access only with a valid session where the primary subject is verified."""
@functools.wraps(f)
def wrapper(request, *args, **kwargs):
if d1_common.const.SUBJECT_VERIFIED not in request.all_subjects_set:
raise d1_common.types.exceptions.NotAuthorized(
0,
"Access allowed only for verified accounts. Please reconnect with a "
"valid DataONE session certificate in which the identity of the "
'primary subject has been verified. session_subjects="{}"'.format(
d1_gmn.app.auth.format_session_subjects(request)
),
)
return f(request, *args, **kwargs)
return wrapper
def required_permission(f, level):
"""Assert that subject has access at given level or higher for object."""
@functools.wraps(f)
def wrapper(request, pid, *args, **kwargs):
d1_gmn.app.auth.assert_allowed(request, level, pid)
return f(request, pid, *args, **kwargs)
return wrapper
def changepermission_permission(f):
"""Assert that subject has changePermission or high for object."""
return required_permission(f, d1_gmn.app.auth.CHANGEPERMISSION_LEVEL)
def write_permission(f):
"""Assert that subject has write permission or higher for object."""
return required_permission(f, d1_gmn.app.auth.WRITE_LEVEL)
def read_permission(f):
"""Assert that subject has read permission or higher for object."""
return required_permission(f, d1_gmn.app.auth.READ_LEVEL) | PypiClean |
/ansys_dpf_core-0.9.0.tar.gz/ansys_dpf_core-0.9.0/src/ansys/dpf/core/operators/math/scale_by_field.py | from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
class scale_by_field(Operator):
"""Scales a field (in 0) by a scalar field (in 1). If one field's scoping
has an 'overall' location, then this field's values are applied on
the other field entirely.
Parameters
----------
fieldA : Field or FieldsContainer
Field or fields container with only one field
is expected
fieldB : Field or FieldsContainer
Field or fields container with only one field
is expected
Examples
--------
>>> from ansys.dpf import core as dpf
>>> # Instantiate operator
>>> op = dpf.operators.math.scale_by_field()
>>> # Make input connections
>>> my_fieldA = dpf.Field()
>>> op.inputs.fieldA.connect(my_fieldA)
>>> my_fieldB = dpf.Field()
>>> op.inputs.fieldB.connect(my_fieldB)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.scale_by_field(
... fieldA=my_fieldA,
... fieldB=my_fieldB,
... )
>>> # Get output data
>>> result_field = op.outputs.field()
"""
def __init__(self, fieldA=None, fieldB=None, config=None, server=None):
super().__init__(name="scale_by_field", config=config, server=server)
self._inputs = InputsScaleByField(self)
self._outputs = OutputsScaleByField(self)
if fieldA is not None:
self.inputs.fieldA.connect(fieldA)
if fieldB is not None:
self.inputs.fieldB.connect(fieldB)
@staticmethod
def _spec():
description = """Scales a field (in 0) by a scalar field (in 1). If one field's scoping
has an 'overall' location, then this field's values are
applied on the other field entirely."""
spec = Specification(
description=description,
map_input_pin_spec={
0: PinSpecification(
name="fieldA",
type_names=["field", "fields_container"],
optional=False,
document="""Field or fields container with only one field
is expected""",
),
1: PinSpecification(
name="fieldB",
type_names=["field", "fields_container"],
optional=False,
document="""Field or fields container with only one field
is expected""",
),
},
map_output_pin_spec={
0: PinSpecification(
name="field",
type_names=["field"],
optional=False,
document="""""",
),
},
)
return spec
@staticmethod
def default_config(server=None):
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
instantiate the operator. The Configuration allows to customize
how the operation will be processed by the operator.
Parameters
----------
server : server.DPFServer, optional
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
return Operator.default_config(name="scale_by_field", server=server)
@property
def inputs(self):
"""Enables to connect inputs to the operator
Returns
--------
inputs : InputsScaleByField
"""
return super().inputs
@property
def outputs(self):
"""Enables to get outputs of the operator by evaluating it
Returns
--------
outputs : OutputsScaleByField
"""
return super().outputs
class InputsScaleByField(_Inputs):
"""Intermediate class used to connect user inputs to
scale_by_field operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
>>> my_fieldA = dpf.Field()
>>> op.inputs.fieldA.connect(my_fieldA)
>>> my_fieldB = dpf.Field()
>>> op.inputs.fieldB.connect(my_fieldB)
"""
def __init__(self, op: Operator):
super().__init__(scale_by_field._spec().inputs, op)
self._fieldA = Input(scale_by_field._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fieldA)
self._fieldB = Input(scale_by_field._spec().input_pin(1), 1, op, -1)
self._inputs.append(self._fieldB)
@property
def fieldA(self):
"""Allows to connect fieldA input to the operator.
Field or fields container with only one field
is expected
Parameters
----------
my_fieldA : Field or FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
>>> op.inputs.fieldA.connect(my_fieldA)
>>> # or
>>> op.inputs.fieldA(my_fieldA)
"""
return self._fieldA
@property
def fieldB(self):
"""Allows to connect fieldB input to the operator.
Field or fields container with only one field
is expected
Parameters
----------
my_fieldB : Field or FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
>>> op.inputs.fieldB.connect(my_fieldB)
>>> # or
>>> op.inputs.fieldB(my_fieldB)
"""
return self._fieldB
class OutputsScaleByField(_Outputs):
"""Intermediate class used to get outputs from
scale_by_field operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
>>> # Connect inputs : op.inputs. ...
>>> result_field = op.outputs.field()
"""
def __init__(self, op: Operator):
super().__init__(scale_by_field._spec().outputs, op)
self._field = Output(scale_by_field._spec().output_pin(0), 0, op)
self._outputs.append(self._field)
@property
def field(self):
"""Allows to get field output of the operator
Returns
----------
my_field : Field
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.scale_by_field()
>>> # Connect inputs : op.inputs. ...
>>> result_field = op.outputs.field()
""" # noqa: E501
return self._field | PypiClean |
/aeroapi_python-1.1.4-py3-none-any.whl/aeroapi_python/History.py | from typing import Optional, Dict, Any
from .APICaller import APICaller
class History:
"""
A class for interacting with the OpenSky Network History API.
Attributes:
api_caller (APICaller): An instance of the `APICaller` class.
endpoint (str): The API endpoint for history.
Methods:
__init__(self, api_caller: APICaller) -> None:
Initializes a `History` instance.
flight_map(self, flight_id: str, height: int = 480, width: int = 640, layer_on: Optional[str] = None,
layer_off: Optional[str] = None, show_data_block: Optional[bool] = None,
airports_expand_view: Optional[bool] = None, show_airports: Optional[bool] = None,
bounding_box: Optional[str] = None) -> Optional[Dict[str, Any]]:
Retrieves a map of a specific flight.
flight_route(self, flight_id: str) -> Optional[Dict[str, Any]]:
Retrieves the route of a specific flight.
flight_track(self, flight_id: str, include_estimated_positions: Optional[bool] = None) -> Optional[Dict[str, Any]]:
Retrieves the track of a specific flight.
last_flight(self, registration: str) -> Optional[Dict[str, Any]]:
Retrieves the last flight of a specific aircraft.
flight_info(self, ident: str, ident_type: Optional[str] = None, start: Optional[int] = None,
end: Optional[int] = None, max_pages: int = 1, cursor: Optional[str] = None) -> Optional[Dict[str, Any]]:
Retrieves information about a specific flight or set of flights.
"""
def __init__(self, api_caller: APICaller) -> None:
"""
Initializes a `History` instance.
Args:
api_caller (APICaller): An instance of the `APICaller` class.
"""
self.api_caller = api_caller
self.endpoint = 'history'
def flight_map(self, flight_id: str, height: int = 480, width: int = 640, layer_on: Optional[str] = None,
layer_off: Optional[str] = None, show_data_block: Optional[bool] = None,
airports_expand_view: Optional[bool] = None, show_airports: Optional[bool] = None,
bounding_box: Optional[str] = None) -> Optional[Dict[str, Any]]:
"""
Retrieves a map of a specific flight.
Args:
flight_id (str): The unique identifier of the flight.
height (int): Optional, the height of the map in pixels (default 480).
width (int): Optional, the width of the map in pixels (default 640).
layer_on (str): Optional, a comma-separated list of layers to enable.
layer_off (str): Optional, a comma-separated list of layers to disable.
show_data_block (bool): Optional, whether to show the data block (default False).
airports_expand_view (bool): Optional, whether to expand the view to include airports (default False).
show_airports (bool): Optional, whether to show airports on the map (default False).
bounding_box (str): Optional, a bounding box to restrict the map view.
Returns:
dict: The parsed JSON response, or None if the request failed.
"""
query = {
"height": height,
"width": width,
"layer_on": layer_on,
"layer_off": layer_off,
"show_data_block": show_data_block,
"airports_expand_view": airports_expand_view,
"show_airports": show_airports,
"bounding_box": bounding_box,
}
path = self.api_caller._build_path(self.endpoint, sub_path=f"flights/{flight_id}/map", query=query)
return self.api_caller.get(path)
def flight_route(self, flight_id: str) -> Optional[Dict[str, Any]]:
"""
Retrieves the route of a specific flight.
Args:
flight_id (str): The unique identifier of the flight.
Returns:
dict: The parsed JSON response, or None if the request failed.
"""
path = self.api_caller._build_path(self.endpoint, sub_path=f"flights/{flight_id}/route")
return self.api_caller.get(path)
def flight_track(self, flight_id: str, include_estimated_positions: Optional[bool] = None) -> Optional[Dict[str, Any]]:
"""
Retrieves the track of a specific flight.
Args:
flight_id (str): The unique identifier of the flight.
include_estimated_positions (bool): Optional, whether to include estimated positions (default False).
Returns:
dict: The parsed JSON response, or None if the request failed.
"""
query = {"include_estimated_positions": include_estimated_positions}
path = self.api_caller._build_path(self.endpoint, sub_path=f"flights/{flight_id}/track", query=query)
return self.api_caller.get(path)
def last_flight(self, registration: str) -> Optional[Dict[str, Any]]:
"""
Retrieves the last flight of a specific aircraft.
Args:
registration (str): The registration of the aircraft.
Returns:
dict: The parsed JSON response, or None if the request failed.
"""
path = self.api_caller._build_path(self.endpoint, sub_path=f"aircraft/{registration}/last_flight")
return self.api_caller.get(path)
def flight_info(self, ident: str, ident_type: Optional[str] = None, start: Optional[int] = None,
end: Optional[int] = None, max_pages: int = 1, cursor: Optional[str] = None) -> Optional[Dict[str, Any]]:
"""
Retrieves information about a specific flight or set of flights.
Args:
ident (str): The identifier of the flight or set of flights.
ident_type (str): Optional, the type of identifier (default None).
start (int): Optional, the start time of the search in seconds since epoch (default None).
end (int): Optional, the end time of the search in seconds since epoch (default None).
max_pages (int): Optional, the maximum number of pages to retrieve (default 1).
cursor (str): Optional, a cursor for pagination (default None).
Returns:
dict: The parsed JSON response, or None if the request failed.
"""
query = {
"ident_type": ident_type,
"start": start,
"end": end,
"max_pages": max_pages,
"cursor": cursor,
}
path = self.api_caller._build_path(self.endpoint, sub_path=f"flights/{ident}", query=query)
return self.api_caller.get(path) | PypiClean |
/types_aiobotocore_iotsitewise-2.3.4.post1-py3-none-any.whl/types_aiobotocore_iotsitewise/type_defs.py | import sys
from datetime import datetime
from typing import IO, Any, Dict, List, Mapping, Sequence, Union
from aiobotocore.response import StreamingBody
from .literals import (
AggregateTypeType,
AssetModelStateType,
AssetStateType,
AuthModeType,
BatchPutAssetPropertyValueErrorCodeType,
CapabilitySyncStatusType,
ComputeLocationType,
ConfigurationStateType,
DetailedErrorCodeType,
DisassociatedDataStorageStateType,
EncryptionTypeType,
ErrorCodeType,
ForwardingConfigStateType,
IdentityTypeType,
ListAssetsFilterType,
ListTimeSeriesTypeType,
LoggingLevelType,
MonitorErrorCodeType,
PermissionType,
PortalStateType,
PropertyDataTypeType,
PropertyNotificationStateType,
QualityType,
ResourceTypeType,
StorageTypeType,
TimeOrderingType,
TraversalDirectionType,
)
if sys.version_info >= (3, 9):
from typing import Literal
else:
from typing_extensions import Literal
if sys.version_info >= (3, 9):
from typing import TypedDict
else:
from typing_extensions import TypedDict
__all__ = (
"AggregatesTypeDef",
"AlarmsTypeDef",
"AssetErrorDetailsTypeDef",
"AssetHierarchyInfoTypeDef",
"AssetHierarchyTypeDef",
"AssetModelHierarchyDefinitionTypeDef",
"AssetModelHierarchyTypeDef",
"PropertyNotificationTypeDef",
"TimeInNanosTypeDef",
"VariantTypeDef",
"AssociateAssetsRequestRequestTypeDef",
"AssociateTimeSeriesToAssetPropertyRequestRequestTypeDef",
"AttributeTypeDef",
"BatchAssociateProjectAssetsRequestRequestTypeDef",
"ResponseMetadataTypeDef",
"BatchDisassociateProjectAssetsRequestRequestTypeDef",
"ConfigurationErrorDetailsTypeDef",
"CreateAssetRequestRequestTypeDef",
"CreateDashboardRequestRequestTypeDef",
"ImageFileTypeDef",
"CreateProjectRequestRequestTypeDef",
"CustomerManagedS3StorageTypeDef",
"DashboardSummaryTypeDef",
"DeleteAccessPolicyRequestRequestTypeDef",
"DeleteAssetModelRequestRequestTypeDef",
"DeleteAssetRequestRequestTypeDef",
"DeleteDashboardRequestRequestTypeDef",
"DeleteGatewayRequestRequestTypeDef",
"DeletePortalRequestRequestTypeDef",
"DeleteProjectRequestRequestTypeDef",
"DeleteTimeSeriesRequestRequestTypeDef",
"DescribeAccessPolicyRequestRequestTypeDef",
"WaiterConfigTypeDef",
"DescribeAssetModelRequestRequestTypeDef",
"DescribeAssetPropertyRequestRequestTypeDef",
"DescribeAssetRequestRequestTypeDef",
"DescribeDashboardRequestRequestTypeDef",
"DescribeGatewayCapabilityConfigurationRequestRequestTypeDef",
"DescribeGatewayRequestRequestTypeDef",
"GatewayCapabilitySummaryTypeDef",
"LoggingOptionsTypeDef",
"DescribePortalRequestRequestTypeDef",
"ImageLocationTypeDef",
"DescribeProjectRequestRequestTypeDef",
"RetentionPeriodTypeDef",
"DescribeTimeSeriesRequestRequestTypeDef",
"DetailedErrorTypeDef",
"DisassociateAssetsRequestRequestTypeDef",
"DisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef",
"VariableValueTypeDef",
"ForwardingConfigTypeDef",
"GreengrassTypeDef",
"GreengrassV2TypeDef",
"PaginatorConfigTypeDef",
"GetAssetPropertyAggregatesRequestRequestTypeDef",
"GetAssetPropertyValueHistoryRequestRequestTypeDef",
"GetAssetPropertyValueRequestRequestTypeDef",
"GetInterpolatedAssetPropertyValuesRequestRequestTypeDef",
"GroupIdentityTypeDef",
"IAMRoleIdentityTypeDef",
"IAMUserIdentityTypeDef",
"UserIdentityTypeDef",
"ListAccessPoliciesRequestRequestTypeDef",
"ListAssetModelsRequestRequestTypeDef",
"ListAssetRelationshipsRequestRequestTypeDef",
"ListAssetsRequestRequestTypeDef",
"ListAssociatedAssetsRequestRequestTypeDef",
"ListDashboardsRequestRequestTypeDef",
"ListGatewaysRequestRequestTypeDef",
"ListPortalsRequestRequestTypeDef",
"ListProjectAssetsRequestRequestTypeDef",
"ListProjectsRequestRequestTypeDef",
"ProjectSummaryTypeDef",
"ListTagsForResourceRequestRequestTypeDef",
"ListTimeSeriesRequestRequestTypeDef",
"TimeSeriesSummaryTypeDef",
"MetricProcessingConfigTypeDef",
"TumblingWindowTypeDef",
"MonitorErrorDetailsTypeDef",
"PortalResourceTypeDef",
"ProjectResourceTypeDef",
"PutDefaultEncryptionConfigurationRequestRequestTypeDef",
"TagResourceRequestRequestTypeDef",
"UntagResourceRequestRequestTypeDef",
"UpdateAssetPropertyRequestRequestTypeDef",
"UpdateAssetRequestRequestTypeDef",
"UpdateDashboardRequestRequestTypeDef",
"UpdateGatewayCapabilityConfigurationRequestRequestTypeDef",
"UpdateGatewayRequestRequestTypeDef",
"UpdateProjectRequestRequestTypeDef",
"AggregatedValueTypeDef",
"AssetRelationshipSummaryTypeDef",
"AssetPropertyTypeDef",
"BatchPutAssetPropertyErrorTypeDef",
"AssetPropertyValueTypeDef",
"InterpolatedAssetPropertyValueTypeDef",
"BatchAssociateProjectAssetsResponseTypeDef",
"BatchDisassociateProjectAssetsResponseTypeDef",
"CreateAccessPolicyResponseTypeDef",
"CreateDashboardResponseTypeDef",
"CreateGatewayResponseTypeDef",
"CreateProjectResponseTypeDef",
"DescribeDashboardResponseTypeDef",
"DescribeGatewayCapabilityConfigurationResponseTypeDef",
"DescribeProjectResponseTypeDef",
"DescribeTimeSeriesResponseTypeDef",
"EmptyResponseMetadataTypeDef",
"ListProjectAssetsResponseTypeDef",
"ListTagsForResourceResponseTypeDef",
"UpdateGatewayCapabilityConfigurationResponseTypeDef",
"ConfigurationStatusTypeDef",
"CreatePortalRequestRequestTypeDef",
"ImageTypeDef",
"MultiLayerStorageTypeDef",
"ListDashboardsResponseTypeDef",
"DescribeAssetModelRequestAssetModelActiveWaitTypeDef",
"DescribeAssetModelRequestAssetModelNotExistsWaitTypeDef",
"DescribeAssetRequestAssetActiveWaitTypeDef",
"DescribeAssetRequestAssetNotExistsWaitTypeDef",
"DescribePortalRequestPortalActiveWaitTypeDef",
"DescribePortalRequestPortalNotExistsWaitTypeDef",
"DescribeLoggingOptionsResponseTypeDef",
"PutLoggingOptionsRequestRequestTypeDef",
"ErrorDetailsTypeDef",
"ExpressionVariableTypeDef",
"MeasurementProcessingConfigTypeDef",
"TransformProcessingConfigTypeDef",
"GatewayPlatformTypeDef",
"GetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef",
"GetAssetPropertyValueHistoryRequestGetAssetPropertyValueHistoryPaginateTypeDef",
"GetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef",
"ListAccessPoliciesRequestListAccessPoliciesPaginateTypeDef",
"ListAssetModelsRequestListAssetModelsPaginateTypeDef",
"ListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef",
"ListAssetsRequestListAssetsPaginateTypeDef",
"ListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef",
"ListDashboardsRequestListDashboardsPaginateTypeDef",
"ListGatewaysRequestListGatewaysPaginateTypeDef",
"ListPortalsRequestListPortalsPaginateTypeDef",
"ListProjectAssetsRequestListProjectAssetsPaginateTypeDef",
"ListProjectsRequestListProjectsPaginateTypeDef",
"ListTimeSeriesRequestListTimeSeriesPaginateTypeDef",
"IdentityTypeDef",
"ListProjectsResponseTypeDef",
"ListTimeSeriesResponseTypeDef",
"MetricWindowTypeDef",
"PortalStatusTypeDef",
"ResourceTypeDef",
"GetAssetPropertyAggregatesResponseTypeDef",
"ListAssetRelationshipsResponseTypeDef",
"AssetCompositeModelTypeDef",
"BatchPutAssetPropertyErrorEntryTypeDef",
"GetAssetPropertyValueHistoryResponseTypeDef",
"GetAssetPropertyValueResponseTypeDef",
"PutAssetPropertyValueEntryTypeDef",
"GetInterpolatedAssetPropertyValuesResponseTypeDef",
"DescribeDefaultEncryptionConfigurationResponseTypeDef",
"PutDefaultEncryptionConfigurationResponseTypeDef",
"UpdatePortalRequestRequestTypeDef",
"DescribeStorageConfigurationResponseTypeDef",
"PutStorageConfigurationRequestRequestTypeDef",
"PutStorageConfigurationResponseTypeDef",
"AssetModelStatusTypeDef",
"AssetStatusTypeDef",
"MeasurementTypeDef",
"TransformTypeDef",
"CreateGatewayRequestRequestTypeDef",
"DescribeGatewayResponseTypeDef",
"GatewaySummaryTypeDef",
"MetricTypeDef",
"CreatePortalResponseTypeDef",
"DeletePortalResponseTypeDef",
"DescribePortalResponseTypeDef",
"PortalSummaryTypeDef",
"UpdatePortalResponseTypeDef",
"AccessPolicySummaryTypeDef",
"CreateAccessPolicyRequestRequestTypeDef",
"DescribeAccessPolicyResponseTypeDef",
"UpdateAccessPolicyRequestRequestTypeDef",
"BatchPutAssetPropertyValueResponseTypeDef",
"BatchPutAssetPropertyValueRequestRequestTypeDef",
"AssetModelSummaryTypeDef",
"CreateAssetModelResponseTypeDef",
"DeleteAssetModelResponseTypeDef",
"UpdateAssetModelResponseTypeDef",
"AssetSummaryTypeDef",
"AssociatedAssetsSummaryTypeDef",
"CreateAssetResponseTypeDef",
"DeleteAssetResponseTypeDef",
"DescribeAssetResponseTypeDef",
"UpdateAssetResponseTypeDef",
"ListGatewaysResponseTypeDef",
"PropertyTypeTypeDef",
"ListPortalsResponseTypeDef",
"ListAccessPoliciesResponseTypeDef",
"ListAssetModelsResponseTypeDef",
"ListAssetsResponseTypeDef",
"ListAssociatedAssetsResponseTypeDef",
"AssetModelPropertyDefinitionTypeDef",
"AssetModelPropertyTypeDef",
"PropertyTypeDef",
"AssetModelCompositeModelDefinitionTypeDef",
"AssetModelCompositeModelTypeDef",
"CompositeModelPropertyTypeDef",
"CreateAssetModelRequestRequestTypeDef",
"DescribeAssetModelResponseTypeDef",
"UpdateAssetModelRequestRequestTypeDef",
"DescribeAssetPropertyResponseTypeDef",
)
AggregatesTypeDef = TypedDict(
"AggregatesTypeDef",
{
"average": float,
"count": float,
"maximum": float,
"minimum": float,
"sum": float,
"standardDeviation": float,
},
total=False,
)
_RequiredAlarmsTypeDef = TypedDict(
"_RequiredAlarmsTypeDef",
{
"alarmRoleArn": str,
},
)
_OptionalAlarmsTypeDef = TypedDict(
"_OptionalAlarmsTypeDef",
{
"notificationLambdaArn": str,
},
total=False,
)
class AlarmsTypeDef(_RequiredAlarmsTypeDef, _OptionalAlarmsTypeDef):
pass
AssetErrorDetailsTypeDef = TypedDict(
"AssetErrorDetailsTypeDef",
{
"assetId": str,
"code": Literal["INTERNAL_FAILURE"],
"message": str,
},
)
AssetHierarchyInfoTypeDef = TypedDict(
"AssetHierarchyInfoTypeDef",
{
"parentAssetId": str,
"childAssetId": str,
},
total=False,
)
_RequiredAssetHierarchyTypeDef = TypedDict(
"_RequiredAssetHierarchyTypeDef",
{
"name": str,
},
)
_OptionalAssetHierarchyTypeDef = TypedDict(
"_OptionalAssetHierarchyTypeDef",
{
"id": str,
},
total=False,
)
class AssetHierarchyTypeDef(_RequiredAssetHierarchyTypeDef, _OptionalAssetHierarchyTypeDef):
pass
AssetModelHierarchyDefinitionTypeDef = TypedDict(
"AssetModelHierarchyDefinitionTypeDef",
{
"name": str,
"childAssetModelId": str,
},
)
_RequiredAssetModelHierarchyTypeDef = TypedDict(
"_RequiredAssetModelHierarchyTypeDef",
{
"name": str,
"childAssetModelId": str,
},
)
_OptionalAssetModelHierarchyTypeDef = TypedDict(
"_OptionalAssetModelHierarchyTypeDef",
{
"id": str,
},
total=False,
)
class AssetModelHierarchyTypeDef(
_RequiredAssetModelHierarchyTypeDef, _OptionalAssetModelHierarchyTypeDef
):
pass
PropertyNotificationTypeDef = TypedDict(
"PropertyNotificationTypeDef",
{
"topic": str,
"state": PropertyNotificationStateType,
},
)
_RequiredTimeInNanosTypeDef = TypedDict(
"_RequiredTimeInNanosTypeDef",
{
"timeInSeconds": int,
},
)
_OptionalTimeInNanosTypeDef = TypedDict(
"_OptionalTimeInNanosTypeDef",
{
"offsetInNanos": int,
},
total=False,
)
class TimeInNanosTypeDef(_RequiredTimeInNanosTypeDef, _OptionalTimeInNanosTypeDef):
pass
VariantTypeDef = TypedDict(
"VariantTypeDef",
{
"stringValue": str,
"integerValue": int,
"doubleValue": float,
"booleanValue": bool,
},
total=False,
)
_RequiredAssociateAssetsRequestRequestTypeDef = TypedDict(
"_RequiredAssociateAssetsRequestRequestTypeDef",
{
"assetId": str,
"hierarchyId": str,
"childAssetId": str,
},
)
_OptionalAssociateAssetsRequestRequestTypeDef = TypedDict(
"_OptionalAssociateAssetsRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class AssociateAssetsRequestRequestTypeDef(
_RequiredAssociateAssetsRequestRequestTypeDef, _OptionalAssociateAssetsRequestRequestTypeDef
):
pass
_RequiredAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef = TypedDict(
"_RequiredAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef",
{
"alias": str,
"assetId": str,
"propertyId": str,
},
)
_OptionalAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef = TypedDict(
"_OptionalAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class AssociateTimeSeriesToAssetPropertyRequestRequestTypeDef(
_RequiredAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef,
_OptionalAssociateTimeSeriesToAssetPropertyRequestRequestTypeDef,
):
pass
AttributeTypeDef = TypedDict(
"AttributeTypeDef",
{
"defaultValue": str,
},
total=False,
)
_RequiredBatchAssociateProjectAssetsRequestRequestTypeDef = TypedDict(
"_RequiredBatchAssociateProjectAssetsRequestRequestTypeDef",
{
"projectId": str,
"assetIds": Sequence[str],
},
)
_OptionalBatchAssociateProjectAssetsRequestRequestTypeDef = TypedDict(
"_OptionalBatchAssociateProjectAssetsRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class BatchAssociateProjectAssetsRequestRequestTypeDef(
_RequiredBatchAssociateProjectAssetsRequestRequestTypeDef,
_OptionalBatchAssociateProjectAssetsRequestRequestTypeDef,
):
pass
ResponseMetadataTypeDef = TypedDict(
"ResponseMetadataTypeDef",
{
"RequestId": str,
"HostId": str,
"HTTPStatusCode": int,
"HTTPHeaders": Dict[str, str],
"RetryAttempts": int,
},
)
_RequiredBatchDisassociateProjectAssetsRequestRequestTypeDef = TypedDict(
"_RequiredBatchDisassociateProjectAssetsRequestRequestTypeDef",
{
"projectId": str,
"assetIds": Sequence[str],
},
)
_OptionalBatchDisassociateProjectAssetsRequestRequestTypeDef = TypedDict(
"_OptionalBatchDisassociateProjectAssetsRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class BatchDisassociateProjectAssetsRequestRequestTypeDef(
_RequiredBatchDisassociateProjectAssetsRequestRequestTypeDef,
_OptionalBatchDisassociateProjectAssetsRequestRequestTypeDef,
):
pass
ConfigurationErrorDetailsTypeDef = TypedDict(
"ConfigurationErrorDetailsTypeDef",
{
"code": ErrorCodeType,
"message": str,
},
)
_RequiredCreateAssetRequestRequestTypeDef = TypedDict(
"_RequiredCreateAssetRequestRequestTypeDef",
{
"assetName": str,
"assetModelId": str,
},
)
_OptionalCreateAssetRequestRequestTypeDef = TypedDict(
"_OptionalCreateAssetRequestRequestTypeDef",
{
"clientToken": str,
"tags": Mapping[str, str],
},
total=False,
)
class CreateAssetRequestRequestTypeDef(
_RequiredCreateAssetRequestRequestTypeDef, _OptionalCreateAssetRequestRequestTypeDef
):
pass
_RequiredCreateDashboardRequestRequestTypeDef = TypedDict(
"_RequiredCreateDashboardRequestRequestTypeDef",
{
"projectId": str,
"dashboardName": str,
"dashboardDefinition": str,
},
)
_OptionalCreateDashboardRequestRequestTypeDef = TypedDict(
"_OptionalCreateDashboardRequestRequestTypeDef",
{
"dashboardDescription": str,
"clientToken": str,
"tags": Mapping[str, str],
},
total=False,
)
class CreateDashboardRequestRequestTypeDef(
_RequiredCreateDashboardRequestRequestTypeDef, _OptionalCreateDashboardRequestRequestTypeDef
):
pass
ImageFileTypeDef = TypedDict(
"ImageFileTypeDef",
{
"data": Union[str, bytes, IO[Any], StreamingBody],
"type": Literal["PNG"],
},
)
_RequiredCreateProjectRequestRequestTypeDef = TypedDict(
"_RequiredCreateProjectRequestRequestTypeDef",
{
"portalId": str,
"projectName": str,
},
)
_OptionalCreateProjectRequestRequestTypeDef = TypedDict(
"_OptionalCreateProjectRequestRequestTypeDef",
{
"projectDescription": str,
"clientToken": str,
"tags": Mapping[str, str],
},
total=False,
)
class CreateProjectRequestRequestTypeDef(
_RequiredCreateProjectRequestRequestTypeDef, _OptionalCreateProjectRequestRequestTypeDef
):
pass
CustomerManagedS3StorageTypeDef = TypedDict(
"CustomerManagedS3StorageTypeDef",
{
"s3ResourceArn": str,
"roleArn": str,
},
)
_RequiredDashboardSummaryTypeDef = TypedDict(
"_RequiredDashboardSummaryTypeDef",
{
"id": str,
"name": str,
},
)
_OptionalDashboardSummaryTypeDef = TypedDict(
"_OptionalDashboardSummaryTypeDef",
{
"description": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
},
total=False,
)
class DashboardSummaryTypeDef(_RequiredDashboardSummaryTypeDef, _OptionalDashboardSummaryTypeDef):
pass
_RequiredDeleteAccessPolicyRequestRequestTypeDef = TypedDict(
"_RequiredDeleteAccessPolicyRequestRequestTypeDef",
{
"accessPolicyId": str,
},
)
_OptionalDeleteAccessPolicyRequestRequestTypeDef = TypedDict(
"_OptionalDeleteAccessPolicyRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeleteAccessPolicyRequestRequestTypeDef(
_RequiredDeleteAccessPolicyRequestRequestTypeDef,
_OptionalDeleteAccessPolicyRequestRequestTypeDef,
):
pass
_RequiredDeleteAssetModelRequestRequestTypeDef = TypedDict(
"_RequiredDeleteAssetModelRequestRequestTypeDef",
{
"assetModelId": str,
},
)
_OptionalDeleteAssetModelRequestRequestTypeDef = TypedDict(
"_OptionalDeleteAssetModelRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeleteAssetModelRequestRequestTypeDef(
_RequiredDeleteAssetModelRequestRequestTypeDef, _OptionalDeleteAssetModelRequestRequestTypeDef
):
pass
_RequiredDeleteAssetRequestRequestTypeDef = TypedDict(
"_RequiredDeleteAssetRequestRequestTypeDef",
{
"assetId": str,
},
)
_OptionalDeleteAssetRequestRequestTypeDef = TypedDict(
"_OptionalDeleteAssetRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeleteAssetRequestRequestTypeDef(
_RequiredDeleteAssetRequestRequestTypeDef, _OptionalDeleteAssetRequestRequestTypeDef
):
pass
_RequiredDeleteDashboardRequestRequestTypeDef = TypedDict(
"_RequiredDeleteDashboardRequestRequestTypeDef",
{
"dashboardId": str,
},
)
_OptionalDeleteDashboardRequestRequestTypeDef = TypedDict(
"_OptionalDeleteDashboardRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeleteDashboardRequestRequestTypeDef(
_RequiredDeleteDashboardRequestRequestTypeDef, _OptionalDeleteDashboardRequestRequestTypeDef
):
pass
DeleteGatewayRequestRequestTypeDef = TypedDict(
"DeleteGatewayRequestRequestTypeDef",
{
"gatewayId": str,
},
)
_RequiredDeletePortalRequestRequestTypeDef = TypedDict(
"_RequiredDeletePortalRequestRequestTypeDef",
{
"portalId": str,
},
)
_OptionalDeletePortalRequestRequestTypeDef = TypedDict(
"_OptionalDeletePortalRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeletePortalRequestRequestTypeDef(
_RequiredDeletePortalRequestRequestTypeDef, _OptionalDeletePortalRequestRequestTypeDef
):
pass
_RequiredDeleteProjectRequestRequestTypeDef = TypedDict(
"_RequiredDeleteProjectRequestRequestTypeDef",
{
"projectId": str,
},
)
_OptionalDeleteProjectRequestRequestTypeDef = TypedDict(
"_OptionalDeleteProjectRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DeleteProjectRequestRequestTypeDef(
_RequiredDeleteProjectRequestRequestTypeDef, _OptionalDeleteProjectRequestRequestTypeDef
):
pass
DeleteTimeSeriesRequestRequestTypeDef = TypedDict(
"DeleteTimeSeriesRequestRequestTypeDef",
{
"alias": str,
"assetId": str,
"propertyId": str,
"clientToken": str,
},
total=False,
)
DescribeAccessPolicyRequestRequestTypeDef = TypedDict(
"DescribeAccessPolicyRequestRequestTypeDef",
{
"accessPolicyId": str,
},
)
WaiterConfigTypeDef = TypedDict(
"WaiterConfigTypeDef",
{
"Delay": int,
"MaxAttempts": int,
},
total=False,
)
DescribeAssetModelRequestRequestTypeDef = TypedDict(
"DescribeAssetModelRequestRequestTypeDef",
{
"assetModelId": str,
},
)
DescribeAssetPropertyRequestRequestTypeDef = TypedDict(
"DescribeAssetPropertyRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
},
)
DescribeAssetRequestRequestTypeDef = TypedDict(
"DescribeAssetRequestRequestTypeDef",
{
"assetId": str,
},
)
DescribeDashboardRequestRequestTypeDef = TypedDict(
"DescribeDashboardRequestRequestTypeDef",
{
"dashboardId": str,
},
)
DescribeGatewayCapabilityConfigurationRequestRequestTypeDef = TypedDict(
"DescribeGatewayCapabilityConfigurationRequestRequestTypeDef",
{
"gatewayId": str,
"capabilityNamespace": str,
},
)
DescribeGatewayRequestRequestTypeDef = TypedDict(
"DescribeGatewayRequestRequestTypeDef",
{
"gatewayId": str,
},
)
GatewayCapabilitySummaryTypeDef = TypedDict(
"GatewayCapabilitySummaryTypeDef",
{
"capabilityNamespace": str,
"capabilitySyncStatus": CapabilitySyncStatusType,
},
)
LoggingOptionsTypeDef = TypedDict(
"LoggingOptionsTypeDef",
{
"level": LoggingLevelType,
},
)
DescribePortalRequestRequestTypeDef = TypedDict(
"DescribePortalRequestRequestTypeDef",
{
"portalId": str,
},
)
ImageLocationTypeDef = TypedDict(
"ImageLocationTypeDef",
{
"id": str,
"url": str,
},
)
DescribeProjectRequestRequestTypeDef = TypedDict(
"DescribeProjectRequestRequestTypeDef",
{
"projectId": str,
},
)
RetentionPeriodTypeDef = TypedDict(
"RetentionPeriodTypeDef",
{
"numberOfDays": int,
"unlimited": bool,
},
total=False,
)
DescribeTimeSeriesRequestRequestTypeDef = TypedDict(
"DescribeTimeSeriesRequestRequestTypeDef",
{
"alias": str,
"assetId": str,
"propertyId": str,
},
total=False,
)
DetailedErrorTypeDef = TypedDict(
"DetailedErrorTypeDef",
{
"code": DetailedErrorCodeType,
"message": str,
},
)
_RequiredDisassociateAssetsRequestRequestTypeDef = TypedDict(
"_RequiredDisassociateAssetsRequestRequestTypeDef",
{
"assetId": str,
"hierarchyId": str,
"childAssetId": str,
},
)
_OptionalDisassociateAssetsRequestRequestTypeDef = TypedDict(
"_OptionalDisassociateAssetsRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DisassociateAssetsRequestRequestTypeDef(
_RequiredDisassociateAssetsRequestRequestTypeDef,
_OptionalDisassociateAssetsRequestRequestTypeDef,
):
pass
_RequiredDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef = TypedDict(
"_RequiredDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef",
{
"alias": str,
"assetId": str,
"propertyId": str,
},
)
_OptionalDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef = TypedDict(
"_OptionalDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class DisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef(
_RequiredDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef,
_OptionalDisassociateTimeSeriesFromAssetPropertyRequestRequestTypeDef,
):
pass
_RequiredVariableValueTypeDef = TypedDict(
"_RequiredVariableValueTypeDef",
{
"propertyId": str,
},
)
_OptionalVariableValueTypeDef = TypedDict(
"_OptionalVariableValueTypeDef",
{
"hierarchyId": str,
},
total=False,
)
class VariableValueTypeDef(_RequiredVariableValueTypeDef, _OptionalVariableValueTypeDef):
pass
ForwardingConfigTypeDef = TypedDict(
"ForwardingConfigTypeDef",
{
"state": ForwardingConfigStateType,
},
)
GreengrassTypeDef = TypedDict(
"GreengrassTypeDef",
{
"groupArn": str,
},
)
GreengrassV2TypeDef = TypedDict(
"GreengrassV2TypeDef",
{
"coreDeviceThingName": str,
},
)
PaginatorConfigTypeDef = TypedDict(
"PaginatorConfigTypeDef",
{
"MaxItems": int,
"PageSize": int,
"StartingToken": str,
},
total=False,
)
_RequiredGetAssetPropertyAggregatesRequestRequestTypeDef = TypedDict(
"_RequiredGetAssetPropertyAggregatesRequestRequestTypeDef",
{
"aggregateTypes": Sequence[AggregateTypeType],
"resolution": str,
"startDate": Union[datetime, str],
"endDate": Union[datetime, str],
},
)
_OptionalGetAssetPropertyAggregatesRequestRequestTypeDef = TypedDict(
"_OptionalGetAssetPropertyAggregatesRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"qualities": Sequence[QualityType],
"timeOrdering": TimeOrderingType,
"nextToken": str,
"maxResults": int,
},
total=False,
)
class GetAssetPropertyAggregatesRequestRequestTypeDef(
_RequiredGetAssetPropertyAggregatesRequestRequestTypeDef,
_OptionalGetAssetPropertyAggregatesRequestRequestTypeDef,
):
pass
GetAssetPropertyValueHistoryRequestRequestTypeDef = TypedDict(
"GetAssetPropertyValueHistoryRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"startDate": Union[datetime, str],
"endDate": Union[datetime, str],
"qualities": Sequence[QualityType],
"timeOrdering": TimeOrderingType,
"nextToken": str,
"maxResults": int,
},
total=False,
)
GetAssetPropertyValueRequestRequestTypeDef = TypedDict(
"GetAssetPropertyValueRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
},
total=False,
)
_RequiredGetInterpolatedAssetPropertyValuesRequestRequestTypeDef = TypedDict(
"_RequiredGetInterpolatedAssetPropertyValuesRequestRequestTypeDef",
{
"startTimeInSeconds": int,
"endTimeInSeconds": int,
"quality": QualityType,
"intervalInSeconds": int,
"type": str,
},
)
_OptionalGetInterpolatedAssetPropertyValuesRequestRequestTypeDef = TypedDict(
"_OptionalGetInterpolatedAssetPropertyValuesRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"startTimeOffsetInNanos": int,
"endTimeOffsetInNanos": int,
"nextToken": str,
"maxResults": int,
"intervalWindowInSeconds": int,
},
total=False,
)
class GetInterpolatedAssetPropertyValuesRequestRequestTypeDef(
_RequiredGetInterpolatedAssetPropertyValuesRequestRequestTypeDef,
_OptionalGetInterpolatedAssetPropertyValuesRequestRequestTypeDef,
):
pass
GroupIdentityTypeDef = TypedDict(
"GroupIdentityTypeDef",
{
"id": str,
},
)
IAMRoleIdentityTypeDef = TypedDict(
"IAMRoleIdentityTypeDef",
{
"arn": str,
},
)
IAMUserIdentityTypeDef = TypedDict(
"IAMUserIdentityTypeDef",
{
"arn": str,
},
)
UserIdentityTypeDef = TypedDict(
"UserIdentityTypeDef",
{
"id": str,
},
)
ListAccessPoliciesRequestRequestTypeDef = TypedDict(
"ListAccessPoliciesRequestRequestTypeDef",
{
"identityType": IdentityTypeType,
"identityId": str,
"resourceType": ResourceTypeType,
"resourceId": str,
"iamArn": str,
"nextToken": str,
"maxResults": int,
},
total=False,
)
ListAssetModelsRequestRequestTypeDef = TypedDict(
"ListAssetModelsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
_RequiredListAssetRelationshipsRequestRequestTypeDef = TypedDict(
"_RequiredListAssetRelationshipsRequestRequestTypeDef",
{
"assetId": str,
"traversalType": Literal["PATH_TO_ROOT"],
},
)
_OptionalListAssetRelationshipsRequestRequestTypeDef = TypedDict(
"_OptionalListAssetRelationshipsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
class ListAssetRelationshipsRequestRequestTypeDef(
_RequiredListAssetRelationshipsRequestRequestTypeDef,
_OptionalListAssetRelationshipsRequestRequestTypeDef,
):
pass
ListAssetsRequestRequestTypeDef = TypedDict(
"ListAssetsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
"assetModelId": str,
"filter": ListAssetsFilterType,
},
total=False,
)
_RequiredListAssociatedAssetsRequestRequestTypeDef = TypedDict(
"_RequiredListAssociatedAssetsRequestRequestTypeDef",
{
"assetId": str,
},
)
_OptionalListAssociatedAssetsRequestRequestTypeDef = TypedDict(
"_OptionalListAssociatedAssetsRequestRequestTypeDef",
{
"hierarchyId": str,
"traversalDirection": TraversalDirectionType,
"nextToken": str,
"maxResults": int,
},
total=False,
)
class ListAssociatedAssetsRequestRequestTypeDef(
_RequiredListAssociatedAssetsRequestRequestTypeDef,
_OptionalListAssociatedAssetsRequestRequestTypeDef,
):
pass
_RequiredListDashboardsRequestRequestTypeDef = TypedDict(
"_RequiredListDashboardsRequestRequestTypeDef",
{
"projectId": str,
},
)
_OptionalListDashboardsRequestRequestTypeDef = TypedDict(
"_OptionalListDashboardsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
class ListDashboardsRequestRequestTypeDef(
_RequiredListDashboardsRequestRequestTypeDef, _OptionalListDashboardsRequestRequestTypeDef
):
pass
ListGatewaysRequestRequestTypeDef = TypedDict(
"ListGatewaysRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
ListPortalsRequestRequestTypeDef = TypedDict(
"ListPortalsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
_RequiredListProjectAssetsRequestRequestTypeDef = TypedDict(
"_RequiredListProjectAssetsRequestRequestTypeDef",
{
"projectId": str,
},
)
_OptionalListProjectAssetsRequestRequestTypeDef = TypedDict(
"_OptionalListProjectAssetsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
class ListProjectAssetsRequestRequestTypeDef(
_RequiredListProjectAssetsRequestRequestTypeDef, _OptionalListProjectAssetsRequestRequestTypeDef
):
pass
_RequiredListProjectsRequestRequestTypeDef = TypedDict(
"_RequiredListProjectsRequestRequestTypeDef",
{
"portalId": str,
},
)
_OptionalListProjectsRequestRequestTypeDef = TypedDict(
"_OptionalListProjectsRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
},
total=False,
)
class ListProjectsRequestRequestTypeDef(
_RequiredListProjectsRequestRequestTypeDef, _OptionalListProjectsRequestRequestTypeDef
):
pass
_RequiredProjectSummaryTypeDef = TypedDict(
"_RequiredProjectSummaryTypeDef",
{
"id": str,
"name": str,
},
)
_OptionalProjectSummaryTypeDef = TypedDict(
"_OptionalProjectSummaryTypeDef",
{
"description": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
},
total=False,
)
class ProjectSummaryTypeDef(_RequiredProjectSummaryTypeDef, _OptionalProjectSummaryTypeDef):
pass
ListTagsForResourceRequestRequestTypeDef = TypedDict(
"ListTagsForResourceRequestRequestTypeDef",
{
"resourceArn": str,
},
)
ListTimeSeriesRequestRequestTypeDef = TypedDict(
"ListTimeSeriesRequestRequestTypeDef",
{
"nextToken": str,
"maxResults": int,
"assetId": str,
"aliasPrefix": str,
"timeSeriesType": ListTimeSeriesTypeType,
},
total=False,
)
_RequiredTimeSeriesSummaryTypeDef = TypedDict(
"_RequiredTimeSeriesSummaryTypeDef",
{
"timeSeriesId": str,
"dataType": PropertyDataTypeType,
"timeSeriesCreationDate": datetime,
"timeSeriesLastUpdateDate": datetime,
},
)
_OptionalTimeSeriesSummaryTypeDef = TypedDict(
"_OptionalTimeSeriesSummaryTypeDef",
{
"assetId": str,
"propertyId": str,
"alias": str,
"dataTypeSpec": str,
},
total=False,
)
class TimeSeriesSummaryTypeDef(
_RequiredTimeSeriesSummaryTypeDef, _OptionalTimeSeriesSummaryTypeDef
):
pass
MetricProcessingConfigTypeDef = TypedDict(
"MetricProcessingConfigTypeDef",
{
"computeLocation": ComputeLocationType,
},
)
_RequiredTumblingWindowTypeDef = TypedDict(
"_RequiredTumblingWindowTypeDef",
{
"interval": str,
},
)
_OptionalTumblingWindowTypeDef = TypedDict(
"_OptionalTumblingWindowTypeDef",
{
"offset": str,
},
total=False,
)
class TumblingWindowTypeDef(_RequiredTumblingWindowTypeDef, _OptionalTumblingWindowTypeDef):
pass
MonitorErrorDetailsTypeDef = TypedDict(
"MonitorErrorDetailsTypeDef",
{
"code": MonitorErrorCodeType,
"message": str,
},
total=False,
)
PortalResourceTypeDef = TypedDict(
"PortalResourceTypeDef",
{
"id": str,
},
)
ProjectResourceTypeDef = TypedDict(
"ProjectResourceTypeDef",
{
"id": str,
},
)
_RequiredPutDefaultEncryptionConfigurationRequestRequestTypeDef = TypedDict(
"_RequiredPutDefaultEncryptionConfigurationRequestRequestTypeDef",
{
"encryptionType": EncryptionTypeType,
},
)
_OptionalPutDefaultEncryptionConfigurationRequestRequestTypeDef = TypedDict(
"_OptionalPutDefaultEncryptionConfigurationRequestRequestTypeDef",
{
"kmsKeyId": str,
},
total=False,
)
class PutDefaultEncryptionConfigurationRequestRequestTypeDef(
_RequiredPutDefaultEncryptionConfigurationRequestRequestTypeDef,
_OptionalPutDefaultEncryptionConfigurationRequestRequestTypeDef,
):
pass
TagResourceRequestRequestTypeDef = TypedDict(
"TagResourceRequestRequestTypeDef",
{
"resourceArn": str,
"tags": Mapping[str, str],
},
)
UntagResourceRequestRequestTypeDef = TypedDict(
"UntagResourceRequestRequestTypeDef",
{
"resourceArn": str,
"tagKeys": Sequence[str],
},
)
_RequiredUpdateAssetPropertyRequestRequestTypeDef = TypedDict(
"_RequiredUpdateAssetPropertyRequestRequestTypeDef",
{
"assetId": str,
"propertyId": str,
},
)
_OptionalUpdateAssetPropertyRequestRequestTypeDef = TypedDict(
"_OptionalUpdateAssetPropertyRequestRequestTypeDef",
{
"propertyAlias": str,
"propertyNotificationState": PropertyNotificationStateType,
"clientToken": str,
},
total=False,
)
class UpdateAssetPropertyRequestRequestTypeDef(
_RequiredUpdateAssetPropertyRequestRequestTypeDef,
_OptionalUpdateAssetPropertyRequestRequestTypeDef,
):
pass
_RequiredUpdateAssetRequestRequestTypeDef = TypedDict(
"_RequiredUpdateAssetRequestRequestTypeDef",
{
"assetId": str,
"assetName": str,
},
)
_OptionalUpdateAssetRequestRequestTypeDef = TypedDict(
"_OptionalUpdateAssetRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class UpdateAssetRequestRequestTypeDef(
_RequiredUpdateAssetRequestRequestTypeDef, _OptionalUpdateAssetRequestRequestTypeDef
):
pass
_RequiredUpdateDashboardRequestRequestTypeDef = TypedDict(
"_RequiredUpdateDashboardRequestRequestTypeDef",
{
"dashboardId": str,
"dashboardName": str,
"dashboardDefinition": str,
},
)
_OptionalUpdateDashboardRequestRequestTypeDef = TypedDict(
"_OptionalUpdateDashboardRequestRequestTypeDef",
{
"dashboardDescription": str,
"clientToken": str,
},
total=False,
)
class UpdateDashboardRequestRequestTypeDef(
_RequiredUpdateDashboardRequestRequestTypeDef, _OptionalUpdateDashboardRequestRequestTypeDef
):
pass
UpdateGatewayCapabilityConfigurationRequestRequestTypeDef = TypedDict(
"UpdateGatewayCapabilityConfigurationRequestRequestTypeDef",
{
"gatewayId": str,
"capabilityNamespace": str,
"capabilityConfiguration": str,
},
)
UpdateGatewayRequestRequestTypeDef = TypedDict(
"UpdateGatewayRequestRequestTypeDef",
{
"gatewayId": str,
"gatewayName": str,
},
)
_RequiredUpdateProjectRequestRequestTypeDef = TypedDict(
"_RequiredUpdateProjectRequestRequestTypeDef",
{
"projectId": str,
"projectName": str,
},
)
_OptionalUpdateProjectRequestRequestTypeDef = TypedDict(
"_OptionalUpdateProjectRequestRequestTypeDef",
{
"projectDescription": str,
"clientToken": str,
},
total=False,
)
class UpdateProjectRequestRequestTypeDef(
_RequiredUpdateProjectRequestRequestTypeDef, _OptionalUpdateProjectRequestRequestTypeDef
):
pass
_RequiredAggregatedValueTypeDef = TypedDict(
"_RequiredAggregatedValueTypeDef",
{
"timestamp": datetime,
"value": AggregatesTypeDef,
},
)
_OptionalAggregatedValueTypeDef = TypedDict(
"_OptionalAggregatedValueTypeDef",
{
"quality": QualityType,
},
total=False,
)
class AggregatedValueTypeDef(_RequiredAggregatedValueTypeDef, _OptionalAggregatedValueTypeDef):
pass
_RequiredAssetRelationshipSummaryTypeDef = TypedDict(
"_RequiredAssetRelationshipSummaryTypeDef",
{
"relationshipType": Literal["HIERARCHY"],
},
)
_OptionalAssetRelationshipSummaryTypeDef = TypedDict(
"_OptionalAssetRelationshipSummaryTypeDef",
{
"hierarchyInfo": AssetHierarchyInfoTypeDef,
},
total=False,
)
class AssetRelationshipSummaryTypeDef(
_RequiredAssetRelationshipSummaryTypeDef, _OptionalAssetRelationshipSummaryTypeDef
):
pass
_RequiredAssetPropertyTypeDef = TypedDict(
"_RequiredAssetPropertyTypeDef",
{
"id": str,
"name": str,
"dataType": PropertyDataTypeType,
},
)
_OptionalAssetPropertyTypeDef = TypedDict(
"_OptionalAssetPropertyTypeDef",
{
"alias": str,
"notification": PropertyNotificationTypeDef,
"dataTypeSpec": str,
"unit": str,
},
total=False,
)
class AssetPropertyTypeDef(_RequiredAssetPropertyTypeDef, _OptionalAssetPropertyTypeDef):
pass
BatchPutAssetPropertyErrorTypeDef = TypedDict(
"BatchPutAssetPropertyErrorTypeDef",
{
"errorCode": BatchPutAssetPropertyValueErrorCodeType,
"errorMessage": str,
"timestamps": List[TimeInNanosTypeDef],
},
)
_RequiredAssetPropertyValueTypeDef = TypedDict(
"_RequiredAssetPropertyValueTypeDef",
{
"value": VariantTypeDef,
"timestamp": TimeInNanosTypeDef,
},
)
_OptionalAssetPropertyValueTypeDef = TypedDict(
"_OptionalAssetPropertyValueTypeDef",
{
"quality": QualityType,
},
total=False,
)
class AssetPropertyValueTypeDef(
_RequiredAssetPropertyValueTypeDef, _OptionalAssetPropertyValueTypeDef
):
pass
InterpolatedAssetPropertyValueTypeDef = TypedDict(
"InterpolatedAssetPropertyValueTypeDef",
{
"timestamp": TimeInNanosTypeDef,
"value": VariantTypeDef,
},
)
BatchAssociateProjectAssetsResponseTypeDef = TypedDict(
"BatchAssociateProjectAssetsResponseTypeDef",
{
"errors": List[AssetErrorDetailsTypeDef],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
BatchDisassociateProjectAssetsResponseTypeDef = TypedDict(
"BatchDisassociateProjectAssetsResponseTypeDef",
{
"errors": List[AssetErrorDetailsTypeDef],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateAccessPolicyResponseTypeDef = TypedDict(
"CreateAccessPolicyResponseTypeDef",
{
"accessPolicyId": str,
"accessPolicyArn": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateDashboardResponseTypeDef = TypedDict(
"CreateDashboardResponseTypeDef",
{
"dashboardId": str,
"dashboardArn": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateGatewayResponseTypeDef = TypedDict(
"CreateGatewayResponseTypeDef",
{
"gatewayId": str,
"gatewayArn": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
CreateProjectResponseTypeDef = TypedDict(
"CreateProjectResponseTypeDef",
{
"projectId": str,
"projectArn": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeDashboardResponseTypeDef = TypedDict(
"DescribeDashboardResponseTypeDef",
{
"dashboardId": str,
"dashboardArn": str,
"dashboardName": str,
"projectId": str,
"dashboardDescription": str,
"dashboardDefinition": str,
"dashboardCreationDate": datetime,
"dashboardLastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeGatewayCapabilityConfigurationResponseTypeDef = TypedDict(
"DescribeGatewayCapabilityConfigurationResponseTypeDef",
{
"gatewayId": str,
"capabilityNamespace": str,
"capabilityConfiguration": str,
"capabilitySyncStatus": CapabilitySyncStatusType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeProjectResponseTypeDef = TypedDict(
"DescribeProjectResponseTypeDef",
{
"projectId": str,
"projectArn": str,
"projectName": str,
"portalId": str,
"projectDescription": str,
"projectCreationDate": datetime,
"projectLastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeTimeSeriesResponseTypeDef = TypedDict(
"DescribeTimeSeriesResponseTypeDef",
{
"assetId": str,
"propertyId": str,
"alias": str,
"timeSeriesId": str,
"dataType": PropertyDataTypeType,
"dataTypeSpec": str,
"timeSeriesCreationDate": datetime,
"timeSeriesLastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
EmptyResponseMetadataTypeDef = TypedDict(
"EmptyResponseMetadataTypeDef",
{
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListProjectAssetsResponseTypeDef = TypedDict(
"ListProjectAssetsResponseTypeDef",
{
"assetIds": List[str],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListTagsForResourceResponseTypeDef = TypedDict(
"ListTagsForResourceResponseTypeDef",
{
"tags": Dict[str, str],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
UpdateGatewayCapabilityConfigurationResponseTypeDef = TypedDict(
"UpdateGatewayCapabilityConfigurationResponseTypeDef",
{
"capabilityNamespace": str,
"capabilitySyncStatus": CapabilitySyncStatusType,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredConfigurationStatusTypeDef = TypedDict(
"_RequiredConfigurationStatusTypeDef",
{
"state": ConfigurationStateType,
},
)
_OptionalConfigurationStatusTypeDef = TypedDict(
"_OptionalConfigurationStatusTypeDef",
{
"error": ConfigurationErrorDetailsTypeDef,
},
total=False,
)
class ConfigurationStatusTypeDef(
_RequiredConfigurationStatusTypeDef, _OptionalConfigurationStatusTypeDef
):
pass
_RequiredCreatePortalRequestRequestTypeDef = TypedDict(
"_RequiredCreatePortalRequestRequestTypeDef",
{
"portalName": str,
"portalContactEmail": str,
"roleArn": str,
},
)
_OptionalCreatePortalRequestRequestTypeDef = TypedDict(
"_OptionalCreatePortalRequestRequestTypeDef",
{
"portalDescription": str,
"clientToken": str,
"portalLogoImageFile": ImageFileTypeDef,
"tags": Mapping[str, str],
"portalAuthMode": AuthModeType,
"notificationSenderEmail": str,
"alarms": AlarmsTypeDef,
},
total=False,
)
class CreatePortalRequestRequestTypeDef(
_RequiredCreatePortalRequestRequestTypeDef, _OptionalCreatePortalRequestRequestTypeDef
):
pass
ImageTypeDef = TypedDict(
"ImageTypeDef",
{
"id": str,
"file": ImageFileTypeDef,
},
total=False,
)
MultiLayerStorageTypeDef = TypedDict(
"MultiLayerStorageTypeDef",
{
"customerManagedS3Storage": CustomerManagedS3StorageTypeDef,
},
)
ListDashboardsResponseTypeDef = TypedDict(
"ListDashboardsResponseTypeDef",
{
"dashboardSummaries": List[DashboardSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredDescribeAssetModelRequestAssetModelActiveWaitTypeDef = TypedDict(
"_RequiredDescribeAssetModelRequestAssetModelActiveWaitTypeDef",
{
"assetModelId": str,
},
)
_OptionalDescribeAssetModelRequestAssetModelActiveWaitTypeDef = TypedDict(
"_OptionalDescribeAssetModelRequestAssetModelActiveWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribeAssetModelRequestAssetModelActiveWaitTypeDef(
_RequiredDescribeAssetModelRequestAssetModelActiveWaitTypeDef,
_OptionalDescribeAssetModelRequestAssetModelActiveWaitTypeDef,
):
pass
_RequiredDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef = TypedDict(
"_RequiredDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef",
{
"assetModelId": str,
},
)
_OptionalDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef = TypedDict(
"_OptionalDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribeAssetModelRequestAssetModelNotExistsWaitTypeDef(
_RequiredDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef,
_OptionalDescribeAssetModelRequestAssetModelNotExistsWaitTypeDef,
):
pass
_RequiredDescribeAssetRequestAssetActiveWaitTypeDef = TypedDict(
"_RequiredDescribeAssetRequestAssetActiveWaitTypeDef",
{
"assetId": str,
},
)
_OptionalDescribeAssetRequestAssetActiveWaitTypeDef = TypedDict(
"_OptionalDescribeAssetRequestAssetActiveWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribeAssetRequestAssetActiveWaitTypeDef(
_RequiredDescribeAssetRequestAssetActiveWaitTypeDef,
_OptionalDescribeAssetRequestAssetActiveWaitTypeDef,
):
pass
_RequiredDescribeAssetRequestAssetNotExistsWaitTypeDef = TypedDict(
"_RequiredDescribeAssetRequestAssetNotExistsWaitTypeDef",
{
"assetId": str,
},
)
_OptionalDescribeAssetRequestAssetNotExistsWaitTypeDef = TypedDict(
"_OptionalDescribeAssetRequestAssetNotExistsWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribeAssetRequestAssetNotExistsWaitTypeDef(
_RequiredDescribeAssetRequestAssetNotExistsWaitTypeDef,
_OptionalDescribeAssetRequestAssetNotExistsWaitTypeDef,
):
pass
_RequiredDescribePortalRequestPortalActiveWaitTypeDef = TypedDict(
"_RequiredDescribePortalRequestPortalActiveWaitTypeDef",
{
"portalId": str,
},
)
_OptionalDescribePortalRequestPortalActiveWaitTypeDef = TypedDict(
"_OptionalDescribePortalRequestPortalActiveWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribePortalRequestPortalActiveWaitTypeDef(
_RequiredDescribePortalRequestPortalActiveWaitTypeDef,
_OptionalDescribePortalRequestPortalActiveWaitTypeDef,
):
pass
_RequiredDescribePortalRequestPortalNotExistsWaitTypeDef = TypedDict(
"_RequiredDescribePortalRequestPortalNotExistsWaitTypeDef",
{
"portalId": str,
},
)
_OptionalDescribePortalRequestPortalNotExistsWaitTypeDef = TypedDict(
"_OptionalDescribePortalRequestPortalNotExistsWaitTypeDef",
{
"WaiterConfig": WaiterConfigTypeDef,
},
total=False,
)
class DescribePortalRequestPortalNotExistsWaitTypeDef(
_RequiredDescribePortalRequestPortalNotExistsWaitTypeDef,
_OptionalDescribePortalRequestPortalNotExistsWaitTypeDef,
):
pass
DescribeLoggingOptionsResponseTypeDef = TypedDict(
"DescribeLoggingOptionsResponseTypeDef",
{
"loggingOptions": LoggingOptionsTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
PutLoggingOptionsRequestRequestTypeDef = TypedDict(
"PutLoggingOptionsRequestRequestTypeDef",
{
"loggingOptions": LoggingOptionsTypeDef,
},
)
_RequiredErrorDetailsTypeDef = TypedDict(
"_RequiredErrorDetailsTypeDef",
{
"code": ErrorCodeType,
"message": str,
},
)
_OptionalErrorDetailsTypeDef = TypedDict(
"_OptionalErrorDetailsTypeDef",
{
"details": List[DetailedErrorTypeDef],
},
total=False,
)
class ErrorDetailsTypeDef(_RequiredErrorDetailsTypeDef, _OptionalErrorDetailsTypeDef):
pass
ExpressionVariableTypeDef = TypedDict(
"ExpressionVariableTypeDef",
{
"name": str,
"value": VariableValueTypeDef,
},
)
MeasurementProcessingConfigTypeDef = TypedDict(
"MeasurementProcessingConfigTypeDef",
{
"forwardingConfig": ForwardingConfigTypeDef,
},
)
_RequiredTransformProcessingConfigTypeDef = TypedDict(
"_RequiredTransformProcessingConfigTypeDef",
{
"computeLocation": ComputeLocationType,
},
)
_OptionalTransformProcessingConfigTypeDef = TypedDict(
"_OptionalTransformProcessingConfigTypeDef",
{
"forwardingConfig": ForwardingConfigTypeDef,
},
total=False,
)
class TransformProcessingConfigTypeDef(
_RequiredTransformProcessingConfigTypeDef, _OptionalTransformProcessingConfigTypeDef
):
pass
GatewayPlatformTypeDef = TypedDict(
"GatewayPlatformTypeDef",
{
"greengrass": GreengrassTypeDef,
"greengrassV2": GreengrassV2TypeDef,
},
total=False,
)
_RequiredGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef = TypedDict(
"_RequiredGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef",
{
"aggregateTypes": Sequence[AggregateTypeType],
"resolution": str,
"startDate": Union[datetime, str],
"endDate": Union[datetime, str],
},
)
_OptionalGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef = TypedDict(
"_OptionalGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"qualities": Sequence[QualityType],
"timeOrdering": TimeOrderingType,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class GetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef(
_RequiredGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef,
_OptionalGetAssetPropertyAggregatesRequestGetAssetPropertyAggregatesPaginateTypeDef,
):
pass
GetAssetPropertyValueHistoryRequestGetAssetPropertyValueHistoryPaginateTypeDef = TypedDict(
"GetAssetPropertyValueHistoryRequestGetAssetPropertyValueHistoryPaginateTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"startDate": Union[datetime, str],
"endDate": Union[datetime, str],
"qualities": Sequence[QualityType],
"timeOrdering": TimeOrderingType,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
_RequiredGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef = TypedDict(
"_RequiredGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef",
{
"startTimeInSeconds": int,
"endTimeInSeconds": int,
"quality": QualityType,
"intervalInSeconds": int,
"type": str,
},
)
_OptionalGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef = TypedDict(
"_OptionalGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
"startTimeOffsetInNanos": int,
"endTimeOffsetInNanos": int,
"intervalWindowInSeconds": int,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class GetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef(
_RequiredGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef,
_OptionalGetInterpolatedAssetPropertyValuesRequestGetInterpolatedAssetPropertyValuesPaginateTypeDef,
):
pass
ListAccessPoliciesRequestListAccessPoliciesPaginateTypeDef = TypedDict(
"ListAccessPoliciesRequestListAccessPoliciesPaginateTypeDef",
{
"identityType": IdentityTypeType,
"identityId": str,
"resourceType": ResourceTypeType,
"resourceId": str,
"iamArn": str,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
ListAssetModelsRequestListAssetModelsPaginateTypeDef = TypedDict(
"ListAssetModelsRequestListAssetModelsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
_RequiredListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef = TypedDict(
"_RequiredListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef",
{
"assetId": str,
"traversalType": Literal["PATH_TO_ROOT"],
},
)
_OptionalListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef = TypedDict(
"_OptionalListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class ListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef(
_RequiredListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef,
_OptionalListAssetRelationshipsRequestListAssetRelationshipsPaginateTypeDef,
):
pass
ListAssetsRequestListAssetsPaginateTypeDef = TypedDict(
"ListAssetsRequestListAssetsPaginateTypeDef",
{
"assetModelId": str,
"filter": ListAssetsFilterType,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
_RequiredListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef = TypedDict(
"_RequiredListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef",
{
"assetId": str,
},
)
_OptionalListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef = TypedDict(
"_OptionalListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef",
{
"hierarchyId": str,
"traversalDirection": TraversalDirectionType,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class ListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef(
_RequiredListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef,
_OptionalListAssociatedAssetsRequestListAssociatedAssetsPaginateTypeDef,
):
pass
_RequiredListDashboardsRequestListDashboardsPaginateTypeDef = TypedDict(
"_RequiredListDashboardsRequestListDashboardsPaginateTypeDef",
{
"projectId": str,
},
)
_OptionalListDashboardsRequestListDashboardsPaginateTypeDef = TypedDict(
"_OptionalListDashboardsRequestListDashboardsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class ListDashboardsRequestListDashboardsPaginateTypeDef(
_RequiredListDashboardsRequestListDashboardsPaginateTypeDef,
_OptionalListDashboardsRequestListDashboardsPaginateTypeDef,
):
pass
ListGatewaysRequestListGatewaysPaginateTypeDef = TypedDict(
"ListGatewaysRequestListGatewaysPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
ListPortalsRequestListPortalsPaginateTypeDef = TypedDict(
"ListPortalsRequestListPortalsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
_RequiredListProjectAssetsRequestListProjectAssetsPaginateTypeDef = TypedDict(
"_RequiredListProjectAssetsRequestListProjectAssetsPaginateTypeDef",
{
"projectId": str,
},
)
_OptionalListProjectAssetsRequestListProjectAssetsPaginateTypeDef = TypedDict(
"_OptionalListProjectAssetsRequestListProjectAssetsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class ListProjectAssetsRequestListProjectAssetsPaginateTypeDef(
_RequiredListProjectAssetsRequestListProjectAssetsPaginateTypeDef,
_OptionalListProjectAssetsRequestListProjectAssetsPaginateTypeDef,
):
pass
_RequiredListProjectsRequestListProjectsPaginateTypeDef = TypedDict(
"_RequiredListProjectsRequestListProjectsPaginateTypeDef",
{
"portalId": str,
},
)
_OptionalListProjectsRequestListProjectsPaginateTypeDef = TypedDict(
"_OptionalListProjectsRequestListProjectsPaginateTypeDef",
{
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
class ListProjectsRequestListProjectsPaginateTypeDef(
_RequiredListProjectsRequestListProjectsPaginateTypeDef,
_OptionalListProjectsRequestListProjectsPaginateTypeDef,
):
pass
ListTimeSeriesRequestListTimeSeriesPaginateTypeDef = TypedDict(
"ListTimeSeriesRequestListTimeSeriesPaginateTypeDef",
{
"assetId": str,
"aliasPrefix": str,
"timeSeriesType": ListTimeSeriesTypeType,
"PaginationConfig": PaginatorConfigTypeDef,
},
total=False,
)
IdentityTypeDef = TypedDict(
"IdentityTypeDef",
{
"user": UserIdentityTypeDef,
"group": GroupIdentityTypeDef,
"iamUser": IAMUserIdentityTypeDef,
"iamRole": IAMRoleIdentityTypeDef,
},
total=False,
)
ListProjectsResponseTypeDef = TypedDict(
"ListProjectsResponseTypeDef",
{
"projectSummaries": List[ProjectSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListTimeSeriesResponseTypeDef = TypedDict(
"ListTimeSeriesResponseTypeDef",
{
"TimeSeriesSummaries": List[TimeSeriesSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
MetricWindowTypeDef = TypedDict(
"MetricWindowTypeDef",
{
"tumbling": TumblingWindowTypeDef,
},
total=False,
)
_RequiredPortalStatusTypeDef = TypedDict(
"_RequiredPortalStatusTypeDef",
{
"state": PortalStateType,
},
)
_OptionalPortalStatusTypeDef = TypedDict(
"_OptionalPortalStatusTypeDef",
{
"error": MonitorErrorDetailsTypeDef,
},
total=False,
)
class PortalStatusTypeDef(_RequiredPortalStatusTypeDef, _OptionalPortalStatusTypeDef):
pass
ResourceTypeDef = TypedDict(
"ResourceTypeDef",
{
"portal": PortalResourceTypeDef,
"project": ProjectResourceTypeDef,
},
total=False,
)
GetAssetPropertyAggregatesResponseTypeDef = TypedDict(
"GetAssetPropertyAggregatesResponseTypeDef",
{
"aggregatedValues": List[AggregatedValueTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAssetRelationshipsResponseTypeDef = TypedDict(
"ListAssetRelationshipsResponseTypeDef",
{
"assetRelationshipSummaries": List[AssetRelationshipSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredAssetCompositeModelTypeDef = TypedDict(
"_RequiredAssetCompositeModelTypeDef",
{
"name": str,
"type": str,
"properties": List[AssetPropertyTypeDef],
},
)
_OptionalAssetCompositeModelTypeDef = TypedDict(
"_OptionalAssetCompositeModelTypeDef",
{
"description": str,
},
total=False,
)
class AssetCompositeModelTypeDef(
_RequiredAssetCompositeModelTypeDef, _OptionalAssetCompositeModelTypeDef
):
pass
BatchPutAssetPropertyErrorEntryTypeDef = TypedDict(
"BatchPutAssetPropertyErrorEntryTypeDef",
{
"entryId": str,
"errors": List[BatchPutAssetPropertyErrorTypeDef],
},
)
GetAssetPropertyValueHistoryResponseTypeDef = TypedDict(
"GetAssetPropertyValueHistoryResponseTypeDef",
{
"assetPropertyValueHistory": List[AssetPropertyValueTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
GetAssetPropertyValueResponseTypeDef = TypedDict(
"GetAssetPropertyValueResponseTypeDef",
{
"propertyValue": AssetPropertyValueTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredPutAssetPropertyValueEntryTypeDef = TypedDict(
"_RequiredPutAssetPropertyValueEntryTypeDef",
{
"entryId": str,
"propertyValues": Sequence[AssetPropertyValueTypeDef],
},
)
_OptionalPutAssetPropertyValueEntryTypeDef = TypedDict(
"_OptionalPutAssetPropertyValueEntryTypeDef",
{
"assetId": str,
"propertyId": str,
"propertyAlias": str,
},
total=False,
)
class PutAssetPropertyValueEntryTypeDef(
_RequiredPutAssetPropertyValueEntryTypeDef, _OptionalPutAssetPropertyValueEntryTypeDef
):
pass
GetInterpolatedAssetPropertyValuesResponseTypeDef = TypedDict(
"GetInterpolatedAssetPropertyValuesResponseTypeDef",
{
"interpolatedAssetPropertyValues": List[InterpolatedAssetPropertyValueTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeDefaultEncryptionConfigurationResponseTypeDef = TypedDict(
"DescribeDefaultEncryptionConfigurationResponseTypeDef",
{
"encryptionType": EncryptionTypeType,
"kmsKeyArn": str,
"configurationStatus": ConfigurationStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
PutDefaultEncryptionConfigurationResponseTypeDef = TypedDict(
"PutDefaultEncryptionConfigurationResponseTypeDef",
{
"encryptionType": EncryptionTypeType,
"kmsKeyArn": str,
"configurationStatus": ConfigurationStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredUpdatePortalRequestRequestTypeDef = TypedDict(
"_RequiredUpdatePortalRequestRequestTypeDef",
{
"portalId": str,
"portalName": str,
"portalContactEmail": str,
"roleArn": str,
},
)
_OptionalUpdatePortalRequestRequestTypeDef = TypedDict(
"_OptionalUpdatePortalRequestRequestTypeDef",
{
"portalDescription": str,
"portalLogoImage": ImageTypeDef,
"clientToken": str,
"notificationSenderEmail": str,
"alarms": AlarmsTypeDef,
},
total=False,
)
class UpdatePortalRequestRequestTypeDef(
_RequiredUpdatePortalRequestRequestTypeDef, _OptionalUpdatePortalRequestRequestTypeDef
):
pass
DescribeStorageConfigurationResponseTypeDef = TypedDict(
"DescribeStorageConfigurationResponseTypeDef",
{
"storageType": StorageTypeType,
"multiLayerStorage": MultiLayerStorageTypeDef,
"disassociatedDataStorage": DisassociatedDataStorageStateType,
"retentionPeriod": RetentionPeriodTypeDef,
"configurationStatus": ConfigurationStatusTypeDef,
"lastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredPutStorageConfigurationRequestRequestTypeDef = TypedDict(
"_RequiredPutStorageConfigurationRequestRequestTypeDef",
{
"storageType": StorageTypeType,
},
)
_OptionalPutStorageConfigurationRequestRequestTypeDef = TypedDict(
"_OptionalPutStorageConfigurationRequestRequestTypeDef",
{
"multiLayerStorage": MultiLayerStorageTypeDef,
"disassociatedDataStorage": DisassociatedDataStorageStateType,
"retentionPeriod": RetentionPeriodTypeDef,
},
total=False,
)
class PutStorageConfigurationRequestRequestTypeDef(
_RequiredPutStorageConfigurationRequestRequestTypeDef,
_OptionalPutStorageConfigurationRequestRequestTypeDef,
):
pass
PutStorageConfigurationResponseTypeDef = TypedDict(
"PutStorageConfigurationResponseTypeDef",
{
"storageType": StorageTypeType,
"multiLayerStorage": MultiLayerStorageTypeDef,
"disassociatedDataStorage": DisassociatedDataStorageStateType,
"retentionPeriod": RetentionPeriodTypeDef,
"configurationStatus": ConfigurationStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredAssetModelStatusTypeDef = TypedDict(
"_RequiredAssetModelStatusTypeDef",
{
"state": AssetModelStateType,
},
)
_OptionalAssetModelStatusTypeDef = TypedDict(
"_OptionalAssetModelStatusTypeDef",
{
"error": ErrorDetailsTypeDef,
},
total=False,
)
class AssetModelStatusTypeDef(_RequiredAssetModelStatusTypeDef, _OptionalAssetModelStatusTypeDef):
pass
_RequiredAssetStatusTypeDef = TypedDict(
"_RequiredAssetStatusTypeDef",
{
"state": AssetStateType,
},
)
_OptionalAssetStatusTypeDef = TypedDict(
"_OptionalAssetStatusTypeDef",
{
"error": ErrorDetailsTypeDef,
},
total=False,
)
class AssetStatusTypeDef(_RequiredAssetStatusTypeDef, _OptionalAssetStatusTypeDef):
pass
MeasurementTypeDef = TypedDict(
"MeasurementTypeDef",
{
"processingConfig": MeasurementProcessingConfigTypeDef,
},
total=False,
)
_RequiredTransformTypeDef = TypedDict(
"_RequiredTransformTypeDef",
{
"expression": str,
"variables": Sequence[ExpressionVariableTypeDef],
},
)
_OptionalTransformTypeDef = TypedDict(
"_OptionalTransformTypeDef",
{
"processingConfig": TransformProcessingConfigTypeDef,
},
total=False,
)
class TransformTypeDef(_RequiredTransformTypeDef, _OptionalTransformTypeDef):
pass
_RequiredCreateGatewayRequestRequestTypeDef = TypedDict(
"_RequiredCreateGatewayRequestRequestTypeDef",
{
"gatewayName": str,
"gatewayPlatform": GatewayPlatformTypeDef,
},
)
_OptionalCreateGatewayRequestRequestTypeDef = TypedDict(
"_OptionalCreateGatewayRequestRequestTypeDef",
{
"tags": Mapping[str, str],
},
total=False,
)
class CreateGatewayRequestRequestTypeDef(
_RequiredCreateGatewayRequestRequestTypeDef, _OptionalCreateGatewayRequestRequestTypeDef
):
pass
DescribeGatewayResponseTypeDef = TypedDict(
"DescribeGatewayResponseTypeDef",
{
"gatewayId": str,
"gatewayName": str,
"gatewayArn": str,
"gatewayPlatform": GatewayPlatformTypeDef,
"gatewayCapabilitySummaries": List[GatewayCapabilitySummaryTypeDef],
"creationDate": datetime,
"lastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredGatewaySummaryTypeDef = TypedDict(
"_RequiredGatewaySummaryTypeDef",
{
"gatewayId": str,
"gatewayName": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
},
)
_OptionalGatewaySummaryTypeDef = TypedDict(
"_OptionalGatewaySummaryTypeDef",
{
"gatewayPlatform": GatewayPlatformTypeDef,
"gatewayCapabilitySummaries": List[GatewayCapabilitySummaryTypeDef],
},
total=False,
)
class GatewaySummaryTypeDef(_RequiredGatewaySummaryTypeDef, _OptionalGatewaySummaryTypeDef):
pass
_RequiredMetricTypeDef = TypedDict(
"_RequiredMetricTypeDef",
{
"expression": str,
"variables": Sequence[ExpressionVariableTypeDef],
"window": MetricWindowTypeDef,
},
)
_OptionalMetricTypeDef = TypedDict(
"_OptionalMetricTypeDef",
{
"processingConfig": MetricProcessingConfigTypeDef,
},
total=False,
)
class MetricTypeDef(_RequiredMetricTypeDef, _OptionalMetricTypeDef):
pass
CreatePortalResponseTypeDef = TypedDict(
"CreatePortalResponseTypeDef",
{
"portalId": str,
"portalArn": str,
"portalStartUrl": str,
"portalStatus": PortalStatusTypeDef,
"ssoApplicationId": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DeletePortalResponseTypeDef = TypedDict(
"DeletePortalResponseTypeDef",
{
"portalStatus": PortalStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribePortalResponseTypeDef = TypedDict(
"DescribePortalResponseTypeDef",
{
"portalId": str,
"portalArn": str,
"portalName": str,
"portalDescription": str,
"portalClientId": str,
"portalStartUrl": str,
"portalContactEmail": str,
"portalStatus": PortalStatusTypeDef,
"portalCreationDate": datetime,
"portalLastUpdateDate": datetime,
"portalLogoImageLocation": ImageLocationTypeDef,
"roleArn": str,
"portalAuthMode": AuthModeType,
"notificationSenderEmail": str,
"alarms": AlarmsTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredPortalSummaryTypeDef = TypedDict(
"_RequiredPortalSummaryTypeDef",
{
"id": str,
"name": str,
"startUrl": str,
"status": PortalStatusTypeDef,
},
)
_OptionalPortalSummaryTypeDef = TypedDict(
"_OptionalPortalSummaryTypeDef",
{
"description": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
"roleArn": str,
},
total=False,
)
class PortalSummaryTypeDef(_RequiredPortalSummaryTypeDef, _OptionalPortalSummaryTypeDef):
pass
UpdatePortalResponseTypeDef = TypedDict(
"UpdatePortalResponseTypeDef",
{
"portalStatus": PortalStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredAccessPolicySummaryTypeDef = TypedDict(
"_RequiredAccessPolicySummaryTypeDef",
{
"id": str,
"identity": IdentityTypeDef,
"resource": ResourceTypeDef,
"permission": PermissionType,
},
)
_OptionalAccessPolicySummaryTypeDef = TypedDict(
"_OptionalAccessPolicySummaryTypeDef",
{
"creationDate": datetime,
"lastUpdateDate": datetime,
},
total=False,
)
class AccessPolicySummaryTypeDef(
_RequiredAccessPolicySummaryTypeDef, _OptionalAccessPolicySummaryTypeDef
):
pass
_RequiredCreateAccessPolicyRequestRequestTypeDef = TypedDict(
"_RequiredCreateAccessPolicyRequestRequestTypeDef",
{
"accessPolicyIdentity": IdentityTypeDef,
"accessPolicyResource": ResourceTypeDef,
"accessPolicyPermission": PermissionType,
},
)
_OptionalCreateAccessPolicyRequestRequestTypeDef = TypedDict(
"_OptionalCreateAccessPolicyRequestRequestTypeDef",
{
"clientToken": str,
"tags": Mapping[str, str],
},
total=False,
)
class CreateAccessPolicyRequestRequestTypeDef(
_RequiredCreateAccessPolicyRequestRequestTypeDef,
_OptionalCreateAccessPolicyRequestRequestTypeDef,
):
pass
DescribeAccessPolicyResponseTypeDef = TypedDict(
"DescribeAccessPolicyResponseTypeDef",
{
"accessPolicyId": str,
"accessPolicyArn": str,
"accessPolicyIdentity": IdentityTypeDef,
"accessPolicyResource": ResourceTypeDef,
"accessPolicyPermission": PermissionType,
"accessPolicyCreationDate": datetime,
"accessPolicyLastUpdateDate": datetime,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredUpdateAccessPolicyRequestRequestTypeDef = TypedDict(
"_RequiredUpdateAccessPolicyRequestRequestTypeDef",
{
"accessPolicyId": str,
"accessPolicyIdentity": IdentityTypeDef,
"accessPolicyResource": ResourceTypeDef,
"accessPolicyPermission": PermissionType,
},
)
_OptionalUpdateAccessPolicyRequestRequestTypeDef = TypedDict(
"_OptionalUpdateAccessPolicyRequestRequestTypeDef",
{
"clientToken": str,
},
total=False,
)
class UpdateAccessPolicyRequestRequestTypeDef(
_RequiredUpdateAccessPolicyRequestRequestTypeDef,
_OptionalUpdateAccessPolicyRequestRequestTypeDef,
):
pass
BatchPutAssetPropertyValueResponseTypeDef = TypedDict(
"BatchPutAssetPropertyValueResponseTypeDef",
{
"errorEntries": List[BatchPutAssetPropertyErrorEntryTypeDef],
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
BatchPutAssetPropertyValueRequestRequestTypeDef = TypedDict(
"BatchPutAssetPropertyValueRequestRequestTypeDef",
{
"entries": Sequence[PutAssetPropertyValueEntryTypeDef],
},
)
AssetModelSummaryTypeDef = TypedDict(
"AssetModelSummaryTypeDef",
{
"id": str,
"arn": str,
"name": str,
"description": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
"status": AssetModelStatusTypeDef,
},
)
CreateAssetModelResponseTypeDef = TypedDict(
"CreateAssetModelResponseTypeDef",
{
"assetModelId": str,
"assetModelArn": str,
"assetModelStatus": AssetModelStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DeleteAssetModelResponseTypeDef = TypedDict(
"DeleteAssetModelResponseTypeDef",
{
"assetModelStatus": AssetModelStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
UpdateAssetModelResponseTypeDef = TypedDict(
"UpdateAssetModelResponseTypeDef",
{
"assetModelStatus": AssetModelStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
AssetSummaryTypeDef = TypedDict(
"AssetSummaryTypeDef",
{
"id": str,
"arn": str,
"name": str,
"assetModelId": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
"status": AssetStatusTypeDef,
"hierarchies": List[AssetHierarchyTypeDef],
},
)
AssociatedAssetsSummaryTypeDef = TypedDict(
"AssociatedAssetsSummaryTypeDef",
{
"id": str,
"arn": str,
"name": str,
"assetModelId": str,
"creationDate": datetime,
"lastUpdateDate": datetime,
"status": AssetStatusTypeDef,
"hierarchies": List[AssetHierarchyTypeDef],
},
)
CreateAssetResponseTypeDef = TypedDict(
"CreateAssetResponseTypeDef",
{
"assetId": str,
"assetArn": str,
"assetStatus": AssetStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DeleteAssetResponseTypeDef = TypedDict(
"DeleteAssetResponseTypeDef",
{
"assetStatus": AssetStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
DescribeAssetResponseTypeDef = TypedDict(
"DescribeAssetResponseTypeDef",
{
"assetId": str,
"assetArn": str,
"assetName": str,
"assetModelId": str,
"assetProperties": List[AssetPropertyTypeDef],
"assetHierarchies": List[AssetHierarchyTypeDef],
"assetCompositeModels": List[AssetCompositeModelTypeDef],
"assetCreationDate": datetime,
"assetLastUpdateDate": datetime,
"assetStatus": AssetStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
UpdateAssetResponseTypeDef = TypedDict(
"UpdateAssetResponseTypeDef",
{
"assetStatus": AssetStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListGatewaysResponseTypeDef = TypedDict(
"ListGatewaysResponseTypeDef",
{
"gatewaySummaries": List[GatewaySummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
PropertyTypeTypeDef = TypedDict(
"PropertyTypeTypeDef",
{
"attribute": AttributeTypeDef,
"measurement": MeasurementTypeDef,
"transform": TransformTypeDef,
"metric": MetricTypeDef,
},
total=False,
)
ListPortalsResponseTypeDef = TypedDict(
"ListPortalsResponseTypeDef",
{
"portalSummaries": List[PortalSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAccessPoliciesResponseTypeDef = TypedDict(
"ListAccessPoliciesResponseTypeDef",
{
"accessPolicySummaries": List[AccessPolicySummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAssetModelsResponseTypeDef = TypedDict(
"ListAssetModelsResponseTypeDef",
{
"assetModelSummaries": List[AssetModelSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAssetsResponseTypeDef = TypedDict(
"ListAssetsResponseTypeDef",
{
"assetSummaries": List[AssetSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
ListAssociatedAssetsResponseTypeDef = TypedDict(
"ListAssociatedAssetsResponseTypeDef",
{
"assetSummaries": List[AssociatedAssetsSummaryTypeDef],
"nextToken": str,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredAssetModelPropertyDefinitionTypeDef = TypedDict(
"_RequiredAssetModelPropertyDefinitionTypeDef",
{
"name": str,
"dataType": PropertyDataTypeType,
"type": PropertyTypeTypeDef,
},
)
_OptionalAssetModelPropertyDefinitionTypeDef = TypedDict(
"_OptionalAssetModelPropertyDefinitionTypeDef",
{
"dataTypeSpec": str,
"unit": str,
},
total=False,
)
class AssetModelPropertyDefinitionTypeDef(
_RequiredAssetModelPropertyDefinitionTypeDef, _OptionalAssetModelPropertyDefinitionTypeDef
):
pass
_RequiredAssetModelPropertyTypeDef = TypedDict(
"_RequiredAssetModelPropertyTypeDef",
{
"name": str,
"dataType": PropertyDataTypeType,
"type": PropertyTypeTypeDef,
},
)
_OptionalAssetModelPropertyTypeDef = TypedDict(
"_OptionalAssetModelPropertyTypeDef",
{
"id": str,
"dataTypeSpec": str,
"unit": str,
},
total=False,
)
class AssetModelPropertyTypeDef(
_RequiredAssetModelPropertyTypeDef, _OptionalAssetModelPropertyTypeDef
):
pass
_RequiredPropertyTypeDef = TypedDict(
"_RequiredPropertyTypeDef",
{
"id": str,
"name": str,
"dataType": PropertyDataTypeType,
},
)
_OptionalPropertyTypeDef = TypedDict(
"_OptionalPropertyTypeDef",
{
"alias": str,
"notification": PropertyNotificationTypeDef,
"unit": str,
"type": PropertyTypeTypeDef,
},
total=False,
)
class PropertyTypeDef(_RequiredPropertyTypeDef, _OptionalPropertyTypeDef):
pass
_RequiredAssetModelCompositeModelDefinitionTypeDef = TypedDict(
"_RequiredAssetModelCompositeModelDefinitionTypeDef",
{
"name": str,
"type": str,
},
)
_OptionalAssetModelCompositeModelDefinitionTypeDef = TypedDict(
"_OptionalAssetModelCompositeModelDefinitionTypeDef",
{
"description": str,
"properties": Sequence[AssetModelPropertyDefinitionTypeDef],
},
total=False,
)
class AssetModelCompositeModelDefinitionTypeDef(
_RequiredAssetModelCompositeModelDefinitionTypeDef,
_OptionalAssetModelCompositeModelDefinitionTypeDef,
):
pass
_RequiredAssetModelCompositeModelTypeDef = TypedDict(
"_RequiredAssetModelCompositeModelTypeDef",
{
"name": str,
"type": str,
},
)
_OptionalAssetModelCompositeModelTypeDef = TypedDict(
"_OptionalAssetModelCompositeModelTypeDef",
{
"description": str,
"properties": List[AssetModelPropertyTypeDef],
},
total=False,
)
class AssetModelCompositeModelTypeDef(
_RequiredAssetModelCompositeModelTypeDef, _OptionalAssetModelCompositeModelTypeDef
):
pass
CompositeModelPropertyTypeDef = TypedDict(
"CompositeModelPropertyTypeDef",
{
"name": str,
"type": str,
"assetProperty": PropertyTypeDef,
},
)
_RequiredCreateAssetModelRequestRequestTypeDef = TypedDict(
"_RequiredCreateAssetModelRequestRequestTypeDef",
{
"assetModelName": str,
},
)
_OptionalCreateAssetModelRequestRequestTypeDef = TypedDict(
"_OptionalCreateAssetModelRequestRequestTypeDef",
{
"assetModelDescription": str,
"assetModelProperties": Sequence[AssetModelPropertyDefinitionTypeDef],
"assetModelHierarchies": Sequence[AssetModelHierarchyDefinitionTypeDef],
"assetModelCompositeModels": Sequence[AssetModelCompositeModelDefinitionTypeDef],
"clientToken": str,
"tags": Mapping[str, str],
},
total=False,
)
class CreateAssetModelRequestRequestTypeDef(
_RequiredCreateAssetModelRequestRequestTypeDef, _OptionalCreateAssetModelRequestRequestTypeDef
):
pass
DescribeAssetModelResponseTypeDef = TypedDict(
"DescribeAssetModelResponseTypeDef",
{
"assetModelId": str,
"assetModelArn": str,
"assetModelName": str,
"assetModelDescription": str,
"assetModelProperties": List[AssetModelPropertyTypeDef],
"assetModelHierarchies": List[AssetModelHierarchyTypeDef],
"assetModelCompositeModels": List[AssetModelCompositeModelTypeDef],
"assetModelCreationDate": datetime,
"assetModelLastUpdateDate": datetime,
"assetModelStatus": AssetModelStatusTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
)
_RequiredUpdateAssetModelRequestRequestTypeDef = TypedDict(
"_RequiredUpdateAssetModelRequestRequestTypeDef",
{
"assetModelId": str,
"assetModelName": str,
},
)
_OptionalUpdateAssetModelRequestRequestTypeDef = TypedDict(
"_OptionalUpdateAssetModelRequestRequestTypeDef",
{
"assetModelDescription": str,
"assetModelProperties": Sequence[AssetModelPropertyTypeDef],
"assetModelHierarchies": Sequence[AssetModelHierarchyTypeDef],
"assetModelCompositeModels": Sequence[AssetModelCompositeModelTypeDef],
"clientToken": str,
},
total=False,
)
class UpdateAssetModelRequestRequestTypeDef(
_RequiredUpdateAssetModelRequestRequestTypeDef, _OptionalUpdateAssetModelRequestRequestTypeDef
):
pass
DescribeAssetPropertyResponseTypeDef = TypedDict(
"DescribeAssetPropertyResponseTypeDef",
{
"assetId": str,
"assetName": str,
"assetModelId": str,
"assetProperty": PropertyTypeDef,
"compositeModel": CompositeModelPropertyTypeDef,
"ResponseMetadata": ResponseMetadataTypeDef,
},
) | PypiClean |
/tensorflow_cpu_test_package-2.11.0rc0-cp38-cp38-win_amd64.whl/tensorflow/dtensor/python/input_util.py | import dataclasses
from typing import Any, List, Optional, Sequence, Tuple
from tensorflow.dtensor.python import api
from tensorflow.dtensor.python import config
from tensorflow.dtensor.python import layout as layout_lib
from tensorflow.python.data.experimental.ops import data_service_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
@dataclasses.dataclass
class TFDataServiceConfig:
"""Specifies the tf.data service configuration to use.
Attributes:
dispatcher_address: a string specifying the address of the tf.data service
dispatcher server.
job_name: a non-empty string identifying the shared job that will be created
on tf.data service to process this dataset.
"""
dispatcher_address: str
job_name: str
# TODO(b/223275517): Add support for get_next_as_optional().
class _DTensorIterator(iterator_ops.IteratorBase):
"""An iterator for a tf.data.Dataset distributed using DTensor.
DTensorIterator encapsulates multiple underlying dataset iterators. It handles
retrieving the tensors to be placed on each underlying device and then uses
the 'pack' operation to create and return a DTensor. Thus users need only
interact with a single DTensorIterator to automatically distribute dataset
tensors onto devices.
"""
def __init__(self, datasets: Sequence[Tuple[int, dataset_ops.DatasetV2]],
element_spec: tensor_spec.TensorSpec, layouts: Any,
num_local_devices_per_replica: int):
"""Initializes a distributed iterator for DTensor datasets.
The DTensorIterator uses 'replica IDs' to identify shards of a dataset. Here
the term 'replica' is used in the data-parallel context where each replica
receives a partition of the global batch. Depending on the model parallelism
in the layouts supplied, each device within that replica may receive the
same partition of the global batch (no model parallelism), or specific
slices of that partition.
Args:
datasets: a dictionary mapping each unique local replica ID to the dataset
object whose elements will be placed on the devices corresponding to
that replica.
element_spec: the underlying dataset's element spec.
layouts: a structure of DTensor layouts to be applied to the dataset
values. This can be a single layout or (possibly nested) tuples or
dictionaries of layouts, and the structure must match the structure of
the dataset.
num_local_devices_per_replica: the number of local devices for each
replica.
"""
self._iterators = [
(replica_id, iter(dataset)) for replica_id, dataset in datasets
]
self._element_spec = element_spec
self._layouts = layouts
self._num_local_devices_per_replica = num_local_devices_per_replica
self._flattened_layouts = nest.flatten(self._layouts)
def __next__(self):
try:
return self.get_next()
except errors.OutOfRangeError as e:
raise StopIteration from e
def __iter__(self):
return self
@property
def element_spec(self):
"""The type specification of an element of this iterator.
A possibly nested structure of `tf.TypeSpec` objects matching the structure
of an element of this iterator.
"""
return self._element_spec
def get_next(self):
"""Returns the next element.
Returns:
A possibly nested structure of values matching
`tf.data.Iterator.element_spec`.
Raises:
`tf.errors.OutOfRangeError`: if the end of the underlying iterators has
been reached.
RuntimeError: if any of the underlying iterators do not return the
expected number of items.
"""
# Create the data structure to store the individual elements of the current
# batch. We store a list per element in the flattened dataset batch, and
# each list should contain as many tensors as there local devices.
curr_batch_elems = [[] for _ in range(len(self._flattened_layouts))]
for _, iterator in self._iterators:
for _ in range(self._num_local_devices_per_replica):
element = iterator.get_next()
# Separate the dataset elements based on the structure of the dataset.
flattened_element = nest.flatten(element)
for idx, batch in enumerate(flattened_element):
curr_batch_elems[idx].append(batch)
flattened_output = []
for batch_elems, layout in zip(curr_batch_elems, self._flattened_layouts):
expected_num_elems = layout.mesh.num_local_devices()
actual_num_elems = len(batch_elems)
if actual_num_elems != expected_num_elems:
raise RuntimeError('Expected to pack %d elements in batch but got %d' %
(expected_num_elems, actual_num_elems))
flattened_output.append(api.pack(batch_elems, layout))
return nest.pack_sequence_as(self._layouts, flattened_output)
def get_next_as_optional(self):
"""Returns the next element wrapped in `tf.experimental.Optional`.
If the iterator has reached the end of the sequence, the returned
`tf.experimental.Optional` will have no value.
Returns:
A `tf.experimental.Optional` object representing the next element.
"""
raise NotImplementedError(
'get_next_as_optional not yet supported: b/223275517')
@property
def _type_spec(self):
return iterator_ops.IteratorSpec(self._element_spec)
def _validate_input(flattened_layouts: Sequence[layout_lib.Layout],
flattened_elem_spec: Sequence[tensor_spec.TensorSpec],
dataset_already_batched: bool):
"""Checks that the dataset's layouts and element specs are compatible.
Args:
flattened_layouts: the flattened list of layouts used to distribute the
dataset.
flattened_elem_spec: the flattened list of element specs used in the
dataset's components.
dataset_already_batched: whether the dataset to be validated is already
batched.
Raises:
ValueError: if the dataset's inputs are incompatible.
"""
if not flattened_elem_spec:
raise ValueError(
'Expected input element spec of at least one element, was empty.')
first_elem_shape = flattened_elem_spec[0].shape
for layout, elem_spec in zip(flattened_layouts, flattened_elem_spec):
if elem_spec.shape.rank is None:
raise ValueError(
'Dataset element shape must have a valid rank, got spec %s.' %
elem_spec)
# Check that layout's rank matches the element's rank. If dataset is not yet
# batched, then the layout's rank must be one greater than the element's
# rank.
expected_rank = elem_spec.shape.rank
if not dataset_already_batched:
expected_rank += 1
if layout.rank != expected_rank:
raise ValueError(
('Expected layout with rank %d for element spec %s, got layout %s. '
'Check that the dataset is not batched before passing to '
'DTensorDataset.') %
(expected_rank, elem_spec, layout.sharding_specs))
if dataset_already_batched:
# Check that the batch dimension size of all dataset elements match.
batch_dim_size = first_elem_shape.as_list()[0]
if batch_dim_size is None:
raise ValueError(
('Size of batch dimension of element spec %s is None. Ensure '
'drop_remainder=True when batching the dataset.') % elem_spec)
if elem_spec.shape.as_list()[0] != batch_dim_size:
raise ValueError(
('Size of batch dimension of element spec %s does not match '
'expected size %d.') % (elem_spec, batch_dim_size))
def _shard_counts(layout: layout_lib.Layout,
batch_dim: Optional[str] = None) -> List[int]:
"""Computes a list of the number of shards in each dimension of the layout.
The shard counts are used to slice each dataset element. The batch dimension's
count is overridden to 1 since we only consider how many shards to make
locally (within each local replica). Sharding across clients is handled by
either tf.data.Dataset's shard transformation (in the single-client case) or
tf.data service's distribute function (in the multi-client case).
Args:
layout: the layout to compute the shard counts for.
batch_dim: the name of the batch dimension of the layout, if present.
Returns:
A list of shard counts, one element per dimension of the layout.
"""
shard_counts = []
for spec in layout.sharding_specs:
if spec in (batch_dim, layout_lib.UNSHARDED):
shard_counts.append(1)
else:
shard_counts.append(layout.mesh.dim_size(spec))
return shard_counts
def _index_matrix(layout: layout_lib.Layout,
elem_spec: tensor_spec.TensorSpec) -> ops.Tensor:
"""Computes a utility matrix to derive device-based slice offsets.
This function builds a matrix of shape `[mesh.rank, layout.rank]` for each
dataset element. This matrix can be used to slice the DTensor components
returned by the iterator according to the local device that component is to be
placed on. This can be done by multiplying the device offsets of shape
`[1, mesh.rank]` with this index matrix to get a `[1, layout.rank]` shape
tensor containing the slice offsets.
Note: the index on the batch dim is always 0 since sharding on the batch
dimension is handled by either tf.data.Dataset's shard transformation (in the
single-client case) or tf.data service's distribute function (in the
multi-client case). If there is no sharding on the batch dimension (or any
other dimension), the slice index remains 0.
Args:
layout: the layout of the dataset element.
elem_spec: the spec of the dataset element.
Returns:
The index matrix as a tensor.
"""
matrix = []
for dim in layout.mesh.dim_names:
row = [0]
for layout_idx, spec in enumerate(layout.sharding_specs[1:]):
if spec == layout_lib.UNSHARDED or spec != dim:
row.append(0)
else:
row.append(elem_spec.shape[layout_idx] // layout.mesh.dim_size(dim))
matrix.append(row)
return constant_op.constant(matrix, dtype=dtypes.int32)
@tf_export('experimental.dtensor.DTensorDataset', v1=[])
class DTensorDataset(dataset_ops.UnaryUnchangedStructureDataset):
"""A dataset of DTensors.
DTensorDataset encapsulates a `tf.data.Dataset` whose elements are
automatically packed and returned as DTensors based on a given mesh and
layouts.
"""
def __init__(self,
dataset: dataset_ops.DatasetV2,
*,
mesh: layout_lib.Mesh,
layouts: Any,
global_batch_size: int,
dataset_already_batched: bool = False,
batch_dim: Optional[str] = None,
prefetch: Optional[int] = None,
tf_data_service_config: Optional[TFDataServiceConfig] = None):
"""Creates a DTensorDataset.
DTensorDataset automatically handles distribution of the dataset elements to
each client's devices. It can be used to create an iterator that returns
DTensors of the input data on each iteration.
DTensorDataset works best with unbatched datasets. It takes the mesh and the
provided layouts to automatically calculate how to batch the input locally
for each replica.
If the provided dataset is already batched according to the per-replica
batch size, then `dataset_already_batched` must be set and DTensorDataset
will check that the batch size is consistent with the intended
`global_batch_size` using the layout information. Each replica receives a
separate slice of the global batch, thus the per-replica batch size can be
computed as the global batch size divided by the number of model replicas.
For a DTensor mesh, the number of replicas is equal to the size of the
mesh's batch dimension.
TODO(b/223275517): add support for input datasets that are already batched
to the global batch size.
Args:
dataset: a `tf.data.Dataset` object.
mesh: the DTensor mesh to place the dataset batches on.
layouts: a structure of DTensor layouts to be applied to the input dataset
values. This can be a single layout or (possibly nested) tuples or
dictionaries of layouts, and the structure must match the structure of
the dataset. Either all or none of the layouts should be sharded on the
batch dimension; having only a subset of layouts batch sharded will not
work and raises a ValueError.
global_batch_size: the desired global batch size.
dataset_already_batched: must be set only if the dataset is already
batched to the per-replica batch size. The batched dataset must have
`drop_remainder=True` set since DTensor requires static shapes for
slicing the input tensors.
batch_dim: the mesh dimension on which the input's batch dimension is
sharded. Set to None if the input layouts do not shard on the batch
dimension.
prefetch: number of batches to prefetch using Dataset.prefetch.
tf_data_service_config: if operating in multi-client mode, this config
specifies the tf.data service configuration to use.
Raises:
ValueError: on any of the following situations,
1. if the structures and ranks of layouts and the dataset do not match.
2. if the shapes in the dataset's spec are not fully defined.
3. if batch_dim is specified and all layouts are not batch-sharded.
4. if per_replica_batch_size is specified for an already batched Dataset
but it does not match the expected per-replica size based on the
provided mesh.
TypeError: if type of structures of layouts and the dataset do not match.
"""
super().__init__(dataset, dataset_ops.to_variant(dataset))
self._mesh = mesh
self._layouts = layouts
self._batch_dim = batch_dim
self._prefetch = prefetch
self._tf_data_service_config = tf_data_service_config
self._element_spec = dataset.element_spec
nest.assert_same_structure(self._element_spec, self._layouts)
flattened_layouts = nest.flatten(self._layouts)
flattened_elem_spec = nest.flatten(self._element_spec)
if batch_dim:
num_global_replicas = mesh.dim_size(batch_dim)
self._local_replica_ids = list(
dict.fromkeys(
[loc[batch_dim] for loc in mesh.local_device_locations()]))
for layout in flattened_layouts:
if batch_dim != layout.sharding_specs[0]:
raise ValueError(
('batch_dim %s was specified but at least one layout did not '
'contain it: %s') % (batch_dim, layout))
else:
# Only one replica since there is no sharding on the batch dimension.
num_global_replicas = 1
self._local_replica_ids = [0]
# Validate layout and element spec compatibility, and raise ValueError if
# invalid.
_validate_input(
flattened_layouts,
flattened_elem_spec,
dataset_already_batched=dataset_already_batched)
expected_batch_size = global_batch_size // num_global_replicas
if not dataset_already_batched:
self._batched_dataset = dataset.batch(
expected_batch_size, drop_remainder=True)
else:
per_replica_batch_size = flattened_elem_spec[0].shape.as_list()[0]
if per_replica_batch_size != expected_batch_size:
raise ValueError(
('per_replica_batch_size does not matched expected size based on '
'the mesh, got %d but expected %d.') %
(per_replica_batch_size, expected_batch_size))
self._batched_dataset = dataset
num_global_devices_per_replica = api.num_global_devices(
mesh.device_type()) // num_global_replicas
self._num_local_replicas = len(self._local_replica_ids)
self._num_local_devices_per_replica = mesh.num_local_devices(
) // self._num_local_replicas
# The number of clients each replica is split over.
self._num_clients_per_replica = (
num_global_devices_per_replica //
self._num_local_devices_per_replica)
# In the case where a replica is split across multiple clients, an offset
# needs to be added to the index used by the partitioning logic such that
# the local devices on that client can be correctly matched to slices of the
# input tensor(s). If replicas are wholly contained within a client, then
# this offset is always 0.
self._partition_offset = (config.client_id() % self._num_clients_per_replica
) * self._num_local_devices_per_replica
# Helper data structures used in partitioning the dataset tensors.
self._all_shard_counts = [
_shard_counts(layout, batch_dim) for layout in flattened_layouts
]
self._index_matrices = [
_index_matrix(layout, elem_spec) for layout, elem_spec in zip(
flattened_layouts, flattened_elem_spec)
]
def __iter__(self):
datasets: List[Tuple[int, dataset_ops.DatasetV2]] = []
# Start with the batched the dataset.
local_dataset = self._batched_dataset
if self._batch_dim is not None:
if self._num_clients_per_replica > 1:
# If a replica is split over multiple clients then each batch needs to
# be repeated before distribution as many times as there are clients
# corresponding to that replica.
local_dataset = self._repeat_batch(local_dataset,
self._num_clients_per_replica)
sharding_policy = data_service_ops.ShardingPolicy.DATA
else:
# Replicas are unique to each client, so FILE based sharding can be used
# which is more performant since each worker does not need to read the
# entire dataset.
sharding_policy = data_service_ops.ShardingPolicy.FILE
else:
# No batch dimension sharding specified so disable dataset sharding during
# the distribute step.
sharding_policy = data_service_ops.ShardingPolicy.OFF
# Apply distribution here (if specified) so all remaining transformations
# are executed locally.
if self._tf_data_service_config is not None:
local_dataset = local_dataset.apply(
data_service_ops.distribute(
processing_mode=sharding_policy,
service=self._tf_data_service_config.dispatcher_address,
job_name=f'{self._tf_data_service_config.job_name}_{config.client_id()}',
target_workers='LOCAL'))
for local_replica_idx, replica_id in enumerate(self._local_replica_ids):
# Select the shard for the corresponding replica.
dataset = local_dataset.shard(self._num_local_replicas, local_replica_idx)
# Repeat each batch for each local device in the replica.
dataset = self._repeat_batch(dataset, self._num_local_devices_per_replica)
# Slice each shard further for all non-batch dim shards. If there is no
# non-batch dim sharding, this slice is essentially a no-op.
dataset = self._partition(dataset)
# Apply prefetch as the last step. Since each batch is repeated, the
# number of elements to prefetch has to be scaled by the same size.
if self._prefetch is not None:
dataset = dataset.prefetch(
self._prefetch * self._num_local_devices_per_replica)
datasets.append((replica_id, dataset))
return _DTensorIterator(datasets, self._element_spec, self._layouts,
self._num_local_devices_per_replica)
def _repeat_batch(self, dataset, repeats):
def repeat(*x):
return dataset_ops.DatasetV2.from_tensors(x).repeat(repeats)
return dataset.flat_map(repeat)
def _partition(self, dataset):
"""Slices each dataset element on any sharded non-batch dimension."""
# TODO(b/223275517): decouple from self and make testable.
def slice_batch(index, batch):
flattened_batch = nest.flatten(batch)
flattened_output = []
norm_index = math_ops.cast(
index % self._num_local_devices_per_replica, dtype=dtypes.int32)
norm_index += self._partition_offset
coords = self._mesh.coords(norm_index)
coords = array_ops.reshape(coords, (1, -1))
for element, shard_counts, idx_matrix in zip(flattened_batch,
self._all_shard_counts,
self._index_matrices):
indexes = math_ops.matmul(coords, idx_matrix)
start = array_ops.reshape(indexes, (-1,))
size = array_ops.shape_v2(
element, out_type=dtypes.int32) // shard_counts
flattened_output.append(
array_ops.slice(element, begin=start, size=size))
return nest.pack_sequence_as(batch, flattened_output)
enumerated_dataset = dataset.enumerate()
partitioned_dataset = enumerated_dataset.map(slice_batch)
return partitioned_dataset | PypiClean |
/pxea-1.0.tar.gz/pxea-1.0/README.rst |
# PxEA: ProXimal pathway Enrichment Analysis
--------------------------------------------
Code for the ProXimal pathway enrichment analysis introduced in the "Targeting comorbid diseases via network endopharmacology" manuscript.
## Requirements
- Python 2 or 3
- numpy
- scipy
- networkx
## Installing & running tests
Download (i.e. clone) the files to your computer, you can use the package as a bare package (without setup.py install) or install it
using the following command:
>>> python setup.py install
Several test cases for the methods are provided in `test/test_pxea.py`.
To run these, on the parent directory (where this README file resides) type
>>> python -m unittest test.test_pxea
or
>>> python setup.py test
It should give an output similar to below
..
----------------------------------------------------------------------
Ran 2 tests in 1.220s
OK
## Usage
### PXEA
>>> from pxea.utilities.set_enrichment import get_enrichment_score_and_pval
get_enrichment_score_and_pval(ranked_list, candidates, n_random=n_random, alternative="greater", seed=51234)
"""
KS based score (~max difference between cumulative distributions of the sample and expected random walk)
"""
Input parameters:
ranked_list: a list with the ranking of the elements (e.g., pathways proximal to a drug)
candidates: set of elements (e.g., pathways common to two diseases)
N: number of pathways in the candidates set (if None, len(candidates) will be used
n_random: number of shufflings to the ranked list for permutation test based P-value calculation
(if none, no pvalue is calculated and None is returned instead)
alternative: greater | less | two-sided
seed: number to be used initializing random generator (for reproducibility)
Output:
Returns enrichment score and pvalue
### Proximity
>>> from pxea.utilities.network import calculate_proximity
calculate_proximity(network, nodes_from, nodes_to, nodes_from_random=None, nodes_to_random=None, bins=None, n_random=1000, min_bin_size=None, seed=51234, lengths=None)
"""
Calculate proximity (average distance to the closest node from the
first to second)from nodes_from to nodes_to (if degree binning or
random nodes are not given, they are generated)
"""
Input parameters:
network: networkx Graph object
nodes_from: set of nodes from which proximity is calculated
nodes_to: set of nodes proximity to which is calculated
nodes_from_random: random from nodes to check proximity
nodes_to_random: random to nodes to check proximity
bins: degree equivalence bins
n_random: number of randomizations for background closest distance calculation
min_bin_size: minimum size of the bins for degree binning if None, len(network) // 100 is used
seed: integer for initializing the state of the random generator
lengths: precalculated shortest path length dictionary
Output:
Returns proximity z-score and average path length to nearest nodes in nodes_to
## Data sets
The data sets used in the analysis for the autoimmune diseases are
under data/ folder at [pxea](https://github.com/emreg00/pxea).
## See also
See [toolbox](https://github.com/emreg00/toolbox) package for various related code.
| PypiClean |
/h2o_wave-0.26.2-py3-none-macosx_12_0_arm64.whl/h2o_wave-0.26.2.data/data/www/wave-static/java-legacy-9efe1586.js | System.register([],(function(e,n){"use strict";return{execute:function(){e("default",(function(e){var n=e.regex,a="[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",t=a+s("(?:<"+a+"~~~(?:\\s*,\\s*"+a+"~~~)*>)?",/~~~/g,2),i={keyword:["synchronized","abstract","private","var","static","if","const ","for","while","strictfp","finally","protected","import","native","final","void","enum","else","break","transient","catch","instanceof","volatile","case","assert","package","default","public","try","switch","continue","throws","protected","public","private","module","requires","exports","do","sealed","yield","permits"],literal:["false","true","null"],type:["char","boolean","long","float","int","byte","short","double"],built_in:["super","this"]},r={className:"meta",begin:"@"+a,contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},o={className:"params",begin:/\(/,end:/\)/,keywords:i,relevance:0,contains:[e.C_BLOCK_COMMENT_MODE],endsParent:!0};return{name:"Java",aliases:["jsp"],keywords:i,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),{begin:/import java\.[a-z]+\./,keywords:"import",relevance:2},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{begin:/"""/,end:/"""/,className:"string",contains:[e.BACKSLASH_ESCAPE]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{match:[/\b(?:class|interface|enum|extends|implements|new)/,/\s+/,a],className:{1:"keyword",3:"title.class"}},{match:/non-sealed/,scope:"keyword"},{begin:[n.concat(/(?!else)/,a),/\s+/,a,/\s+/,/=(?!=)/],className:{1:"type",3:"variable",5:"operator"}},{begin:[/record/,/\s+/,a],className:{1:"keyword",3:"title.class"},contains:[o,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"new throw return else",relevance:0},{begin:["(?:"+t+"\\s+)",e.UNDERSCORE_IDENT_RE,/\s*(?=\()/],className:{2:"title.function"},keywords:i,contains:[{className:"params",begin:/\(/,end:/\)/,keywords:i,relevance:0,contains:[r,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,c,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,r]}}));var n="[0-9](_*[0-9])*",a="\\.(".concat(n,")"),t="[0-9a-fA-F](_*[0-9a-fA-F])*",c={className:"number",variants:[{begin:"(\\b(".concat(n,")((").concat(a,")|\\.)?|(").concat(a,"))")+"[eE][+-]?(".concat(n,")[fFdD]?\\b")},{begin:"\\b(".concat(n,")((").concat(a,")[fFdD]?\\b|\\.([fFdD]\\b)?)")},{begin:"(".concat(a,")[fFdD]?\\b")},{begin:"\\b(".concat(n,")[fFdD]\\b")},{begin:"\\b0[xX]((".concat(t,")\\.?|(").concat(t,")?\\.(").concat(t,"))")+"[pP][+-]?(".concat(n,")[fFdD]?\\b")},{begin:"\\b(0|[1-9](_*[0-9])*)[lL]?\\b"},{begin:"\\b0[xX](".concat(t,")[lL]?\\b")},{begin:"\\b0(_*[0-7])*[lL]?\\b"},{begin:"\\b0[bB][01](_*[01])*[lL]?\\b"}],relevance:0};function s(e,n,a){return-1===a?"":e.replace(n,(function(t){return s(e,n,a-1)}))}}}})); | PypiClean |
/tencentcloud_iac_pulumi-0.1.5.tar.gz/tencentcloud_iac_pulumi-0.1.5/tencentcloud_iac_pulumi/ha/vip.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['VipArgs', 'Vip']
@pulumi.input_type
class VipArgs:
def __init__(__self__, *,
subnet_id: pulumi.Input[str],
vpc_id: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Vip resource.
:param pulumi.Input[str] subnet_id: Subnet ID.
:param pulumi.Input[str] vpc_id: VPC ID.
:param pulumi.Input[str] name: Name of the HA VIP. The length of character is limited to 1-60.
:param pulumi.Input[str] vip: Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
"""
pulumi.set(__self__, "subnet_id", subnet_id)
pulumi.set(__self__, "vpc_id", vpc_id)
if name is not None:
pulumi.set(__self__, "name", name)
if vip is not None:
pulumi.set(__self__, "vip", vip)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
Subnet ID.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Input[str]:
"""
VPC ID.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: pulumi.Input[str]):
pulumi.set(self, "vpc_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the HA VIP. The length of character is limited to 1-60.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def vip(self) -> Optional[pulumi.Input[str]]:
"""
Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vip", value)
@pulumi.input_type
class _VipState:
def __init__(__self__, *,
address_ip: Optional[pulumi.Input[str]] = None,
create_time: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface_id: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Vip resources.
:param pulumi.Input[str] address_ip: EIP that is associated.
:param pulumi.Input[str] create_time: Create time of the HA VIP.
:param pulumi.Input[str] instance_id: Instance ID that is associated.
:param pulumi.Input[str] name: Name of the HA VIP. The length of character is limited to 1-60.
:param pulumi.Input[str] network_interface_id: Network interface ID that is associated.
:param pulumi.Input[str] state: State of the HA VIP. Valid value: `AVAILABLE`, `UNBIND`.
:param pulumi.Input[str] subnet_id: Subnet ID.
:param pulumi.Input[str] vip: Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
:param pulumi.Input[str] vpc_id: VPC ID.
"""
if address_ip is not None:
pulumi.set(__self__, "address_ip", address_ip)
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if name is not None:
pulumi.set(__self__, "name", name)
if network_interface_id is not None:
pulumi.set(__self__, "network_interface_id", network_interface_id)
if state is not None:
pulumi.set(__self__, "state", state)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if vip is not None:
pulumi.set(__self__, "vip", vip)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="addressIp")
def address_ip(self) -> Optional[pulumi.Input[str]]:
"""
EIP that is associated.
"""
return pulumi.get(self, "address_ip")
@address_ip.setter
def address_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_ip", value)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
Create time of the HA VIP.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
Instance ID that is associated.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the HA VIP. The length of character is limited to 1-60.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkInterfaceId")
def network_interface_id(self) -> Optional[pulumi.Input[str]]:
"""
Network interface ID that is associated.
"""
return pulumi.get(self, "network_interface_id")
@network_interface_id.setter
def network_interface_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_interface_id", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
State of the HA VIP. Valid value: `AVAILABLE`, `UNBIND`.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
Subnet ID.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter
def vip(self) -> Optional[pulumi.Input[str]]:
"""
Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vip", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
VPC ID.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
class Vip(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a resource to create a HA VIP.
## Example Usage
```python
import pulumi
import tencentcloud_iac_pulumi as tencentcloud
foo = tencentcloud.ha.Vip("foo",
subnet_id="subnet-4d4m4cd4s",
vip="10.0.4.16",
vpc_id="vpc-gzea3dd7")
```
## Import
HA VIP can be imported using the id, e.g.
```sh
$ pulumi import tencentcloud:Ha/vip:Vip foo havip-kjqwe4ba
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: Name of the HA VIP. The length of character is limited to 1-60.
:param pulumi.Input[str] subnet_id: Subnet ID.
:param pulumi.Input[str] vip: Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
:param pulumi.Input[str] vpc_id: VPC ID.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VipArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a resource to create a HA VIP.
## Example Usage
```python
import pulumi
import tencentcloud_iac_pulumi as tencentcloud
foo = tencentcloud.ha.Vip("foo",
subnet_id="subnet-4d4m4cd4s",
vip="10.0.4.16",
vpc_id="vpc-gzea3dd7")
```
## Import
HA VIP can be imported using the id, e.g.
```sh
$ pulumi import tencentcloud:Ha/vip:Vip foo havip-kjqwe4ba
```
:param str resource_name: The name of the resource.
:param VipArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VipArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.plugin_download_url is None:
opts.plugin_download_url = _utilities.get_plugin_download_url()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VipArgs.__new__(VipArgs)
__props__.__dict__["name"] = name
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["vip"] = vip
if vpc_id is None and not opts.urn:
raise TypeError("Missing required property 'vpc_id'")
__props__.__dict__["vpc_id"] = vpc_id
__props__.__dict__["address_ip"] = None
__props__.__dict__["create_time"] = None
__props__.__dict__["instance_id"] = None
__props__.__dict__["network_interface_id"] = None
__props__.__dict__["state"] = None
super(Vip, __self__).__init__(
'tencentcloud:Ha/vip:Vip',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
address_ip: Optional[pulumi.Input[str]] = None,
create_time: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interface_id: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None) -> 'Vip':
"""
Get an existing Vip resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] address_ip: EIP that is associated.
:param pulumi.Input[str] create_time: Create time of the HA VIP.
:param pulumi.Input[str] instance_id: Instance ID that is associated.
:param pulumi.Input[str] name: Name of the HA VIP. The length of character is limited to 1-60.
:param pulumi.Input[str] network_interface_id: Network interface ID that is associated.
:param pulumi.Input[str] state: State of the HA VIP. Valid value: `AVAILABLE`, `UNBIND`.
:param pulumi.Input[str] subnet_id: Subnet ID.
:param pulumi.Input[str] vip: Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
:param pulumi.Input[str] vpc_id: VPC ID.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VipState.__new__(_VipState)
__props__.__dict__["address_ip"] = address_ip
__props__.__dict__["create_time"] = create_time
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["name"] = name
__props__.__dict__["network_interface_id"] = network_interface_id
__props__.__dict__["state"] = state
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["vip"] = vip
__props__.__dict__["vpc_id"] = vpc_id
return Vip(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="addressIp")
def address_ip(self) -> pulumi.Output[str]:
"""
EIP that is associated.
"""
return pulumi.get(self, "address_ip")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
Create time of the HA VIP.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
Instance ID that is associated.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the HA VIP. The length of character is limited to 1-60.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkInterfaceId")
def network_interface_id(self) -> pulumi.Output[str]:
"""
Network interface ID that is associated.
"""
return pulumi.get(self, "network_interface_id")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
State of the HA VIP. Valid value: `AVAILABLE`, `UNBIND`.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
Subnet ID.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter
def vip(self) -> pulumi.Output[str]:
"""
Virtual IP address, it must not be occupied and in this VPC network segment. If not set, it will be assigned after resource created automatically.
"""
return pulumi.get(self, "vip")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[str]:
"""
VPC ID.
"""
return pulumi.get(self, "vpc_id") | PypiClean |
/pandora-python-sdk-0.0.1.tar.gz/pandora-python-sdk-0.0.1/ontology/crypto/aes_handler.py |
from Cryptodome import Random
from Cryptodome.Cipher import AES
from Cryptodome.Util.Padding import pad, unpad
class AESHandler(object):
@staticmethod
def generate_iv() -> bytes:
return Random.new().read(AES.block_size)
@staticmethod
def generate_key():
key = Random.get_random_bytes(32)
return key
@staticmethod
def aes_gcm_encrypt_with_iv(plain_text: bytes, hdr: bytes, key: bytes, iv: bytes):
cipher = AES.new(key=key, mode=AES.MODE_GCM, nonce=iv)
cipher.update(hdr)
cipher_text, mac_tag = cipher.encrypt_and_digest(plain_text)
return mac_tag, cipher_text
@staticmethod
def aes_gcm_decrypt_with_iv(cipher_text: bytes, hdr: bytes, mac_tag: bytes, key: bytes, iv: bytes) -> bytes:
cipher = AES.new(key=key, mode=AES.MODE_GCM, nonce=iv)
cipher.update(hdr)
try:
plain_text = cipher.decrypt_and_verify(cipher_text, mac_tag)
except ValueError:
plain_text = b""
except KeyError:
plain_text = b""
return plain_text
@staticmethod
def aes_gcm_encrypt(plain_text: bytes, hdr: bytes, key: bytes):
cipher = AES.new(key=key, mode=AES.MODE_GCM)
cipher.update(hdr)
cipher_text, mac_tag = cipher.encrypt_and_digest(plain_text)
nonce = cipher.nonce
return nonce, mac_tag, cipher_text
@staticmethod
def aes_gcm_decrypt(cipher_text: bytes, hdr: bytes, nonce: bytes, mac_tag: bytes, key: bytes):
cipher = AES.new(key=key, mode=AES.MODE_GCM, nonce=nonce)
cipher.update(hdr)
try:
plain_text = cipher.decrypt_and_verify(cipher_text, mac_tag)
except ValueError:
plain_text = b""
except KeyError:
plain_text = b""
return plain_text
@staticmethod
def aes_ctr_encrypt(plain_text: bytes, key: bytes):
cipher = AES.new(key=key, mode=AES.MODE_CTR)
cipher_text = cipher.encrypt(plain_text)
nonce = cipher.nonce
return nonce, cipher_text
@staticmethod
def aes_ctr_decrypt(cipher_text: bytes, nonce: bytes, key: bytes):
cipher = AES.new(key=key, mode=AES.MODE_CTR, nonce=nonce)
plain_text = cipher.decrypt(cipher_text)
return plain_text
@staticmethod
def aes_cbc_encrypt(plain_text: bytes, key: bytes, iv: bytes = b''):
if len(iv) == 0:
iv = AESHandler.generate_iv()
cipher = AES.new(key=key, mode=AES.MODE_CBC, iv=iv)
return cipher.IV, cipher.encrypt(pad(plain_text, AES.block_size))
@staticmethod
def aes_cbc_decrypt(cipher_text: bytes, iv: bytes, key: bytes):
cipher = AES.new(key=key, mode=AES.MODE_CBC, iv=iv)
return unpad(cipher.decrypt(cipher_text), AES.block_size, style='pkcs7') | PypiClean |
/IPFS_Toolkit-0.5.14-py3-none-any.whl/ipfs_api.py |
from datetime import datetime
from datetime import timedelta
from termcolor import colored
import time
from threading import Event
import shutil
import tempfile
# import sys
from subprocess import Popen, PIPE
# import subprocess
import os
import os.path
# import threading
# import multiprocessing
import traceback
import ipfs_lns
import logging
from threading import Thread
try:
import base64
import ipfshttpclient2 as ipfshttpclient
from base64 import urlsafe_b64decode, urlsafe_b64encode
http_client = ipfshttpclient.client.Client()
LIBERROR = False
except Exception as e:
print(str(e))
LIBERROR = True
http_client = None
ipfshttpclient = None
print_log = False
# List for keeping track of subscriptions to IPFS topics, so that subscriptions can be ended
subscriptions = list([])
def publish(path: str):
"""Upload a file or a directory to IPFS, returning its CID.
Args:
path (str): the path of the file to publish
Returns:
str: the IPFS content ID (CID) of the published file
"""
result = http_client.add(path, recursive=True)
if(type(result) == list):
return result[-1]["Hash"]
else:
return result["Hash"]
def download(cid, path="."):
"""Get the specified IPFS content, saving it to a file.
Args:
cid (str): the IPFS content ID (cid) of the resource to get
path (str): (optional) the path (directory or filepath) of the saved file
"""
# create temporary download directory
tempdir = tempfile.mkdtemp()
# download and save file/folder to temporary directory
http_client.get(cid=cid, target=tempdir)
# move file/folder from temporary directory to desired path
shutil.move(os.path.join(tempdir, cid), path)
# cleanup temporary directory
shutil.rmtree(tempdir)
def read(cid):
"""Returns the textual content of the specified IPFS resource.
Args:
cid (str): the IPFS content ID (CID) of the resource to read
Returns:
str: the content of the specified IPFS resource as text
"""
return http_client.cat(cid)
def pin(cid: str):
"""Ensure the specified IPFS resource remains available on this IPFS node.
Args:
cid (str): the IPFS content ID (CID) of the resource to pin
"""
http_client.pin.add(cid)
def unpin(cid: str):
"""Allow a pinned IPFS resource to be garbage-collected and removed on this IPFS node.
Args:
cid (str): the IPFS content ID (CID) of the resource to unpin
"""
http_client.pin.rm(cid)
__pins_cache = {}
def pins(cids_only: bool = False, cache_age_s: int = None):
"""Get the CIDs of files we have pinned on IPFS
Args:
cids_only (bool): if True, returns a plain list of IPFS CIDs
otherwise, returns a list of dicts of CIDs and their pinning type
cache_age_s (int): getting the of pins from IPFS can take several
seconds. IPFS_API therefore caches each result. If the age of the
cache is less than this parameter, the cacheed result is returned,
otherwise the slow process of getting the latest list of pins is
used.
Returns:
list(): a list of the CIDs of pinned objects. The list element type
depends on the cids_only parameter (see above)
"""
global __pins_cache
if __pins_cache and cache_age_s and (datetime.utcnow() - __pins_cache['date']).total_seconds() < cache_age_s:
data = __pins_cache['data']
else:
data = http_client.pin.ls()['Keys'].as_json()
__pins_cache = {
"date": datetime.utcnow(),
"data": data
}
if cids_only:
return list(data.keys())
else:
return data
def create_ipns_record(name: str, type: str = "rsa", size: int = 2048):
"""Create an IPNS record (eqv. IPNS key).
Args:
name (str): the name of the record/key (in the scope of this IPFS node)
type (str): the cryptographic algorithm behind this key's security
size (int): the length of the key
"""
result = http_client.key.gen(key_name=name, type=type, size=size)
if isinstance(result, list):
return result[-1]["Id"]
else:
return result["Id"]
def update_ipns_record_from_cid(record_name: str, cid: str, ttl: str = "24h", lifetime: str = "24h"):
"""Assign IPFS content to an IPNS record.
Args:
record_name (str): the name of the IPNS record (IPNS key) to be updated
cid (str): the IPFS content ID (CID) of the content to assign to the IPNS record
ttl (str): Time duration this record should be cached for.
Uses the same syntax as the lifetime option.
(caution: experimental).
lifetime (str): Time duration that the record will be valid for.
Default: 24h.
"""
http_client.name.publish(ipfs_path=cid, key=record_name,
ttl=ttl, lifetime=lifetime)
def update_ipns_record(name: str, path, ttl: str = "24h", lifetime: str = "24h"):
"""Publish a file to IPFS and assign it to an IPNS record.
Args:
record_name (str): the name of the IPNS record (IPNS key) to be updated
path (str): the path of the file to assign to the IPNS record
ttl (str): Time duration this record should be cached for.
Uses the same syntax as the lifetime option.
(caution: experimental).
lifetime (str): Time duration that the record will be valid for.
Default: 24h.
"""
cid = publish(path)
update_ipns_record_from_cid(name, cid, ttl=ttl, lifetime=lifetime)
return cid
def resolve_ipns_key(ipns_key, nocache=False):
"""Get the IPFS CID of the given IPNS record (IPNS key)
Args:
ipns_key: the key of the IPNS record to lookup
nocache: whether or not to ignore this IPFS nodes cached memory of IPNS keys
Returns:
str: the IPFS CID associated with the IPNS key
"""
return http_client.name.resolve(name=ipns_key, nocache=nocache)["Path"]
def download_ipns_record(ipns_key, path="", nocache=False):
"""Get the specified IPFS content, saving it to a file.
Args:
ipns_key (str): the key of the IPNS record to get
path (str): (optional) the path (directory or filepath) of the saved file
nocache: whether or not to ignore this IPFS nodes cached memory of IPNS keys
"""
return download(resolve_ipns_key(ipns_key, nocache=nocache), path)
def read_ipns_record(ipns_key, nocache=False):
"""Returns the textual content of the specified IPFS resource.
Args:
ipns_key (str): the key of the IPNS record to read
Returns:
str: the content of the specified IPFS resource as text
"""
return read(resolve_ipns_key(ipns_key, nocache=nocache))
def my_id():
"""Returns this IPFS node's peer ID.
Returns:
str: the peer ID of this node
"""
return http_client.id()["ID"]
def is_ipfs_running():
"""Checks whether or not the IPFS daemon is currently running.
Returns:
bool: whether or not the IPFS daemon is currently running
"""
return len(my_multiaddrs()) > 0
def my_multiaddrs():
"""Returns this IPFS node's peer ID.
Returns:
str: the peer ID of this node
"""
return http_client.id()["Addresses"]
def list_peers():
"""Returns a list of the IPFS multiaddresses of the other nodes
this node is connected to.
Returns:
list(str): a list of the IPFS multiaddresses of the other nodes
this node is connected to
"""
proc = Popen(['ipfs', 'swarm', 'peers'], stdout=PIPE)
proc.wait()
peers = []
for line in proc.stdout:
peers.append(line.decode('utf-8').strip("\n"))
return peers
def list_peer_multiaddrs():
print(colored("IPFS_API: DEPRECATED: This function (ifps_api.list_peer_multiaddrs) has been renamed to ipfs_api.list_peers to avoid confusion with the new get_peer_multiaddrs function.", "yellow"))
return list_peers()
def get_peer_multiaddrs(peer_id):
"""Returns the multiaddresses (without the IPFS CID) via which we can reach
the specified peer.
Append /p2p/PEER_ID to these multiaddress parts to turn them into complete
multiaddresses.
Args:
peer_id (str): the IPFS ID of the peer to lookup
Returns:
list(str): the multiaddresses (without the IPFS CID) via which we can
reach the given peer
"""
try:
response = http_client.dht.findpeer(peer_id)
return response["Responses"][0]["Addrs"]
except:
return []
def connect_to_peer(multiaddr):
"""Tries to connect to a peer given its multiaddress.
Returns:
bool: success
"""
try:
response = http_client.swarm.connect(multiaddr)
if response["Strings"][0][-7:] == "success":
return True
return False
except:
return False
def find_peer(peer_id: str):
"""Try to connect to the specified IPFS node.
Args:
peer_id (str): the IPFS peer ID of the node to connect to
Returns:
str: the multiaddress of the connected node
"""
try:
response = http_client.dht.findpeer(peer_id)
if(len(response["Responses"][0]["Addrs"]) > 0):
return response
except:
return None
def is_peer_connected(peer_id, ping_count=1):
"""Tests the connection to the given IPFS peer.
Args:
peer_id (str): the IPFS ID of the peer to test
count (int): (optional, default 1) the number of pings to send
Returns:
bool: whether or not the peer is connected
"""
responses = http_client.ping(peer_id, count=ping_count)
return responses[-1]['Success']
def find_providers(cid):
"""Lookup/find out which IPFS nodes provide the file with the given CID
(including onesself).
E.g. to check if this computer hosts a file with a certain CID:
def DoWeHaveFile(cid:str):
ipfs_api.my_id() in ipfs_api.find_providers(cid)
Args:
cid (str): cid (str): the IPFS content ID (CID) of the resource to look up
Returns:
list: the peer IDs of the IPFS nodes who provide the file
"""
responses = http_client.dht.findprovs(cid)
peers = []
for response in responses:
if not isinstance(response, ipfshttpclient.client.base.ResponseBase):
continue
if response['Type'] == 4:
for resp in response['Responses']:
if resp['ID'] and resp['ID'] not in peers:
peers.append(resp['ID'])
return peers
def create_tcp_listening_connection(name: str, port: int):
"""Open a listener TCP connection for IPFS' libp2p stream-mounting (port-forwarding).
TCP traffic coming from another peer via this connection is forwarded
to the specified port on localhost.
Args:
name (str): the name of the connection (starts with /x/)
port (int): the local TCP port number to forward incoming traffic to
"""
if name[:3] != "/x/":
name = "/x/" + name
http_client.p2p.listen(name, "/ip4/127.0.0.1/tcp/" + str(port))
def create_tcp_sending_connection(name: str, port, peerID):
"""Open a sending TCP connection for IPFS' libp2p stream-mounting (port-forwarding).
TCP traffic sent to the specified port on localhost will be fowarded
to the specified peer via this connection.
Args:
name (str): the name of the connection (starts with /x/)
port (int): the local TCP port number from which to forward traffic
"""
if name[:3] != "/x/":
name = "/x/" + name
http_client.p2p.forward(name, "/ip4/127.0.0.1/tcp/"
+ str(port), "/p2p/" + peerID)
def close_all_tcp_connections(listeners_only=False):
"""Close all libp2p stream-mounting (IPFS port-forwarding) connections.
Args:
listeners_only (bool): if set to True, only listening connections are closed
"""
if listeners_only:
http_client.p2p.close(listenaddress="/p2p/" + my_id())
else:
http_client.p2p.close(True)
def close_tcp_sending_connection(name: str = None, port: str = None, peer_id: str = None):
"""Close specific sending libp2p stream-mounting (IPFS port-forwarding) connections.
Args:
name (str): the name of the connection to close
port (str): the local forwarded TCP port of the connection to close
peer_id (str): the target peer_id of the connection to close
"""
if name and name[:3] != "/x/":
name = "/x/" + name
if port and isinstance(port, int):
listenaddress = f"/ip4/127.0.0.1/tcp/{port}"
else:
listenaddress = port
if peer_id and peer_id[:5] != "/p2p/":
targetaddress = "/p2p/" + peer_id
else:
targetaddress = peer_id
http_client.p2p.close(False, name, listenaddress, targetaddress)
def close_tcp_listening_connection(name: str = None, port: str = None):
"""Close specific listening libp2p stream-mounting (IPFS port-forwarding) connections.
Args:
name (str): the name of the connection to close
port (str): the local listening TCP port of the connection to close
"""
if name and name[:3] != "/x/":
name = "/x/" + name
if port and isinstance(port, int):
targetaddress = f"/ip4/127.0.0.1/tcp/{port}"
else:
targetaddress = port
http_client.p2p.close(False, name, None, targetaddress)
def check_peer_connection(id, name=""):
"""Try to connect to the specified peer, and stores its multiaddresses in ipfs_lns.
Args:
id (str): the IPFS PeerID or the ipfs_lns name of the computer to connect to
name (str): (optional) the human readable name of the computer to connect to (not critical, you can put in whatever you like)"""
contact = ipfs_lns.get_contact(id)
if not contact:
contact = ipfs_lns.add_contact(id, name)
return contact.check_connection()
class PubsubListener():
"""Listener object for PubSub subscriptions."""
_terminate = False
__listening = False
sub = None
_REFRESH_RATE = 5 # seconds. How often the pubsub HTTP listener ist restarted, also the maximum duration termination can take
def __init__(self, topic, eventhandler):
self.topic = topic
self.eventhandler = eventhandler
self.listen()
def _listen(self):
if self.__listening:
return
self.__listening = True
"""blocks the calling thread"""
while not self._terminate:
try:
if int(http_client.version()["Version"].split(".")[1]) >= 11:
with http_client.pubsub.subscribe(self.topic, timeout=self._REFRESH_RATE) as self.sub:
for message in self.sub:
if self._terminate:
self.__listening = False
return
data = {
"senderID": message["from"],
"data": _decode_base64_url(message["data"]),
}
Thread(
target=self.eventhandler,
args=(data,),
name="ipfs_api.PubsubListener-eventhandler"
).start()
else:
with http_client.pubsub.subscribe_old(self.topic) as self.sub:
for message in self.sub:
if self._terminate:
self.__listening = False
return
data = str(base64.b64decode(
str(message).split('\'')[7]), "utf-8")
Thread(
target=self.eventhandler,
args=(data,),
name="ipfs_api.PubsubListener-eventhandler"
).start()
except:
pass
# print(f"IPFS API Pubsub: restarting sub {self.topic}")
self.__listening = False
def listen(self):
self._terminate = False
self.listener_thread = Thread(
target=self._listen, args=(), name="ipfs_api.PubsubListener")
self.listener_thread.start()
def terminate(self):
"""Stop this PubSub subscription, stop listening for data.
May let one more pubsub message through
Takes up to self._REFRESH_RATE seconds to complete.
"""
self._terminate = True
if self.sub:
self.sub.close()
def pubsub_publish(topic, data):
"""Publishes te specified data to the specified IPFS-PubSub topic.
Args:
topic (str): the name of the IPFS PubSub topic to publish to
data (str/bytearray): either the filepath of a file whose
content should be published to the pubsub topic,
or the raw data to be published as a string or bytearray.
When using an older version of IPFS < v0.11.0 however,
only plain data as a string is accepted.
"""
if int(http_client.version()["Version"].split(".")[1]) < 11:
return http_client.pubsub.publish_old(topic, data)
if isinstance(data, str) and not os.path.exists(data):
data = data.encode()
if isinstance(data, bytes) or isinstance(data, bytearray):
with tempfile.NamedTemporaryFile() as tp:
tp.write(data)
tp.flush()
http_client.pubsub.publish(topic, tp.name)
else:
http_client.pubsub.publish(topic, data)
def pubsub_subscribe(topic, eventhandler):
"""
Listens to the specified IPFS PubSub topic, calling the eventhandler
whenever a message is received, passing the message data and its sender
to the eventhandler.
Args:
topic (str): the name of the IPFS PubSub topic to publish to
eventhandler (function): the function to be executed whenever a message is received.
The eventhandler parameter is a dict with the keys 'data' and 'senderID',
except when using an older version of IPFS < v0.11.0,
in which case only the message is passed as a string.
Returns:
PubsubListener: listener object which can be terminated with the .terminate() method (and restarted with the .listen() method)
"""
return PubsubListener(topic, eventhandler)
def pubsub_peers(topic: str):
"""Looks up what IPFS nodes we are connected to who are listening on the given topic.
Returns:
list: peers we are connected to on the specified pubsub topic
"""
return http_client.pubsub.peers(topic=_encode_base64_url(topic.encode()))["Strings"]
def _decode_base64_url(data: str):
"""Performs the URL-Safe multibase decoding required by some functions (since IFPS v0.11.0) on strings"""
if isinstance(data, bytes):
data = data.decode()
data = str(data)[1:].encode()
missing_padding = len(data) % 4
if missing_padding:
data += b'=' * (4 - missing_padding)
return urlsafe_b64decode(data)
def _encode_base64_url(data: bytearray):
"""Performs the URL-Safe multibase encoding required by some functions (since IFPS v0.11.0) on strings"""
if isinstance(data, str):
data = data.encode()
data = urlsafe_b64encode(data)
while data[-1] == 61 and data[-1]:
data = data[:-1]
data = b'u' + data
return data
def wait_till_ipfs_is_running(timeout_sec=None):
"""Waits till it can connect to the local IPFS daemon's HTTP-interface.
Args:
timeout_sec (int): maximum time to wait for. If this duration is,
exceeded, a TimeoutError is raised.
"""
count = 0
while True:
try:
if is_ipfs_running():
return
except ipfshttpclient.exceptions.ConnectionError as error:
pass
time.sleep(1)
count += 1
if timeout_sec and count == timeout_sec:
raise TimeoutError()
def try_run_ipfs():
"""Tries to use the IPFS CLI to run the local IPFS daemon with PubSub,
like manually executing `ipfs daemon --enable-pubsub-experiment`
"""
from ipfs_cli import try_run_ipfs as _try_run_ipfs
_try_run_ipfs()
if LIBERROR: # if not all modules needed for the ipfs_http_client library were loaded
print("Falling back to IPFS CLI because our HTTP client isn't working;\nNot all modules required by the http-connection could be loaded.")
from ipfs_cli import * | PypiClean |
/vmware-nsx-17.0.0.tar.gz/vmware-nsx-17.0.0/vmware_nsx/services/dynamic_routing/nsx_v/driver.py | import netaddr
from neutron_dynamic_routing.extensions import bgp as bgp_ext
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from neutron_lib.api.definitions import address_scope
from neutron_lib.api.definitions import external_net as extnet_apidef
from neutron_lib import constants as n_const
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from vmware_nsx._i18n import _
from vmware_nsx.common import exceptions as nsx_exc
from vmware_nsx.common import locking
from vmware_nsx.common import nsxv_constants
from vmware_nsx.db import nsxv_db
from vmware_nsx.extensions import edge_service_gateway_bgp_peer as ext_esg_peer
from vmware_nsx.extensions import projectpluginmap
from vmware_nsx.plugins.nsx_v.vshield.common import exceptions as vcns_exc
LOG = logging.getLogger(__name__)
def ip_prefix(name, ip_address):
return {'ipPrefix': {'name': name, 'ipAddress': ip_address}}
def redistribution_rule(advertise_static_routes, prefix_name, action='permit'):
rule = {
'prefixName': prefix_name,
'action': action,
'from': {
'ospf': False,
'bgp': False,
'connected': not advertise_static_routes,
'static': advertise_static_routes
}
}
return {'rule': rule}
def _get_bgp_neighbour(ip_address, remote_as, password, direction):
bgp_filter = {'bgpFilter': [{'direction': direction, 'action': 'permit'}]}
nbr = {
'ipAddress': ip_address,
'remoteAS': remote_as,
'bgpFilters': bgp_filter,
'holdDownTimer': cfg.CONF.nsxv.bgp_neighbour_hold_down_timer,
'keepAliveTimer': cfg.CONF.nsxv.bgp_neighbour_keep_alive_timer
}
if password:
nbr['password'] = password
return {'bgpNeighbour': nbr}
def bgp_neighbour_from_peer(bgp_peer):
return _get_bgp_neighbour(bgp_peer['peer_ip'],
bgp_peer['remote_as'],
bgp_peer['password'],
direction='out')
def gw_bgp_neighbour(ip_address, remote_as, password):
return _get_bgp_neighbour(ip_address, remote_as, password,
direction='in')
class NSXvBgpDriver(object):
"""Class driver to address the neutron_dynamic_routing API"""
def __init__(self, plugin):
super(NSXvBgpDriver, self).__init__()
self._plugin = plugin
self._core_plugin = directory.get_plugin()
if self._core_plugin.is_tvd_plugin():
self._core_plugin = self._core_plugin.get_plugin_by_type(
projectpluginmap.NsxPlugins.NSX_V)
if not self._core_plugin:
err_msg = _("NSXv BGP cannot work without the NSX-V core plugin")
raise n_exc.InvalidInput(error_message=err_msg)
self._nsxv = self._core_plugin.nsx_v
self._edge_manager = self._core_plugin.edge_manager
def prefix_name(self, subnet_id):
return 'subnet-%s' % subnet_id
def _get_router_edge_info(self, context, router_id):
edge_binding = nsxv_db.get_nsxv_router_binding(context.session,
router_id)
if not edge_binding:
return None, None
# Indicates which routes should be advertised - connected or static.
advertise_static_routes = False
if edge_binding['edge_type'] != nsxv_constants.SERVICE_EDGE:
# Distributed router
plr_id = self._edge_manager.get_plr_by_tlr_id(context, router_id)
edge_binding = nsxv_db.get_nsxv_router_binding(context.session,
plr_id)
if not edge_binding:
# Distributed router isn't bound to plr
return None, None
# PLR for distributed router, advertise static routes.
advertise_static_routes = True
return edge_binding['edge_id'], advertise_static_routes
def get_advertised_routes(self, context, bgp_speaker_id):
routes = []
bgp_speaker = self._plugin.get_bgp_speaker(context, bgp_speaker_id)
edge_router_dict = (
self._get_dynamic_routing_edge_list(context,
bgp_speaker['networks'][0],
bgp_speaker_id))
for edge_id, edge_router_config in edge_router_dict.items():
bgp_identifier = edge_router_config['bgp_identifier']
subnets = self._query_tenant_subnets(
context, edge_router_config['no_snat_routers'])
routes.extend([(subnet['cidr'], bgp_identifier)
for subnet in subnets])
routes = self._plugin._make_advertised_routes_list(routes)
return self._plugin._make_advertised_routes_dict(routes)
def _get_dynamic_routing_edge_list(self, context,
gateway_network_id, bgp_speaker_id):
# Filter the routers attached this network as gateway interface
filters = {'network_id': [gateway_network_id],
'device_owner': [n_const.DEVICE_OWNER_ROUTER_GW]}
fields = ['device_id', 'fixed_ips']
gateway_ports = self._core_plugin.get_ports(context, filters=filters,
fields=fields)
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
binding_info = {bgp_binding['edge_id']: bgp_binding['bgp_identifier']
for bgp_binding in bgp_bindings}
edge_router_dict = {}
for port in gateway_ports:
router_id = port['device_id']
router = self._core_plugin._get_router(context, router_id)
edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
if not edge_id:
# Shared router is not attached on any edge
continue
if edge_id not in edge_router_dict:
bgp_identifier = binding_info.get(
edge_id, port['fixed_ips'][0]['ip_address'])
edge_router_dict[edge_id] = {'no_snat_routers': [],
'bgp_identifier':
bgp_identifier,
'advertise_static_routes':
advertise_static_routes}
if not router.enable_snat:
edge_router_dict[edge_id]['no_snat_routers'].append(router_id)
return edge_router_dict
def _get_md_proxy_for_router(self, context, router_id):
binding = nsxv_db.get_nsxv_router_binding(context.session,
router_id)
md_proxy = None
if binding:
az_name = binding['availability_zone']
md_proxy = self._core_plugin.get_metadata_proxy_handler(
az_name)
return md_proxy
def _query_tenant_subnets(self, context, router_ids):
# Query subnets attached to all of routers attached to same edge
subnets = []
for router_id in router_ids:
filters = {'device_id': [router_id],
'device_owner': [n_const.DEVICE_OWNER_ROUTER_INTF]}
int_ports = self._core_plugin.get_ports(context,
filters=filters,
fields=['fixed_ips'])
# We need to skip metadata subnets
md_proxy = self._get_md_proxy_for_router(context, router_id)
for p in int_ports:
subnet_id = p['fixed_ips'][0]['subnet_id']
if md_proxy and md_proxy.is_md_subnet(subnet_id):
continue
subnet = self._core_plugin.get_subnet(context, subnet_id)
subnets.append({'id': subnet_id,
'cidr': subnet['cidr']})
LOG.debug("Got related subnets %s", subnets)
return subnets
def _get_bgp_speakers_by_bgp_peer(self, context, bgp_peer_id):
fields = ['id', 'peers']
bgp_speakers = self._plugin.get_bgp_speakers(context, fields=fields)
bgp_speaker_ids = [bgp_speaker['id'] for bgp_speaker in bgp_speakers
if bgp_peer_id in bgp_speaker['peers']]
return bgp_speaker_ids
def _get_prefixes_and_redistribution_rules(self, subnets,
advertise_static_routes):
prefixes = []
redis_rules = []
for subnet in subnets:
prefix_name = self.prefix_name(subnet['id'])
prefix = ip_prefix(prefix_name, subnet['cidr'])
prefixes.append(prefix)
rule = redistribution_rule(advertise_static_routes, prefix_name)
redis_rules.append(rule)
return prefixes, redis_rules
def create_bgp_speaker(self, context, bgp_speaker):
bgp_speaker_data = bgp_speaker['bgp_speaker']
ip_version = bgp_speaker_data.get('ip_version')
if ip_version and ip_version == 6:
err_msg = _("NSXv BGP does not support for IPv6")
raise n_exc.InvalidInput(error_message=err_msg)
def update_bgp_speaker(self, context, bgp_speaker_id, bgp_speaker):
bgp_obj = bgp_speaker['bgp_speaker']
old_speaker_info = self._plugin.get_bgp_speaker(context,
bgp_speaker_id)
enabled_state = old_speaker_info['advertise_tenant_networks']
new_enabled_state = bgp_obj.get('advertise_tenant_networks',
enabled_state)
if new_enabled_state == enabled_state:
return
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
edge_ids = [bgp_binding['edge_id'] for bgp_binding in bgp_bindings]
action = 'Enabling' if new_enabled_state else 'Disabling'
LOG.info("%s BGP route redistribution on edges: %s.", action, edge_ids)
for edge_id in edge_ids:
try:
self._nsxv.update_routing_redistribution(edge_id,
new_enabled_state)
except vcns_exc.VcnsApiException:
LOG.warning("Failed to update BGP on edge '%s'.", edge_id)
def delete_bgp_speaker(self, context, bgp_speaker_id):
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
self._stop_bgp_on_edges(context, bgp_bindings, bgp_speaker_id)
def _validate_bgp_configuration_on_peer_esg(self, bgp_peer):
if not bgp_peer.get('esg_id'):
return
# TBD(roeyc): Validate peer_ip is on subnet
bgp_config = self._nsxv.get_routing_bgp_config(bgp_peer['esg_id'])
remote_as = bgp_peer['remote_as']
esg_id = bgp_peer['esg_id']
esg_as = bgp_config['bgp'].get('localAS')
if not bgp_config['bgp']['enabled']:
raise ext_esg_peer.BgpDisabledOnEsgPeer(esg_id=esg_id)
if esg_as != int(remote_as):
raise ext_esg_peer.EsgRemoteASDoNotMatch(remote_as=remote_as,
esg_id=esg_id,
esg_as=esg_as)
h, resp = self._nsxv.vcns.get_interfaces(esg_id)
for iface in resp['vnics']:
address_groups = iface['addressGroups']['addressGroups']
matching_iface = [ag for ag in address_groups
if ag['primaryAddress'] == bgp_peer['peer_ip']]
if matching_iface:
break
else:
raise ext_esg_peer.EsgInternalIfaceDoesNotMatch(esg_id=esg_id)
def create_bgp_peer(self, context, bgp_peer):
bgp_peer = bgp_peer['bgp_peer']
remote_ip = bgp_peer['peer_ip']
if not netaddr.valid_ipv4(remote_ip):
err_msg = _("NSXv BGP does not support for IPv6")
raise n_exc.InvalidInput(error_message=err_msg)
self._validate_bgp_configuration_on_peer_esg(bgp_peer)
def update_bgp_peer(self, context, bgp_peer_id, bgp_peer):
password = bgp_peer['bgp_peer'].get('password')
old_bgp_peer = self._plugin.get_bgp_peer(context, bgp_peer_id)
# Only password update is relevant for backend.
if old_bgp_peer['password'] == password:
return
bgp_speaker_ids = self._get_bgp_speakers_by_bgp_peer(context,
bgp_peer_id)
# Update the password for the old bgp peer and update NSX
old_bgp_peer['password'] = password
neighbour = bgp_neighbour_from_peer(old_bgp_peer)
for bgp_speaker_id in bgp_speaker_ids:
with locking.LockManager.get_lock(bgp_speaker_id):
peers = self._plugin.get_bgp_peers_by_bgp_speaker(
context, bgp_speaker_id)
if bgp_peer_id not in [p['id'] for p in peers]:
continue
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
for binding in bgp_bindings:
try:
# Neighbours are identified by their ip address
self._nsxv.update_bgp_neighbours(binding['edge_id'],
[neighbour],
[neighbour])
except vcns_exc.VcnsApiException:
LOG.error("Failed to update BGP neighbor '%s' on "
"edge '%s'", old_bgp_peer['peer_ip'],
binding['edge_id'])
def _validate_bgp_peer(self, context, bgp_speaker_id, new_peer_id):
new_peer = self._plugin._get_bgp_peer(context, new_peer_id)
peers = self._plugin._get_bgp_peers_by_bgp_speaker_binding(
context, bgp_speaker_id)
self._plugin._validate_peer_ips(bgp_speaker_id, peers, new_peer)
def add_bgp_peer(self, context, bgp_speaker_id, bgp_peer_info):
bgp_peer_id = self._plugin._get_id_for(bgp_peer_info, 'bgp_peer_id')
bgp_peer_obj = self._plugin.get_bgp_peer(context,
bgp_peer_id)
nbr = bgp_neighbour_from_peer(bgp_peer_obj)
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(context.session,
bgp_speaker_id)
self._validate_bgp_peer(context, bgp_speaker_id, bgp_peer_obj['id'])
speaker = self._plugin.get_bgp_speaker(context, bgp_speaker_id)
# list of tenant edge routers to be removed as bgp-neighbours to this
# peer if it's associated with specific ESG.
neighbours = []
for binding in bgp_bindings:
try:
self._nsxv.add_bgp_neighbours(binding['edge_id'], [nbr])
except vcns_exc.VcnsApiException:
LOG.error("Failed to add BGP neighbour on '%s'",
binding['edge_id'])
else:
gw_nbr = gw_bgp_neighbour(binding['bgp_identifier'],
speaker['local_as'],
bgp_peer_obj['password'])
neighbours.append(gw_nbr)
LOG.debug("Succesfully added BGP neighbor '%s' on '%s'",
bgp_peer_obj['peer_ip'], binding['edge_id'])
if bgp_peer_obj.get('esg_id'):
edge_gw = bgp_peer_obj['esg_id']
try:
self._nsxv.add_bgp_neighbours(edge_gw, neighbours)
except vcns_exc.VcnsApiException:
with excutils.save_and_reraise_exception():
LOG.error("Failed to add BGP neighbour on GW Edge '%s'",
edge_gw)
def remove_bgp_peer(self, context, bgp_speaker_id, bgp_peer_info):
bgp_peer_id = bgp_peer_info['bgp_peer_id']
bgp_peer_obj = self._plugin.get_bgp_peer(context, bgp_peer_id)
nbr = bgp_neighbour_from_peer(bgp_peer_obj)
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
speaker = self._plugin.get_bgp_speaker(context, bgp_speaker_id)
# list of tenant edge routers to be removed as bgp-neighbours to this
# peer if it's associated with specific ESG.
neighbours = []
for binding in bgp_bindings:
try:
self._nsxv.remove_bgp_neighbours(binding['edge_id'], [nbr])
except vcns_exc.VcnsApiException:
LOG.error("Failed to remove BGP neighbour on '%s'",
binding['edge_id'])
else:
gw_nbr = gw_bgp_neighbour(binding['bgp_identifier'],
speaker['local_as'],
bgp_peer_obj['password'])
neighbours.append(gw_nbr)
LOG.debug("Succesfully removed BGP neighbor '%s' on '%s'",
bgp_peer_obj['peer_ip'], binding['edge_id'])
if bgp_peer_obj.get('esg_id'):
edge_gw = bgp_peer_obj['esg_id']
try:
self._nsxv.remove_bgp_neighbours(edge_gw, neighbours)
except vcns_exc.VcnsApiException:
LOG.error("Failed to remove BGP neighbour on GW Edge '%s'",
edge_gw)
def _validate_gateway_network(self, context, speaker_id, network_id):
ext_net = self._core_plugin.get_network(context, network_id)
if not ext_net.get(extnet_apidef.EXTERNAL):
raise nsx_exc.NsxBgpNetworkNotExternal(net_id=network_id)
if not ext_net['subnets']:
raise nsx_exc.NsxBgpGatewayNetworkHasNoSubnets(net_id=network_id)
# REVISIT(roeyc): Currently not allowing more than one bgp speaker per
# gateway network.
speakers_on_network = self._plugin._bgp_speakers_for_gateway_network(
context, network_id)
if speakers_on_network:
raise bgp_ext.BgpSpeakerNetworkBindingError(
network_id=network_id,
bgp_speaker_id=speakers_on_network[0]['id'])
subnet_id = ext_net['subnets'][0]
ext_subnet = self._core_plugin.get_subnet(context, subnet_id)
if ext_subnet.get('gateway_ip'):
raise ext_esg_peer.ExternalSubnetHasGW(
network_id=network_id, subnet_id=subnet_id)
if not ext_net[address_scope.IPV4_ADDRESS_SCOPE]:
raise nsx_exc.NsxBgpSpeakerUnableToAddGatewayNetwork(
network_id=network_id, bgp_speaker_id=speaker_id)
return True
def add_gateway_network(self, context, bgp_speaker_id, network_info):
gateway_network_id = network_info['network_id']
if not self._validate_gateway_network(context, bgp_speaker_id,
gateway_network_id):
return
edge_router_dict = self._get_dynamic_routing_edge_list(
context, gateway_network_id, bgp_speaker_id)
speaker = self._plugin.get_bgp_speaker(context, bgp_speaker_id)
bgp_peers = self._plugin.get_bgp_peers_by_bgp_speaker(
context, bgp_speaker_id)
local_as = speaker['local_as']
peers = []
for edge_id, edge_router_config in edge_router_dict.items():
router_ids = edge_router_config['no_snat_routers']
advertise_static_routes = (
edge_router_config['advertise_static_routes'])
subnets = self._query_tenant_subnets(context, router_ids)
# router_id here is in IP address format and is required for
# the BGP configuration.
bgp_identifier = edge_router_config['bgp_identifier']
try:
self._start_bgp_on_edge(context, edge_id, speaker,
bgp_peers, bgp_identifier, subnets,
advertise_static_routes)
except vcns_exc.VcnsApiException:
LOG.error("Failed to configure BGP speaker %s on edge '%s'.",
bgp_speaker_id, edge_id)
else:
peers.append(bgp_identifier)
for edge_gw, password in [(peer['esg_id'], peer['password'])
for peer in bgp_peers if peer.get('esg_id')]:
neighbours = [gw_bgp_neighbour(bgp_id, local_as, password)
for bgp_id in peers]
try:
self._nsxv.add_bgp_neighbours(edge_gw, neighbours)
except vcns_exc.VcnsApiException:
LOG.error("Failed to add BGP neighbour on GW Edge '%s'",
edge_gw)
def _start_bgp_on_edge(self, context, edge_id, speaker, bgp_peers,
bgp_identifier, subnets, advertise_static_routes):
enabled_state = speaker['advertise_tenant_networks']
local_as = speaker['local_as']
prefixes, redis_rules = self._get_prefixes_and_redistribution_rules(
subnets, advertise_static_routes)
bgp_neighbours = [bgp_neighbour_from_peer(bgp_peer)
for bgp_peer in bgp_peers]
try:
self._nsxv.add_bgp_speaker_config(edge_id, bgp_identifier,
local_as, enabled_state,
bgp_neighbours, prefixes,
redis_rules)
except vcns_exc.VcnsApiException:
with excutils.save_and_reraise_exception():
LOG.error("Failed to configure BGP speaker '%s' on edge '%s'.",
speaker['id'], edge_id)
else:
nsxv_db.add_nsxv_bgp_speaker_binding(context.session, edge_id,
speaker['id'], bgp_identifier)
def _stop_bgp_on_edges(self, context, bgp_bindings, speaker_id):
peers_to_remove = []
speaker = self._plugin.get_bgp_speaker(context, speaker_id)
local_as = speaker['local_as']
for bgp_binding in bgp_bindings:
edge_id = bgp_binding['edge_id']
try:
self._nsxv.delete_bgp_speaker_config(edge_id)
except vcns_exc.VcnsApiException:
LOG.error("Failed to delete BGP speaker '%s' config on edge "
"'%s'.", speaker_id, edge_id)
else:
nsxv_db.delete_nsxv_bgp_speaker_binding(context.session,
edge_id)
peers_to_remove.append(bgp_binding['bgp_identifier'])
# We should also remove all bgp neighbours on gw-edges which
# corresponds with tenant routers that are associated with this bgp
# speaker.
bgp_peers = self._plugin.get_bgp_peers_by_bgp_speaker(context,
speaker_id)
gw_edges = [(peer['esg_id'], peer['password'])
for peer in bgp_peers if peer.get('esg_id')]
for gw_edge, password in gw_edges:
neighbours_to_remove = [gw_bgp_neighbour(bgp_identifier,
local_as,
password)
for bgp_identifier in peers_to_remove]
try:
self._nsxv.remove_bgp_neighbours(gw_edge, neighbours_to_remove)
except vcns_exc.VcnsApiException:
LOG.error("Failed to remove BGP neighbour on GW edge '%s'.",
gw_edge)
def remove_gateway_network(self, context, bgp_speaker_id, network_info):
bgp_bindings = nsxv_db.get_nsxv_bgp_speaker_bindings(
context.session, bgp_speaker_id)
self._stop_bgp_on_edges(context, bgp_bindings, bgp_speaker_id)
def _update_edge_bgp_identifier(self, context, bgp_binding, speaker,
new_bgp_identifier):
local_as = speaker['local_as']
bgp_peers = self._plugin.get_bgp_peers_by_bgp_speaker(context,
speaker['id'])
self._nsxv.update_router_id(bgp_binding['edge_id'], new_bgp_identifier)
for gw_edge_id, password in [(peer['esg_id'], peer['password'])
for peer in bgp_peers
if peer.get('esg_id')]:
nbr_to_remove = gw_bgp_neighbour(bgp_binding['bgp_identifier'],
local_as, password)
nbr_to_add = gw_bgp_neighbour(new_bgp_identifier, local_as,
password)
self._nsxv.update_bgp_neighbours(gw_edge_id,
[nbr_to_add],
[nbr_to_remove])
with context.session.begin(subtransactions=True):
bgp_binding['bgp_identifier'] = new_bgp_identifier
def process_router_gw_port_update(self, context, speaker,
router, updated_port):
router_id = router['id']
gw_fixed_ip = router.gw_port['fixed_ips'][0]['ip_address']
edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
if not edge_id:
# shared router is not attached on any edge
return
bgp_binding = nsxv_db.get_nsxv_bgp_speaker_binding(
context.session, edge_id)
if bgp_binding:
new_fixed_ip = updated_port['fixed_ips'][0]['ip_address']
fixed_ip_updated = gw_fixed_ip != new_fixed_ip
subnets = self._query_tenant_subnets(context, [router_id])
prefixes, redis_rules = (
self._get_prefixes_and_redistribution_rules(
subnets, advertise_static_routes))
# Handle possible snat/no-nat update
if router.enable_snat:
self._nsxv.remove_bgp_redistribution_rules(edge_id, prefixes)
else:
self._nsxv.add_bgp_redistribution_rules(edge_id, prefixes,
redis_rules)
if bgp_binding['bgp_identifier'] == gw_fixed_ip:
if fixed_ip_updated:
self._update_edge_bgp_identifier(context,
bgp_binding,
speaker,
new_fixed_ip)
def enable_bgp_on_router(self, context, speaker, router_id):
local_as = speaker['local_as']
edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
if not edge_id:
# shared router is not attached on any edge
return
router = self._core_plugin._get_router(context, router_id)
subnets = self._query_tenant_subnets(context, [router_id])
bgp_peers = self._plugin.get_bgp_peers_by_bgp_speaker(
context, speaker['id'])
bgp_binding = nsxv_db.get_nsxv_bgp_speaker_binding(
context.session, edge_id)
if bgp_binding and subnets:
# Edge already configured with BGP (e.g - shared router edge),
# Add the router attached subnets.
if router.enable_snat:
prefixes = [self.prefix_name(subnet['id'])
for subnet in subnets]
self._nsxv.remove_bgp_redistribution_rules(edge_id, prefixes)
else:
prefixes, redis_rules = (
self._get_prefixes_and_redistribution_rules(
subnets, advertise_static_routes))
self._nsxv.add_bgp_redistribution_rules(edge_id, prefixes,
redis_rules)
elif not bgp_binding:
if router.enable_snat:
subnets = []
bgp_identifier = router.gw_port['fixed_ips'][0]['ip_address']
self._start_bgp_on_edge(context, edge_id, speaker, bgp_peers,
bgp_identifier, subnets,
advertise_static_routes)
for gw_edge_id, password in [(peer['esg_id'], peer['password'])
for peer in bgp_peers
if peer.get('esg_id')]:
nbr = gw_bgp_neighbour(bgp_identifier, local_as, password)
self._nsxv.add_bgp_neighbours(gw_edge_id, [nbr])
def disable_bgp_on_router(self, context, speaker, router_id, gw_ip,
edge_id=None):
speaker = self._plugin.get_bgp_speaker(context, speaker['id'])
current_edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
edge_id = edge_id or current_edge_id
if not edge_id:
return
bgp_binding = nsxv_db.get_nsxv_bgp_speaker_binding(context.session,
edge_id)
if not bgp_binding:
return
# Need to ensure that we do not use the metadata IP's
md_proxy = self._get_md_proxy_for_router(context, router_id)
routers_ids = (
self._core_plugin.edge_manager.get_routers_on_same_edge(
context, router_id))
routers_ids.remove(router_id)
# We need to find out what other routers are hosted on the edges and
# whether they have a gw addresses that could replace the current
# bgp-identifier (if required).
filters = {'device_owner': [n_const.DEVICE_OWNER_ROUTER_GW],
'device_id': routers_ids}
edge_gw_ports = self._core_plugin.get_ports(context, filters=filters)
alt_bgp_identifiers = [
p['fixed_ips'][0]['ip_address'] for p in edge_gw_ports
if (not md_proxy or
not md_proxy.is_md_subnet(
p['fixed_ips'][0]['subnet_id']))]
if alt_bgp_identifiers:
# Shared router, only remove prefixes and redistribution
# rules.
subnets = self._query_tenant_subnets(context, [router_id])
prefixes = [self.prefix_name(subnet['id'])
for subnet in subnets]
self._nsxv.remove_bgp_redistribution_rules(edge_id, prefixes)
if bgp_binding['bgp_identifier'] == gw_ip:
self._update_edge_bgp_identifier(context, bgp_binding, speaker,
alt_bgp_identifiers[0])
else:
self._stop_bgp_on_edges(context, [bgp_binding], speaker['id'])
def advertise_subnet(self, context, speaker_id, router_id, subnet):
router = self._core_plugin._get_router(context, router_id)
if router.enable_snat:
# Do nothing, by default, only when advertisement is needed we add
# a new redistribution rule
return
edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
if not edge_id:
# shared router is not attached on any edge
return
prefixes, redis_rules = self._get_prefixes_and_redistribution_rules(
[subnet], advertise_static_routes)
self._nsxv.add_bgp_redistribution_rules(edge_id, prefixes, redis_rules)
def withdraw_subnet(self, context, speaker_id, router_id, subnet_id):
router = self._core_plugin._get_router(context, router_id)
if router.enable_snat:
# Do nothing, by default, only when advertisement is needed we add
# a new redistribution rule
return
edge_id, advertise_static_routes = (
self._get_router_edge_info(context, router_id))
prefix_name = self.prefix_name(subnet_id)
self._nsxv.remove_bgp_redistribution_rules(edge_id, [prefix_name]) | PypiClean |
/superset-2.0.0-custom-0.0.2.tar.gz/superset-2.0.0-custom-0.0.2/superset/utils/webdriver.py |
import logging
from enum import Enum
from time import sleep
from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
from flask import current_app
from selenium.common.exceptions import (
StaleElementReferenceException,
TimeoutException,
WebDriverException,
)
from selenium.webdriver import chrome, firefox, FirefoxProfile
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from superset.extensions import machine_auth_provider_factory
from superset.utils.retries import retry_call
WindowSize = Tuple[int, int]
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from flask_appbuilder.security.sqla.models import User
class DashboardStandaloneMode(Enum):
HIDE_NAV = 1
HIDE_NAV_AND_TITLE = 2
REPORT = 3
class WebDriverProxy:
def __init__(self, driver_type: str, window: Optional[WindowSize] = None):
self._driver_type = driver_type
self._window: WindowSize = window or (800, 600)
self._screenshot_locate_wait = current_app.config["SCREENSHOT_LOCATE_WAIT"]
self._screenshot_load_wait = current_app.config["SCREENSHOT_LOAD_WAIT"]
def create(self) -> WebDriver:
pixel_density = current_app.config["WEBDRIVER_WINDOW"].get("pixel_density", 1)
if self._driver_type == "firefox":
driver_class = firefox.webdriver.WebDriver
options = firefox.options.Options()
profile = FirefoxProfile()
profile.set_preference("layout.css.devPixelsPerPx", str(pixel_density))
kwargs: Dict[Any, Any] = dict(options=options, firefox_profile=profile)
elif self._driver_type == "chrome":
driver_class = chrome.webdriver.WebDriver
options = chrome.options.Options()
options.add_argument(f"--force-device-scale-factor={pixel_density}")
options.add_argument(f"--window-size={self._window[0]},{self._window[1]}")
kwargs = dict(options=options)
else:
raise Exception(f"Webdriver name ({self._driver_type}) not supported")
# Prepare args for the webdriver init
# Add additional configured options
for arg in current_app.config["WEBDRIVER_OPTION_ARGS"]:
options.add_argument(arg)
kwargs.update(current_app.config["WEBDRIVER_CONFIGURATION"])
logger.info("Init selenium driver")
return driver_class(**kwargs)
def auth(self, user: "User") -> WebDriver:
driver = self.create()
return machine_auth_provider_factory.instance.authenticate_webdriver(
driver, user
)
@staticmethod
def destroy(driver: WebDriver, tries: int = 2) -> None:
"""Destroy a driver"""
# This is some very flaky code in selenium. Hence the retries
# and catch-all exceptions
try:
retry_call(driver.close, max_tries=tries)
except Exception: # pylint: disable=broad-except
pass
try:
driver.quit()
except Exception: # pylint: disable=broad-except
pass
def get_screenshot(
self, url: str, element_name: str, user: "User"
) -> Optional[bytes]:
driver = self.auth(user)
driver.set_window_size(*self._window)
driver.get(url)
img: Optional[bytes] = None
selenium_headstart = current_app.config["SCREENSHOT_SELENIUM_HEADSTART"]
logger.debug("Sleeping for %i seconds", selenium_headstart)
sleep(selenium_headstart)
try:
logger.debug("Wait for the presence of %s", element_name)
element = WebDriverWait(driver, self._screenshot_locate_wait).until(
EC.presence_of_element_located((By.CLASS_NAME, element_name))
)
logger.debug("Wait for .loading to be done")
WebDriverWait(driver, self._screenshot_load_wait).until_not(
EC.presence_of_all_elements_located((By.CLASS_NAME, "loading"))
)
logger.debug("Wait for chart to have content")
WebDriverWait(driver, self._screenshot_locate_wait).until(
EC.visibility_of_all_elements_located(
(By.CLASS_NAME, "slice_container")
)
)
selenium_animation_wait = current_app.config[
"SCREENSHOT_SELENIUM_ANIMATION_WAIT"
]
logger.debug("Wait %i seconds for chart animation", selenium_animation_wait)
sleep(selenium_animation_wait)
logger.info("Taking a PNG screenshot of url %s", url)
img = element.screenshot_as_png
except TimeoutException:
logger.warning("Selenium timed out requesting url %s", url, exc_info=True)
img = element.screenshot_as_png
except StaleElementReferenceException:
logger.error(
"Selenium got a stale element while requesting url %s",
url,
exc_info=True,
)
except WebDriverException as ex:
logger.error(ex, exc_info=True)
finally:
self.destroy(driver, current_app.config["SCREENSHOT_SELENIUM_RETRIES"])
return img | PypiClean |
/invenio-communities-7.7.0.tar.gz/invenio-communities-7.7.0/invenio_communities/permissions.py | from invenio_administration.generators import Administration
from invenio_records_permissions.generators import (
AnyUser,
AuthenticatedUser,
Disable,
IfConfig,
SystemProcess,
)
from invenio_records_permissions.policies import BasePermissionPolicy
from .generators import (
AllowedMemberTypes,
CommunityCurators,
CommunityManagers,
CommunityManagersForRole,
CommunityMembers,
CommunityOwners,
CommunitySelfMember,
GroupsEnabled,
IfPolicyClosed,
IfRestricted,
)
# Permission Policy
class CommunityPermissionPolicy(BasePermissionPolicy):
"""Permissions for Community CRUD operations."""
# Community
can_create = [AuthenticatedUser(), SystemProcess()]
can_read = [
IfRestricted("visibility", then_=[CommunityMembers()], else_=[AnyUser()]),
SystemProcess(),
]
can_update = [CommunityOwners(), SystemProcess()]
can_delete = [CommunityOwners(), SystemProcess()]
can_manage_access = [
IfConfig("COMMUNITIES_ALLOW_RESTRICTED", then_=can_update, else_=[]),
]
can_create_restricted = [
IfConfig("COMMUNITIES_ALLOW_RESTRICTED", then_=can_create, else_=[]),
]
can_search = [AnyUser(), SystemProcess()]
can_search_user_communities = [AuthenticatedUser(), SystemProcess()]
can_search_invites = [CommunityManagers(), SystemProcess()]
can_search_requests = [CommunityManagers(), CommunityCurators(), SystemProcess()]
can_rename = [CommunityOwners(), SystemProcess()]
can_submit_record = [
IfPolicyClosed(
"record_policy",
then_=[CommunityMembers(), SystemProcess()],
else_=[
IfRestricted(
"visibility",
then_=[CommunityMembers()],
else_=[AuthenticatedUser()],
),
],
),
]
# who can include a record directly, without a review
can_include_directly = [
IfPolicyClosed(
"review_policy",
then_=[Disable()],
else_=[CommunityCurators()],
),
]
can_members_add = [
CommunityManagersForRole(),
AllowedMemberTypes("group"),
GroupsEnabled("group"),
SystemProcess(),
]
can_members_invite = [
CommunityManagersForRole(),
AllowedMemberTypes("user", "email"),
SystemProcess(),
]
can_members_manage = [
CommunityManagers(),
SystemProcess(),
]
can_members_search = [
CommunityMembers(),
SystemProcess(),
]
can_members_search_public = [
IfRestricted("visibility", then_=[CommunityMembers()], else_=[AnyUser()]),
SystemProcess(),
]
# Ability to use membership update api
can_members_bulk_update = [
CommunityMembers(),
SystemProcess(),
]
can_members_bulk_delete = can_members_bulk_update
# Ability to update a single membership
can_members_update = [
CommunityManagersForRole(),
CommunitySelfMember(),
SystemProcess(),
]
# Ability to delete a single membership
can_members_delete = can_members_update
can_invite_owners = [CommunityOwners(), SystemProcess()]
# Abilities for featured communities
can_featured_search = [AnyUser(), SystemProcess()]
can_featured_list = [Administration(), SystemProcess()]
can_featured_create = [Administration(), SystemProcess()]
can_featured_update = [Administration(), SystemProcess()]
can_featured_delete = [Administration(), SystemProcess()]
# Used to hide at the moment the `is_verified` field. It should be set to
# correct permissions based on which the field will be exposed only to moderators
can_moderate = [Disable()]
def can_perform_action(community, context):
"""Check if the given action is available on the request."""
action = context.get("action")
identity = context.get("identity")
permission_policy_cls = context.get("permission_policy_cls")
permission = permission_policy_cls(action, community=community)
return permission.allows(identity) | PypiClean |
/e-fonenana-frontend-20190305.1.tar.gz/e-fonenana-frontend-20190305.1/hass_frontend_es5/71e1b56b1dfed1708bd7.chunk.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[81],{160:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_polymer_polymer_legacy_js__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(2),_polymer_iron_flex_layout_iron_flex_layout_js__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(40),_polymer_iron_image_iron_image_js__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(170),_polymer_paper_styles_element_styles_paper_material_styles_js__WEBPACK_IMPORTED_MODULE_3__=__webpack_require__(168),_polymer_paper_styles_default_theme_js__WEBPACK_IMPORTED_MODULE_4__=__webpack_require__(41),_polymer_polymer_lib_legacy_polymer_fn_js__WEBPACK_IMPORTED_MODULE_5__=__webpack_require__(4),_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_6__=__webpack_require__(3);function _templateObject(){var data=_taggedTemplateLiteral(["\n <style include=\"paper-material-styles\">\n :host {\n display: inline-block;\n position: relative;\n box-sizing: border-box;\n background-color: var(--paper-card-background-color, var(--primary-background-color));\n border-radius: 2px;\n\n @apply --paper-font-common-base;\n @apply --paper-card;\n }\n\n /* IE 10 support for HTML5 hidden attr */\n :host([hidden]), [hidden] {\n display: none !important;\n }\n\n .header {\n position: relative;\n border-top-left-radius: inherit;\n border-top-right-radius: inherit;\n overflow: hidden;\n\n @apply --paper-card-header;\n }\n\n .header iron-image {\n display: block;\n width: 100%;\n --iron-image-width: 100%;\n pointer-events: none;\n\n @apply --paper-card-header-image;\n }\n\n .header .title-text {\n padding: 16px;\n font-size: 24px;\n font-weight: 400;\n color: var(--paper-card-header-color, #000);\n\n @apply --paper-card-header-text;\n }\n\n .header .title-text.over-image {\n position: absolute;\n bottom: 0px;\n\n @apply --paper-card-header-image-text;\n }\n\n :host ::slotted(.card-content) {\n padding: 16px;\n position:relative;\n\n @apply --paper-card-content;\n }\n\n :host ::slotted(.card-actions) {\n border-top: 1px solid #e8e8e8;\n padding: 5px 16px;\n position:relative;\n\n @apply --paper-card-actions;\n }\n\n :host([elevation=\"1\"]) {\n @apply --paper-material-elevation-1;\n }\n\n :host([elevation=\"2\"]) {\n @apply --paper-material-elevation-2;\n }\n\n :host([elevation=\"3\"]) {\n @apply --paper-material-elevation-3;\n }\n\n :host([elevation=\"4\"]) {\n @apply --paper-material-elevation-4;\n }\n\n :host([elevation=\"5\"]) {\n @apply --paper-material-elevation-5;\n }\n </style>\n\n <div class=\"header\">\n <iron-image hidden$=\"[[!image]]\" aria-hidden$=\"[[_isHidden(image)]]\" src=\"[[image]]\" alt=\"[[alt]]\" placeholder=\"[[placeholderImage]]\" preload=\"[[preloadImage]]\" fade=\"[[fadeImage]]\"></iron-image>\n <div hidden$=\"[[!heading]]\" class$=\"title-text [[_computeHeadingClass(image)]]\">[[heading]]</div>\n </div>\n\n <slot></slot>\n"],["\n <style include=\"paper-material-styles\">\n :host {\n display: inline-block;\n position: relative;\n box-sizing: border-box;\n background-color: var(--paper-card-background-color, var(--primary-background-color));\n border-radius: 2px;\n\n @apply --paper-font-common-base;\n @apply --paper-card;\n }\n\n /* IE 10 support for HTML5 hidden attr */\n :host([hidden]), [hidden] {\n display: none !important;\n }\n\n .header {\n position: relative;\n border-top-left-radius: inherit;\n border-top-right-radius: inherit;\n overflow: hidden;\n\n @apply --paper-card-header;\n }\n\n .header iron-image {\n display: block;\n width: 100%;\n --iron-image-width: 100%;\n pointer-events: none;\n\n @apply --paper-card-header-image;\n }\n\n .header .title-text {\n padding: 16px;\n font-size: 24px;\n font-weight: 400;\n color: var(--paper-card-header-color, #000);\n\n @apply --paper-card-header-text;\n }\n\n .header .title-text.over-image {\n position: absolute;\n bottom: 0px;\n\n @apply --paper-card-header-image-text;\n }\n\n :host ::slotted(.card-content) {\n padding: 16px;\n position:relative;\n\n @apply --paper-card-content;\n }\n\n :host ::slotted(.card-actions) {\n border-top: 1px solid #e8e8e8;\n padding: 5px 16px;\n position:relative;\n\n @apply --paper-card-actions;\n }\n\n :host([elevation=\"1\"]) {\n @apply --paper-material-elevation-1;\n }\n\n :host([elevation=\"2\"]) {\n @apply --paper-material-elevation-2;\n }\n\n :host([elevation=\"3\"]) {\n @apply --paper-material-elevation-3;\n }\n\n :host([elevation=\"4\"]) {\n @apply --paper-material-elevation-4;\n }\n\n :host([elevation=\"5\"]) {\n @apply --paper-material-elevation-5;\n }\n </style>\n\n <div class=\"header\">\n <iron-image hidden\\$=\"[[!image]]\" aria-hidden\\$=\"[[_isHidden(image)]]\" src=\"[[image]]\" alt=\"[[alt]]\" placeholder=\"[[placeholderImage]]\" preload=\"[[preloadImage]]\" fade=\"[[fadeImage]]\"></iron-image>\n <div hidden\\$=\"[[!heading]]\" class\\$=\"title-text [[_computeHeadingClass(image)]]\">[[heading]]</div>\n </div>\n\n <slot></slot>\n"]);_templateObject=function _templateObject(){return data};return data}function _taggedTemplateLiteral(strings,raw){if(!raw){raw=strings.slice(0)}return Object.freeze(Object.defineProperties(strings,{raw:{value:Object.freeze(raw)}}))}/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/Object(_polymer_polymer_lib_legacy_polymer_fn_js__WEBPACK_IMPORTED_MODULE_5__.a)({_template:Object(_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_6__.a)(_templateObject()),is:"paper-card",properties:{heading:{type:String,value:"",observer:"_headingChanged"},image:{type:String,value:""},alt:{type:String},preloadImage:{type:Boolean,value:!1},fadeImage:{type:Boolean,value:!1},placeholderImage:{type:String,value:null},elevation:{type:Number,value:1,reflectToAttribute:!0},animatedShadow:{type:Boolean,value:!1},animated:{type:Boolean,reflectToAttribute:!0,readOnly:!0,computed:"_computeAnimated(animatedShadow)"}},_isHidden:function _isHidden(image){return image?"false":"true"},_headingChanged:function _headingChanged(heading){var currentHeading=this.getAttribute("heading"),currentLabel=this.getAttribute("aria-label");if("string"!==typeof currentLabel||currentLabel===currentHeading){this.setAttribute("aria-label",heading)}},_computeHeadingClass:function _computeHeadingClass(image){return image?" over-image":""},_computeAnimated:function _computeAnimated(animatedShadow){return animatedShadow}})},168:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_polymer_polymer_legacy_js__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(2),_shadow_js__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(98),_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(3);function _templateObject(){var data=_taggedTemplateLiteral(["\n<dom-module id=\"paper-material-styles\">\n <template>\n <style>\n html {\n --paper-material: {\n display: block;\n position: relative;\n };\n --paper-material-elevation-1: {\n @apply --shadow-elevation-2dp;\n };\n --paper-material-elevation-2: {\n @apply --shadow-elevation-4dp;\n };\n --paper-material-elevation-3: {\n @apply --shadow-elevation-6dp;\n };\n --paper-material-elevation-4: {\n @apply --shadow-elevation-8dp;\n };\n --paper-material-elevation-5: {\n @apply --shadow-elevation-16dp;\n };\n }\n .paper-material {\n @apply --paper-material;\n }\n .paper-material[elevation=\"1\"] {\n @apply --paper-material-elevation-1;\n }\n .paper-material[elevation=\"2\"] {\n @apply --paper-material-elevation-2;\n }\n .paper-material[elevation=\"3\"] {\n @apply --paper-material-elevation-3;\n }\n .paper-material[elevation=\"4\"] {\n @apply --paper-material-elevation-4;\n }\n .paper-material[elevation=\"5\"] {\n @apply --paper-material-elevation-5;\n }\n\n /* Duplicate the styles because of https://github.com/webcomponents/shadycss/issues/193 */\n :host {\n --paper-material: {\n display: block;\n position: relative;\n };\n --paper-material-elevation-1: {\n @apply --shadow-elevation-2dp;\n };\n --paper-material-elevation-2: {\n @apply --shadow-elevation-4dp;\n };\n --paper-material-elevation-3: {\n @apply --shadow-elevation-6dp;\n };\n --paper-material-elevation-4: {\n @apply --shadow-elevation-8dp;\n };\n --paper-material-elevation-5: {\n @apply --shadow-elevation-16dp;\n };\n }\n :host(.paper-material) {\n @apply --paper-material;\n }\n :host(.paper-material[elevation=\"1\"]) {\n @apply --paper-material-elevation-1;\n }\n :host(.paper-material[elevation=\"2\"]) {\n @apply --paper-material-elevation-2;\n }\n :host(.paper-material[elevation=\"3\"]) {\n @apply --paper-material-elevation-3;\n }\n :host(.paper-material[elevation=\"4\"]) {\n @apply --paper-material-elevation-4;\n }\n :host(.paper-material[elevation=\"5\"]) {\n @apply --paper-material-elevation-5;\n }\n </style>\n </template>\n</dom-module>"]);_templateObject=function _templateObject(){return data};return data}function _taggedTemplateLiteral(strings,raw){if(!raw){raw=strings.slice(0)}return Object.freeze(Object.defineProperties(strings,{raw:{value:Object.freeze(raw)}}))}/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/var template=Object(_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_2__.a)(_templateObject());template.setAttribute("style","display: none;");document.head.appendChild(template.content)},170:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_polymer_polymer_legacy_js__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(2),_polymer_polymer_lib_legacy_polymer_fn_js__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(4),_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(3),_polymer_polymer_lib_utils_resolve_url_js__WEBPACK_IMPORTED_MODULE_3__=__webpack_require__(16);function _templateObject(){var data=_taggedTemplateLiteral(["\n <style>\n :host {\n display: inline-block;\n overflow: hidden;\n position: relative;\n }\n\n #baseURIAnchor {\n display: none;\n }\n\n #sizedImgDiv {\n position: absolute;\n top: 0px;\n right: 0px;\n bottom: 0px;\n left: 0px;\n\n display: none;\n }\n\n #img {\n display: block;\n width: var(--iron-image-width, auto);\n height: var(--iron-image-height, auto);\n }\n\n :host([sizing]) #sizedImgDiv {\n display: block;\n }\n\n :host([sizing]) #img {\n display: none;\n }\n\n #placeholder {\n position: absolute;\n top: 0px;\n right: 0px;\n bottom: 0px;\n left: 0px;\n\n background-color: inherit;\n opacity: 1;\n\n @apply --iron-image-placeholder;\n }\n\n #placeholder.faded-out {\n transition: opacity 0.5s linear;\n opacity: 0;\n }\n </style>\n\n <a id=\"baseURIAnchor\" href=\"#\"></a>\n <div id=\"sizedImgDiv\" role=\"img\" hidden$=\"[[_computeImgDivHidden(sizing)]]\" aria-hidden$=\"[[_computeImgDivARIAHidden(alt)]]\" aria-label$=\"[[_computeImgDivARIALabel(alt, src)]]\"></div>\n <img id=\"img\" alt$=\"[[alt]]\" hidden$=\"[[_computeImgHidden(sizing)]]\" crossorigin$=\"[[crossorigin]]\" on-load=\"_imgOnLoad\" on-error=\"_imgOnError\">\n <div id=\"placeholder\" hidden$=\"[[_computePlaceholderHidden(preload, fade, loading, loaded)]]\" class$=\"[[_computePlaceholderClassName(preload, fade, loading, loaded)]]\"></div>\n"],["\n <style>\n :host {\n display: inline-block;\n overflow: hidden;\n position: relative;\n }\n\n #baseURIAnchor {\n display: none;\n }\n\n #sizedImgDiv {\n position: absolute;\n top: 0px;\n right: 0px;\n bottom: 0px;\n left: 0px;\n\n display: none;\n }\n\n #img {\n display: block;\n width: var(--iron-image-width, auto);\n height: var(--iron-image-height, auto);\n }\n\n :host([sizing]) #sizedImgDiv {\n display: block;\n }\n\n :host([sizing]) #img {\n display: none;\n }\n\n #placeholder {\n position: absolute;\n top: 0px;\n right: 0px;\n bottom: 0px;\n left: 0px;\n\n background-color: inherit;\n opacity: 1;\n\n @apply --iron-image-placeholder;\n }\n\n #placeholder.faded-out {\n transition: opacity 0.5s linear;\n opacity: 0;\n }\n </style>\n\n <a id=\"baseURIAnchor\" href=\"#\"></a>\n <div id=\"sizedImgDiv\" role=\"img\" hidden\\$=\"[[_computeImgDivHidden(sizing)]]\" aria-hidden\\$=\"[[_computeImgDivARIAHidden(alt)]]\" aria-label\\$=\"[[_computeImgDivARIALabel(alt, src)]]\"></div>\n <img id=\"img\" alt\\$=\"[[alt]]\" hidden\\$=\"[[_computeImgHidden(sizing)]]\" crossorigin\\$=\"[[crossorigin]]\" on-load=\"_imgOnLoad\" on-error=\"_imgOnError\">\n <div id=\"placeholder\" hidden\\$=\"[[_computePlaceholderHidden(preload, fade, loading, loaded)]]\" class\\$=\"[[_computePlaceholderClassName(preload, fade, loading, loaded)]]\"></div>\n"]);_templateObject=function _templateObject(){return data};return data}function _taggedTemplateLiteral(strings,raw){if(!raw){raw=strings.slice(0)}return Object.freeze(Object.defineProperties(strings,{raw:{value:Object.freeze(raw)}}))}/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/Object(_polymer_polymer_lib_legacy_polymer_fn_js__WEBPACK_IMPORTED_MODULE_1__.a)({_template:Object(_polymer_polymer_lib_utils_html_tag_js__WEBPACK_IMPORTED_MODULE_2__.a)(_templateObject()),is:"iron-image",properties:{src:{type:String,value:""},alt:{type:String,value:null},crossorigin:{type:String,value:null},preventLoad:{type:Boolean,value:!1},sizing:{type:String,value:null,reflectToAttribute:!0},position:{type:String,value:"center"},preload:{type:Boolean,value:!1},placeholder:{type:String,value:null,observer:"_placeholderChanged"},fade:{type:Boolean,value:!1},loaded:{notify:!0,readOnly:!0,type:Boolean,value:!1},loading:{notify:!0,readOnly:!0,type:Boolean,value:!1},error:{notify:!0,readOnly:!0,type:Boolean,value:!1},width:{observer:"_widthChanged",type:Number,value:null},height:{observer:"_heightChanged",type:Number,value:null}},observers:["_transformChanged(sizing, position)","_loadStateObserver(src, preventLoad)"],created:function created(){this._resolvedSrc=""},_imgOnLoad:function _imgOnLoad(){if(this.$.img.src!==this._resolveSrc(this.src)){return}this._setLoading(!1);this._setLoaded(!0);this._setError(!1)},_imgOnError:function _imgOnError(){if(this.$.img.src!==this._resolveSrc(this.src)){return}this.$.img.removeAttribute("src");this.$.sizedImgDiv.style.backgroundImage="";this._setLoading(!1);this._setLoaded(!1);this._setError(!0)},_computePlaceholderHidden:function _computePlaceholderHidden(){return!this.preload||!this.fade&&!this.loading&&this.loaded},_computePlaceholderClassName:function _computePlaceholderClassName(){return this.preload&&this.fade&&!this.loading&&this.loaded?"faded-out":""},_computeImgDivHidden:function _computeImgDivHidden(){return!this.sizing},_computeImgDivARIAHidden:function _computeImgDivARIAHidden(){return""===this.alt?"true":void 0},_computeImgDivARIALabel:function _computeImgDivARIALabel(){if(null!==this.alt){return this.alt}if(""===this.src){return""}var resolved=this._resolveSrc(this.src);return resolved.replace(/[?|#].*/g,"").split("/").pop()},_computeImgHidden:function _computeImgHidden(){return!!this.sizing},_widthChanged:function _widthChanged(){this.style.width=isNaN(this.width)?this.width:this.width+"px"},_heightChanged:function _heightChanged(){this.style.height=isNaN(this.height)?this.height:this.height+"px"},_loadStateObserver:function _loadStateObserver(src,preventLoad){var newResolvedSrc=this._resolveSrc(src);if(newResolvedSrc===this._resolvedSrc){return}this._resolvedSrc="";this.$.img.removeAttribute("src");this.$.sizedImgDiv.style.backgroundImage="";if(""===src||preventLoad){this._setLoading(!1);this._setLoaded(!1);this._setError(!1)}else{this._resolvedSrc=newResolvedSrc;this.$.img.src=this._resolvedSrc;this.$.sizedImgDiv.style.backgroundImage="url(\""+this._resolvedSrc+"\")";this._setLoading(!0);this._setLoaded(!1);this._setError(!1)}},_placeholderChanged:function _placeholderChanged(){this.$.placeholder.style.backgroundImage=this.placeholder?"url(\""+this.placeholder+"\")":""},_transformChanged:function _transformChanged(){var sizedImgDivStyle=this.$.sizedImgDiv.style,placeholderStyle=this.$.placeholder.style;sizedImgDivStyle.backgroundSize=placeholderStyle.backgroundSize=this.sizing;sizedImgDivStyle.backgroundPosition=placeholderStyle.backgroundPosition=this.sizing?this.position:"";sizedImgDivStyle.backgroundRepeat=placeholderStyle.backgroundRepeat=this.sizing?"no-repeat":""},_resolveSrc:function _resolveSrc(testSrc){var resolved=Object(_polymer_polymer_lib_utils_resolve_url_js__WEBPACK_IMPORTED_MODULE_3__.c)(testSrc,this.$.baseURIAnchor.href);if("/"===resolved[0]){resolved=(location.origin||location.protocol+"//"+location.host)+resolved}return resolved}})},340:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_app_storage_app_storage_behavior__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(390),_polymer_polymer_lib_legacy_polymer_fn__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(4),_polymer_polymer_polymer_legacy__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(2);/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/Object(_polymer_polymer_lib_legacy_polymer_fn__WEBPACK_IMPORTED_MODULE_1__.a)({is:"app-localstorage-document",behaviors:[_polymer_app_storage_app_storage_behavior__WEBPACK_IMPORTED_MODULE_0__.a],properties:{key:{type:String,notify:!0},sessionOnly:{type:Boolean,value:!1},storage:{type:Object,computed:"__computeStorage(sessionOnly)"}},observers:["__storageSourceChanged(storage, key)"],attached:function attached(){this.listen(window,"storage","__onStorage");this.listen(window.top,"app-local-storage-changed","__onAppLocalStorageChanged")},detached:function detached(){this.unlisten(window,"storage","__onStorage");this.unlisten(window.top,"app-local-storage-changed","__onAppLocalStorageChanged")},get isNew(){return!this.key},saveValue:function saveValue(key){try{this.__setStorageValue(key,this.data)}catch(e){return Promise.reject(e)}this.key=key;return Promise.resolve()},reset:function reset(){this.key=null;this.data=this.zeroValue},destroy:function destroy(){try{this.storage.removeItem(this.key);this.reset()}catch(e){return Promise.reject(e)}return Promise.resolve()},getStoredValue:function getStoredValue(path){var value;if(null!=this.key){try{value=this.__parseValueFromStorage();if(null!=value){value=this.get(path,{data:value})}else{value=void 0}}catch(e){return Promise.reject(e)}}return Promise.resolve(value)},setStoredValue:function setStoredValue(path,value){if(null!=this.key){try{this.__setStorageValue(this.key,this.data)}catch(e){return Promise.reject(e)}this.fire("app-local-storage-changed",this,{node:window.top})}return Promise.resolve(value)},__computeStorage:function __computeStorage(sessionOnly){return sessionOnly?window.sessionStorage:window.localStorage},__storageSourceChanged:function __storageSourceChanged(storage,key){this._initializeStoredValue()},__onStorage:function __onStorage(event){if(event.key!==this.key||event.storageArea!==this.storage){return}this.syncToMemory(function(){this.set("data",this.__parseValueFromStorage())})},__onAppLocalStorageChanged:function __onAppLocalStorageChanged(event){if(event.detail===this||event.detail.key!==this.key||event.detail.storage!==this.storage){return}this.syncToMemory(function(){this.set("data",event.detail.data)})},__parseValueFromStorage:function __parseValueFromStorage(){try{return JSON.parse(this.storage.getItem(this.key))}catch(e){console.error("Failed to parse value from storage for",this.key)}},__setStorageValue:function __setStorageValue(key,value){if("undefined"===typeof value)value=null;this.storage.setItem(key,JSON.stringify(value))}})},735:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.r(__webpack_exports__);var _polymer_app_layout_app_header_layout_app_header_layout__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(135),_polymer_app_layout_app_header_app_header__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(134),_polymer_app_layout_app_toolbar_app_toolbar__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(111),_material_mwc_button__WEBPACK_IMPORTED_MODULE_3__=__webpack_require__(73),_polymer_paper_card_paper_card__WEBPACK_IMPORTED_MODULE_4__=__webpack_require__(160),_polymer_paper_input_paper_input__WEBPACK_IMPORTED_MODULE_5__=__webpack_require__(80),_polymer_paper_input_paper_textarea__WEBPACK_IMPORTED_MODULE_6__=__webpack_require__(196),_polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_7__=__webpack_require__(3),_polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_8__=__webpack_require__(20),_components_ha_menu_button__WEBPACK_IMPORTED_MODULE_9__=__webpack_require__(136),_resources_ha_style__WEBPACK_IMPORTED_MODULE_10__=__webpack_require__(101),_util_app_localstorage_document__WEBPACK_IMPORTED_MODULE_11__=__webpack_require__(340);function _typeof(obj){if("function"===typeof Symbol&&"symbol"===typeof Symbol.iterator){_typeof=function _typeof(obj){return typeof obj}}else{_typeof=function _typeof(obj){return obj&&"function"===typeof Symbol&&obj.constructor===Symbol&&obj!==Symbol.prototype?"symbol":typeof obj}}return _typeof(obj)}function _templateObject(){var data=_taggedTemplateLiteral(["\n <style include=\"ha-style\">\n :host {\n -ms-user-select: initial;\n -webkit-user-select: initial;\n -moz-user-select: initial;\n }\n\n .content {\n padding: 24px 0 32px;\n max-width: 600px;\n margin: 0 auto;\n direction: ltr;\n }\n\n paper-card {\n display: block;\n }\n\n mwc-button {\n background-color: white;\n }\n </style>\n\n <app-header-layout has-scrolling-region>\n <app-header slot=\"header\" fixed>\n <app-toolbar>\n <ha-menu-button\n narrow=\"[[narrow]]\"\n show-menu=\"[[showMenu]]\"\n ></ha-menu-button>\n <div main-title>MQTT</div>\n </app-toolbar>\n </app-header>\n\n <app-localstorage-document key=\"panel-dev-mqtt-topic\" data=\"{{topic}}\">\n </app-localstorage-document>\n <app-localstorage-document\n key=\"panel-dev-mqtt-payload\"\n data=\"{{payload}}\"\n >\n </app-localstorage-document>\n\n <div class=\"content\">\n <paper-card heading=\"Publish a packet\">\n <div class=\"card-content\">\n <paper-input label=\"topic\" value=\"{{topic}}\"></paper-input>\n\n <paper-textarea\n always-float-label\n label=\"Payload (template allowed)\"\n value=\"{{payload}}\"\n ></paper-textarea>\n </div>\n <div class=\"card-actions\">\n <mwc-button on-click=\"_publish\">Publish</mwc-button>\n </div>\n </paper-card>\n </div>\n </app-header-layout>\n "]);_templateObject=function _templateObject(){return data};return data}function _taggedTemplateLiteral(strings,raw){if(!raw){raw=strings.slice(0)}return Object.freeze(Object.defineProperties(strings,{raw:{value:Object.freeze(raw)}}))}function _classCallCheck(instance,Constructor){if(!(instance instanceof Constructor)){throw new TypeError("Cannot call a class as a function")}}function _defineProperties(target,props){for(var i=0,descriptor;i<props.length;i++){descriptor=props[i];descriptor.enumerable=descriptor.enumerable||!1;descriptor.configurable=!0;if("value"in descriptor)descriptor.writable=!0;Object.defineProperty(target,descriptor.key,descriptor)}}function _createClass(Constructor,protoProps,staticProps){if(protoProps)_defineProperties(Constructor.prototype,protoProps);if(staticProps)_defineProperties(Constructor,staticProps);return Constructor}function _possibleConstructorReturn(self,call){if(call&&("object"===_typeof(call)||"function"===typeof call)){return call}return _assertThisInitialized(self)}function _assertThisInitialized(self){if(void 0===self){throw new ReferenceError("this hasn't been initialised - super() hasn't been called")}return self}function _getPrototypeOf(o){_getPrototypeOf=Object.setPrototypeOf?Object.getPrototypeOf:function _getPrototypeOf(o){return o.__proto__||Object.getPrototypeOf(o)};return _getPrototypeOf(o)}function _inherits(subClass,superClass){if("function"!==typeof superClass&&null!==superClass){throw new TypeError("Super expression must either be null or a function")}subClass.prototype=Object.create(superClass&&superClass.prototype,{constructor:{value:subClass,writable:!0,configurable:!0}});if(superClass)_setPrototypeOf(subClass,superClass)}function _setPrototypeOf(o,p){_setPrototypeOf=Object.setPrototypeOf||function _setPrototypeOf(o,p){o.__proto__=p;return o};return _setPrototypeOf(o,p)}var HaPanelDevMqtt=function(_PolymerElement){_inherits(HaPanelDevMqtt,_PolymerElement);function HaPanelDevMqtt(){_classCallCheck(this,HaPanelDevMqtt);return _possibleConstructorReturn(this,_getPrototypeOf(HaPanelDevMqtt).apply(this,arguments))}_createClass(HaPanelDevMqtt,[{key:"_publish",value:function _publish(){this.hass.callService("mqtt","publish",{topic:this.topic,payload_template:this.payload})}}],[{key:"template",get:function get(){return Object(_polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_7__.a)(_templateObject())}},{key:"properties",get:function get(){return{hass:Object,narrow:Boolean,showMenu:Boolean,topic:String,payload:String}}}]);return HaPanelDevMqtt}(_polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_8__.a);customElements.define("ha-panel-dev-mqtt",HaPanelDevMqtt)}}]);
//# sourceMappingURL=71e1b56b1dfed1708bd7.chunk.js.map | PypiClean |
/lnhub_rest-0.10.18.tar.gz/lnhub_rest-0.10.18/lamindb-setup/lamindb_setup/_check_instance_setup.py | from lamin_logger import logger
from ._init_instance import reload_schema_modules
from ._silence_loggers import silence_loggers
from .dev._settings_store import current_instance_settings_file
_INSTANCE_NOT_SETUP_WARNING = """\
You haven't yet setup an instance: Please call `ln.setup.init()` or `ln.setup.load()`
"""
def check_instance_setup(from_lamindb: bool = False):
if current_instance_settings_file().exists():
silence_loggers()
try:
# attempt loading the settings file
from .dev._settings_load import load_instance_settings
isettings = load_instance_settings()
from .dev._django import IS_SETUP, setup_django
# this flag should probably be renamed to `from_user`
# it will typically be invoked if lamindb is imported for use
# but users might also import their schema modules first
# and then want lamindb be to be available
if from_lamindb:
# this guarantees that ths is called exactly once
# prior to django being setup!
if not IS_SETUP:
setup_django(isettings)
reload_schema_modules(isettings)
# only now we can import lamindb
import lamindb as ln
logger.success(
f"Loaded instance: {isettings.identifier} (lamindb"
f" {ln.__version__})"
)
return True
else:
return IS_SETUP
except Exception:
# user will get more detailed traceback once they run the CLI
raise RuntimeError(
"Current instance cannot be reached, close it: `lamin close`\n"
"Alternatively, init or load a connectable instance on the"
" command line: `lamin load <instance>` or `lamin init <...>`"
)
else:
if from_lamindb:
logger.warning(_INSTANCE_NOT_SETUP_WARNING)
return False | PypiClean |
/drfpasswordless-gstr169-1.1.3.tar.gz/drfpasswordless-gstr169-1.1.3/README.md | ![splash-image]
![ci-image]
drfpasswordless is a quick way to integrate ‘passwordless’ auth into
your Django Rest Framework project using a user’s email address or
mobile number only (herein referred to as an alias).
Built to work with DRF’s own TokenAuthentication system, it sends the
user a 6-digit callback token to a given email address or a mobile
number. The user sends it back correctly and they’re given an
authentication token (again, provided by Django Rest Framework’s
TokenAuthentication system).
Callback tokens by default expire after 15 minutes.
Example Usage:
==============
```bash
curl -X POST -d “[email protected]” localhost:8000/auth/email/
```
Email to [email protected]:
```
...
<h1>Your login token is 815381.</h1>
...
```
Return Stage
```bash
curl -X POST -d "[email protected]&token=815381" localhost:8000/auth/token/
> HTTP/1.0 200 OK
> {"token":"76be2d9ecfaf5fa4226d722bzdd8a4fff207ed0e”}
```
Requirements
============
- Python (3.7+)
- Django (2.2+)
- Django Rest Framework + AuthToken (3.10+)
- Python-Twilio (Optional, for mobile.)
Install
=======
1. Install drfpasswordless
```
pipenv install drfpasswordless
```
2. Add Django Rest Framework’s Token Authentication to your Django Rest
Framework project.
```python
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES':
('rest_framework.authentication.TokenAuthentication',
)}
INSTALLED_APPS = [
...
'rest_framework',
'rest_framework.authtoken',
'drfpasswordless',
...
]
```
And run
```bash
python manage.py migrate
```
3. Set which types of contact points are allowed for auth in your
Settings.py. The available options are ``EMAIL`` and ``MOBILE``.
```python
PASSWORDLESS_AUTH = {
..
'PASSWORDLESS_AUTH_TYPES': ['EMAIL', 'MOBILE'],
..
}
```
By default drfpasswordless looks for fields named ``email`` or ``mobile``
on the User model. If an alias provided doesn’t belong to any given user,
a new user is created.
3a. If you’re using ``email``, see the Configuring Email section
below.
3b. If you’re using ``mobile``, see the Configuring Mobile section
below.
4. Add ``drfpasswordless.urls`` to your urls.py
```python
from django.urls import path, include
urlpatterns = [
..
path('', include('drfpasswordless.urls')),
..
]
```
5. You can now POST to either of the endpoints:
```bash
curl -X POST -d "[email protected]" localhost:8000/auth/email/
// OR
curl -X POST -d "mobile=+15552143912" localhost:8000/auth/mobile/
```
A 6 digit callback token will be sent to the contact point.
6. The client has 15 minutes to use the 6 digit callback token
correctly. If successful, they get an authorization token in exchange
which the client can then use with Django Rest Framework’s
TokenAuthentication scheme.
```bash
curl -X POST -d "[email protected]&token=815381" localhost:8000/auth/token/
> HTTP/1.0 200 OK
> {"token":"76be2d9ecfaf5fa4226d722bzdd8a4fff207ed0e”}
```
Configuring Emails
------------------
Specify the email address you’d like to send the callback token from
with the ``PASSWORDLESS_EMAIL_NOREPLY_ADDRESS`` setting.
```python
PASSWORDLESS_AUTH = {
..
'PASSWORDLESS_AUTH_TYPES': ['EMAIL',],
'PASSWORDLESS_EMAIL_NOREPLY_ADDRESS': '[email protected]',
..
}
```
You’ll also need to set up an SMTP server to send emails but for
development you can set up a dummy development smtp server to test
emails. Sent emails will print to the console. `Read more
here. <https://docs.djangoproject.com/en/3.0/topics/email/#console-backend>`__
```python
# Settings.py
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
```
Configuring Mobile
------------------
You’ll need to have the python twilio module installed
```bash
pipenv install twilio
```
and set the ``TWILIO_ACCOUNT_SID`` and ``TWILIO_AUTH_TOKEN`` environment
variables. These are read from `os.environ`, so make sure you don't put
them in your settings file accidentally.
You’ll also need to specify the number you send the token from with the
``PASSWORDLESS_MOBILE_NOREPLY_NUMBER`` setting.
Templates
=========
If you’d like to use a custom email template for your email callback
token, specify your template name with this setting:
```bash
PASSWORDLESS_AUTH = {
...
'PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE_NAME': "mytemplate.html"
}
```
The template renders a single variable ``{{ callback_token }}`` which is
the 6 digit callback token being sent.
Contact Point Validation
========================
Endpoints can automatically mark themselves as validated when a user
logs in with a token sent to a specific endpoint. They can also
automatically mark themselves as invalid when a user changes a contact
point.
This is off by default but can be turned on with
``PASSWORDLESS_USER_MARK_EMAIL_VERIFIED`` or
``PASSWORDLESS_USER_MARK_MOBILE_VERIFIED``. By default when these are
enabled they look for the User model fields ``email_verified`` or
``mobile_verified``.
You can also use ``auth/verify/email/`` or ``/auth/verify/mobile/`` which will
automatically send a token to the endpoint attached to the current
``request.user``'s email or mobile if available.
You can then send that token to ``/auth/verify/`` which will double-check
that the endpoint belongs to the request.user and mark the alias as verified.
Registration
============
All unrecognized emails and mobile numbers create new accounts by
default. New accounts are automatically set with
``set_unusable_password()`` but it’s recommended that admins have some
type of password.
This can be turned off with the ``PASSWORDLESS_REGISTER_NEW_USERS``
setting.
Other Settings
==============
Here’s a full list of the configurable defaults.
```python
DEFAULTS = {
# Allowed auth types, can be EMAIL, MOBILE, or both.
'PASSWORDLESS_AUTH_TYPES': ['EMAIL'],
# URL Prefix for Authentication Endpoints
'PASSWORDLESS_AUTH_PREFIX': 'auth/',
# URL Prefix for Verification Endpoints
'PASSWORDLESS_VERIFY_PREFIX': 'auth/verify/',
# Amount of time that tokens last, in seconds
'PASSWORDLESS_TOKEN_EXPIRE_TIME': 15 * 60,
# The user's email field name
'PASSWORDLESS_USER_EMAIL_FIELD_NAME': 'email',
# The user's mobile field name
'PASSWORDLESS_USER_MOBILE_FIELD_NAME': 'mobile',
# Marks itself as verified the first time a user completes auth via token.
# Automatically unmarks itself if email is changed.
'PASSWORDLESS_USER_MARK_EMAIL_VERIFIED': False,
'PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME': 'email_verified',
# Marks itself as verified the first time a user completes auth via token.
# Automatically unmarks itself if mobile number is changed.
'PASSWORDLESS_USER_MARK_MOBILE_VERIFIED': False,
'PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME': 'mobile_verified',
# The email the callback token is sent from
'PASSWORDLESS_EMAIL_NOREPLY_ADDRESS': None,
# The email subject
'PASSWORDLESS_EMAIL_SUBJECT': "Your Login Token",
# A plaintext email message overridden by the html message. Takes one string.
'PASSWORDLESS_EMAIL_PLAINTEXT_MESSAGE': "Enter this token to sign in: %s",
# The email template name.
'PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE_NAME': "passwordless_default_token_email.html",
# Your twilio number that sends the callback tokens.
'PASSWORDLESS_MOBILE_NOREPLY_NUMBER': None,
# The message sent to mobile users logging in. Takes one string.
'PASSWORDLESS_MOBILE_MESSAGE': "Use this code to log in: %s",
# Registers previously unseen aliases as new users.
'PASSWORDLESS_REGISTER_NEW_USERS': True,
# Suppresses actual SMS for testing
'PASSWORDLESS_TEST_SUPPRESSION': False,
# Context Processors for Email Template
'PASSWORDLESS_CONTEXT_PROCESSORS': [],
# The verification email subject
'PASSWORDLESS_EMAIL_VERIFICATION_SUBJECT': "Your Verification Token",
# A plaintext verification email message overridden by the html message. Takes one string.
'PASSWORDLESS_EMAIL_VERIFICATION_PLAINTEXT_MESSAGE': "Enter this verification code: %s",
# The verification email template name.
'PASSWORDLESS_EMAIL_VERIFICATION_TOKEN_HTML_TEMPLATE_NAME': "passwordless_default_verification_token_email.html",
# The message sent to mobile users logging in. Takes one string.
'PASSWORDLESS_MOBILE_VERIFICATION_MESSAGE': "Enter this verification code: %s",
# Automatically send verification email or sms when a user changes their alias.
'PASSWORDLESS_AUTO_SEND_VERIFICATION_TOKEN': False,
# What function is called to construct an authentication tokens when
# exchanging a passwordless token for a real user auth token. This function
# should take a user and return a tuple of two values. The first value is
# the token itself, the second is a boolean value representating whether
# the token was newly created.
'PASSWORDLESS_AUTH_TOKEN_CREATOR': 'drfpasswordless.utils.create_authentication_token',
# What function is called to construct a serializer for drf tokens when
# exchanging a passwordless token for a real user auth token.
'PASSWORDLESS_AUTH_TOKEN_SERIALIZER': 'drfpasswordless.serializers.TokenResponseSerializer',
# A dictionary of demo user's primary key mapped to their static pin
'PASSWORDLESS_DEMO_USERS': {},
# configurable function for sending email
'PASSWORDLESS_EMAIL_CALLBACK': 'drfpasswordless.utils.send_email_with_callback_token',
# configurable function for sending sms
'PASSWORDLESS_SMS_CALLBACK': 'drfpasswordless.utils.send_sms_with_callback_token',
# Token Generation Retry Count
'PASSWORDLESS_TOKEN_GENERATION_ATTEMPTS': 3
}
```
To Do
----
- github.io project page
- Add MkDocs - http://www.mkdocs.org/
- Support non-US mobile numbers
- Custom URLs
- Change bad settings to 500's
Pull requests are encouraged!
Donations & Support
----
If you found drfpasswordless useful, consider giving me a follow
[@localghost](https://www.twitter.com/aaronykng) on Twitter and
[@hi.aaron](https://www.instagram.com/hi.aaron) on Instagram.
If you'd like to go a step further and are using drfpasswordless in your startup
or business, consider a donation:
- BTC: `3FzSFeKVABL5Adh9Egoxh77gHbtg2kcTPk`
- ETH: `0x13412a79F06A83B107A8833dB209BECbcb700f24`
- Square Cash: `$aaron`
License
-------
The MIT License (MIT)
Copyright (c) 2020 Aaron Ng
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
[ci-image]: https://travis-ci.org/aaronn/django-rest-framework-passwordless.svg?branch=master
[splash-image]: https://i.imgur.com/OdDHAIf.png
| PypiClean |
/thoraxe-0.8.0.tar.gz/thoraxe-0.8.0/docs/source/index.rst | ThorAxe's documentation!
========================
*Assessing Conservation of Alternative Splicing with Evolutionary Splicing Graphs*
Check out our **`YouTube tutorial`_** to see how you can easily use *ThorAxe* and
visualise the results! If you want to know more, you can read our
`Genome Research`_ publication.
If you find *ThorAxe* useful on your research, **please cite us!**
Zea DJ, Laskina S, Baudin A, Richard H, Laine E. **Assessing conservation of
alternative splicing with evolutionary splicing graphs.** *Genome Research.*
2021 Jun 15:gr-274696. doi: 10.1101/gr.274696.120.
.. note::
*ThorAxe* can be run on the **`Ases`_ web server**, allowing for a convenient
display of its output. *Ases* makes it easy to run *ThorAxe* and explore and
share its results with others.
.. toctree::
:maxdepth: 4
installation
introduction
Command line programs <programs/programs>
output
Module API <api/modules>
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _YouTube tutorial: https://www.youtube.com/watch?v=Z96985kX-uY
.. _Genome Research: https://genome.cshlp.org/content/31/8/1462
.. _Ases: http://www.lcqb.upmc.fr/Ases | PypiClean |
/nanome-matryx-0.0.1.tar.gz/nanome-matryx-0.0.1/nanome_matryx/menus/SettingsMenu.py | import os
from functools import partial
import nanome
from nanome.util import Logs
class SettingsMenu:
def __init__(self, plugin, on_close):
self._plugin = plugin
self._network = 'ropsten'
self._gas_price = '4'
self._menu = nanome.ui.Menu.io.from_json('menus/json/settings.json')
self._menu.register_closed_callback(on_close)
self._button_confirm = self._menu.root.find_node('Confirm').get_content()
self._button_confirm.register_pressed_callback(on_close)
self._gas_price_value = self._menu.root.find_node('Gas Price Value').get_content()
self._slider_gas_price = self._menu.root.find_node('Gas Slider').get_content()
self._slider_gas_price.register_changed_callback(self.update_gas)
self._slider_gas_price.register_released_callback(self.update_gas)
self._slider_gas_price.current_value = 4
self._gas_price_value.text_value = str(4)
self._ln_network_buttons = self._menu.root.find_node('Network Buttons')
self._prefab_network_item = self._menu.root.find_node('Network Button Prefab')
self._buttons_network = []
for network in ['mainnet', 'ropsten']:
item = self._prefab_network_item.clone()
item.network = network
item.set_size_fixed(0.1)
self._buttons_network.append(item)
btn = item.get_content()
btn.set_all_text(network)
icon = item.find_node('Check Icon')
icon.add_new_image(os.path.join(os.path.dirname(__file__), '..', 'images', 'checkmark.png'))
icon.enabled = network == 'mainnet'
btn.register_pressed_callback(partial(self.select_network, network))
self._ln_network_buttons.add_child(item)
def show_menu(self, button):
self._plugin.open_menu(self._menu)
def update_gas(self, slider):
gas = int(self._slider_gas_price.current_value)
self._gas_price = str(gas)
self._gas_price_value.text_value = str(gas)
self._plugin.refresh_menu()
def select_network(self, network, button):
self._plugin.set_to_refresh()
self._network = network
self._plugin._web3.set_network(network)
for item in self._buttons_network:
icon = item.find_node('Check Icon')
icon.enabled = item.network == network
self._plugin.refresh_menu() | PypiClean |
/tensorflow_macos-2.14.0rc0-cp311-cp311-macosx_12_0_arm64.whl/tensorflow/python/keras/layers/pooling.py | """Pooling layers."""
import functools
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import backend
from tensorflow.python.keras.engine.base_layer import Layer
from tensorflow.python.keras.engine.input_spec import InputSpec
from tensorflow.python.keras.utils import conv_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
class Pooling1D(Layer):
"""Pooling layer for arbitrary pooling functions, for 1D inputs.
This class only exists for code reuse. It will never be an exposed API.
Args:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool2d`.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, steps, features)` while `channels_first`
corresponds to inputs with shape
`(batch, features, steps)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(Pooling1D, self).__init__(name=name, **kwargs)
if data_format is None:
data_format = backend.image_data_format()
if strides is None:
strides = pool_size
self.pool_function = pool_function
self.pool_size = conv_utils.normalize_tuple(pool_size, 1, 'pool_size')
self.strides = conv_utils.normalize_tuple(strides, 1, 'strides')
self.padding = conv_utils.normalize_padding(padding)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(ndim=3)
def call(self, inputs):
pad_axis = 2 if self.data_format == 'channels_last' else 3
inputs = array_ops.expand_dims(inputs, pad_axis)
outputs = self.pool_function(
inputs,
self.pool_size + (1,),
strides=self.strides + (1,),
padding=self.padding,
data_format=self.data_format)
return array_ops.squeeze(outputs, pad_axis)
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
steps = input_shape[2]
features = input_shape[1]
else:
steps = input_shape[1]
features = input_shape[2]
length = conv_utils.conv_output_length(steps,
self.pool_size[0],
self.padding,
self.strides[0])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape([input_shape[0], features, length])
else:
return tensor_shape.TensorShape([input_shape[0], length, features])
def get_config(self):
config = {
'strides': self.strides,
'pool_size': self.pool_size,
'padding': self.padding,
'data_format': self.data_format,
}
base_config = super(Pooling1D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class MaxPooling1D(Pooling1D):
"""Max pooling operation for 1D temporal data.
Downsamples the input representation by taking the maximum value over a
spatial window of size `pool_size`. The window is shifted by `strides`. The
resulting output, when using the `"valid"` padding option, has a shape of:
`output_shape = (input_shape - pool_size + 1) / strides)`
The resulting output shape when using the `"same"` padding option is:
`output_shape = input_shape / strides`
For example, for `strides=1` and `padding="valid"`:
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> max_pool_1d = tf.keras.layers.MaxPooling1D(pool_size=2,
... strides=1, padding='valid')
>>> max_pool_1d(x)
<tf.Tensor: shape=(1, 4, 1), dtype=float32, numpy=
array([[[2.],
[3.],
[4.],
[5.]]], dtype=float32)>
For example, for `strides=2` and `padding="valid"`:
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> max_pool_1d = tf.keras.layers.MaxPooling1D(pool_size=2,
... strides=2, padding='valid')
>>> max_pool_1d(x)
<tf.Tensor: shape=(1, 2, 1), dtype=float32, numpy=
array([[[2.],
[4.]]], dtype=float32)>
For example, for `strides=1` and `padding="same"`:
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> max_pool_1d = tf.keras.layers.MaxPooling1D(pool_size=2,
... strides=1, padding='same')
>>> max_pool_1d(x)
<tf.Tensor: shape=(1, 5, 1), dtype=float32, numpy=
array([[[2.],
[3.],
[4.],
[5.],
[5.]]], dtype=float32)>
Args:
pool_size: Integer, size of the max pooling window.
strides: Integer, or None. Specifies how much the pooling window moves
for each pooling step.
If None, it will default to `pool_size`.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, steps, features)` while `channels_first`
corresponds to inputs with shape
`(batch, features, steps)`.
Input shape:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, steps, features)`.
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, steps)`.
Output shape:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, downsampled_steps, features)`.
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, downsampled_steps)`.
"""
def __init__(self, pool_size=2, strides=None,
padding='valid', data_format='channels_last', **kwargs):
super(MaxPooling1D, self).__init__(
functools.partial(backend.pool2d, pool_mode='max'),
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
**kwargs)
class AveragePooling1D(Pooling1D):
"""Average pooling for temporal data.
Downsamples the input representation by taking the average value over the
window defined by `pool_size`. The window is shifted by `strides`. The
resulting output when using "valid" padding option has a shape of:
`output_shape = (input_shape - pool_size + 1) / strides)`
The resulting output shape when using the "same" padding option is:
`output_shape = input_shape / strides`
For example, for strides=1 and padding="valid":
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> x
<tf.Tensor: shape=(1, 5, 1), dtype=float32, numpy=
array([[[1.],
[2.],
[3.],
[4.],
[5.]], dtype=float32)>
>>> avg_pool_1d = tf.keras.layers.AveragePooling1D(pool_size=2,
... strides=1, padding='valid')
>>> avg_pool_1d(x)
<tf.Tensor: shape=(1, 4, 1), dtype=float32, numpy=
array([[[1.5],
[2.5],
[3.5],
[4.5]]], dtype=float32)>
For example, for strides=2 and padding="valid":
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> x
<tf.Tensor: shape=(1, 5, 1), dtype=float32, numpy=
array([[[1.],
[2.],
[3.],
[4.],
[5.]], dtype=float32)>
>>> avg_pool_1d = tf.keras.layers.AveragePooling1D(pool_size=2,
... strides=2, padding='valid')
>>> avg_pool_1d(x)
<tf.Tensor: shape=(1, 2, 1), dtype=float32, numpy=
array([[[1.5],
[3.5]]], dtype=float32)>
For example, for strides=1 and padding="same":
>>> x = tf.constant([1., 2., 3., 4., 5.])
>>> x = tf.reshape(x, [1, 5, 1])
>>> x
<tf.Tensor: shape=(1, 5, 1), dtype=float32, numpy=
array([[[1.],
[2.],
[3.],
[4.],
[5.]], dtype=float32)>
>>> avg_pool_1d = tf.keras.layers.AveragePooling1D(pool_size=2,
... strides=1, padding='same')
>>> avg_pool_1d(x)
<tf.Tensor: shape=(1, 5, 1), dtype=float32, numpy=
array([[[1.5],
[2.5],
[3.5],
[4.5],
[5.]]], dtype=float32)>
Args:
pool_size: Integer, size of the average pooling windows.
strides: Integer, or None. Factor by which to downscale.
E.g. 2 will halve the input.
If None, it will default to `pool_size`.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, steps, features)` while `channels_first`
corresponds to inputs with shape
`(batch, features, steps)`.
Input shape:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, steps, features)`.
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, steps)`.
Output shape:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, downsampled_steps, features)`.
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, downsampled_steps)`.
"""
def __init__(self, pool_size=2, strides=None,
padding='valid', data_format='channels_last', **kwargs):
super(AveragePooling1D, self).__init__(
functools.partial(backend.pool2d, pool_mode='avg'),
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
**kwargs)
class Pooling2D(Layer):
"""Pooling layer for arbitrary pooling functions, for 2D inputs (e.g. images).
This class only exists for code reuse. It will never be an exposed API.
Args:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool2d`.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format=None,
name=None, **kwargs):
super(Pooling2D, self).__init__(name=name, **kwargs)
if data_format is None:
data_format = backend.image_data_format()
if strides is None:
strides = pool_size
self.pool_function = pool_function
self.pool_size = conv_utils.normalize_tuple(pool_size, 2, 'pool_size')
self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
self.padding = conv_utils.normalize_padding(padding)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(ndim=4)
def call(self, inputs):
if self.data_format == 'channels_last':
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
else:
pool_shape = (1, 1) + self.pool_size
strides = (1, 1) + self.strides
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper(),
data_format=conv_utils.convert_data_format(self.data_format, 4))
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
else:
rows = input_shape[1]
cols = input_shape[2]
rows = conv_utils.conv_output_length(rows, self.pool_size[0], self.padding,
self.strides[0])
cols = conv_utils.conv_output_length(cols, self.pool_size[1], self.padding,
self.strides[1])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], rows, cols])
else:
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, input_shape[3]])
def get_config(self):
config = {
'pool_size': self.pool_size,
'padding': self.padding,
'strides': self.strides,
'data_format': self.data_format
}
base_config = super(Pooling2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class MaxPooling2D(Pooling2D):
"""Max pooling operation for 2D spatial data.
Downsamples the input along its spatial dimensions (height and width)
by taking the maximum value over an input window
(of size defined by `pool_size`) for each channel of the input.
The window is shifted by `strides` along each dimension.
The resulting output,
when using the `"valid"` padding option, has a spatial shape
(number of rows or columns) of:
`output_shape = math.floor((input_shape - pool_size) / strides) + 1`
(when `input_shape >= pool_size`)
The resulting output shape when using the `"same"` padding option is:
`output_shape = math.floor((input_shape - 1) / strides) + 1`
For example, for `strides=(1, 1)` and `padding="valid"`:
>>> x = tf.constant([[1., 2., 3.],
... [4., 5., 6.],
... [7., 8., 9.]])
>>> x = tf.reshape(x, [1, 3, 3, 1])
>>> max_pool_2d = tf.keras.layers.MaxPooling2D(pool_size=(2, 2),
... strides=(1, 1), padding='valid')
>>> max_pool_2d(x)
<tf.Tensor: shape=(1, 2, 2, 1), dtype=float32, numpy=
array([[[[5.],
[6.]],
[[8.],
[9.]]]], dtype=float32)>
For example, for `strides=(2, 2)` and `padding="valid"`:
>>> x = tf.constant([[1., 2., 3., 4.],
... [5., 6., 7., 8.],
... [9., 10., 11., 12.]])
>>> x = tf.reshape(x, [1, 3, 4, 1])
>>> max_pool_2d = tf.keras.layers.MaxPooling2D(pool_size=(2, 2),
... strides=(2, 2), padding='valid')
>>> max_pool_2d(x)
<tf.Tensor: shape=(1, 1, 2, 1), dtype=float32, numpy=
array([[[[6.],
[8.]]]], dtype=float32)>
Usage Example:
>>> input_image = tf.constant([[[[1.], [1.], [2.], [4.]],
... [[2.], [2.], [3.], [2.]],
... [[4.], [1.], [1.], [1.]],
... [[2.], [2.], [1.], [4.]]]])
>>> output = tf.constant([[[[1], [0]],
... [[0], [1]]]])
>>> model = tf.keras.models.Sequential()
>>> model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2),
... input_shape=(4, 4, 1)))
>>> model.compile('adam', 'mean_squared_error')
>>> model.predict(input_image, steps=1)
array([[[[2.],
[4.]],
[[4.],
[4.]]]], dtype=float32)
For example, for stride=(1, 1) and padding="same":
>>> x = tf.constant([[1., 2., 3.],
... [4., 5., 6.],
... [7., 8., 9.]])
>>> x = tf.reshape(x, [1, 3, 3, 1])
>>> max_pool_2d = tf.keras.layers.MaxPooling2D(pool_size=(2, 2),
... strides=(1, 1), padding='same')
>>> max_pool_2d(x)
<tf.Tensor: shape=(1, 3, 3, 1), dtype=float32, numpy=
array([[[[5.],
[6.],
[6.]],
[[8.],
[9.],
[9.]],
[[8.],
[9.],
[9.]]]], dtype=float32)>
Args:
pool_size: integer or tuple of 2 integers,
window size over which to take the maximum.
`(2, 2)` will take the max value over a 2x2 pooling window.
If only one integer is specified, the same window length
will be used for both dimensions.
strides: Integer, tuple of 2 integers, or None.
Strides values. Specifies how far the pooling window moves
for each pooling step. If None, it will default to `pool_size`.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, rows, cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, rows, cols)`.
Output shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, pooled_rows, pooled_cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, pooled_rows, pooled_cols)`.
Returns:
A tensor of rank 4 representing the maximum pooled values. See above for
output shape.
"""
def __init__(self,
pool_size=(2, 2),
strides=None,
padding='valid',
data_format=None,
**kwargs):
super(MaxPooling2D, self).__init__(
nn.max_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, **kwargs)
class AveragePooling2D(Pooling2D):
"""Average pooling operation for spatial data.
Downsamples the input along its spatial dimensions (height and width)
by taking the average value over an input window
(of size defined by `pool_size`) for each channel of the input.
The window is shifted by `strides` along each dimension.
The resulting output when using `"valid"` padding option has a shape
(number of rows or columns) of:
`output_shape = math.floor((input_shape - pool_size) / strides) + 1`
(when `input_shape >= pool_size`)
The resulting output shape when using the `"same"` padding option is:
`output_shape = math.floor((input_shape - 1) / strides) + 1`
For example, for `strides=(1, 1)` and `padding="valid"`:
>>> x = tf.constant([[1., 2., 3.],
... [4., 5., 6.],
... [7., 8., 9.]])
>>> x = tf.reshape(x, [1, 3, 3, 1])
>>> avg_pool_2d = tf.keras.layers.AveragePooling2D(pool_size=(2, 2),
... strides=(1, 1), padding='valid')
>>> avg_pool_2d(x)
<tf.Tensor: shape=(1, 2, 2, 1), dtype=float32, numpy=
array([[[[3.],
[4.]],
[[6.],
[7.]]]], dtype=float32)>
For example, for `stride=(2, 2)` and `padding="valid"`:
>>> x = tf.constant([[1., 2., 3., 4.],
... [5., 6., 7., 8.],
... [9., 10., 11., 12.]])
>>> x = tf.reshape(x, [1, 3, 4, 1])
>>> avg_pool_2d = tf.keras.layers.AveragePooling2D(pool_size=(2, 2),
... strides=(2, 2), padding='valid')
>>> avg_pool_2d(x)
<tf.Tensor: shape=(1, 1, 2, 1), dtype=float32, numpy=
array([[[[3.5],
[5.5]]]], dtype=float32)>
For example, for `strides=(1, 1)` and `padding="same"`:
>>> x = tf.constant([[1., 2., 3.],
... [4., 5., 6.],
... [7., 8., 9.]])
>>> x = tf.reshape(x, [1, 3, 3, 1])
>>> avg_pool_2d = tf.keras.layers.AveragePooling2D(pool_size=(2, 2),
... strides=(1, 1), padding='same')
>>> avg_pool_2d(x)
<tf.Tensor: shape=(1, 3, 3, 1), dtype=float32, numpy=
array([[[[3.],
[4.],
[4.5]],
[[6.],
[7.],
[7.5]],
[[7.5],
[8.5],
[9.]]]], dtype=float32)>
Args:
pool_size: integer or tuple of 2 integers,
factors by which to downscale (vertical, horizontal).
`(2, 2)` will halve the input in both spatial dimension.
If only one integer is specified, the same window length
will be used for both dimensions.
strides: Integer, tuple of 2 integers, or None.
Strides values.
If None, it will default to `pool_size`.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, rows, cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, rows, cols)`.
Output shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, pooled_rows, pooled_cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, pooled_rows, pooled_cols)`.
"""
def __init__(self,
pool_size=(2, 2),
strides=None,
padding='valid',
data_format=None,
**kwargs):
super(AveragePooling2D, self).__init__(
nn.avg_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, **kwargs)
class Pooling3D(Layer):
"""Pooling layer for arbitrary pooling functions, for 3D inputs.
This class only exists for code reuse. It will never be an exposed API.
Args:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool2d`.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)`
while `channels_first` corresponds to
inputs with shape `(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(Pooling3D, self).__init__(name=name, **kwargs)
if data_format is None:
data_format = backend.image_data_format()
if strides is None:
strides = pool_size
self.pool_function = pool_function
self.pool_size = conv_utils.normalize_tuple(pool_size, 3, 'pool_size')
self.strides = conv_utils.normalize_tuple(strides, 3, 'strides')
self.padding = conv_utils.normalize_padding(padding)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(ndim=5)
def call(self, inputs):
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
if self.data_format == 'channels_first':
# TF does not support `channels_first` with 3D pooling operations,
# so we must handle this case manually.
# TODO(fchollet): remove this when TF pooling is feature-complete.
inputs = array_ops.transpose(inputs, (0, 2, 3, 4, 1))
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper())
if self.data_format == 'channels_first':
outputs = array_ops.transpose(outputs, (0, 4, 1, 2, 3))
return outputs
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
len_dim1 = input_shape[2]
len_dim2 = input_shape[3]
len_dim3 = input_shape[4]
else:
len_dim1 = input_shape[1]
len_dim2 = input_shape[2]
len_dim3 = input_shape[3]
len_dim1 = conv_utils.conv_output_length(len_dim1, self.pool_size[0],
self.padding, self.strides[0])
len_dim2 = conv_utils.conv_output_length(len_dim2, self.pool_size[1],
self.padding, self.strides[1])
len_dim3 = conv_utils.conv_output_length(len_dim3, self.pool_size[2],
self.padding, self.strides[2])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], len_dim1, len_dim2, len_dim3])
else:
return tensor_shape.TensorShape(
[input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4]])
def get_config(self):
config = {
'pool_size': self.pool_size,
'padding': self.padding,
'strides': self.strides,
'data_format': self.data_format
}
base_config = super(Pooling3D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class MaxPooling3D(Pooling3D):
"""Max pooling operation for 3D data (spatial or spatio-temporal).
Downsamples the input along its spatial dimensions (depth, height, and width)
by taking the maximum value over an input window
(of size defined by `pool_size`) for each channel of the input.
The window is shifted by `strides` along each dimension.
Args:
pool_size: Tuple of 3 integers,
factors by which to downscale (dim1, dim2, dim3).
`(2, 2, 2)` will halve the size of the 3D input in each dimension.
strides: tuple of 3 integers, or None. Strides values.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
while `channels_first` corresponds to inputs with shape
`(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`
Output shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, pooled_dim1, pooled_dim2, pooled_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, pooled_dim1, pooled_dim2, pooled_dim3)`
Example:
```python
depth = 30
height = 30
width = 30
input_channels = 3
inputs = tf.keras.Input(shape=(depth, height, width, input_channels))
layer = tf.keras.layers.MaxPooling3D(pool_size=3)
outputs = layer(inputs) # Shape: (batch_size, 10, 10, 10, 3)
```
"""
def __init__(self,
pool_size=(2, 2, 2),
strides=None,
padding='valid',
data_format=None,
**kwargs):
super(MaxPooling3D, self).__init__(
nn.max_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, **kwargs)
class AveragePooling3D(Pooling3D):
"""Average pooling operation for 3D data (spatial or spatio-temporal).
Downsamples the input along its spatial dimensions (depth, height, and width)
by taking the average value over an input window
(of size defined by `pool_size`) for each channel of the input.
The window is shifted by `strides` along each dimension.
Args:
pool_size: tuple of 3 integers,
factors by which to downscale (dim1, dim2, dim3).
`(2, 2, 2)` will halve the size of the 3D input in each dimension.
strides: tuple of 3 integers, or None. Strides values.
padding: One of `"valid"` or `"same"` (case-insensitive).
`"valid"` means no padding. `"same"` results in padding evenly to
the left/right or up/down of the input such that output has the same
height/width dimension as the input.
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
while `channels_first` corresponds to inputs with shape
`(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
Input shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`
Output shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, pooled_dim1, pooled_dim2, pooled_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, pooled_dim1, pooled_dim2, pooled_dim3)`
Example:
```python
depth = 30
height = 30
width = 30
input_channels = 3
inputs = tf.keras.Input(shape=(depth, height, width, input_channels))
layer = tf.keras.layers.AveragePooling3D(pool_size=3)
outputs = layer(inputs) # Shape: (batch_size, 10, 10, 10, 3)
```
"""
def __init__(self,
pool_size=(2, 2, 2),
strides=None,
padding='valid',
data_format=None,
**kwargs):
super(AveragePooling3D, self).__init__(
nn.avg_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, **kwargs)
class GlobalPooling1D(Layer):
"""Abstract class for different global pooling 1D layers."""
def __init__(self, data_format='channels_last', keepdims=False, **kwargs):
super(GlobalPooling1D, self).__init__(**kwargs)
self.input_spec = InputSpec(ndim=3)
self.data_format = conv_utils.normalize_data_format(data_format)
self.keepdims = keepdims
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
if self.keepdims:
return tensor_shape.TensorShape([input_shape[0], input_shape[1], 1])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[1]])
else:
if self.keepdims:
return tensor_shape.TensorShape([input_shape[0], 1, input_shape[2]])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[2]])
def call(self, inputs):
raise NotImplementedError
def get_config(self):
config = {'data_format': self.data_format, 'keepdims': self.keepdims}
base_config = super(GlobalPooling1D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class GlobalAveragePooling1D(GlobalPooling1D):
"""Global average pooling operation for temporal data.
Examples:
>>> input_shape = (2, 3, 4)
>>> x = tf.random.normal(input_shape)
>>> y = tf.keras.layers.GlobalAveragePooling1D()(x)
>>> print(y.shape)
(2, 4)
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, steps, features)` while `channels_first`
corresponds to inputs with shape
`(batch, features, steps)`.
keepdims: A boolean, whether to keep the temporal dimension or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the temporal dimension are retained with
length 1.
The behavior is the same as for `tf.reduce_mean` or `np.mean`.
Call arguments:
inputs: A 3D tensor.
mask: Binary tensor of shape `(batch_size, steps)` indicating whether
a given step should be masked (excluded from the average).
Input shape:
- If `data_format='channels_last'`:
3D tensor with shape:
`(batch_size, steps, features)`
- If `data_format='channels_first'`:
3D tensor with shape:
`(batch_size, features, steps)`
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, features)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, 1, features)`
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, 1)`
"""
def __init__(self, data_format='channels_last', **kwargs):
super(GlobalAveragePooling1D, self).__init__(data_format=data_format,
**kwargs)
self.supports_masking = True
def call(self, inputs, mask=None):
steps_axis = 1 if self.data_format == 'channels_last' else 2
if mask is not None:
mask = math_ops.cast(mask, inputs[0].dtype)
mask = array_ops.expand_dims(
mask, 2 if self.data_format == 'channels_last' else 1)
inputs *= mask
return backend.sum(
inputs, axis=steps_axis,
keepdims=self.keepdims) / math_ops.reduce_sum(
mask, axis=steps_axis, keepdims=self.keepdims)
else:
return backend.mean(inputs, axis=steps_axis, keepdims=self.keepdims)
def compute_mask(self, inputs, mask=None):
return None
class GlobalMaxPooling1D(GlobalPooling1D):
"""Global max pooling operation for 1D temporal data.
Downsamples the input representation by taking the maximum value over
the time dimension.
For example:
>>> x = tf.constant([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.]])
>>> x = tf.reshape(x, [3, 3, 1])
>>> x
<tf.Tensor: shape=(3, 3, 1), dtype=float32, numpy=
array([[[1.], [2.], [3.]],
[[4.], [5.], [6.]],
[[7.], [8.], [9.]]], dtype=float32)>
>>> max_pool_1d = tf.keras.layers.GlobalMaxPooling1D()
>>> max_pool_1d(x)
<tf.Tensor: shape=(3, 1), dtype=float32, numpy=
array([[3.],
[6.],
[9.], dtype=float32)>
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, steps, features)` while `channels_first`
corresponds to inputs with shape
`(batch, features, steps)`.
keepdims: A boolean, whether to keep the temporal dimension or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the temporal dimension are retained with
length 1.
The behavior is the same as for `tf.reduce_max` or `np.max`.
Input shape:
- If `data_format='channels_last'`:
3D tensor with shape:
`(batch_size, steps, features)`
- If `data_format='channels_first'`:
3D tensor with shape:
`(batch_size, features, steps)`
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, features)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
3D tensor with shape `(batch_size, 1, features)`
- If `data_format='channels_first'`:
3D tensor with shape `(batch_size, features, 1)`
"""
def call(self, inputs):
steps_axis = 1 if self.data_format == 'channels_last' else 2
return backend.max(inputs, axis=steps_axis, keepdims=self.keepdims)
class GlobalPooling2D(Layer):
"""Abstract class for different global pooling 2D layers.
"""
def __init__(self, data_format=None, keepdims=False, **kwargs):
super(GlobalPooling2D, self).__init__(**kwargs)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(ndim=4)
self.keepdims = keepdims
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_last':
if self.keepdims:
return tensor_shape.TensorShape([input_shape[0], 1, 1, input_shape[3]])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[3]])
else:
if self.keepdims:
return tensor_shape.TensorShape([input_shape[0], input_shape[1], 1, 1])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[1]])
def call(self, inputs):
raise NotImplementedError
def get_config(self):
config = {'data_format': self.data_format, 'keepdims': self.keepdims}
base_config = super(GlobalPooling2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class GlobalAveragePooling2D(GlobalPooling2D):
"""Global average pooling operation for spatial data.
Examples:
>>> input_shape = (2, 4, 5, 3)
>>> x = tf.random.normal(input_shape)
>>> y = tf.keras.layers.GlobalAveragePooling2D()(x)
>>> print(y.shape)
(2, 3)
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
keepdims: A boolean, whether to keep the spatial dimensions or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the spatial dimensions are retained with
length 1.
The behavior is the same as for `tf.reduce_mean` or `np.mean`.
Input shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, rows, cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, rows, cols)`.
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, channels)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, 1, 1, channels)`
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, 1, 1)`
"""
def call(self, inputs):
if self.data_format == 'channels_last':
return backend.mean(inputs, axis=[1, 2], keepdims=self.keepdims)
else:
return backend.mean(inputs, axis=[2, 3], keepdims=self.keepdims)
class GlobalMaxPooling2D(GlobalPooling2D):
"""Global max pooling operation for spatial data.
Examples:
>>> input_shape = (2, 4, 5, 3)
>>> x = tf.random.normal(input_shape)
>>> y = tf.keras.layers.GlobalMaxPool2D()(x)
>>> print(y.shape)
(2, 3)
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
keepdims: A boolean, whether to keep the spatial dimensions or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the spatial dimensions are retained with
length 1.
The behavior is the same as for `tf.reduce_max` or `np.max`.
Input shape:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, rows, cols, channels)`.
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, rows, cols)`.
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, channels)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
4D tensor with shape `(batch_size, 1, 1, channels)`
- If `data_format='channels_first'`:
4D tensor with shape `(batch_size, channels, 1, 1)`
"""
def call(self, inputs):
if self.data_format == 'channels_last':
return backend.max(inputs, axis=[1, 2], keepdims=self.keepdims)
else:
return backend.max(inputs, axis=[2, 3], keepdims=self.keepdims)
class GlobalPooling3D(Layer):
"""Abstract class for different global pooling 3D layers."""
def __init__(self, data_format=None, keepdims=False, **kwargs):
super(GlobalPooling3D, self).__init__(**kwargs)
self.data_format = conv_utils.normalize_data_format(data_format)
self.input_spec = InputSpec(ndim=5)
self.keepdims = keepdims
def compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_last':
if self.keepdims:
return tensor_shape.TensorShape(
[input_shape[0], 1, 1, 1, input_shape[4]])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[4]])
else:
if self.keepdims:
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], 1, 1, 1])
else:
return tensor_shape.TensorShape([input_shape[0], input_shape[1]])
def call(self, inputs):
raise NotImplementedError
def get_config(self):
config = {'data_format': self.data_format, 'keepdims': self.keepdims}
base_config = super(GlobalPooling3D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class GlobalAveragePooling3D(GlobalPooling3D):
"""Global Average pooling operation for 3D data.
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
while `channels_first` corresponds to inputs with shape
`(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
keepdims: A boolean, whether to keep the spatial dimensions or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the spatial dimensions are retained with
length 1.
The behavior is the same as for `tf.reduce_mean` or `np.mean`.
Input shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, channels)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
5D tensor with shape `(batch_size, 1, 1, 1, channels)`
- If `data_format='channels_first'`:
5D tensor with shape `(batch_size, channels, 1, 1, 1)`
"""
def call(self, inputs):
if self.data_format == 'channels_last':
return backend.mean(inputs, axis=[1, 2, 3], keepdims=self.keepdims)
else:
return backend.mean(inputs, axis=[2, 3, 4], keepdims=self.keepdims)
class GlobalMaxPooling3D(GlobalPooling3D):
"""Global Max pooling operation for 3D data.
Args:
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
while `channels_first` corresponds to inputs with shape
`(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
keepdims: A boolean, whether to keep the spatial dimensions or not.
If `keepdims` is `False` (default), the rank of the tensor is reduced
for spatial dimensions.
If `keepdims` is `True`, the spatial dimensions are retained with
length 1.
The behavior is the same as for `tf.reduce_max` or `np.max`.
Input shape:
- If `data_format='channels_last'`:
5D tensor with shape:
`(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`
- If `data_format='channels_first'`:
5D tensor with shape:
`(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`
Output shape:
- If `keepdims`=False:
2D tensor with shape `(batch_size, channels)`.
- If `keepdims`=True:
- If `data_format='channels_last'`:
5D tensor with shape `(batch_size, 1, 1, 1, channels)`
- If `data_format='channels_first'`:
5D tensor with shape `(batch_size, channels, 1, 1, 1)`
"""
def call(self, inputs):
if self.data_format == 'channels_last':
return backend.max(inputs, axis=[1, 2, 3], keepdims=self.keepdims)
else:
return backend.max(inputs, axis=[2, 3, 4], keepdims=self.keepdims)
# Aliases
AvgPool1D = AveragePooling1D
MaxPool1D = MaxPooling1D
AvgPool2D = AveragePooling2D
MaxPool2D = MaxPooling2D
AvgPool3D = AveragePooling3D
MaxPool3D = MaxPooling3D
GlobalMaxPool1D = GlobalMaxPooling1D
GlobalMaxPool2D = GlobalMaxPooling2D
GlobalMaxPool3D = GlobalMaxPooling3D
GlobalAvgPool1D = GlobalAveragePooling1D
GlobalAvgPool2D = GlobalAveragePooling2D
GlobalAvgPool3D = GlobalAveragePooling3D | PypiClean |
/salt_nornir-0.20.3.tar.gz/salt_nornir-0.20.3/README.md | [![Downloads][pepy-downloads-badge]][pepy-downloads-link]
[![PyPI][pypi-latest-release-badge]][pypi-latest-release-link]
[![PyPI versions][pypi-versions-badge]][pypi-versions-link]
[![GitHub Discussion][github-discussions-badge]][github-discussions-link]
[![Code style: black][black-badge]][black-link]
[![Documentation status][readthedocs-badge]][readthedocs-link]
![logo][logo]
# Salt Nornir
Nornir centered SaltStack modules:
- salt-nornir proxy minion module
- salt-nornir execution module
- salt-nornir state module
- salt-nornir runner module
- salt-nornir Netbox pillar module
Nornir Proxy Minion helps to manage network devices at scale, refer to
[documentation](https://salt-nornir.readthedocs.io/en/latest/index.html)
for details.
# Architecture
Python and Plugins.
![architecture][architecture]
Nornir Proxy acts as a bridge between SaltStack and a wide set of open
source network automation libraries.
# Features
- **CLI** management of devices over SSH or Telnet using Netmiko, Scrapli, Cisco Genie/PyATS or NAPALM
- **NETCONF** management of network devices using Ncclient or Scrapli-Netconf
- **HTTP API**/**RESTCONF** interact with devices using Python requests library
- **gNMI** device management supported thanks to integration with PyGNMI library
- **SNMPv1/2/3** support to manage device using puresnmp library
- **Data Processing** using NTC-Templates, TTP, Jmespath, lxml, xmltodict libraries
- **Network Testing** of state and configuration via SSH, Netconf, gNMI, HTTP or SNMP
- **Python** is a first class citizen - write plugins, modules, scripts, codify work flows
- **API** integrate with anything using SaltStack and Nornir Python API or SaltStack HTTP API
- **Netbox** Source of Truth inventory integration for infrastructure management
# Communication and discussion
Network To Code [salt-nornir Slack Channel](https://app.slack.com/client/T09LQ7E9E/C02MPR34DGF)
Open an [issue](https://github.com/dmulyalin/salt-nornir/issues)
Start a [discussion](https://github.com/dmulyalin/salt-nornir/discussions)
# Contributing
Issues, bug reports and feature requests are welcomed. Feedback is a gift and we truly value it.
# Developers Motto
- if it is not in the docs it does not exist
- if it is not tested it is broken
- done is better than perfect
- keep it stupid simple
[logo]: docs/source/_images/SaltNornirLogo.png "salt nornir logo"
[architecture]: docs/source/_images/Nornir_proxy_minion_architecture_v2.png "salt nornir architecture"
[pepy-downloads-badge]: https://pepy.tech/badge/salt-nornir
[pepy-downloads-link]: https://pepy.tech/project/salt-nornir
[pypi-versions-badge]: https://img.shields.io/pypi/pyversions/salt-nornir.svg
[pypi-versions-link]: https://pypi.python.org/pypi/salt-nornir/
[readthedocs-badge]: https://readthedocs.org/projects/salt-nornir/badge/?version=latest
[readthedocs-link]: http://salt-nornir.readthedocs.io/?badge=latest
[pypi-latest-release-badge]: https://img.shields.io/pypi/v/salt-nornir.svg
[pypi-latest-release-link]: https://pypi.python.org/pypi/salt-nornir
[github-discussions-link]: https://github.com/dmulyalin/salt-nornir/discussions
[github-discussions-badge]: https://img.shields.io/static/v1?label=Discussions&message=Ask&color=blue&logo=github
[black-badge]: https://img.shields.io/badge/code%20style-black-000000.svg
[black-link]: https://github.com/psf/black
[github-tests-badge]: https://github.com/dmulyalin/salt-nornir/actions/workflows/main.yml/badge.svg
[github-tests-link]: https://github.com/dmulyalin/salt-nornir/actions
| PypiClean |
/icloudtogcal-1.0.4-py3-none-any.whl/iCloudToGCal/selenuimFunctions/iCloud.py | from datetime import date
from time import *
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.chrome.options import Options
def openiCloud(username="",password=""):
# cerdentials
import os
current_file_path = os.path.dirname(os.path.abspath(__file__))
if username == "" and password == "":
with open(r"C:\icloud_resources" +"\\cerdential.txt", "r") as f:
username = f.readline()
password = f.readline()+"\n"
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(chrome_options=chrome_options)
actions = ActionChains(driver)
# login url
driver.get("https://gu.icloudems.com/corecampus/index.php")
driver.implicitly_wait(120)
# enter username
driver.find_element(By.ID, "useriid").send_keys(username)
sleep(2)
# enter password
WebDriverWait(driver, 120).until(EC.element_to_be_clickable((By.ID, "actlpass"))).send_keys(password)
# press enter
# driver.find_element(By.ID, "actlpass").send_keys(Keys.ENTER)
# login button
sleep(5)
# if not "schedulerand/tt_report_view.php" in driver.current_url:
# try:
# driver.find_element(By.ID, "psslogin").click()
# except:
# pass
return driver
def clickOnTimeTable(driver):
driver.find_element(By.XPATH,"//a[@href='schedulerand/tt_report_view.php']").click()
sleep(5)
# wait for timetable ot load
WebDriverWait(driver, 120).until(EC.presence_of_element_located((By.TAG_NAME, "table")))
sleep(5)
def clickOnNext(driver):
try:
driver.find_element(By.XPATH, "//a[contains(text(),'Next ')]").click()
except:
driver.find_element(By.XPATH, "//a[contains(text(),'Next')]").click()
sleep(5)
WebDriverWait(driver, 120).until(EC.presence_of_element_located((By.TAG_NAME, "table")))
sleep(5)
def get_academic_year(driver):
#click on Menu
driver.find_element(By.XPATH, "//a[contains(text(),'Menu')]").click()
sleep(5)
def clickOnAttendence(driver):
sleep(5)
driver.execute_script("""
var sidebar = document.querySelector('.sidebar-menu');
sidebar.innerHTML = '<a href="/corecampus/student/attendance/myattendance.php">Attendance</a>';
""")
# click on attendance
driver.find_element(By.XPATH, "//a[contains(text(),'Attendance')]").click()
sleep(5)
try:
driver.find_element(By.ID, "getattendance").click()
except:pass
sleep(5)
def openAttendence(driver):
sleep(5)
Select(driver.find_element(By.ID, "acadyear")).select_by_index(1)
card_body = driver.find_element(By.CLASS_NAME, "card-body")
sleep(5)
Select(driver.find_element(By.NAME, "users")).select_by_index(date.today().month)
sleep(5) | PypiClean |
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/TaxInfoDto.py | import json
from alipay.aop.api.constant.ParamConstants import *
class TaxInfoDto(object):
def __init__(self):
self._address = None
self._bank_name = None
self._effective_date = None
self._invoice_title = None
self._org_id = None
self._phone_no = None
self._tax_no = None
self._type = None
self._type_desc = None
@property
def address(self):
return self._address
@address.setter
def address(self, value):
self._address = value
@property
def bank_name(self):
return self._bank_name
@bank_name.setter
def bank_name(self, value):
self._bank_name = value
@property
def effective_date(self):
return self._effective_date
@effective_date.setter
def effective_date(self, value):
self._effective_date = value
@property
def invoice_title(self):
return self._invoice_title
@invoice_title.setter
def invoice_title(self, value):
self._invoice_title = value
@property
def org_id(self):
return self._org_id
@org_id.setter
def org_id(self, value):
self._org_id = value
@property
def phone_no(self):
return self._phone_no
@phone_no.setter
def phone_no(self, value):
self._phone_no = value
@property
def tax_no(self):
return self._tax_no
@tax_no.setter
def tax_no(self, value):
self._tax_no = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
@property
def type_desc(self):
return self._type_desc
@type_desc.setter
def type_desc(self, value):
self._type_desc = value
def to_alipay_dict(self):
params = dict()
if self.address:
if hasattr(self.address, 'to_alipay_dict'):
params['address'] = self.address.to_alipay_dict()
else:
params['address'] = self.address
if self.bank_name:
if hasattr(self.bank_name, 'to_alipay_dict'):
params['bank_name'] = self.bank_name.to_alipay_dict()
else:
params['bank_name'] = self.bank_name
if self.effective_date:
if hasattr(self.effective_date, 'to_alipay_dict'):
params['effective_date'] = self.effective_date.to_alipay_dict()
else:
params['effective_date'] = self.effective_date
if self.invoice_title:
if hasattr(self.invoice_title, 'to_alipay_dict'):
params['invoice_title'] = self.invoice_title.to_alipay_dict()
else:
params['invoice_title'] = self.invoice_title
if self.org_id:
if hasattr(self.org_id, 'to_alipay_dict'):
params['org_id'] = self.org_id.to_alipay_dict()
else:
params['org_id'] = self.org_id
if self.phone_no:
if hasattr(self.phone_no, 'to_alipay_dict'):
params['phone_no'] = self.phone_no.to_alipay_dict()
else:
params['phone_no'] = self.phone_no
if self.tax_no:
if hasattr(self.tax_no, 'to_alipay_dict'):
params['tax_no'] = self.tax_no.to_alipay_dict()
else:
params['tax_no'] = self.tax_no
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
if self.type_desc:
if hasattr(self.type_desc, 'to_alipay_dict'):
params['type_desc'] = self.type_desc.to_alipay_dict()
else:
params['type_desc'] = self.type_desc
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = TaxInfoDto()
if 'address' in d:
o.address = d['address']
if 'bank_name' in d:
o.bank_name = d['bank_name']
if 'effective_date' in d:
o.effective_date = d['effective_date']
if 'invoice_title' in d:
o.invoice_title = d['invoice_title']
if 'org_id' in d:
o.org_id = d['org_id']
if 'phone_no' in d:
o.phone_no = d['phone_no']
if 'tax_no' in d:
o.tax_no = d['tax_no']
if 'type' in d:
o.type = d['type']
if 'type_desc' in d:
o.type_desc = d['type_desc']
return o | PypiClean |
/yet_another_wizz-2.5.7-cp311-cp311-macosx_10_9_x86_64.whl/yaw/core/containers.py | from __future__ import annotations
import warnings
from collections.abc import Iterator, Sequence
from dataclasses import dataclass, field, fields
from typing import TYPE_CHECKING, Callable, Generic, NamedTuple, TypeVar
import numpy as np
import pandas as pd
from yaw.config import OPTIONS
from yaw.core.abc import BinnedQuantity, concatenate_bin_edges
from yaw.core.math import cov_from_samples
if TYPE_CHECKING: # pragma: no cover
from numpy.typing import NDArray
from pandas import DataFrame, IntervalIndex, Series
__all__ = ["Indexer", "PatchIDs", "PatchCorrelationData", "SampledValue", "SampledData"]
_TK = TypeVar("_TK")
_TV = TypeVar("_TV")
class Indexer(Generic[_TK, _TV], Iterator):
"""Helper class to implemented a class attribute that can be used as
indexer and iterator for the classes stored data (e.g. indexing patches or
redshift bins).
"""
def __init__(self, inst: _TV, builder: Callable[[_TV, _TK], _TV]) -> None:
"""Construct a new indexer.
Args:
inst:
Class instance on which the indexing operations are applied.
builder:
Callable signature ``builder(inst, item) -> inst`` that
constructs a new class instance with the indexing specified from
``item`` applied.
The resulting indexer supports indexing and slicing (depending on the
subclass implementation), as well as iteration, where instances holding
individual items are yielded.
"""
self._inst = inst
self._builder = builder
self._iter_loc = 0
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._inst.__class__.__name__})"
def __getitem__(self, item: _TK) -> _TV:
return self._builder(self._inst, item)
def __next__(self) -> _TV:
"""Returns the next value and increments the iterator location index."""
try:
val = self[self._iter_loc]
except IndexError:
raise StopIteration
else:
self._iter_loc += 1
return val
def __iter__(self) -> Iterator[_TV]:
"""Returns a new instance of this class to have an independent iterator
location index"""
return self.__class__(inst=self._inst, builder=self._builder)
class PatchIDs(NamedTuple):
"""Named tuple that can hold a pair of patch indices."""
id1: int
"""First patch index."""
id2: int
"""Second patch index."""
@dataclass(frozen=True)
class PatchCorrelationData:
"""Container to hold the result of a pair counting operation between two
spatial patches.
Args:
patches (:obj:`PatchIDs`):
The indices of used the patches.
totals1 (:obj:`NDArray`):
Total number of objects after binning by redshift in first patch.
totals1 (:obj:`NDArray`):
Total number of objects after binning by redshift in second patch.
counts (:obj:`dict`):
Dictionary listing the number of counted pairs after binning by
redshift. Each item represents results from a different scale.
"""
patches: PatchIDs
totals1: NDArray
totals2: NDArray
counts: dict[str, NDArray]
_Tscalar = TypeVar("_Tscalar", bound=np.number)
@dataclass(frozen=True)
class SampledValue(Generic[_Tscalar]):
"""Container to hold a scalar value with an empirically estimated
uncertainty from resampling.
Supports comparison of the values and samples with ``==`` and ``!=``.
.. rubric:: Examples
Create a value container with 100 assumed jackknife samples that scatter
around zero with a standard deviation of 0.1:
>>> from numpy.random import normal
>>> samples = normal(loc=0.0, scale=0.01, size=101)
>>> value = yaw.core.SampledValue(0.0, samples, method="jackknife")
>>> value
SampledValue(value=0, error=0.963, n_samples=100, method='jackknife')
Args:
value:
Numerical, scalar value.
samples (:obj:`NDArray`):
Samples of ``value`` obtained from resampling methods.
method (:obj:`str`):
Resampling method used to obtain the data samples, see
:class:`~yaw.ResamplingConfig` for available options.
"""
value: _Tscalar
"""Numerical, scalar value."""
samples: NDArray[_Tscalar]
"""Samples of ``value`` obtained from resampling methods."""
method: str
"""Resampling method used to obtain the data samples, see
:class:`~yaw.ResamplingConfig` for available options."""
error: _Tscalar = field(init=False)
"""The uncertainty (standard error) of the value."""
def __post_init__(self) -> None:
if self.method not in OPTIONS.method:
raise ValueError(f"unknown sampling method '{self.method}'")
if self.method == "bootstrap":
error = np.std(self.samples, ddof=1, axis=0)
else: # jackknife
error = np.std(self.samples, ddof=0, axis=0) * (self.n_samples - 1)
object.__setattr__(self, "error", error)
def __repr__(self) -> str:
string = self.__class__.__name__
value = self.value
error = self.error
n_samples = self.n_samples
method = self.method
return f"{string}({value=:.3g}, {error=:.3g}, {n_samples=}, {method=})"
def __str__(self) -> str:
return f"{self.value:+.3g}+/-{self.error:.3g}"
def __eq__(self, other: object) -> bool:
if isinstance(other, SampledValue):
return (
self.samples.shape == other.samples.shape
and self.method == other.method
and self.value == other.value
and np.all(self.samples == other.samples)
)
return NotImplemented
@property
def n_samples(self) -> int:
"""Number of samples used for error estimate."""
return len(self.samples)
_Tdata = TypeVar("_Tdata", bound="SampledData")
@dataclass(frozen=True, repr=False)
class SampledData(BinnedQuantity):
"""Container for data and resampled data with redshift binning.
Contains the redshift binning, data vector, and resampled data vector (e.g.
jackknife or bootstrap samples). The resampled values are used to compute
error estimates and covariance/correlation matrices.
Args:
binning (:obj:`pandas.IntervalIndex`):
The redshift binning applied to the data.
data (:obj:`NDArray`):
The data values, one for each redshift bin.
samples (:obj:`NDArray`):
The resampled data values (e.g. jackknife or bootstrap samples).
method (:obj:`str`):
The resampling method used, see :class:`~yaw.ResamplingConfig` for
available options.
The container supports addition and subtraction, which return a new instance
of the container, holding the modified data. This requires that both
operands are compatible (same binning and same sampling). The operands are
applied to the ``data`` and ``samples`` attribtes.
Furthermore, the container supports indexing and iteration over the redshift
bins using the :obj:`SampledData.bins` attribute. This attribute yields
instances of :obj:`SampledData` containing a single bin when iterating.
Slicing and indexing follows the same rules as the underlying ``data``
:obj:`NDArray`. Refer to :obj:`~yaw.correlation.CorrData` for some indexing
and iteration examples.
.. rubric:: Examples
Create a redshift binning:
>>> import pandas as pd
>>> bins = pd.IntervalIndex.from_breaks([0.1, 0.2, 0.3])
>>> bins
IntervalIndex([(0.1, 0.2], (0.2, 0.3]], dtype='interval[float64, right]')
Create some sample data for the bins with value 1 and five assumed jackknife
samples normal-distributed around 1.
>>> import numpy as np
>>> n_bins, n_samples = len(bins), 5
>>> data = np.ones(n_bins)
>>> samples = np.random.normal(1.0, size=(n_samples, n_bins))
Create the container:
>>> values = yaw.core.SampledData(bins, data, samples, method="jackknife")
>>> values
SampledData(n_bins=2, z='0.100...0.300', n_samples=10, method='jackknife')
Add the container to itself and verify that the values are doubled:
>>> summed = values + values
>>> summed.data
array([2., 2.])
The same applies to the samples:
>>> summed.samples / values.samples
array([[2., 2.],
[2., 2.],
[2., 2.],
[2., 2.],
[2., 2.]])
"""
binning: IntervalIndex
"""The redshift bin intervals."""
data: NDArray
"""The data values, one for each redshift bin."""
samples: NDArray
"""Samples of the data values, shape (# samples, # bins)."""
method: str
"""The resampling method used."""
covariance: NDArray = field(init=False)
"""Covariance matrix automatically computed from the resampled values."""
def __post_init__(self) -> None:
if self.data.shape != (self.n_bins,):
raise ValueError("unexpected shape of 'data' array")
if not self.samples.shape[1] == self.n_bins:
raise ValueError("number of bins for 'data' and 'samples' do not match")
if self.method not in OPTIONS.method:
raise ValueError(f"unknown sampling method '{self.method}'")
covmat = cov_from_samples(self.samples, self.method)
object.__setattr__(self, "covariance", np.atleast_2d(covmat))
def __repr__(self) -> str:
string = super().__repr__()[:-1]
n_samples = self.n_samples
method = self.method
return f"{string}, {n_samples=}, {method=})"
def __eq__(self, other: object) -> bool:
if isinstance(other, self.__class__):
return (
self.samples.shape == other.samples.shape
and self.method == other.method
and np.all(self.data == other.data)
and np.all(self.samples == other.samples)
and (self.binning == other.binning).all()
)
return NotImplemented
def __add__(self, other: object) -> _Tdata:
if not isinstance(other, self.__class__):
self.is_compatible(other, require=True)
return self.__class__(
binning=self.get_binning(),
data=self.data + other.data,
samples=self.samples + other.samples,
method=self.method,
)
return NotImplemented
def __sub__(self, other: object) -> _Tdata:
if not isinstance(other, self.__class__):
self.is_compatible(other, require=True)
return self.__class__(
binning=self.get_binning(),
data=self.data - other.data,
samples=self.samples - other.samples,
method=self.method,
)
return NotImplemented
@property
def bins(self: _Tdata) -> Indexer[int | slice | Sequence, _Tdata]:
def builder(inst: _Tdata, item: int | slice | Sequence) -> _Tdata:
if isinstance(item, int):
item = [item]
# try to take subsets along bin axis
binning = inst.get_binning()[item]
data = inst.data[item]
samples = inst.samples[:, item]
# determine which extra attributes need to be copied
init_attrs = {field.name for field in fields(inst) if field.init}
copy_attrs = init_attrs - {"binning", "data", "samples"}
kwargs = dict(binning=binning, data=data, samples=samples)
kwargs.update({attr: getattr(inst, attr) for attr in copy_attrs})
return inst.__class__(**kwargs)
return Indexer(self, builder)
@property
def n_samples(self) -> int:
"""Number of samples used for error estimate."""
return len(self.samples)
@property
def error(self) -> NDArray:
"""The uncertainty (standard error) of the data.
Returns:
:obj:`NDArray`
"""
return np.sqrt(np.diag(self.covariance))
def get_binning(self) -> IntervalIndex:
return self.binning
def is_compatible(self, other: SampledData, require: bool = False) -> bool:
"""Check whether this instance is compatible with another instance.
Ensures that both objects are instances of the same class, that the
redshift binning is identical, that the number of samples agree, and
that the resampling method is identical.
Args:
other (:obj:`BinnedQuantity`):
Object instance to compare to.
require (:obj:`bool`, optional)
Raise a ValueError if any of the checks fail.
Returns:
:obj:`bool`
"""
if not super().is_compatible(other, require):
return False
if self.n_samples != other.n_samples:
if require:
raise ValueError("number of samples do not agree")
return False
if self.method != other.method:
if require:
raise ValueError("resampling method does not agree")
return False
return True
def concatenate_bins(self: _Tdata, *data: _Tdata) -> _Tdata:
for other in data:
self.is_compatible(other, require=True)
all_data: list[_Tdata] = [self, *data]
binning = concatenate_bin_edges(*all_data)
# concatenate data
data = np.concatenate([d.data for d in all_data])
samples = np.concatenate([d.samples for d in all_data], axis=1)
# determine which extra attributes need to be copied
init_attrs = {field.name for field in fields(self) if field.init}
copy_attrs = init_attrs - {"binning", "data", "samples"}
kwargs = dict(binning=binning, data=data, samples=samples)
kwargs.update({attr: getattr(self, attr) for attr in copy_attrs})
return self.__class__(**kwargs)
def get_data(self) -> Series:
"""Get the data as :obj:`pandas.Series` with the binning as index."""
return pd.Series(self.data, index=self.binning)
def get_samples(self) -> DataFrame:
"""Get the data as :obj:`pandas.DataFrame` with the binning as index.
The columns are labelled numerically and each represent one of the
samples."""
return pd.DataFrame(self.samples.T, index=self.binning)
def get_error(self) -> Series:
"""Get value error estimate (diagonal of covariance matrix) as series
with its corresponding redshift bin intervals as index.
Returns:
:obj:`pandas.Series`
"""
return pd.Series(self.error, index=self.binning)
def get_covariance(self) -> DataFrame:
"""Get value covariance matrix as data frame with its corresponding
redshift bin intervals as index and column labels.
Returns:
:obj:`pandas.DataFrame`
"""
return pd.DataFrame(
data=self.covariance, index=self.binning, columns=self.binning
)
def get_correlation(self) -> DataFrame:
"""Get value correlation matrix as data frame with its corresponding
redshift bin intervals as index and column labels.
Returns:
:obj:`pandas.DataFrame`
"""
stdev = np.sqrt(np.diag(self.covariance))
with warnings.catch_warnings():
warnings.simplefilter("ignore")
corr = self.covariance / np.outer(stdev, stdev)
corr[self.covariance == 0] = 0
return pd.DataFrame(data=corr, index=self.binning, columns=self.binning) | PypiClean |
/scancode-toolkit-32.0.6.tar.gz/scancode-toolkit-32.0.6/src/licensedcode/additional_license_location_provider.py |
import logging
import os
import sys
from pluggy import PluginManager as PluggyPluginManager
from plugincode import HookimplMarker
from plugincode import HookspecMarker
"""
Support for plugins that provide one or more paths keys typically OS-specific
paths to bundled pre-built binaries provided as Python packages.
Plugin can either be enabled for very specific environment/platform markers (OS,
arch, etc) in their built wheels .... Or be smart about OS/ARCH/etc and provide
a path based on running some code.
"""
logger = logging.getLogger(__name__)
# uncomment to enable logging locally
# logging.basicConfig(stream=sys.stdout)
# logger.setLevel(logging.DEBUG)
def logger_debug(*args):
return logger.debug(" ".join(isinstance(a, str) and a or repr(a) for a in args))
project_name = __name__
entrypoint = "scancode_additional_license_location_provider"
location_provider_spec = HookspecMarker(project_name=project_name)
location_provider_impl = HookimplMarker(project_name=project_name)
@location_provider_spec
class AdditionalLicenseLocationProviderPlugin(object):
"""
Base plugin class for plugins that provide path locations for one or more
keys such as the path location to a native binary executable or related
system files.
A plugin is configured as it own package with proper environemnt markers
"""
# name string under which this plugin is registered.
# This is set automatically when a plugin class is loaded in its manager.
# Subclasses must not set this.
name = None
def get_locations(self):
"""
Return a mapping of {key: location} where location is an absolute path
to a file or directory referenced by a known key. The location should
exist on a given platorm/OS where this plgin can be installed.
"""
raise NotImplementedError
class AdditionalLicensePluginManager(object):
"""
A PluginManager class for simple, non-scanning related plugins.
"""
def __init__(self, project_name, entrypoint, plugin_base_class):
"""
Initialize this plugin manager for the fully qualified Python module
name `module_qname` with plugins loaded from the setuptools `entrypoint`
that must subclass `plugin_base_class`.
"""
self.manager = PluggyPluginManager(project_name=project_name)
self.entrypoint = entrypoint
self.plugin_base_class = plugin_base_class
self.manager.add_hookspecs(sys.modules[project_name])
# set to True once this manager is initialized by running its setup()
self.initialized = False
# mapping of {plugin.name: plugin_class} for all the loaded plugins of
# this manager
self.plugin_classes = dict()
def setup(self):
"""
Load and validate available plugins for this PluginManager from its
assigned `entrypoint`. Raise an Exception if a plugin is not valid such
that when it does not subcclass the manager `plugin_base_class`.
Must be called once to initialize the plugins if this manager.
Return a list of all plugin classes for this manager.
"""
if self.initialized:
return self.plugin_classes.values()
entrypoint = self.entrypoint
self.manager.load_setuptools_entrypoints(entrypoint)
plugin_classes = []
for name, plugin_class in self.manager.list_name_plugin():
if not issubclass(plugin_class, self.plugin_base_class):
plugin_base_class = self.plugin_base_class
raise Exception(
"Invalid plugin: %(name)r: %(plugin_class)r "
"must extend %(plugin_base_class)r." % locals()
)
plugin_class.name = name
plugin_classes.append(plugin_class)
self.plugin_classes = dict([(cls.name, cls) for cls in plugin_classes])
self.initialized = True
return self.plugin_classes.values()
additional_license_location_provider_plugins = AdditionalLicensePluginManager(
project_name=project_name, entrypoint=entrypoint, plugin_base_class=AdditionalLicenseLocationProviderPlugin
)
class ProvidedLocationError(Exception):
pass
def get_location(location_key, _cached_locations={}):
"""
Return the location for a `location_key` if available from plugins or None.
"""
if not _cached_locations:
additional_license_location_provider_plugins.setup()
unknown_locations = {}
for k, plugin_class in additional_license_location_provider_plugins.plugin_classes.items():
pc = plugin_class()
provided_locs = pc.get_locations() or {}
for loc_key, location in provided_locs.items():
if not os.path.exists(location):
unknown_locations[loc_key] = location
if loc_key in _cached_locations:
existing = _cached_locations[loc_key]
msg = (
"Duplicate location key provided: {loc_key}: "
"new: {location}, existing:{existing}"
)
msg = msg.format(**locals())
raise ProvidedLocationError(msg)
_cached_locations[loc_key] = location
if unknown_locations:
msg = "Non-existing locations provided:\n:"
msg += "\n".join("key:{}, loc: {}".format(k, l) for k, l in unknown_locations.items())
raise ProvidedLocationError(msg)
return _cached_locations.get(location_key) | PypiClean |
/django-sqs-qc-0.2.zip/django-sqs-qc-0.2/README | django-sqs-qc
==========
Author: Maciej Pasternacki <[email protected]>
Date: 2010-01-15 Fri
Maintainer: Nauman Tariq <[email protected]>
Introduction:
Modified Version of django-sqs (https://pypi.python.org/pypi/django-sqs/0.2)
Actual django-sqs creates new queue every time worker listener is started
In this modified version, it uses the existing queue for messages.
Integrate Amazon Simple Queue Service in your Django project
Table of Contents
=================
1 Setup
2 Receivers
2.1 Register using a decorator
2.2 Register manually
2.3 Example receiver
3 Receiving
4 Sending
4.1 Using decorated function
4.2 Manually
5 Custom message classes
5.1 ModelInstanceMessage class
6 Management
6.1 manage.py sqs_status
6.2 manage.py sqs_clear
6.3 manage.py sqs_wait
7 Views
8 FIXME
8.1 Sensible forking/threading or multiplexing instead of the fork hack?
8.2 Autoimporting receivers.py from apps
8.3 docstrings
8.4 Minimize polling
8.5 Custom exception to leave message in queue
1 Setup
~~~~~~~~
Boto library for accessing Amazon Web Services is required.
1. Add `django_sqs' to your Python path
2. Add `django_sqs' to INSTALLED_APPS setting
3. Set AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY
4. Optionally set SQS_QUEUE_PREFIX to prefix your queues and avoid
clashes with other developers, production/staging env and so on.
SQS_QUEUE_PREFIX is required when DEBUG is true, and recommended
even in production mode.
5. Optionally set SQS_DEFAULT_VISIBILITY_TIMEOUT (default is 60 seconds)
6. Optionally set SQS_POLL_PERIOD (default is 10 seconds)
7. Optionally set AWS_REGION (default to "us-east-1")
2 Receivers
~~~~~~~~~~~~
Create receiver function that accepts one argument, which will be an
instance of `boto.sqs.message.Message' or its custom subclass.
Then, register it as queue receiver:
2.1 Register using a decorator
===============================
Decorate receiver function with:
django_sqs.receiver([queue_name=None, visibility_timeout=None, message_class=None, delete_on_start=False, close_database=False, suffixes=()])
Decorated function will become an instance of
`django_sqs.registered_queue.RegisteredQueue.ReceiverProxy' class.
Instance is callable - you may call it with an instance of
appropriate message class, or its constructor's keyword arguments,
and the message will be added to the queue. Instance provides also
attributes `direct' with original decorated function, and
`registered_queue' with appropriate
`django_sqs.registered_queue.RegisteredQueue' instance.
If `queue_name' is omitted or false, function's `__module__' and
`__name__' is used, with dots converted to double underscores.
The `suffixes' argument is a tuple of known queue name suffixes. If
an unknown suffix is used, a warning will be issued.
If `delete_on_start' is true, received message will be deleted from
queue right after it's been received, before receiver function is
called. If it is false (which is the default), it will be deleted
after receiver function has finished, when message has been fully
processed.
If `close_database' is true, all database connections will be
closed after processing each message to prevent pending unclosed
transactions.
Queue name suffixes can be used to split processing similar items
to multiple queues (e.g. use separate queue for big input items to
distribute load).
2.2 Register manually
======================
Alternatively, you can avoid decoration, and register a receiver
manually by calling:
django_sqs.register(queue_name, [fn=None, visibility_timeout=None, message_class=None, delete_on_start=False, suffixes=()])
If `fn' is None or not given, no handler is assigned: messages can
be sent, but won't be received.
Create function in modules that will be imported by default
(recommendation: use `receivers.py' and import them in `models.py',
autoimporting TBD).
2.3 Example receiver
=====================
@receiver("test")
def receive_message(msg):
print 'received:', msg.get_body()
3 Receiving
~~~~~~~~~~~~
python manage.py runreceiver [--message-limit=N] [--suffix=SUFFIX] [queue_name [queue_name [...]]]
If no `queue_name' parameters are given, receive from all configured
queues.
If more than one queue is registered, a new process is forked for
each queue.
For each message received on the queue, registered receiver function
is called with the message instance as argument. If receiver
function returns successfully, message is then deleted from queue.
If receiver message raises an exception, exception traceback is
logged using logging module, and message is deleted. If receiver
sees a restartable error and wants to keep message in queue, it
should raise `django_sqs.RestartLater' exception - this exception will
leave the message in queue.
Options:
`--message-limit=N': exit after receiving `N' messages
`--suffix=SUFFIX': Use queue name suffix
4 Sending
~~~~~~~~~~
4.1 Using decorated function
=============================
You can simply call function decorated with `@receiver' decorator,
providing a message instance or keyword arguments (like for `send'
function described below).
4.2 Manually
=============
To send a message manually, use following function:
django_sqs.send(queue_name, message=None, suffix=None, **kwargs)
`message' should be an instance of `message_class' configured with
`receiver' decorator or `register' function for the queue (or
`boto.sqs.message.Message').
When `message' is omitted or `None', new instance of queue's message
class will be instantiated using `**kwargs'. With default message
class, `boto.sqs.message.Message', we can simply provide body:
django_sqs.send("a_queue", body='Lorem ipsum dolor sit amet')
`suffix' is a queue name suffix to use.
5 Custom message classes
~~~~~~~~~~~~~~~~~~~~~~~~~
For sending other values than raw, non-unicode strings, any of
classes provided in `boto.sqs.message' or their subclasses may be
used. The module is well commented (much better than this one), so
go ahead and read the fine source!
5.1 ModelInstanceMessage class
===============================
The `django_sqs.message.ModelInstanceMessage' class is provided for
convenience. It encodes a single model instance, using Django's
ContentType framework (as app/model/primary key triple). It
accepts `instance' keyword parameter in constructor, and provides
`get_instance()' method.
There is no support for passing additional information except the
instance yet.
6 Management
~~~~~~~~~~~~~
6.1 manage.py sqs_status
=========================
Prints the (approximate) count of messages in the queue.
6.2 manage.py sqs_clear
========================
Clears all queues (by default), or queues named as arguments.
Prints number of messages deleted.
If queue receivers are running or were running recently, some
messages may be still locked and won't be deleted. Command may
need to be re-run.
6.3 manage.py sqs_wait
=======================
Waits until specified (or all) queues are empty.
7 Views
~~~~~~~~
A single view, `django_sqs.views.status', is provided for simple,
plain text queue status report (same as `manage.py sqs_status').
8 FIXME
~~~~~~~~
8.1 TODO Sensible forking/threading or multiplexing instead of the fork hack?
==============================================================================
8.2 TODO Autoimporting receivers.py from apps
==============================================
8.3 TODO docstrings
====================
8.4 TODO Minimize polling
==========================
Amazon charges for every call. Less polling, lower invoice. Some
exponential backoff + out-of-band signal (view?) to wake up a running
receiver process may be a good thing.
8.5 TODO Custom exception to leave message in queue
====================================================
Provide a custom exception class that won't be handled by receive
loop (i.e. no backtrace) that can be used by receiver function to
explicitly leave message in queue without printing backtrace and
alarming everyone. | PypiClean |
/lazr.restful-2.0.1-py3-none-any.whl/lazr/restful/docs/webservice-marshallers.rst | LAZR's field marshallers
************************
LAZR defines an interface for converting between the values that
come in on an HTTP request, and the object values appropriate for schema
fields. This is similar to Zope's widget interface, but much smaller.
To test the various marshallers we create a stub request and
application root.
>>> from lazr.restful.testing.webservice import WebServiceTestPublication
>>> from lazr.restful.simple import Request
>>> from lazr.restful.example.base.root import (
... CookbookServiceRootResource)
>>> request = Request("", {'HTTP_HOST': 'cookbooks.dev'})
>>> request.annotations[request.VERSION_ANNOTATION] = '1.0'
>>> application = CookbookServiceRootResource()
>>> request.setPublication(WebServiceTestPublication(application))
>>> request.processInputs()
We also define some helpers to print values in a way that is unambiguous
across Python versions.
>>> def pprint_dict(d):
... print('{', end='')
... print(
... ', '.join(
... '%r: %r' % (key, value)
... for key, value in sorted(d.items())),
... end='')
... print('}')
IFieldMarshaller and SimpleFieldMarshaller
==========================================
There is a SimpleFieldMarshaller class that provides a good base to
implement that interface.
>>> from zope.interface.verify import verifyObject
>>> from lazr.restful.interfaces import IFieldMarshaller
>>> from lazr.restful.marshallers import SimpleFieldMarshaller
>>> from zope.schema import Text
>>> field = Text(__name__='field_name')
>>> marshaller = SimpleFieldMarshaller(field, request)
>>> verifyObject(IFieldMarshaller, marshaller)
True
representation_name
===================
The representation_name attribute is used to retrieve the name under
which the field should be stored in the JSON representation. In the
simple case, it's the same name as the field.
>>> marshaller.representation_name
'field_name'
marshall_from_json_data()
=========================
The marshall_from_json_data() method is used during PUT and PATCH
requests to transform the value provided in the JSON representation to a
value in the underlying schema field. In SimpleFieldMarshaller
implementation, the value is returned unchanged.
>>> marshaller.marshall_from_json_data("foo")
'foo'
>>> marshaller.marshall_from_json_data(4)
4
>>> print(marshaller.marshall_from_json_data("unicode\u2122"))
unicode™
>>> marshaller.marshall_from_json_data("")
''
>>> print(marshaller.marshall_from_json_data(None))
None
marshall_from_request()
=======================
The marshall_from_request() method is used during operation invocation
to transform a value submitted via the query string or form-encoded POST
data into a value the will be accepted by the underlying schema field.
SimpleFieldMarshaller tries first to parse the value as a JSON-encoded
string, the resulting value is passed on to marshall_from_json_data().
>>> print(marshaller.marshall_from_request("null"))
None
>>> marshaller.marshall_from_request("true")
True
>>> marshaller.marshall_from_request("false")
False
>>> marshaller.marshall_from_request('["True", "False"]')
['True', 'False']
>>> marshaller.marshall_from_request("1")
1
>>> marshaller.marshall_from_request("-10.5")
-10.5
>>> marshaller.marshall_from_request('"a string"')
'a string'
>>> marshaller.marshall_from_request('"false"')
'false'
>>> marshaller.marshall_from_request('"null"')
'null'
Invalid JSON-encoded strings are interpreted as string literals and
passed on directly to marshall_from_json_data(). That's for the
convenience of web clients, they don't need to encode string values in
quotes, or can pass lists using multiple key-value pairs.
>>> marshaller.marshall_from_request("a string")
'a string'
>>> marshaller.marshall_from_request('False')
'False'
>>> marshaller.marshall_from_request("")
''
>>> marshaller.marshall_from_request(' ')
' '
>>> marshaller.marshall_from_request('\n')
'\n'
>>> marshaller.marshall_from_request(['value1', 'value2'])
['value1', 'value2']
unmarshall() and variants
=========================
The unmarshall() method is used to convert the field's value to a value
that can be serialized to JSON as part of an entry representation. The
first parameter is the entry that the value is part of. That is used by
fields that transform the value into a URL, see the CollectionField
marshaller for an example. The second one is the value to convert. In
the SimpleFieldMarshaller implementation, the value is returned
unchanged.
>>> print(marshaller.unmarshall(None, 'foo'))
foo
>>> print(marshaller.unmarshall(None, None))
None
When a more detailed representation is needed, unmarshall_to_closeup()
can be called. By default, this returns the same data as unmarshall(),
but specific marshallers may send more detailed information.
>>> marshaller.unmarshall_to_closeup(None, 'foo')
'foo'
Marshallers for basic data types
================================
Bool
----
The marshaller for a Bool field checks that the JSON value is either
True or False. A ValueError is raised when its not the case.
>>> from zope.configuration import xmlconfig
>>> zcmlcontext = xmlconfig.string("""
... <configure xmlns="http://namespaces.zope.org/zope">
... <include package="lazr.restful" file="ftesting.zcml" />
... </configure>
... """)
>>> from zope.component import getMultiAdapter
>>> from zope.schema import Bool
>>> field = Bool()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data(True)
True
>>> marshaller.marshall_from_json_data(False)
False
>>> marshaller.marshall_from_json_data("true")
Traceback (most recent call last):
...
ValueError: got 'str', expected bool: 'true'
>>> marshaller.marshall_from_json_data(1)
Traceback (most recent call last):
...
ValueError: got 'int', expected bool: 1
None is passed through though.
>>> print(marshaller.marshall_from_json_data(None))
None
Booleans are encoded using the standard JSON representation of 'true' or
'false'.
>>> marshaller.marshall_from_request("true")
True
>>> marshaller.marshall_from_request("false")
False
>>> marshaller.marshall_from_request('True')
Traceback (most recent call last):
...
ValueError: got 'str', expected bool: 'True'
Int
---
The marshaller for an Int field checks that the JSON value is an
integer. A ValueError is raised when its not the case.
>>> from zope.schema import Int
>>> field = Int()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data(-10)
-10
>>> marshaller.marshall_from_json_data("-10")
Traceback (most recent call last):
...
ValueError: got 'str', expected int: '-10'
None is passed through though.
>>> print(marshaller.marshall_from_json_data(None))
None
Integers are encoded using strings when in a request.
>>> marshaller.marshall_from_request("4")
4
>>> marshaller.marshall_from_request("-4")
-4
It raises a ValueError if the value cannot be converted to an integer.
>>> marshaller.marshall_from_request("foo")
Traceback (most recent call last):
...
ValueError: got 'str', expected int: 'foo'
>>> marshaller.marshall_from_request("4.62")
Traceback (most recent call last):
...
ValueError: got 'float', expected int: 4.62...
Note that python octal and hexadecimal syntax isn't supported.
(This would 13 in octal notation.)
>>> marshaller.marshall_from_request("015")
Traceback (most recent call last):
...
ValueError: got 'str', expected int: '015'
>>> marshaller.marshall_from_request("0x04")
Traceback (most recent call last):
...
ValueError: got 'str', expected int: '0x04'
Float
-----
The marshaller for a Float field checks that the JSON value is indeed a
float. A ValueError is raised when it's not the case.
>>> from zope.schema import Float
>>> field = Float()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data(1.0)
1.0
>>> marshaller.marshall_from_json_data(-1.0)
-1.0
>>> marshaller.marshall_from_json_data("true")
Traceback (most recent call last):
...
ValueError: got 'str', expected float, int: 'true'
None is passed through though.
>>> print(marshaller.marshall_from_json_data(None))
None
And integers are automatically converted to a float.
>>> marshaller.marshall_from_json_data(1)
1.0
Floats are encoded using the standard JSON representation.
>>> marshaller.marshall_from_request("1.2")
1.2
>>> marshaller.marshall_from_request("-1.2")
-1.2
>>> marshaller.marshall_from_request("-1")
-1.0
>>> marshaller.marshall_from_request('True')
Traceback (most recent call last):
...
ValueError: got 'str', expected float, int: 'True'
Datetime
--------
The marshaller for a Datetime field checks that the JSON value is indeed a
parsable datetime stamp.
>>> from zope.schema import Datetime
>>> field = Datetime()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data('2009-07-07T13:15:00+0000')
datetime.datetime(2009, 7, 7, 13, 15, tzinfo=<UTC>)
>>> marshaller.marshall_from_json_data('2009-07-07T13:30:00-0000')
datetime.datetime(2009, 7, 7, 13, 30, tzinfo=<UTC>)
>>> marshaller.marshall_from_json_data('2009-07-07T13:45:00Z')
datetime.datetime(2009, 7, 7, 13, 45, tzinfo=<UTC>)
>>> marshaller.marshall_from_json_data('2009-07-08T14:30:00')
datetime.datetime(2009, 7, 8, 14, 30, tzinfo=<UTC>)
>>> marshaller.marshall_from_json_data('2009-07-09')
datetime.datetime(2009, 7, 9, 0, 0, tzinfo=<UTC>)
The time zone must be UTC. An error is raised if is it clearly not UTC.
>>> marshaller.marshall_from_json_data('2009-07-25T13:15:00+0500')
Traceback (most recent call last):
...
ValueError: Time not in UTC.
>>> marshaller.marshall_from_json_data('2009-07-25T13:30:00-0200')
Traceback (most recent call last):
...
ValueError: Time not in UTC.
A ValueError is raised when the value is not parsable.
>>> marshaller.marshall_from_json_data("now")
Traceback (most recent call last):
...
ValueError: Value doesn't look like a date.
>>> marshaller.marshall_from_json_data('20090708')
Traceback (most recent call last):
...
ValueError: Value doesn't look like a date.
>>> marshaller.marshall_from_json_data(20090708)
Traceback (most recent call last):
...
ValueError: Value doesn't look like a date.
The unmarshall() method returns the ISO 8601 representation of the value.
>>> marshaller.unmarshall(
... None, marshaller.marshall_from_json_data('2009-07-07T13:45:00Z'))
'2009-07-07T13:45:00+00:00'
Date
----
The marshaller for a Date field checks that the JSON value is indeed a
parsable date.
>>> from zope.schema import Date
>>> field = Date()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data('2009-07-09')
datetime.date(2009, 7, 9)
The marshaller extends the Datetime marshaller. It will parse a datetime
stamp and return a date.
>>> marshaller.marshall_from_json_data('2009-07-07T13:15:00+0000')
datetime.date(2009, 7, 7)
The unmarshall() method returns the ISO 8601 representation of the value.
>>> marshaller.unmarshall(
... None, marshaller.marshall_from_json_data('2009-07-09'))
'2009-07-09'
Text
----
The marshaller for IText field checks that the value is a unicode
string. A ValueError is raised when that's not the case.
>>> from zope.schema import Text
>>> field = Text()
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data("Test")
'Test'
>>> marshaller.marshall_from_json_data(1.0)
Traceback (most recent call last):
...
ValueError: got 'float', expected str: 1.0
>>> marshaller.marshall_from_json_data(b'Test')
Traceback (most recent call last):
...
ValueError: got 'bytes', expected str: b'Test'
None is passed through though.
>>> print(marshaller.marshall_from_json_data(None))
None
When coming from the request, everything is interpreted as a unicode
string:
>>> marshaller.marshall_from_request('a string')
'a string'
>>> marshaller.marshall_from_request(['a', 'b'])
"['a', 'b']"
>>> marshaller.marshall_from_request('true')
'True'
>>> marshaller.marshall_from_request('')
''
Except that 'null' still returns None.
>>> print(marshaller.marshall_from_request('null'))
None
Line breaks coming from the request are normalized to LF.
>>> marshaller.marshall_from_request('abc\r\n\r\ndef\r\n')
'abc\n\ndef\n'
>>> marshaller.marshall_from_request('abc\n\ndef\n')
'abc\n\ndef\n'
>>> marshaller.marshall_from_request('abc\r\rdef\r')
'abc\n\ndef\n'
Bytes
-----
Since there is no way to represent a bytes string in JSON, all strings
are converted to a byte string using UTF-8 encoding. If the value isn't
a string, a ValueError is raised.
>>> from zope.schema import Bytes
>>> field = Bytes(__name__='data')
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data("Test")
b'Test'
>>> marshaller.marshall_from_json_data('int\xe9ressant')
b'int\xc3\xa9ressant'
>>> marshaller.marshall_from_json_data(1.0)
Traceback (most recent call last):
...
ValueError: got 'float', expected ...: 1.0
Again, except for None which is passed through.
>>> print(marshaller.marshall_from_json_data(None))
None
When coming over the request, the value is also converted into a UTF-8
encoded string, with no JSON decoding.
>>> marshaller.marshall_from_request("Test")
b'Test'
>>> marshaller.marshall_from_request('int\xe9ressant')
b'int\xc3\xa9ressant'
>>> marshaller.marshall_from_request(b'1.0')
b'1.0'
>>> marshaller.marshall_from_request(b'"not JSON"')
b'"not JSON"'
Since multipart/form-data can be used to upload data, file-like objects
are read.
>>> from io import BytesIO
>>> marshaller.marshall_from_request(BytesIO(b'A line of data'))
b'A line of data'
Bytes field used in an entry are stored in the librarian, so their
representation name states that it's a link.
>>> marshaller.representation_name
'data_link'
And the unmarshall() method returns a link that will serve the file.
>>> from lazr.restful import EntryResource
>>> from lazr.restful.example.base.interfaces import ICookbookSet
>>> from zope.component import getUtility
>>> entry_resource = EntryResource(
... getUtility(ICookbookSet).get('Everyday Greens'), request)
(The value would be the BytesStorage instance used to store the
content, but it's not needed.)
>>> marshaller.unmarshall(entry_resource, None)
'http://.../cookbooks/Everyday%20Greens/data'
ASCIILine
---------
ASCIILine is a subclass of Bytes but is marshalled like text.
>>> from zope.schema import ASCIILine
>>> field = ASCIILine(__name__='field')
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
Unicode objects remain Unicode objects.
>>> marshaller.marshall_from_json_data("Test")
'Test'
Note that the marshaller accepts character values where bit 7 is set.
>>> print(marshaller.marshall_from_json_data('int\xe9ressant'))
intéressant
Non-string values like floats are rejected.
>>> marshaller.marshall_from_json_data(1.0)
Traceback (most recent call last):
...
ValueError: got 'float', expected str: 1.0
None is passed through.
>>> print(marshaller.marshall_from_json_data(None))
None
When coming from the request, everything is interpreted as a unicode
string:
>>> marshaller.marshall_from_request('a string')
'a string'
>>> marshaller.marshall_from_request(['a', 'b'])
"['a', 'b']"
>>> marshaller.marshall_from_request('true')
'True'
>>> marshaller.marshall_from_request('')
''
>>> print(marshaller.marshall_from_request('int\xe9ressant'))
intéressant
>>> marshaller.marshall_from_request('1.0')
'1.0'
But again, 'null' is returned as None.
>>> print(marshaller.marshall_from_request('null'))
None
Unlike a Bytes field, an ASCIILine field used in an entry is stored
as an ordinary attribute, hence its representation name is the attribute
name itself.
>>> marshaller.representation_name
'field'
Choice marshallers
==================
The marshaller for a Choice is chosen based on the Choice's
vocabulary.
>>> from zope.schema import Choice
Choice for IVocabularyTokenized
-------------------------------
The default marshaller will use the vocabulary getTermByToken to
retrieve the value to use. It raises an error if the value isn't in the
vocabulary.
>>> field = Choice(__name__='simple', values=[10, 'a value', True])
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data(10)
10
>>> marshaller.marshall_from_json_data("a value")
'a value'
>>> marshaller.marshall_from_json_data(True)
True
>>> marshaller.marshall_from_request('true')
True
>>> marshaller.marshall_from_request('a value')
'a value'
>>> marshaller.marshall_from_request('10')
10
>>> marshaller.marshall_from_json_data('100')
Traceback (most recent call last):
...
ValueError: '100' isn't a valid token
None is always returned unchanged.
>>> print(marshaller.marshall_from_json_data(None))
None
Since this marshaller's Choice fields deal with small, fixed
vocabularies, their unmarshall_to_closeup() implementations to
describe the vocabulary as a whole.
>>> for token in marshaller.unmarshall_to_closeup(None, '10'):
... print(sorted(token.items()))
[('title', None), ('token', '10')]
[('title', None), ('token', 'a value')]
[('title', None), ('token', 'True')]
And None is handled correctly.
>>> for token in marshaller.unmarshall_to_closeup(None, None):
... print(sorted(token.items()))
[('title', None), ('token', '10')]
[('title', None), ('token', 'a value')]
[('title', None), ('token', 'True')]
Unicode Exceptions Sidebar
--------------------------
Because tracebacks with high-bit characters in them end up being displayed
like "ValueError: <unprintable ValueError object>" we'll use a helper to
display them the way we want.
>>> def show_ValueError(callable, *args):
... try:
... callable(*args)
... except ValueError as e:
... print('ValueError:', str(e))
Choice of EnumeratedTypes
-------------------------
The JSON representation of the enumerated value is its title. A string
that corresponds to one of the values is marshalled to the appropriate
value. A string that doesn't correspond to any enumerated value results
in a helpful ValueError.
>>> from lazr.restful.example.base.interfaces import Cuisine
>>> field = Choice(vocabulary=Cuisine)
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
>>> marshaller.marshall_from_json_data("Dessert")
<Item Cuisine.DESSERT, Dessert>
>>> show_ValueError(marshaller.marshall_from_json_data, "NoSuchCuisine")
ValueError: Invalid value "NoSuchCuisine". Acceptable values are: ...
>>> show_ValueError(marshaller.marshall_from_json_data, "dessert")
ValueError: Invalid value "dessert". Acceptable values are: ...
None is returned unchanged:
>>> print(marshaller.marshall_from_json_data(None))
None
This marshaller is for a Choice field describing a small, fixed
vocabularies. Because the vocabulary is small, its
unmarshall_to_closeup() implementation can describe the whole
vocabulary.
>>> from operator import itemgetter
>>> for cuisine in sorted(
... marshaller.unmarshall_to_closeup(None, "Triaged"),
... key=itemgetter("token")):
... print(sorted(cuisine.items()))
[('title', 'American'), ('token', 'AMERICAN')]
...
[('title', 'Vegetarian'), ('token', 'VEGETARIAN')]
Objects
-------
An object is marshalled to its URL.
>>> from lazr.restful.fields import Reference
>>> from lazr.restful.example.base.interfaces import ICookbook
>>> reference_field = Reference(schema=ICookbook)
>>> reference_marshaller = getMultiAdapter(
... (reference_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, reference_marshaller)
True
>>> from lazr.restful.example.base.root import COOKBOOKS
>>> cookbook = COOKBOOKS[0]
>>> cookbook_url = reference_marshaller.unmarshall(None, cookbook)
>>> print(cookbook_url)
http://.../cookbooks/Mastering%20the%20Art%20of%20French%20Cooking
A URL is unmarshalled to the underlying object.
>>> cookbook = reference_marshaller.marshall_from_json_data(cookbook_url)
>>> print(cookbook.name)
Mastering the Art of French Cooking
>>> reference_marshaller.marshall_from_json_data("not a url")
Traceback (most recent call last):
...
ValueError: "not a url" is not a valid URI.
>>> reference_marshaller.marshall_from_json_data(4)
Traceback (most recent call last):
...
ValueError: got 'int', expected string: 4
>>> print(reference_marshaller.marshall_from_json_data(None))
None
Relative URLs
~~~~~~~~~~~~~
Relative URLs are interpreted as would be expected:
>>> cookbook = reference_marshaller.marshall_from_json_data(
... '/cookbooks/Everyday%20Greens')
>>> print(cookbook.name)
Everyday Greens
Redirections
~~~~~~~~~~~~
Objects may have multiple URLs, with non-canonical forms redirecting to
canonical forms. The object marshaller accepts URLs that redirect, provided
that the redirected-to resource knows how to find the ultimate target
object.
>>> cookbook = reference_marshaller.marshall_from_json_data(
... '/cookbooks/featured')
>>> print(cookbook.name)
Mastering the Art of French Cooking
>>> from lazr.restful.interfaces import IWebServiceConfiguration
>>> webservice_configuration = getUtility(IWebServiceConfiguration)
>>> webservice_configuration.use_https = True
>>> cookbook = reference_marshaller.marshall_from_json_data(
... '/cookbooks/featured')
>>> print(cookbook.name)
Mastering the Art of French Cooking
>>> webservice_configuration.use_https = False
Collections
-----------
The most complicated kind of marshaller is one that manages a
collection of objects associated with some other object. The generic
collection marshaller will take care of marshalling to the proper
collection type, and of marshalling the individual items using the
marshaller for its value_type. Dictionaries may specify separate
marshallers for their keys and values. If no key and/or value marshallers
are specified, the default SimpleFieldMarshaller is used.
>>> from zope.schema import Dict, List, Tuple, Set
>>> list_of_strings_field = List(value_type=Text())
>>> from lazr.restful.example.base.interfaces import Cuisine
>>> tuple_of_ints_field = Tuple(value_type=Int())
>>> list_of_choices_field = List(
... value_type=Choice(vocabulary=Cuisine))
>>> simple_list_field = List()
>>> set_of_choices_field = Set(
... value_type=Choice(vocabulary=Cuisine)).bind(None)
>>> dict_of_choices_field = Dict(
... key_type=Text(),
... value_type=Choice(vocabulary=Cuisine))
>>> simple_dict_field = Dict()
>>> list_marshaller = getMultiAdapter(
... (list_of_strings_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, list_marshaller)
True
>>> simple_list_marshaller = getMultiAdapter(
... (simple_list_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, simple_list_marshaller)
True
>>> verifyObject(
... IFieldMarshaller, simple_list_marshaller.value_marshaller)
True
>>> tuple_marshaller = getMultiAdapter(
... (tuple_of_ints_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, tuple_marshaller)
True
>>> choice_list_marshaller = getMultiAdapter(
... (list_of_choices_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, choice_list_marshaller)
True
>>> set_marshaller = getMultiAdapter(
... (set_of_choices_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, set_marshaller)
True
>>> dict_marshaller = getMultiAdapter(
... (dict_of_choices_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, dict_marshaller)
True
>>> verifyObject(IFieldMarshaller, dict_marshaller.key_marshaller)
True
>>> verifyObject(IFieldMarshaller, dict_marshaller.value_marshaller)
True
>>> simple_dict_marshaller = getMultiAdapter(
... (simple_dict_field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, simple_dict_marshaller)
True
>>> verifyObject(IFieldMarshaller, simple_dict_marshaller.key_marshaller)
True
>>> verifyObject(
... IFieldMarshaller, simple_dict_marshaller.value_marshaller)
True
For sequences, the only JSON representation for the collection itself is a
list, since that's the only sequence type available in JSON. Anything else
will raise a ValueError.
>>> list_marshaller.marshall_from_json_data(["Test"])
['Test']
>>> list_marshaller.marshall_from_json_data("Test")
Traceback (most recent call last):
...
ValueError: got 'str', expected list: 'Test'
For dicts, we support marshalling from sequences of (name, value) pairs as
well as from dicts or even strings which are interpreted as single element
lists.
>>> pprint_dict(
... dict_marshaller.marshall_from_json_data({"foo": "Vegetarian"}))
{'foo': <Item Cuisine.VEGETARIAN, Vegetarian>}
>>> pprint_dict(
... dict_marshaller.marshall_from_json_data([("foo", "Vegetarian")]))
{'foo': <Item Cuisine.VEGETARIAN, Vegetarian>}
>>> pprint_dict(dict_marshaller.marshall_from_request("foo,Vegetarian"))
{'foo': <Item Cuisine.VEGETARIAN, Vegetarian>}
If we attempt to marshall something other than one of the above data formats,
a ValueError will be raised.
>>> dict_marshaller.marshall_from_json_data("Test")
Traceback (most recent call last):
...
ValueError: got 'str', expected dict: 'Test'
>>> dict_marshaller.marshall_from_request("Test")
Traceback (most recent call last):
...
ValueError: got '['Test']', list of name,value pairs
None is passed through though.
>>> print(list_marshaller.marshall_from_json_data(None))
None
>>> print(dict_marshaller.marshall_from_json_data(None))
None
ValueError is also raised if one of the value in the list doesn't
validate against the more specific marshaller.
>>> list_marshaller.marshall_from_json_data(['Text', 1, 2])
Traceback (most recent call last):
...
ValueError: got 'int', expected str: 1
>>> show_ValueError(choice_list_marshaller.marshall_from_request,
... ['Vegetarian', 'NoSuchChoice'])
ValueError: Invalid value "NoSuchChoice"...
ValueError is also raised if one of the keys or values in the dict doesn't
validate against the more specific marshaller.
>>> dict_marshaller.marshall_from_json_data({1: "Vegetarian"})
Traceback (most recent call last):
...
ValueError: got 'int', expected str: 1
>>> show_ValueError(dict_marshaller.marshall_from_request,
... {'foo': 'NoSuchChoice'})
ValueError: Invalid value "NoSuchChoice"...
The return type is correctly typed to the concrete collection.
>>> tuple_marshaller.marshall_from_json_data([1, 2, 3])
(1, 2, 3)
>>> marshalled_set = set_marshaller.marshall_from_json_data(
... ['Vegetarian', 'Dessert'])
>>> print(type(marshalled_set).__name__)
set
>>> sorted(marshalled_set)
[<Item Cuisine.VEGETARIAN, Vegetarian>, <Item Cuisine.DESSERT, Dessert>]
>>> result = choice_list_marshaller.marshall_from_request(
... ['Vegetarian', 'General'])
>>> print(type(result).__name__)
list
>>> result
[<Item Cuisine.VEGETARIAN, Vegetarian>, <Item Cuisine.GENERAL, General>]
>>> marshalled_dict = dict_marshaller.marshall_from_json_data(
... {'foo': 'Vegetarian', 'bar': 'General'})
>>> print(type(marshalled_dict).__name__)
dict
>>> pprint_dict(marshalled_dict)
{'bar': <Item Cuisine.GENERAL, General>,
'foo': <Item Cuisine.VEGETARIAN, Vegetarian>}
When coming from the request, either a list or a JSON-encoded
representation is accepted. The normal request rules for the
underlying type are then followed. When marshalling dicts, the
list elements are name,value strings which are pulled apart and
used to populate the dict.
>>> list_marshaller.marshall_from_request(['1', '2'])
['1', '2']
>>> list_marshaller.marshall_from_request('["1", "2"]')
['1', '2']
>>> pprint_dict(
... dict_marshaller.marshall_from_request('["foo,Vegetarian"]'))
{'foo': <Item Cuisine.VEGETARIAN, Vegetarian>}
>>> tuple_marshaller.marshall_from_request(['1', '2'])
(1, 2)
Except that 'null' still returns None.
>>> print(list_marshaller.marshall_from_request('null'))
None
>>> print(dict_marshaller.marshall_from_request('null'))
None
Also, as a convenience for web client, so that they don't have to JSON
encode single-element list, non-list value are promoted into a
single-element list.
>>> tuple_marshaller.marshall_from_request('1')
(1,)
>>> list_marshaller.marshall_from_request('test')
['test']
The unmarshall() method will return a list containing the unmarshalled
representation of each its members.
>>> sorted(set_marshaller.unmarshall(None, marshalled_set))
['Dessert', 'Vegetarian']
>>> unmarshalled = dict_marshaller.unmarshall(None, marshalled_dict)
>>> print(type(unmarshalled).__name__)
OrderedDict
>>> for key, value in sorted(unmarshalled.items()):
... print('%s: %s' % (key, value))
bar: General
foo: Vegetarian
The unmarshall() method will return None when given None.
>>> print(dict_marshaller.unmarshall(None, None))
None
>>> print(list_marshaller.unmarshall(None, None))
None
>>> print(set_marshaller.unmarshall(None, None))
None
>>> print(tuple_marshaller.unmarshall(None, None))
None
CollectionField
---------------
Since CollectionField are really a list of references to other
objects, and they are exposed using a dedicated CollectionResource,
the marshaller for this kind of field is simpler. Let's do an example
with a collection of IRecipe objects associated with some
ICookbook. (This might be the list of recipes in the cookbook, or
something like that.)
>>> from lazr.restful.fields import CollectionField
>>> from lazr.restful.example.base.interfaces import IRecipe
>>> field = CollectionField(
... __name__='recipes', value_type=Reference(schema=IRecipe))
>>> marshaller = getMultiAdapter((field, request), IFieldMarshaller)
>>> verifyObject(IFieldMarshaller, marshaller)
True
Instead of serving the actual collection, collection marshallers serve
a URL to that collection.
>>> marshaller.unmarshall(entry_resource, ["recipe 1", "recipe 2"])
'http://.../cookbooks/Everyday%20Greens/recipes'
They also annotate the representation name of the field, so that
clients know this is a link to a collection-type resource.
>>> marshaller.representation_name
'recipes_collection_link'
| PypiClean |
/arxiv_miner-2.0.3.tar.gz/arxiv_miner-2.0.3/throughput_sim/rl_rep.py | import sys
import os
import signal
import time
import multiprocessing
sys.path.insert(0, os.path.join(os.path.abspath(__file__), "throughput_sim"))
from .structures import *
from .data_workers import ClientThread
from .datasource import get_tcp_server
class Agent:
def __init__(self):
pass
def act(self):
pass
STATES = [
'SLEEP_STATE'
'SUCCESS_STATE',
'FAILURE_STATE',
'ERROR_STATE'
]
class STATE_SPACE:
@staticmethod
def get_onehot_state(state_name):
if state_name not in STATES:
raise Exception("%s State Not Present"%state_name)
oh_vec = [0 for i in range(len(STATES))]
get_onehot_state = STATES.index(state_name)
return get_onehot_state
class State:
number_of_timesteps:None
last_n_state_ocs = [] # Array of last N ONEHOT STATE VEC
wait_time:None
class Action:
def __init__(self):
self.name = self.__class__.__name__
def __call__(self):
raise NotImplementedError()
class Sleep(Action):
def __call__(self,sleep_time):
time.sleep(sleep_time)
class MultiAgentWorkers(multiprocessing.Process):
def __init__(self,host,port,num_workers=100):
super().__init__()
def create_clients(self):
pass
class ServerProcess(multiprocessing.Process):
def __init__(self,HOST="localhost", PORT = 9999):
super().__init__()
self.server = get_tcp_server(HOST,PORT)
def shutdown(self):
self.server.shutdown()
def run(self):
self.server.serve_forever()
class Environment:
def __init__(self,episode_length = 100,HOST="localhost", PORT = 9999, ):
self.process_manager = multiprocessing.Manager()
self.exit = multiprocessing.Event()
self.episode_length = episode_length
self.server_host = HOST
self.server_port = PORT
signal(signal.SIGINT, self.shutdown)
self.server_process = ServerProcess(HOST=self.server_host,PORT=self.server_port)
def shutdown(self):
self.server_process.shutdown()
def step(self):
pass | PypiClean |
/gooddata-api-client-1.5.0.tar.gz/gooddata-api-client-1.5.0/gooddata_api_client/paths/api_v1_entities_workspaces_workspace_id_workspace_settings_object_id/delete.py | from dataclasses import dataclass
import typing_extensions
import urllib3
from gooddata_api_client import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from gooddata_api_client import schemas # noqa: F401
from . import path
# Query params
FilterSchema = schemas.StrSchema
RequestRequiredQueryParams = typing_extensions.TypedDict(
'RequestRequiredQueryParams',
{
}
)
RequestOptionalQueryParams = typing_extensions.TypedDict(
'RequestOptionalQueryParams',
{
'filter': typing.Union[FilterSchema, str, ],
},
total=False
)
class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
pass
request_query_filter = api_client.QueryParameter(
name="filter",
style=api_client.ParameterStyle.FORM,
schema=FilterSchema,
explode=True,
)
# Path params
WorkspaceIdSchema = schemas.StrSchema
ObjectIdSchema = schemas.StrSchema
RequestRequiredPathParams = typing_extensions.TypedDict(
'RequestRequiredPathParams',
{
'workspaceId': typing.Union[WorkspaceIdSchema, str, ],
'objectId': typing.Union[ObjectIdSchema, str, ],
}
)
RequestOptionalPathParams = typing_extensions.TypedDict(
'RequestOptionalPathParams',
{
},
total=False
)
class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
pass
request_path_workspace_id = api_client.PathParameter(
name="workspaceId",
style=api_client.ParameterStyle.SIMPLE,
schema=WorkspaceIdSchema,
required=True,
)
request_path_object_id = api_client.PathParameter(
name="objectId",
style=api_client.ParameterStyle.SIMPLE,
schema=ObjectIdSchema,
required=True,
)
@dataclass
class ApiResponseFor204(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_204 = api_client.OpenApiResponse(
response_cls=ApiResponseFor204,
)
_status_code_to_response = {
'204': _response_for_204,
}
class BaseApi(api_client.Api):
@typing.overload
def _delete_entity_workspace_settings_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor204,
]: ...
@typing.overload
def _delete_entity_workspace_settings_oapg(
self,
skip_deserialization: typing_extensions.Literal[True],
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def _delete_entity_workspace_settings_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor204,
api_client.ApiResponseWithoutDeserialization,
]: ...
def _delete_entity_workspace_settings_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
"""
Delete a Setting for Workspace
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
used_path = path.value
_path_params = {}
for parameter in (
request_path_workspace_id,
request_path_object_id,
):
parameter_data = path_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
serialized_data = parameter.serialize(parameter_data)
_path_params.update(serialized_data)
for k, v in _path_params.items():
used_path = used_path.replace('{%s}' % k, v)
prefix_separator_iterator = None
for parameter in (
request_query_filter,
):
parameter_data = query_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
if prefix_separator_iterator is None:
prefix_separator_iterator = parameter.get_prefix_separator_iterator()
serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
for serialized_value in serialized_data.values():
used_path += serialized_value
# TODO add cookie handling
response = self.api_client.call_api(
resource_path=used_path,
method='delete'.upper(),
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(
status=response.status,
reason=response.reason,
api_response=api_response
)
return api_response
class DeleteEntityWorkspaceSettings(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
@typing.overload
def delete_entity_workspace_settings(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor204,
]: ...
@typing.overload
def delete_entity_workspace_settings(
self,
skip_deserialization: typing_extensions.Literal[True],
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def delete_entity_workspace_settings(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor204,
api_client.ApiResponseWithoutDeserialization,
]: ...
def delete_entity_workspace_settings(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._delete_entity_workspace_settings_oapg(
query_params=query_params,
path_params=path_params,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiFordelete(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
@typing.overload
def delete(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor204,
]: ...
@typing.overload
def delete(
self,
skip_deserialization: typing_extensions.Literal[True],
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def delete(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor204,
api_client.ApiResponseWithoutDeserialization,
]: ...
def delete(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
path_params: RequestPathParams = frozendict.frozendict(),
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._delete_entity_workspace_settings_oapg(
query_params=query_params,
path_params=path_params,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
) | PypiClean |
/mediapipe-rpi4-0.8.8.tar.gz/mediapipe-rpi4-0.8.8/mediapipe/util/sequence/media_sequence.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from mediapipe.util.sequence import media_sequence_util
msu = media_sequence_util
_HAS_DYNAMIC_ATTRIBUTES = True
################################## METADATA #################################
# A unique identifier for each example.
EXAMPLE_ID_KEY = "example/id"
# The name o fthe data set, including the version.
EXAMPLE_DATASET_NAME_KEY = "example/dataset_name"
# The relative path to the data on disk from some root directory.
CLIP_DATA_PATH_KEY = "clip/data_path"
# Any identifier for the media beyond the data path.
CLIP_MEDIA_ID_KEY = "clip/media_id"
# Yet another alternative identifier.
ALTERNATIVE_CLIP_MEDIA_ID_KEY = "clip/alternative_media_id"
# The encoded bytes for storing media directly in the SequenceExample.
CLIP_ENCODED_MEDIA_BYTES_KEY = "clip/encoded_media_bytes"
# The start time for the encoded media if not preserved during encoding.
CLIP_ENCODED_MEDIA_START_TIMESTAMP_KEY = "clip/encoded_media_start_timestamp"
# The start time, in microseconds, for the start of the clip in the media.
CLIP_START_TIMESTAMP_KEY = "clip/start/timestamp"
# The end time, in microseconds, for the end of the clip in the media.
CLIP_END_TIMESTAMP_KEY = "clip/end/timestamp"
# A list of label indices for this clip.
CLIP_LABEL_INDEX_KEY = "clip/label/index"
# A list of label strings for this clip.
CLIP_LABEL_STRING_KEY = "clip/label/string"
# A list of label confidences for this clip.
CLIP_LABEL_CONFIDENCE_KEY = "clip/label/confidence"
msu.create_bytes_context_feature(
"example_id", EXAMPLE_ID_KEY, module_dict=globals())
msu.create_bytes_context_feature(
"example_dataset_name", EXAMPLE_DATASET_NAME_KEY, module_dict=globals())
msu.create_bytes_context_feature(
"clip_media_id", CLIP_MEDIA_ID_KEY, module_dict=globals())
msu.create_bytes_context_feature(
"clip_alternative_media_id", ALTERNATIVE_CLIP_MEDIA_ID_KEY,
module_dict=globals())
msu.create_bytes_context_feature(
"clip_encoded_media_bytes", CLIP_ENCODED_MEDIA_BYTES_KEY,
module_dict=globals())
msu.create_bytes_context_feature(
"clip_data_path", CLIP_DATA_PATH_KEY, module_dict=globals())
msu.create_int_context_feature(
"clip_encoded_media_start_timestamp",
CLIP_ENCODED_MEDIA_START_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_context_feature(
"clip_start_timestamp", CLIP_START_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_context_feature(
"clip_end_timestamp", CLIP_END_TIMESTAMP_KEY, module_dict=globals())
msu.create_bytes_list_context_feature(
"clip_label_string", CLIP_LABEL_STRING_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"clip_label_index", CLIP_LABEL_INDEX_KEY, module_dict=globals())
msu.create_float_list_context_feature(
"clip_label_confidence", CLIP_LABEL_CONFIDENCE_KEY, module_dict=globals())
################################## SEGMENTS #################################
# A list of segment start times in microseconds.
SEGMENT_START_TIMESTAMP_KEY = "segment/start/timestamp"
# A list of indices marking the first frame index >= the start timestamp.
SEGMENT_START_INDEX_KEY = "segment/start/index"
# A list of segment end times in microseconds.
SEGMENT_END_TIMESTAMP_KEY = "segment/end/timestamp"
# A list of indices marking the last frame index <= the end timestamp.
SEGMENT_END_INDEX_KEY = "segment/end/index"
# A list with the label index for each segment.
# Multiple labels for the same segment are encoded as repeated segments.
SEGMENT_LABEL_INDEX_KEY = "segment/label/index"
# A list with the label string for each segment.
# Multiple labels for the same segment are encoded as repeated segments.
SEGMENT_LABEL_STRING_KEY = "segment/label/string"
# A list with the label confidence for each segment.
# Multiple labels for the same segment are encoded as repeated segments.
SEGMENT_LABEL_CONFIDENCE_KEY = "segment/label/confidence"
msu.create_bytes_list_context_feature(
"segment_label_string", SEGMENT_LABEL_STRING_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"segment_start_timestamp",
SEGMENT_START_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"segment_start_index", SEGMENT_START_INDEX_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"segment_end_timestamp", SEGMENT_END_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"segment_end_index", SEGMENT_END_INDEX_KEY, module_dict=globals())
msu.create_int_list_context_feature(
"segment_label_index", SEGMENT_LABEL_INDEX_KEY, module_dict=globals())
msu.create_float_list_context_feature(
"segment_label_confidence",
SEGMENT_LABEL_CONFIDENCE_KEY, module_dict=globals())
########################## REGIONS / BOUNDING BOXES #########################
# Normalized coordinates of bounding boxes are provided in four lists to avoid
# order ambiguity. We provide additional accessors for complete bounding boxes
# below.
REGION_BBOX_YMIN_KEY = "region/bbox/ymin"
REGION_BBOX_XMIN_KEY = "region/bbox/xmin"
REGION_BBOX_YMAX_KEY = "region/bbox/ymax"
REGION_BBOX_XMAX_KEY = "region/bbox/xmax"
# The point and radius can denote keypoints.
REGION_POINT_X_KEY = "region/point/x"
REGION_POINT_Y_KEY = "region/point/y"
REGION_RADIUS_KEY = "region/radius"
# The 3D point can denote keypoints.
REGION_3D_POINT_X_KEY = "region/3d_point/x"
REGION_3D_POINT_Y_KEY = "region/3d_point/y"
REGION_3D_POINT_Z_KEY = "region/3d_point/z"
# The number of regions at that timestep.
REGION_NUM_REGIONS_KEY = "region/num_regions"
# Whether that timestep is annotated for regions.
# (Disambiguates between multiple meanings of num_regions = 0.)
REGION_IS_ANNOTATED_KEY = "region/is_annotated"
# A list indicating if each region is generated (1) or manually annotated (0)
REGION_IS_GENERATED_KEY = "region/is_generated"
# A list indicating if each region is occluded (1) or visible (0)
REGION_IS_OCCLUDED_KEY = "region/is_occluded"
# Lists with a label for each region.
# Multiple labels for the same region require duplicating the region.
REGION_LABEL_INDEX_KEY = "region/label/index"
REGION_LABEL_STRING_KEY = "region/label/string"
REGION_LABEL_CONFIDENCE_KEY = "region/label/confidence"
# Lists with a track identifier for each region.
# Multiple track identifier for the same region require duplicating the region.
REGION_TRACK_INDEX_KEY = "region/track/index"
REGION_TRACK_STRING_KEY = "region/track/string"
REGION_TRACK_CONFIDENCE_KEY = "region/track/confidence"
# Lists with a class for each region. In general, prefer to use the label
# fields. These class fields exist to distinguish tracks when different classes
# have overlapping track ids.
REGION_CLASS_INDEX_KEY = "region/class/index"
REGION_CLASS_STRING_KEY = "region/class/string"
REGION_CLASS_CONFIDENCE_KEY = "region/class/confidence"
# The timestamp of the region annotation in microseconds.
REGION_TIMESTAMP_KEY = "region/timestamp"
# The original timestamp in microseconds for region annotations.
# If regions are aligned to image frames, this field preserves the original
# timestamps.
REGION_UNMODIFIED_TIMESTAMP_KEY = "region/unmodified_timestamp"
# The list of region parts expected in this example.
REGION_PARTS_KEY = "region/parts"
# The dimensions of each embedding per region / bounding box.
REGION_EMBEDDING_DIMENSIONS_PER_REGION_KEY = (
"region/embedding/dimensions_per_region")
# The format encoding embeddings as strings.
REGION_EMBEDDING_FORMAT_KEY = "region/embedding/format"
# An embedding for each region. The length of each list must be the product of
# the number of regions and the product of the embedding dimensions.
REGION_EMBEDDING_FLOAT_KEY = "region/embedding/float"
# A string encoded embedding for each regions.
REGION_EMBEDDING_ENCODED_KEY = "region/embedding/encoded"
# The confidence of the embedding.
REGION_EMBEDDING_CONFIDENCE_KEY = "region/embedding/confidence"
def _create_region_with_prefix(name, prefix):
"""Create multiple accessors for region based data."""
msu.create_int_feature_list(name + "_num_regions", REGION_NUM_REGIONS_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_feature_list(name + "_is_annotated", REGION_IS_ANNOTATED_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_feature_list(
name + "_is_occluded", REGION_IS_OCCLUDED_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_feature_list(
name + "_is_generated", REGION_IS_GENERATED_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_feature_list(name + "_timestamp", REGION_TIMESTAMP_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_feature_list(
name + "_unmodified_timestamp", REGION_UNMODIFIED_TIMESTAMP_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_feature_list(
name + "_label_string", REGION_LABEL_STRING_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_feature_list(
name + "_label_index", REGION_LABEL_INDEX_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_label_confidence", REGION_LABEL_CONFIDENCE_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_feature_list(
name + "_class_string", REGION_CLASS_STRING_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_feature_list(
name + "_class_index", REGION_CLASS_INDEX_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_class_confidence", REGION_CLASS_CONFIDENCE_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_feature_list(
name + "_track_string", REGION_TRACK_STRING_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_feature_list(
name + "_track_index", REGION_TRACK_INDEX_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_track_confidence", REGION_TRACK_CONFIDENCE_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_ymin", REGION_BBOX_YMIN_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_xmin", REGION_BBOX_XMIN_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_ymax", REGION_BBOX_YMAX_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_xmax", REGION_BBOX_XMAX_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_point_x", REGION_POINT_X_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_point_y", REGION_POINT_Y_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_3d_point_x", REGION_3D_POINT_X_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_3d_point_y", REGION_3D_POINT_Y_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(
name + "_3d_point_z", REGION_3D_POINT_Z_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_context_feature(name + "_parts",
REGION_PARTS_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_context_feature(
name + "_embedding_dimensions_per_region",
REGION_EMBEDDING_DIMENSIONS_PER_REGION_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_context_feature(name + "_embedding_format",
REGION_EMBEDDING_FORMAT_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_embedding_floats",
REGION_EMBEDDING_FLOAT_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_feature_list(name + "_embedding_encoded",
REGION_EMBEDDING_ENCODED_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_list_feature_list(name + "_embedding_confidence",
REGION_EMBEDDING_CONFIDENCE_KEY,
prefix=prefix, module_dict=globals())
# pylint: disable=undefined-variable
def get_prefixed_bbox_at(index, sequence_example, prefix):
return np.stack((
get_bbox_ymin_at(index, sequence_example, prefix=prefix),
get_bbox_xmin_at(index, sequence_example, prefix=prefix),
get_bbox_ymax_at(index, sequence_example, prefix=prefix),
get_bbox_xmax_at(index, sequence_example, prefix=prefix)),
1)
def add_prefixed_bbox(values, sequence_example, prefix):
values = np.array(values)
if values.size == 0:
add_bbox_ymin([], sequence_example, prefix=prefix)
add_bbox_xmin([], sequence_example, prefix=prefix)
add_bbox_ymax([], sequence_example, prefix=prefix)
add_bbox_xmax([], sequence_example, prefix=prefix)
else:
add_bbox_ymin(values[:, 0], sequence_example, prefix=prefix)
add_bbox_xmin(values[:, 1], sequence_example, prefix=prefix)
add_bbox_ymax(values[:, 2], sequence_example, prefix=prefix)
add_bbox_xmax(values[:, 3], sequence_example, prefix=prefix)
def get_prefixed_bbox_size(sequence_example, prefix):
return get_bbox_ymin_size(sequence_example, prefix=prefix)
def has_prefixed_bbox(sequence_example, prefix):
return has_bbox_ymin(sequence_example, prefix=prefix)
def clear_prefixed_bbox(sequence_example, prefix):
clear_bbox_ymin(sequence_example, prefix=prefix)
clear_bbox_xmin(sequence_example, prefix=prefix)
clear_bbox_ymax(sequence_example, prefix=prefix)
clear_bbox_xmax(sequence_example, prefix=prefix)
def get_prefixed_point_at(index, sequence_example, prefix):
return np.stack((
get_bbox_point_y_at(index, sequence_example, prefix=prefix),
get_bbox_point_x_at(index, sequence_example, prefix=prefix)),
1)
def add_prefixed_point(values, sequence_example, prefix):
add_bbox_point_y(values[:, 0], sequence_example, prefix=prefix)
add_bbox_point_x(values[:, 1], sequence_example, prefix=prefix)
def get_prefixed_point_size(sequence_example, prefix):
return get_bbox_point_y_size(sequence_example, prefix=prefix)
def has_prefixed_point(sequence_example, prefix):
return has_bbox_point_y(sequence_example, prefix=prefix)
def clear_prefixed_point(sequence_example, prefix):
clear_bbox_point_y(sequence_example, prefix=prefix)
clear_bbox_point_x(sequence_example, prefix=prefix)
def get_prefixed_3d_point_at(index, sequence_example, prefix):
return np.stack((
get_bbox_3d_point_x_at(index, sequence_example, prefix=prefix),
get_bbox_3d_point_y_at(index, sequence_example, prefix=prefix),
get_bbox_3d_point_z_at(index, sequence_example, prefix=prefix)),
1)
def add_prefixed_3d_point(values, sequence_example, prefix):
add_bbox_3d_point_x(values[:, 0], sequence_example, prefix=prefix)
add_bbox_3d_point_y(values[:, 1], sequence_example, prefix=prefix)
add_bbox_3d_point_z(values[:, 2], sequence_example, prefix=prefix)
def get_prefixed_3d_point_size(sequence_example, prefix):
return get_bbox_3d_point_x_size(sequence_example, prefix=prefix)
def has_prefixed_3d_point(sequence_example, prefix):
return has_bbox_3d_point_x(sequence_example, prefix=prefix)
def clear_prefixed_3d_point(sequence_example, prefix):
clear_bbox_3d_point_x(sequence_example, prefix=prefix)
clear_bbox_3d_point_y(sequence_example, prefix=prefix)
clear_bbox_3d_point_z(sequence_example, prefix=prefix)
# pylint: enable=undefined-variable
msu.add_functions_to_module({
"get_" + name + "_at":
msu.function_with_default(get_prefixed_bbox_at, prefix),
"add_" + name:
msu.function_with_default(add_prefixed_bbox, prefix),
"get_" + name + "_size":
msu.function_with_default(get_prefixed_bbox_size, prefix),
"has_" + name:
msu.function_with_default(has_prefixed_bbox, prefix),
"clear_" + name:
msu.function_with_default(clear_prefixed_bbox, prefix),
}, module_dict=globals())
msu.add_functions_to_module({
"get_" + name + "_point_at":
msu.function_with_default(get_prefixed_point_at, prefix),
"add_" + name + "_point":
msu.function_with_default(add_prefixed_point, prefix),
"get_" + name + "_point_size":
msu.function_with_default(get_prefixed_point_size, prefix),
"has_" + name + "_point":
msu.function_with_default(has_prefixed_point, prefix),
"clear_" + name + "_point":
msu.function_with_default(clear_prefixed_point, prefix),
}, module_dict=globals())
msu.add_functions_to_module({
"get_" + name + "_3d_point_at":
msu.function_with_default(get_prefixed_3d_point_at, prefix),
"add_" + name + "_3d_point":
msu.function_with_default(add_prefixed_3d_point, prefix),
"get_" + name + "_3d_point_size":
msu.function_with_default(get_prefixed_3d_point_size, prefix),
"has_" + name + "_3d_point":
msu.function_with_default(has_prefixed_3d_point, prefix),
"clear_" + name + "_3d_point":
msu.function_with_default(clear_prefixed_3d_point, prefix),
}, module_dict=globals())
PREDICTED_PREFIX = "PREDICTED"
_create_region_with_prefix("bbox", "")
_create_region_with_prefix("predicted_bbox", PREDICTED_PREFIX)
################################### IMAGES #################################
# The format the images are encoded as (e.g. "JPEG", "PNG")
IMAGE_FORMAT_KEY = "image/format"
# The number of channels in the image.
IMAGE_CHANNELS_KEY = "image/channels"
# The colorspace of the iamge.
IMAGE_COLORSPACE_KEY = "image/colorspace"
# The height of the image in pixels.
IMAGE_HEIGHT_KEY = "image/height"
# The width of the image in pixels.
IMAGE_WIDTH_KEY = "image/width"
# frame rate in images/second of media.
IMAGE_FRAME_RATE_KEY = "image/frame_rate"
# The maximum values if the images were saturated and normalized for encoding.
IMAGE_SATURATION_KEY = "image/saturation"
# The listing from discrete image values (as indices) to class indices.
IMAGE_CLASS_LABEL_INDEX_KEY = "image/class/label/index"
# The listing from discrete image values (as indices) to class strings.
IMAGE_CLASS_LABEL_STRING_KEY = "image/class/label/string"
# The listing from discrete instance indices to class indices they embody.
IMAGE_OBJECT_CLASS_INDEX_KEY = "image/object/class/index"
# The encoded image frame.
IMAGE_ENCODED_KEY = "image/encoded"
# Multiple images from the same timestep (e.g. multiview video).
IMAGE_MULTI_ENCODED_KEY = "image/multi_encoded"
# The timestamp of the frame in microseconds.
IMAGE_TIMESTAMP_KEY = "image/timestamp"
# A per image label if specific frames have labels.
# If time spans have labels, segments are preferred to allow changing rates.
IMAGE_LABEL_INDEX_KEY = "image/label/index"
IMAGE_LABEL_STRING_KEY = "image/label/string"
IMAGE_LABEL_CONFIDENCE_KEY = "image/label/confidence"
# The path of the image file if it did not come from a media clip.
IMAGE_DATA_PATH_KEY = "image/data_path"
def _create_image_with_prefix(name, prefix):
"""Create multiple accessors for image based data."""
msu.create_bytes_context_feature(name + "_format", IMAGE_FORMAT_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_context_feature(name + "_colorspace", IMAGE_COLORSPACE_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_context_feature(name + "_channels", IMAGE_CHANNELS_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_context_feature(name + "_height", IMAGE_HEIGHT_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_context_feature(name + "_width", IMAGE_WIDTH_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_feature_list(name + "_encoded", IMAGE_ENCODED_KEY,
prefix=prefix, module_dict=globals())
msu.create_float_context_feature(name + "_frame_rate", IMAGE_FRAME_RATE_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_context_feature(
name + "_class_label_string", IMAGE_CLASS_LABEL_STRING_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_context_feature(
name + "_class_label_index", IMAGE_CLASS_LABEL_INDEX_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_list_context_feature(
name + "_object_class_index", IMAGE_OBJECT_CLASS_INDEX_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_context_feature(name + "_data_path", IMAGE_DATA_PATH_KEY,
prefix=prefix, module_dict=globals())
msu.create_int_feature_list(name + "_timestamp", IMAGE_TIMESTAMP_KEY,
prefix=prefix, module_dict=globals())
msu.create_bytes_list_feature_list(name + "_multi_encoded",
IMAGE_MULTI_ENCODED_KEY, prefix=prefix,
module_dict=globals())
FORWARD_FLOW_PREFIX = "FORWARD_FLOW"
CLASS_SEGMENTATION_PREFIX = "CLASS_SEGMENTATION"
INSTANCE_SEGMENTATION_PREFIX = "INSTANCE_SEGMENTATION"
_create_image_with_prefix("image", "")
_create_image_with_prefix("forward_flow", FORWARD_FLOW_PREFIX)
_create_image_with_prefix("class_segmentation", CLASS_SEGMENTATION_PREFIX)
_create_image_with_prefix("instance_segmentation", INSTANCE_SEGMENTATION_PREFIX)
################################## TEXT #################################
# Which language text tokens are likely to be in.
TEXT_LANGUAGE_KEY = "text/language"
# A large block of text that applies to the media.
TEXT_CONTEXT_CONTENT_KEY = "text/context/content"
# The text contents for a given time.
TEXT_CONTENT_KEY = "text/content"
# The start time for the text becoming relevant.
TEXT_TIMESTAMP_KEY = "text/timestamp"
# The duration where the text is relevant.
TEXT_DURATION_KEY = "text/duration"
# The confidence that this is the correct text.
TEXT_CONFIDENCE_KEY = "text/confidence"
# A floating point embedding corresponding to the text.
TEXT_EMBEDDING_KEY = "text/embedding"
# An integer id corresponding to the text.
TEXT_TOKEN_ID_KEY = "text/token/id"
msu.create_bytes_context_feature(
"text_language", TEXT_LANGUAGE_KEY, module_dict=globals())
msu.create_bytes_context_feature(
"text_context_content", TEXT_CONTEXT_CONTENT_KEY, module_dict=globals())
msu.create_bytes_feature_list(
"text_content", TEXT_CONTENT_KEY, module_dict=globals())
msu.create_int_feature_list(
"text_timestamp", TEXT_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_feature_list(
"text_duration", TEXT_DURATION_KEY, module_dict=globals())
msu.create_float_feature_list(
"text_confidence", TEXT_CONFIDENCE_KEY, module_dict=globals())
msu.create_float_list_feature_list(
"text_embedding", TEXT_EMBEDDING_KEY, module_dict=globals())
msu.create_int_feature_list(
"text_token_id", TEXT_TOKEN_ID_KEY, module_dict=globals())
################################## FEATURES #################################
# The dimensions of the feature.
FEATURE_DIMENSIONS_KEY = "feature/dimensions"
# The rate the features are extracted per second of media.
FEATURE_RATE_KEY = "feature/rate"
# The encoding format if any for the feature.
FEATURE_BYTES_FORMAT_KEY = "feature/bytes/format"
# For audio, the rate the samples are extracted per second of media.
FEATURE_SAMPLE_RATE_KEY = "feature/sample_rate"
# For audio, the number of channels per extracted feature.
FEATURE_NUM_CHANNELS_KEY = "feature/num_channels"
# For audio, th enumber of samples per extracted feature.
FEATURE_NUM_SAMPLES_KEY = "feature/num_samples"
# For audio, the rate the features are extracted per second of media.
FEATURE_PACKET_RATE_KEY = "feature/packet_rate"
# For audio, the original audio sampling rate the feature is derived from.
FEATURE_AUDIO_SAMPLE_RATE_KEY = "feature/audio_sample_rate"
# The feature as a list of floats.
FEATURE_FLOATS_KEY = "feature/floats"
# The feature as a list of bytes. May be encoded.
FEATURE_BYTES_KEY = "feature/bytes"
# The feature as a list of ints.
FEATURE_INTS_KEY = "feature/ints"
# The timestamp, in microseconds, of the feature.
FEATURE_TIMESTAMP_KEY = "feature/timestamp"
# It is occasionally useful to indicate that a feature applies to a given range.
# This should be used for features only and annotations should be provided as
# segments.
FEATURE_DURATION_KEY = "feature/duration"
# Encodes an optional confidence score for the generated features.
FEATURE_CONFIDENCE_KEY = "feature/confidence"
msu.create_int_list_context_feature(
"feature_dimensions", FEATURE_DIMENSIONS_KEY, module_dict=globals())
msu.create_float_context_feature(
"feature_rate", FEATURE_RATE_KEY, module_dict=globals())
msu.create_bytes_context_feature(
"feature_bytes_format", FEATURE_BYTES_FORMAT_KEY, module_dict=globals())
msu.create_float_context_feature(
"feature_sample_rate", FEATURE_SAMPLE_RATE_KEY, module_dict=globals())
msu.create_int_context_feature(
"feature_num_channels", FEATURE_NUM_CHANNELS_KEY, module_dict=globals())
msu.create_int_context_feature(
"feature_num_samples", FEATURE_NUM_SAMPLES_KEY, module_dict=globals())
msu.create_float_context_feature(
"feature_packet_rate", FEATURE_PACKET_RATE_KEY, module_dict=globals())
msu.create_float_context_feature(
"feature_audio_sample_rate", FEATURE_AUDIO_SAMPLE_RATE_KEY,
module_dict=globals())
msu.create_float_list_feature_list(
"feature_floats", FEATURE_FLOATS_KEY, module_dict=globals())
msu.create_bytes_list_feature_list(
"feature_bytes", FEATURE_BYTES_KEY, module_dict=globals())
msu.create_int_list_feature_list(
"feature_ints", FEATURE_INTS_KEY, module_dict=globals())
msu.create_int_feature_list(
"feature_timestamp", FEATURE_TIMESTAMP_KEY, module_dict=globals())
msu.create_int_list_feature_list(
"feature_duration", FEATURE_DURATION_KEY, module_dict=globals())
msu.create_float_list_feature_list(
"feature_confidence", FEATURE_CONFIDENCE_KEY, module_dict=globals()) | PypiClean |
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_9/models/performance.py | import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_9 import models
class Performance(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'bytes_per_mirrored_write': 'int',
'bytes_per_op': 'int',
'bytes_per_read': 'int',
'bytes_per_write': 'int',
'mirrored_write_bytes_per_sec': 'int',
'mirrored_writes_per_sec': 'int',
'qos_rate_limit_usec_per_mirrored_write_op': 'int',
'qos_rate_limit_usec_per_read_op': 'int',
'qos_rate_limit_usec_per_write_op': 'int',
'queue_usec_per_mirrored_write_op': 'int',
'queue_usec_per_read_op': 'int',
'queue_usec_per_write_op': 'int',
'read_bytes_per_sec': 'int',
'reads_per_sec': 'int',
'san_usec_per_mirrored_write_op': 'int',
'san_usec_per_read_op': 'int',
'san_usec_per_write_op': 'int',
'service_usec_per_mirrored_write_op': 'int',
'service_usec_per_read_op': 'int',
'service_usec_per_write_op': 'int',
'time': 'int',
'usec_per_mirrored_write_op': 'int',
'usec_per_read_op': 'int',
'usec_per_write_op': 'int',
'write_bytes_per_sec': 'int',
'writes_per_sec': 'int',
'service_usec_per_read_op_cache_reduction': 'float'
}
attribute_map = {
'bytes_per_mirrored_write': 'bytes_per_mirrored_write',
'bytes_per_op': 'bytes_per_op',
'bytes_per_read': 'bytes_per_read',
'bytes_per_write': 'bytes_per_write',
'mirrored_write_bytes_per_sec': 'mirrored_write_bytes_per_sec',
'mirrored_writes_per_sec': 'mirrored_writes_per_sec',
'qos_rate_limit_usec_per_mirrored_write_op': 'qos_rate_limit_usec_per_mirrored_write_op',
'qos_rate_limit_usec_per_read_op': 'qos_rate_limit_usec_per_read_op',
'qos_rate_limit_usec_per_write_op': 'qos_rate_limit_usec_per_write_op',
'queue_usec_per_mirrored_write_op': 'queue_usec_per_mirrored_write_op',
'queue_usec_per_read_op': 'queue_usec_per_read_op',
'queue_usec_per_write_op': 'queue_usec_per_write_op',
'read_bytes_per_sec': 'read_bytes_per_sec',
'reads_per_sec': 'reads_per_sec',
'san_usec_per_mirrored_write_op': 'san_usec_per_mirrored_write_op',
'san_usec_per_read_op': 'san_usec_per_read_op',
'san_usec_per_write_op': 'san_usec_per_write_op',
'service_usec_per_mirrored_write_op': 'service_usec_per_mirrored_write_op',
'service_usec_per_read_op': 'service_usec_per_read_op',
'service_usec_per_write_op': 'service_usec_per_write_op',
'time': 'time',
'usec_per_mirrored_write_op': 'usec_per_mirrored_write_op',
'usec_per_read_op': 'usec_per_read_op',
'usec_per_write_op': 'usec_per_write_op',
'write_bytes_per_sec': 'write_bytes_per_sec',
'writes_per_sec': 'writes_per_sec',
'service_usec_per_read_op_cache_reduction': 'service_usec_per_read_op_cache_reduction'
}
required_args = {
}
def __init__(
self,
bytes_per_mirrored_write=None, # type: int
bytes_per_op=None, # type: int
bytes_per_read=None, # type: int
bytes_per_write=None, # type: int
mirrored_write_bytes_per_sec=None, # type: int
mirrored_writes_per_sec=None, # type: int
qos_rate_limit_usec_per_mirrored_write_op=None, # type: int
qos_rate_limit_usec_per_read_op=None, # type: int
qos_rate_limit_usec_per_write_op=None, # type: int
queue_usec_per_mirrored_write_op=None, # type: int
queue_usec_per_read_op=None, # type: int
queue_usec_per_write_op=None, # type: int
read_bytes_per_sec=None, # type: int
reads_per_sec=None, # type: int
san_usec_per_mirrored_write_op=None, # type: int
san_usec_per_read_op=None, # type: int
san_usec_per_write_op=None, # type: int
service_usec_per_mirrored_write_op=None, # type: int
service_usec_per_read_op=None, # type: int
service_usec_per_write_op=None, # type: int
time=None, # type: int
usec_per_mirrored_write_op=None, # type: int
usec_per_read_op=None, # type: int
usec_per_write_op=None, # type: int
write_bytes_per_sec=None, # type: int
writes_per_sec=None, # type: int
service_usec_per_read_op_cache_reduction=None, # type: float
):
"""
Keyword args:
bytes_per_mirrored_write (int): The average I/O size per mirrored write. Measured in bytes.
bytes_per_op (int): The average I/O size for both read and write (all) operations.
bytes_per_read (int): The average I/O size per read. Measured in bytes.
bytes_per_write (int): The average I/O size per write. Measured in bytes.
mirrored_write_bytes_per_sec (int): The number of mirrored bytes written per second.
mirrored_writes_per_sec (int): The number of mirrored writes per second.
qos_rate_limit_usec_per_mirrored_write_op (int): The average time it takes the array to process a mirrored I/O write request. Measured in microseconds.
qos_rate_limit_usec_per_read_op (int): The average time spent waiting due to QoS rate limiting for a read request. Measured in microseconds.
qos_rate_limit_usec_per_write_op (int): The average time that a write I/O request spends waiting as a result of the volume reaching its QoS bandwidth limit. Measured in microseconds.
queue_usec_per_mirrored_write_op (int): The average time that a mirrored write I/O request spends in the array waiting to be served. Measured in microseconds.
queue_usec_per_read_op (int): The average time that a read I/O request spends in the array waiting to be served. Measured in microseconds.
queue_usec_per_write_op (int): The average time that a write I/O request spends in the array waiting to be served. Measured in microseconds.
read_bytes_per_sec (int): The number of bytes read per second.
reads_per_sec (int): The number of read requests processed per second.
san_usec_per_mirrored_write_op (int): The average time required to transfer data from the initiator to the array for a mirrored write request. Measured in microseconds.
san_usec_per_read_op (int): The average time required to transfer data from the array to the initiator for a read request. Measured in microseconds.
san_usec_per_write_op (int): The average time required to transfer data from the initiator to the array for a write request. Measured in microseconds.
service_usec_per_mirrored_write_op (int): The average time required for the array to service a mirrored write request. Measured in microseconds.
service_usec_per_read_op (int): The average time required for the array to service a read request. Measured in microseconds.
service_usec_per_write_op (int): The average time required for the array to service a write request. Measured in microseconds.
time (int): The time when the sample performance data was taken. Measured in milliseconds since the UNIX epoch.
usec_per_mirrored_write_op (int): The average time it takes the array to process a mirrored I/O write request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
usec_per_read_op (int): The average time it takes the array to process an I/O read request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
usec_per_write_op (int): The average time it takes the array to process an I/O write request. Measured in microseconds. The average time does not include SAN time, queue time, or QoS rate limit time.
write_bytes_per_sec (int): The number of bytes written per second.
writes_per_sec (int): The number of write requests processed per second.
service_usec_per_read_op_cache_reduction (float): The percentage reduction in `service_usec_per_read_op` due to data cache hits. For example, a value of 0.25 indicates that the value of `service_usec_per_read_op` is 25% lower than it would have been without any data cache hits.
"""
if bytes_per_mirrored_write is not None:
self.bytes_per_mirrored_write = bytes_per_mirrored_write
if bytes_per_op is not None:
self.bytes_per_op = bytes_per_op
if bytes_per_read is not None:
self.bytes_per_read = bytes_per_read
if bytes_per_write is not None:
self.bytes_per_write = bytes_per_write
if mirrored_write_bytes_per_sec is not None:
self.mirrored_write_bytes_per_sec = mirrored_write_bytes_per_sec
if mirrored_writes_per_sec is not None:
self.mirrored_writes_per_sec = mirrored_writes_per_sec
if qos_rate_limit_usec_per_mirrored_write_op is not None:
self.qos_rate_limit_usec_per_mirrored_write_op = qos_rate_limit_usec_per_mirrored_write_op
if qos_rate_limit_usec_per_read_op is not None:
self.qos_rate_limit_usec_per_read_op = qos_rate_limit_usec_per_read_op
if qos_rate_limit_usec_per_write_op is not None:
self.qos_rate_limit_usec_per_write_op = qos_rate_limit_usec_per_write_op
if queue_usec_per_mirrored_write_op is not None:
self.queue_usec_per_mirrored_write_op = queue_usec_per_mirrored_write_op
if queue_usec_per_read_op is not None:
self.queue_usec_per_read_op = queue_usec_per_read_op
if queue_usec_per_write_op is not None:
self.queue_usec_per_write_op = queue_usec_per_write_op
if read_bytes_per_sec is not None:
self.read_bytes_per_sec = read_bytes_per_sec
if reads_per_sec is not None:
self.reads_per_sec = reads_per_sec
if san_usec_per_mirrored_write_op is not None:
self.san_usec_per_mirrored_write_op = san_usec_per_mirrored_write_op
if san_usec_per_read_op is not None:
self.san_usec_per_read_op = san_usec_per_read_op
if san_usec_per_write_op is not None:
self.san_usec_per_write_op = san_usec_per_write_op
if service_usec_per_mirrored_write_op is not None:
self.service_usec_per_mirrored_write_op = service_usec_per_mirrored_write_op
if service_usec_per_read_op is not None:
self.service_usec_per_read_op = service_usec_per_read_op
if service_usec_per_write_op is not None:
self.service_usec_per_write_op = service_usec_per_write_op
if time is not None:
self.time = time
if usec_per_mirrored_write_op is not None:
self.usec_per_mirrored_write_op = usec_per_mirrored_write_op
if usec_per_read_op is not None:
self.usec_per_read_op = usec_per_read_op
if usec_per_write_op is not None:
self.usec_per_write_op = usec_per_write_op
if write_bytes_per_sec is not None:
self.write_bytes_per_sec = write_bytes_per_sec
if writes_per_sec is not None:
self.writes_per_sec = writes_per_sec
if service_usec_per_read_op_cache_reduction is not None:
self.service_usec_per_read_op_cache_reduction = service_usec_per_read_op_cache_reduction
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Performance`".format(key))
if key == "bytes_per_mirrored_write" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_mirrored_write`, must be a value greater than or equal to `0`")
if key == "bytes_per_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_op`, must be a value greater than or equal to `0`")
if key == "bytes_per_read" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_read`, must be a value greater than or equal to `0`")
if key == "bytes_per_write" and value is not None:
if value < 0:
raise ValueError("Invalid value for `bytes_per_write`, must be a value greater than or equal to `0`")
if key == "mirrored_write_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `mirrored_write_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "mirrored_writes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `mirrored_writes_per_sec`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "qos_rate_limit_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `qos_rate_limit_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "queue_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `queue_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "read_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `read_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "reads_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `reads_per_sec`, must be a value greater than or equal to `0`")
if key == "san_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "san_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "san_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `san_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "service_usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `service_usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "usec_per_mirrored_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_mirrored_write_op`, must be a value greater than or equal to `0`")
if key == "usec_per_read_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_read_op`, must be a value greater than or equal to `0`")
if key == "usec_per_write_op" and value is not None:
if value < 0:
raise ValueError("Invalid value for `usec_per_write_op`, must be a value greater than or equal to `0`")
if key == "write_bytes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `write_bytes_per_sec`, must be a value greater than or equal to `0`")
if key == "writes_per_sec" and value is not None:
if value < 0:
raise ValueError("Invalid value for `writes_per_sec`, must be a value greater than or equal to `0`")
if key == "service_usec_per_read_op_cache_reduction" and value is not None:
if value > 1.0:
raise ValueError("Invalid value for `service_usec_per_read_op_cache_reduction`, value must be less than or equal to `1.0`")
if value < 0.0:
raise ValueError("Invalid value for `service_usec_per_read_op_cache_reduction`, must be a value greater than or equal to `0.0`")
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Performance`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Performance`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `Performance`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Performance, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Performance):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | PypiClean |
/bitsmiths_monitor-2.2.2-py3-none-any.whl/bs_monitor/braze/blazy_load.py |
import datetime
import uuid
import mettle.braze
import mettle.lib
from mettle.io.iserializable import ISerializable
class bLazyLoad(ISerializable):
__slots__ = ("offset", "limit")
def __init__(self,
offset: int = 0,
limit: int = 0):
"""
Constructor.
:param offset:
:param limit:
"""
self.offset = offset
self.limit = limit
def clear(self):
self.offset = 0
self.limit = 0
def _name(self) -> str:
"""
Get the struct name.
:return: Name of the struct
"""
return 'bLazyLoad'
def _serialize(self, _w: mettle.io.IWriter, _oname: str = None):
"""
Serialize this struct.
:param _w:
:param _oname:
"""
if not _oname:
_oname = self._name()
_w.write_start(_oname)
_w.write_int64("offset", self.offset)
_w.write_int64("limit", self.limit)
_w.write_end(_oname)
def _deserialize(self, _r: mettle.io.IReader, _oname: str = None) -> int:
"""
Deserialize this struct.
:param _r:
:param _oname:
"""
if _oname == None:
_oname = self._name()
_r.read_start(_oname)
self.offset = _r.read_int64("offset")
self.limit = _r.read_int64("limit")
_r.read_end(_oname)
@staticmethod
def _cache_davs(dvc: mettle.lib.DavCache = None):
"""
Cache the DAV into the dav cache (or create one) and return it.
:param dvc: Target cache, if None a new one is created and returned.
"""
if not dvc:
dvc = mettle.lib.DavCache()
dvc.add_targ("offset", mettle.lib.Dav(mettle.lib.Dav.eDavType.Min, 0))
dvc.add_targ("limit", mettle.lib.Dav(mettle.lib.Dav.eDavType.Min, 0))
return dvc
def _get_davs(self, dvc=None):
return self._cache_davs(dvc)
def __repr__(self) -> str:
return '<bLazyLoad [offset:%r, limit:%r]>' % (
self.offset, self.limit)
# List class
class List(list, ISerializable):
def _name(self) -> str:
"""
Get list name
"""
return 'bLazyLoad.List'
def _serialize(self, _w: mettle.io.IWriter, _oname: str = None):
"""
Serialize the list
:param _w:
:param _oname:
"""
if _oname == None:
_oname = self._name()
_w.write_start_list(_oname, len(self))
for _rec in self:
_rec._serialize(_w)
_w.write_end(_oname)
def _deserialize(self, _r: mettle.io.IReader, _oname: str = None) -> int:
"""
Deserialize the list
:param _r:
:param _oname:
"""
if _oname == None:
_oname = self._name()
_cnt = _r.read_start_list(_oname)
while _cnt >= 1:
_rec = bLazyLoad()
_cnt -= 1
_rec._deserialize(_r)
self.append(_rec)
_r.read_end(_oname)
def _get_davs(self, dvc: mettle.lib.DavCache = None) -> int:
"""
Cache the davs
:param dvc:
"""
return bLazyLoad._cache_davs(dvc) | PypiClean |
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayCommerceKidsAccountBindRequest.py | import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayCommerceKidsAccountBindModel import AlipayCommerceKidsAccountBindModel
class AlipayCommerceKidsAccountBindRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayCommerceKidsAccountBindModel):
self._biz_content = value
else:
self._biz_content = AlipayCommerceKidsAccountBindModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.commerce.kids.account.bind'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params | PypiClean |
/seinfeld_laugh_corpus-1.0.13.tar.gz/seinfeld_laugh_corpus-1.0.13/seinfeld_laugh_corpus/humor_recogniser/screenplay.py | import re
from collections import namedtuple
Line = namedtuple('Line', ['character', 'txt', 'start', 'end', 'is_funny', 'laugh_time'])
Laugh = namedtuple('Laugh', ['time'])
class Screenplay:
"""
Represents a Seinfeld screenplay in the memory.
"""
def __init__(self, filename):
self.lines = []
self.filename = filename
m = re.findall(r'\d+', filename)
self.season, self.episode = int(m[0]), int(m[1])
self.episode_name = filename[16:-7].replace('.', ' ')
def __iter__(self):
for line in self.lines:
yield line
def __getitem__(self, item):
return self.lines[item]
def __str__(self):
return "S%.2dE%.2d %s" % (self.season, self.episode, self.episode_name)
def __repr__(self):
return "Screenplay('S%.2dE%.2d %s')" % (self.season, self.episode, self.episode_name)
def fold_laughs(self):
result = []
for i, line in enumerate(self.lines):
if i+1 < len(self.lines) and isinstance(self.lines[i+1], Laugh)\
and isinstance(line, Line):
result.append(Line(character=line.character, txt=line.txt, start=line.start, end=line.end,
is_funny=True, laugh_time=self.lines[i+1].time))
elif isinstance(line, Laugh):
pass
else:
result.append(line)
self.lines = result
@classmethod
def from_file(cls, file_path, fold_laughs=False):
filename = file_path.rsplit('/',1)[-1]
screenplay = Screenplay(filename)
with open(file_path, encoding='utf8', errors='ignore') as f:
lines = f.__iter__()
for line in lines:
if line[0] == '#':
current_character = line.replace('#', '').strip()
else:
start = float(line)
txt = lines.readline()
if '**LOL**' in txt:
screenplay.lines.append(Laugh(time=start))
else:
for i in range(3):
# maximum 3 lines in one subtitle
end = lines.readline()
try:
end = float(end)
except ValueError:
txt += ('\n'+end)
else:
break
screenplay.lines.append(Line(txt=txt.replace('\n', ' ').strip(),
start=start, end=end, character=current_character,
is_funny=None, laugh_time=None))
if fold_laughs:
screenplay.fold_laughs()
return screenplay | PypiClean |
/python_wikibase-0.1.0-py3-none-any.whl/python_wikibase/data_types/data_type.py | from abc import abstractmethod
from python_wikibase.value import Value
class DataType(Value):
"""Abstract class for Wikibase data types (see
https://www.mediawiki.org/wiki/Wikibase/DataModel)"""
@abstractmethod
def unmarshal(self, data_value):
pass
@abstractmethod
def marshal(self):
pass
@abstractmethod
def create(self, *args, **kwargs):
pass
def unmarshal_data_value(py_wb, main_snak):
# Return None if snak type is "novalue" or "somevalue"
snak_type = main_snak["snaktype"]
if snak_type != "value":
return None
if "datatype" in main_snak:
data_type = main_snak["datatype"]
else:
data_type = "string"
data_value = main_snak["datavalue"]
# DataType
if data_type in ["monolingualtext", "string"]:
return py_wb.StringValue().unmarshal(data_value)
elif data_type == "commonsMedia":
raise NotImplementedError # TODO
elif data_type == "external-id":
return py_wb.ExternalId().unmarshal(data_value)
elif data_type == "geo-shape":
raise NotImplementedError # TODO
elif data_type == "globe-coordinate":
return py_wb.GeoLocation().unmarshal(data_value)
elif data_type == "math":
raise NotImplementedError # TODO
elif data_type == "quantity":
return py_wb.Quantity().unmarshal(data_value)
elif data_type == "tabular-data":
raise NotImplementedError # TODO
elif data_type == "time":
raise NotImplementedError # TODO
elif data_type == "url":
raise NotImplementedError # TODO
elif data_type == "wikibase-form":
raise NotImplementedError # TODO
elif data_type == "wikibase-lexeme":
raise NotImplementedError # TODO
elif data_type == "wikibase-sense":
raise NotImplementedError # TODO
# Entity
elif data_type == "wikibase-item":
item = py_wb.Item()
item.entity_id = data_value["value"]["id"]
return item
elif data_type == "wikibase-property":
prop = py_wb.Property()
prop.entity_id = data_value["value"]["id"]
return prop
else:
raise NotImplementedError(f'No unmarshalling function for data type "{data_type}" defined')
def check_data_type(value, prop):
"""Check if value is of correct data type
:param value: Value whose data type shall be checked
:type value: Value
:param prop: Property whose data type the value shall be compared with
:type prop: Property
"""
data_type = value.__class__.__name__
if data_type != prop.data_type:
raise ValueError(f"Value must be instance of {prop.data_type} class") | PypiClean |
/galts-trade-api-0.2.0.tar.gz/galts-trade-api-0.2.0/src/galts_trade_api/transport/real.py | import asyncio
import datetime
import json
from dataclasses import dataclass
from decimal import Decimal
from functools import partial
from multiprocessing import Event, Pipe, Process
from multiprocessing.connection import Connection
from typing import Any, Awaitable, Callable, Dict, List, MutableMapping, Optional, Type
import aio_pika
from . import DepthConsumeKey, MessageConsumerCollection, PipeRequest, PipeResponseRouter, \
TransportFactory, TransportFactoryException
from .exchange_info_client import ExchangeInfoClient
from .grpc_utils import generate_request_id
from ..asyncio_helper import AsyncProgramEnv, run_program_forever
from ..tools import Singleton
AioPikaConsumeCallable = Callable[[aio_pika.IncomingMessage], Any]
@dataclass(frozen=True)
class GetExchangeEntitiesRequest(PipeRequest):
dsn: str
timeout: float
@dataclass(frozen=True)
class ConsumePriceDepthRequest(PipeRequest):
dsn: str
exchange: str
consume_keys: frozenset
class RabbitConnection(
metaclass=Singleton,
singleton_hash_args=[0],
singleton_hash_kwargs=['dsn']
):
def __init__(self, dsn: str):
self._dsn: str = str(dsn).strip()
self._connection: Optional[aio_pika.Connection] = None
@property
def connection(self):
return self._connection
async def create_channel(self, prefetch_count: int = 100) -> aio_pika.Channel:
if self._connection is None:
self._connection = await aio_pika.connect_robust(self._dsn)
channel = await self._connection.channel()
await channel.set_qos(prefetch_count=prefetch_count)
return channel
class RabbitConsumer:
def __init__(
self,
channel: aio_pika.Channel,
exchange_name: str,
on_message: AioPikaConsumeCallable
):
self._channel = channel
self._exchange_name = str(exchange_name).strip()
self._on_message = on_message
self._exchange: Optional[aio_pika.Exchange] = None
self._queue: Optional[aio_pika.Queue] = None
@property
def channel(self):
return self._channel
@property
def exchange_name(self):
return self._exchange_name
@property
def exchange(self):
return self._exchange
@property
def queue(self):
return self._queue
async def create_queue(self) -> aio_pika.Queue:
self._exchange = await self.channel.declare_exchange(self._exchange_name, passive=True)
self._queue = await self.channel.declare_queue(exclusive=True)
await self.queue.consume(self._on_message, no_ack=True)
return self.queue
class RealTransportFactory(TransportFactory):
def __init__(
self,
exchange_info_dsn: str,
depth_scraping_queue_dsn: str,
depth_scraping_queue_exchange: str,
exchange_info_get_entities_timeout: float = 5.0,
process_ready_timeout: float = 2.0,
):
super().__init__()
self._process: Optional[Process] = None
self._process_ready = Event()
self._parent_connection, self._child_connection = Pipe()
self._response_router: Optional[PipeResponseRouter] = None
def sanity_string(value: str) -> str: return str(value).strip()
self._exchange_info_dsn = sanity_string(exchange_info_dsn)
self._exchange_info_get_entities_timeout = float(exchange_info_get_entities_timeout)
self._depth_scraping_queue_dsn = sanity_string(depth_scraping_queue_dsn)
self._depth_scraping_queue_exchange = sanity_string(depth_scraping_queue_exchange)
self._process_ready_timeout = float(process_ready_timeout)
@property
def exchange_info_dsn(self):
return self._exchange_info_dsn
@property
def exchange_info_get_entities_timeout(self):
return self._exchange_info_get_entities_timeout
@property
def depth_scraping_queue_dsn(self):
return self._depth_scraping_queue_dsn
@property
def depth_scraping_queue_exchange(self):
return self._depth_scraping_queue_exchange
@property
def process_ready_timeout(self):
return self._process_ready_timeout
async def init(self, loop_debug: Optional[bool] = None) -> None:
if self._process:
raise RuntimeError('A process for RealTransportFactory should be created only once')
self._process = RealTransportProcess(
loop_debug=loop_debug,
ready_event=self._process_ready,
connection=self._child_connection
)
self._process.start()
if not self._process_ready.wait(self.process_ready_timeout):
raise RuntimeError('Failed to initialize RealTransportFactory in time')
self._response_router = PipeResponseRouter(self._parent_connection)
task = asyncio.create_task(self._response_router.start())
def task_done_cb(t: asyncio.Task) -> None:
if t.cancelled():
self.shutdown()
return
if t.exception():
self.shutdown()
raise t.exception()
task.add_done_callback(task_done_cb)
def shutdown(self) -> None:
if self._process:
self._process.terminate()
async def get_exchange_entities(
self,
on_response: Callable[..., Awaitable]
) -> MessageConsumerCollection:
request = GetExchangeEntitiesRequest(
self.exchange_info_dsn,
self.exchange_info_get_entities_timeout
)
result = self._response_router.prepare_consumers_of_response(request)
result.add_consumer(on_response)
self._parent_connection.send(request)
return result
async def consume_price_depth(
self,
on_response: Callable[..., Awaitable],
consume_keys: Optional[List[DepthConsumeKey]] = None
) -> MessageConsumerCollection:
request = ConsumePriceDepthRequest(
self.depth_scraping_queue_dsn,
self.depth_scraping_queue_exchange,
frozenset(consume_keys)
)
result = self._response_router.prepare_consumers_of_response(request)
result.add_consumer(on_response)
self._parent_connection.send(request)
return result
class RealTransportProcess(Process):
def __init__(
self,
*args,
loop_debug: Optional[bool] = None,
ready_event: Event,
connection: Connection,
poll_delay: float = 0.001,
**kwargs
):
super().__init__(*args, **kwargs)
self._loop_debug = loop_debug
self._ready_event = ready_event
self._connection = connection
self._poll_delay = float(poll_delay)
self._handlers: MutableMapping[Type[PipeRequest], Callable[..., Awaitable]] = {}
self.add_handler(GetExchangeEntitiesRequest, self._get_exchange_entities)
self.add_handler(ConsumePriceDepthRequest, self._consume_price_depth)
@property
def poll_delay(self):
return self._poll_delay
def add_handler(
self,
request_type: Type[PipeRequest],
handler: Callable[..., Awaitable]
) -> None:
if request_type in self._handlers:
raise ValueError(f'Handler for {request_type} already registered')
self._handlers[request_type] = handler
def run(self) -> None:
run_program_forever(self.main, loop_debug=self._loop_debug)
async def main(self, program_env: AsyncProgramEnv) -> None:
def exception_handler(_, context: Dict) -> None:
if 'exception' in context:
self._notify_owner_process(context['exception'])
program_env.exception_handler_patch = exception_handler
self._ready_event.set()
while True:
if not self._connection.poll():
await asyncio.sleep(self.poll_delay)
continue
request = self._connection.recv()
handler = self._find_handler_for_request(request)
# @TODO The tasks should be collected for cancellation when this main task has been
# cancelled. But when I've tried to done this I've got Segmentation fault from
# unit-tests. May be it depends on Python version.
asyncio.create_task(handler(request))
async def _get_exchange_entities(self, request: GetExchangeEntitiesRequest) -> None:
client = ExchangeInfoClient.factory(request.dsn, timeout_get_entities=request.timeout)
entities = client.get_entities(generate_request_id())
self._respond_to_owner_request(request, entities)
client.destroy()
async def _consume_price_depth(self, request: ConsumePriceDepthRequest) -> None:
connection = RabbitConnection(request.dsn)
channel = await connection.create_channel(100)
cb = partial(self._price_depth_callback, request)
consumer = RabbitConsumer(channel, request.exchange, cb)
queue = await consumer.create_queue()
if not request.consume_keys:
routing_keys = ['#']
else:
routing_keys = [k.format_for_rabbitmq() for k in request.consume_keys]
for routing_key in routing_keys:
await queue.bind(consumer.exchange, routing_key)
def _respond_to_owner_request(self, request: PipeRequest, content: Any) -> None:
self._connection.send([request, content])
def _notify_owner_process(self, original_exception: Exception) -> None:
# This wrapping is required to don't fire unnecessary errors about serialization
# of the exception. Otherwise a framework user will see undesired spam about
# pickling RLock etc in logs.
wrapped_exception = TransportFactoryException('An error in the transport process')
# Unfortunately this line has no side-effects in Python 3.7 because this attribute
# won't be packed by Connection.send() therefore cross-Connection.recv() will unpack
# the data without the value for this attribute. So we don't have real wrapping here.
wrapped_exception.__cause__ = original_exception
self._connection.send([wrapped_exception])
def _find_handler_for_request(self, request: PipeRequest) -> Callable[..., Awaitable]:
request_type = type(request)
if request_type not in self._handlers:
raise ValueError(f'No handler for request type {request_type}')
return self._handlers[request_type]
def _price_depth_callback(
self,
request: ConsumePriceDepthRequest,
message: aio_pika.IncomingMessage
) -> None:
body = json.loads(message.body)
for kind in body['depth'].keys():
new_depth = []
for price_level in body['depth'][kind]:
rate = Decimal(price_level[0])
amount = Decimal(price_level[1])
if len(price_level) > 2 and price_level[2] is not None:
fee = Decimal(price_level[2])
else:
fee = None
new_depth.append((rate, amount, fee,))
body['depth'][kind] = tuple(new_depth)
args = [
body['exchange'],
body['market'],
body['symbol'],
datetime.datetime.fromisoformat(body['now']),
body['depth']['bids'],
body['depth']['asks'],
]
self._respond_to_owner_request(request, args) | PypiClean |
/zohocrmsdk2_0-5.1.0.tar.gz/zohocrmsdk2_0-5.1.0/zcrmsdk/src/com/zoho/crm/api/currencies/currency.py | try:
from zcrmsdk.src.com.zoho.crm.api.exception import SDKException
from zcrmsdk.src.com.zoho.crm.api.util import Constants
except Exception:
from ..exception import SDKException
from ..util import Constants
class Currency(object):
def __init__(self):
"""Creates an instance of Currency"""
self.__symbol = None
self.__created_time = None
self.__is_active = None
self.__exchange_rate = None
self.__format = None
self.__created_by = None
self.__prefix_symbol = None
self.__is_base = None
self.__modified_time = None
self.__name = None
self.__modified_by = None
self.__id = None
self.__iso_code = None
self.__key_modified = dict()
def get_symbol(self):
"""
The method to get the symbol
Returns:
string: A string representing the symbol
"""
return self.__symbol
def set_symbol(self, symbol):
"""
The method to set the value to symbol
Parameters:
symbol (string) : A string representing the symbol
"""
if symbol is not None and not isinstance(symbol, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: symbol EXPECTED TYPE: str', None, None)
self.__symbol = symbol
self.__key_modified['symbol'] = 1
def get_created_time(self):
"""
The method to get the created_time
Returns:
datetime: An instance of datetime
"""
return self.__created_time
def set_created_time(self, created_time):
"""
The method to set the value to created_time
Parameters:
created_time (datetime) : An instance of datetime
"""
from datetime import datetime
if created_time is not None and not isinstance(created_time, datetime):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: created_time EXPECTED TYPE: datetime', None, None)
self.__created_time = created_time
self.__key_modified['created_time'] = 1
def get_is_active(self):
"""
The method to get the is_active
Returns:
bool: A bool representing the is_active
"""
return self.__is_active
def set_is_active(self, is_active):
"""
The method to set the value to is_active
Parameters:
is_active (bool) : A bool representing the is_active
"""
if is_active is not None and not isinstance(is_active, bool):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: is_active EXPECTED TYPE: bool', None, None)
self.__is_active = is_active
self.__key_modified['is_active'] = 1
def get_exchange_rate(self):
"""
The method to get the exchange_rate
Returns:
string: A string representing the exchange_rate
"""
return self.__exchange_rate
def set_exchange_rate(self, exchange_rate):
"""
The method to set the value to exchange_rate
Parameters:
exchange_rate (string) : A string representing the exchange_rate
"""
if exchange_rate is not None and not isinstance(exchange_rate, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: exchange_rate EXPECTED TYPE: str', None, None)
self.__exchange_rate = exchange_rate
self.__key_modified['exchange_rate'] = 1
def get_format(self):
"""
The method to get the format
Returns:
Format: An instance of Format
"""
return self.__format
def set_format(self, format):
"""
The method to set the value to format
Parameters:
format (Format) : An instance of Format
"""
try:
from zcrmsdk.src.com.zoho.crm.api.currencies.format import Format
except Exception:
from .format import Format
if format is not None and not isinstance(format, Format):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: format EXPECTED TYPE: Format', None, None)
self.__format = format
self.__key_modified['format'] = 1
def get_created_by(self):
"""
The method to get the created_by
Returns:
User: An instance of User
"""
return self.__created_by
def set_created_by(self, created_by):
"""
The method to set the value to created_by
Parameters:
created_by (User) : An instance of User
"""
try:
from zcrmsdk.src.com.zoho.crm.api.users import User
except Exception:
from ..users import User
if created_by is not None and not isinstance(created_by, User):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: created_by EXPECTED TYPE: User', None, None)
self.__created_by = created_by
self.__key_modified['created_by'] = 1
def get_prefix_symbol(self):
"""
The method to get the prefix_symbol
Returns:
bool: A bool representing the prefix_symbol
"""
return self.__prefix_symbol
def set_prefix_symbol(self, prefix_symbol):
"""
The method to set the value to prefix_symbol
Parameters:
prefix_symbol (bool) : A bool representing the prefix_symbol
"""
if prefix_symbol is not None and not isinstance(prefix_symbol, bool):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: prefix_symbol EXPECTED TYPE: bool', None, None)
self.__prefix_symbol = prefix_symbol
self.__key_modified['prefix_symbol'] = 1
def get_is_base(self):
"""
The method to get the is_base
Returns:
bool: A bool representing the is_base
"""
return self.__is_base
def set_is_base(self, is_base):
"""
The method to set the value to is_base
Parameters:
is_base (bool) : A bool representing the is_base
"""
if is_base is not None and not isinstance(is_base, bool):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: is_base EXPECTED TYPE: bool', None, None)
self.__is_base = is_base
self.__key_modified['is_base'] = 1
def get_modified_time(self):
"""
The method to get the modified_time
Returns:
datetime: An instance of datetime
"""
return self.__modified_time
def set_modified_time(self, modified_time):
"""
The method to set the value to modified_time
Parameters:
modified_time (datetime) : An instance of datetime
"""
from datetime import datetime
if modified_time is not None and not isinstance(modified_time, datetime):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modified_time EXPECTED TYPE: datetime', None, None)
self.__modified_time = modified_time
self.__key_modified['modified_time'] = 1
def get_name(self):
"""
The method to get the name
Returns:
string: A string representing the name
"""
return self.__name
def set_name(self, name):
"""
The method to set the value to name
Parameters:
name (string) : A string representing the name
"""
if name is not None and not isinstance(name, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: name EXPECTED TYPE: str', None, None)
self.__name = name
self.__key_modified['name'] = 1
def get_modified_by(self):
"""
The method to get the modified_by
Returns:
User: An instance of User
"""
return self.__modified_by
def set_modified_by(self, modified_by):
"""
The method to set the value to modified_by
Parameters:
modified_by (User) : An instance of User
"""
try:
from zcrmsdk.src.com.zoho.crm.api.users import User
except Exception:
from ..users import User
if modified_by is not None and not isinstance(modified_by, User):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modified_by EXPECTED TYPE: User', None, None)
self.__modified_by = modified_by
self.__key_modified['modified_by'] = 1
def get_id(self):
"""
The method to get the id
Returns:
int: An int representing the id
"""
return self.__id
def set_id(self, id):
"""
The method to set the value to id
Parameters:
id (int) : An int representing the id
"""
if id is not None and not isinstance(id, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)
self.__id = id
self.__key_modified['id'] = 1
def get_iso_code(self):
"""
The method to get the iso_code
Returns:
string: A string representing the iso_code
"""
return self.__iso_code
def set_iso_code(self, iso_code):
"""
The method to set the value to iso_code
Parameters:
iso_code (string) : A string representing the iso_code
"""
if iso_code is not None and not isinstance(iso_code, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: iso_code EXPECTED TYPE: str', None, None)
self.__iso_code = iso_code
self.__key_modified['iso_code'] = 1
def is_key_modified(self, key):
"""
The method to check if the user has modified the given key
Parameters:
key (string) : A string representing the key
Returns:
int: An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if key in self.__key_modified:
return self.__key_modified.get(key)
return None
def set_key_modified(self, key, modification):
"""
The method to mark the given key as modified
Parameters:
key (string) : A string representing the key
modification (int) : An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if modification is not None and not isinstance(modification, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)
self.__key_modified[key] = modification | PypiClean |
/lingua_language_detector-1.3.2-py3-none-any.whl/lingua/detector.py |
import numpy as np
from collections import Counter
from dataclasses import dataclass
from decimal import Decimal
from math import exp
from typing import (
Counter as TypedCounter,
Dict,
FrozenSet,
NamedTuple,
Optional,
List,
)
from ._constant import (
CHARS_TO_LANGUAGES_MAPPING,
JAPANESE_CHARACTER_SET,
LETTERS,
TOKENS_WITHOUT_WHITESPACE,
TOKENS_WITH_OPTIONAL_WHITESPACE,
)
from .language import Language, _Alphabet
from ._model import _TrainingDataLanguageModel, _TestDataLanguageModel
_UNIGRAM_MODELS: Dict[Language, np.ndarray] = {}
_BIGRAM_MODELS: Dict[Language, np.ndarray] = {}
_TRIGRAM_MODELS: Dict[Language, np.ndarray] = {}
_QUADRIGRAM_MODELS: Dict[Language, np.ndarray] = {}
_FIVEGRAM_MODELS: Dict[Language, np.ndarray] = {}
_CACHE: Dict[Language, Dict[str, Optional[float]]] = {}
_HIGH_ACCURACY_MODE_MAX_TEXT_LENGTH = 120
def _split_text_into_words(text: str) -> List[str]:
return LETTERS.findall(text.lower())
def _load_language_models(
language: Language,
ngram_length: int,
) -> Optional[Dict[Language, np.ndarray]]:
loaded_model = _TrainingDataLanguageModel.from_numpy_binary_file(
language, ngram_length
)
if loaded_model is None:
return None
return {language: loaded_model}
def _sum_up_probabilities(
probabilities: List[Dict[Language, float]],
unigram_counts: Optional[TypedCounter[Language]],
filtered_languages: FrozenSet[Language],
) -> Dict[Language, Decimal]:
summed_up_probabilities = {}
for language in filtered_languages:
result = 0.0
for dct in probabilities:
if language in dct:
result += dct[language]
if unigram_counts is not None and language in unigram_counts:
result /= unigram_counts[language]
if result != 0:
# Use Decimal instead of float to prevent numerical underflow
summed_up_probabilities[language] = _compute_exponent(result)
return summed_up_probabilities
def _compute_exponent(value: float) -> Decimal:
exponent = exp(value)
if exponent > 0:
return Decimal(exponent)
return Decimal(value).exp()
def _sort_confidence_values(values: List["ConfidenceValue"]):
values.sort(key=lambda tup: (-tup[1], tup[0]))
def _collect_languages_with_unique_characters(
languages: FrozenSet[Language],
) -> FrozenSet[Language]:
return frozenset(
{language for language in languages if language._unique_characters is not None}
)
def _collect_one_language_alphabets(
languages: FrozenSet[Language],
) -> Dict[_Alphabet, Language]:
return {
alphabet: language
for alphabet, language in _Alphabet.all_supporting_single_language().items()
if language in languages
}
def _merge_adjacent_results(
results: List["DetectionResult"], mergeable_result_indices: List[int]
):
mergeable_result_indices.sort(reverse=True)
for i in mergeable_result_indices:
if i == 0:
results[i + 1] = DetectionResult(
start_index=results[i].start_index,
end_index=results[i + 1].end_index,
word_count=results[i + 1].word_count,
language=results[i + 1].language,
)
else:
results[i - 1] = DetectionResult(
start_index=results[i - 1].start_index,
end_index=results[i].end_index,
word_count=results[i - 1].word_count,
language=results[i - 1].language,
)
del results[i]
if len(results) == 1:
break
class ConfidenceValue(NamedTuple):
"""This class describes a language's confidence value.
Attributes:
language (Language):
The language associated with this confidence value.
value (float):
The language's confidence value which lies between 0.0 and 1.0.
"""
language: Language
value: float
class DetectionResult(NamedTuple):
"""This class describes a contiguous single-language
text section within a possibly mixed-language text.
Attributes:
start_index (int):
The start index of the identified single-language substring.
end_index (int):
The end index of the identified single-language substring.
word_count (int):
The number of words being part of the identified
single-language substring.
language (Language):
The detected language of the identified single-language substring.
"""
start_index: int
end_index: int
word_count: int
language: Language
@dataclass
class LanguageDetector:
"""This class detects the language of text."""
_languages: FrozenSet[Language]
_minimum_relative_distance: float
_is_low_accuracy_mode_enabled: bool
_languages_with_unique_characters: FrozenSet[Language]
_one_language_alphabets: Dict[_Alphabet, Language]
_unigram_language_models: Dict[Language, np.ndarray]
_bigram_language_models: Dict[Language, np.ndarray]
_trigram_language_models: Dict[Language, np.ndarray]
_quadrigram_language_models: Dict[Language, np.ndarray]
_fivegram_language_models: Dict[Language, np.ndarray]
_cache: Dict[Language, Dict[str, Optional[float]]]
def __repr__(self):
languages = sorted([language.name for language in self._languages])
return (
"LanguageDetector("
f"_languages={languages}, "
f"_minimum_relative_distance={self._minimum_relative_distance})"
)
@classmethod
def _from(
cls,
languages: FrozenSet[Language],
minimum_relative_distance: float,
is_every_language_model_preloaded: bool,
is_low_accuracy_mode_enabled: bool,
) -> "LanguageDetector":
languages_with_unique_characters = _collect_languages_with_unique_characters(
languages
)
one_language_alphabets = _collect_one_language_alphabets(languages)
detector = LanguageDetector(
languages,
minimum_relative_distance,
is_low_accuracy_mode_enabled,
languages_with_unique_characters,
one_language_alphabets,
_UNIGRAM_MODELS,
_BIGRAM_MODELS,
_TRIGRAM_MODELS,
_QUADRIGRAM_MODELS,
_FIVEGRAM_MODELS,
_CACHE,
)
if is_every_language_model_preloaded:
detector._preload_language_models()
return detector
def _preload_language_models(self):
trigram_models = [
_load_language_models(language, 3) for language in self._languages
]
for trigram_model in trigram_models:
if trigram_model is not None:
self._trigram_language_models.update(trigram_model)
if not self._is_low_accuracy_mode_enabled:
(unigram_models, bigram_models, quadrigram_models, fivegram_models,) = [
[
_load_language_models(language, ngram_length)
for language in self._languages
]
for ngram_length in (1, 2, 4, 5)
]
for unigram_model in unigram_models:
if unigram_model is not None:
self._unigram_language_models.update(unigram_model)
for bigram_model in bigram_models:
if bigram_model is not None:
self._bigram_language_models.update(bigram_model)
for quadrigram_model in quadrigram_models:
if quadrigram_model is not None:
self._quadrigram_language_models.update(quadrigram_model)
for fivegram_model in fivegram_models:
if fivegram_model is not None:
self._fivegram_language_models.update(fivegram_model)
def detect_language_of(self, text: str) -> Optional[Language]:
"""Detect the language of text.
Args:
text (str): The text whose language should be identified.
Returns:
The identified language. If the language cannot be
reliably detected, None is returned.
"""
confidence_values = self.compute_language_confidence_values(text)
if len(confidence_values) == 0:
return None
most_likely_language, most_likely_language_probability = confidence_values[0]
if len(confidence_values) == 1:
return most_likely_language
second_most_likely_language_probability = confidence_values[1].value
if most_likely_language_probability == second_most_likely_language_probability:
return None
if (
most_likely_language_probability - second_most_likely_language_probability
< self._minimum_relative_distance
):
return None
return most_likely_language
def detect_multiple_languages_of(self, text: str) -> List[DetectionResult]:
"""Attempt to detect multiple languages in mixed-language text.
This feature is experimental and under continuous development.
A list of DetectionResult is returned containing an entry for each
contiguous single-language text section as identified by the library.
Each entry consists of the identified language, a start index and an
end index. The indices denote the substring that has been identified
as a contiguous single-language text section.
Args:
text (str): The text whose language should be identified.
Returns:
A list of detection results. Each result contains the
identified language, the start index and end index of
the identified single-language substring.
"""
if len(text) == 0:
return []
tokens_without_whitespace = TOKENS_WITHOUT_WHITESPACE.findall(text)
if len(tokens_without_whitespace) == 0:
return []
results = []
language_counts: TypedCounter[Language] = Counter()
language = self.detect_language_of(text)
if language is not None:
language_counts[language] += 1
for word in tokens_without_whitespace:
if len(word) < 5:
continue
language = self.detect_language_of(word)
if language is not None:
language_counts[language] += 1
languages = frozenset(language_counts.keys())
if len(languages) == 1:
result = DetectionResult(
start_index=0,
end_index=len(text),
word_count=len(tokens_without_whitespace),
language=next(iter(languages)),
)
results.append(result)
else:
previous_detector_languages = self._languages.copy()
self._languages = languages
current_start_index = 0
current_end_index = 0
word_count = 0
current_language = None
token_matches = list(TOKENS_WITH_OPTIONAL_WHITESPACE.finditer(text))
last_index = len(token_matches) - 1
for i, token_match in enumerate(token_matches):
word = token_match.group(0)
language = self.detect_language_of(word)
if i == 0:
current_language = language
if language != current_language and current_language is not None:
result = DetectionResult(
start_index=current_start_index,
end_index=current_end_index,
word_count=word_count,
language=current_language,
)
results.append(result)
current_start_index = current_end_index
current_language = language
word_count = 0
current_end_index = token_match.end()
word_count += 1
if i == last_index and current_language is not None:
result = DetectionResult(
start_index=current_start_index,
end_index=current_end_index,
word_count=word_count,
language=current_language,
)
results.append(result)
if len(results) > 1:
mergeable_result_indices = []
for i, result in enumerate(results):
if result.word_count == 1:
mergeable_result_indices.append(i)
_merge_adjacent_results(results, mergeable_result_indices)
if len(results) > 1:
mergeable_result_indices.clear()
for i in range(len(results) - 1):
if results[i].language == results[i + 1].language:
mergeable_result_indices.append(i + 1)
_merge_adjacent_results(results, mergeable_result_indices)
self._languages = previous_detector_languages
return results
def compute_language_confidence_values(self, text: str) -> List[ConfidenceValue]:
"""Compute confidence values for each language supported
by this detector for the given text.
The confidence values denote how likely it is that the
given text has been written in any of the languages
supported by this detector.
A list is returned containing those languages which the
calling instance of LanguageDetector has been built from.
The entries are sorted by their confidence value in
descending order. Each value is a probability between
0.0 and 1.0. The probabilities of all languages will sum to 1.0.
If the language is unambiguously identified by the rule engine,
the value 1.0 will always be returned for this language. The
other languages will receive a value of 0.0.
Args:
text (str): The text for which to compute confidence values.
Returns:
A list of 2-element tuples. Each tuple contains a language
and the associated confidence value.
"""
values = [ConfidenceValue(language, 0.0) for language in self._languages]
words = _split_text_into_words(text)
if len(words) == 0:
_sort_confidence_values(values)
return values
language_detected_by_rules = self._detect_language_with_rules(words)
if language_detected_by_rules is not None:
for i in range(len(values)):
if values[i].language == language_detected_by_rules:
values[i] = ConfidenceValue(language_detected_by_rules, 1.0)
break
_sort_confidence_values(values)
return values
filtered_languages = self._filter_languages_by_rules(words)
if len(filtered_languages) == 1:
language_detected_by_filter = next(iter(filtered_languages))
for i in range(len(values)):
if values[i].language == language_detected_by_filter:
values[i] = ConfidenceValue(language_detected_by_filter, 1.0)
break
_sort_confidence_values(values)
return values
character_count = sum(len(word) for word in words)
if self._is_low_accuracy_mode_enabled and character_count < 3:
_sort_confidence_values(values)
return values
ngram_length_range = (
range(3, 4)
if character_count >= _HIGH_ACCURACY_MODE_MAX_TEXT_LENGTH
or self._is_low_accuracy_mode_enabled
else range(1, 6)
)
unigram_counts = None
all_probabilities = []
for ngram_length in ngram_length_range:
if character_count >= ngram_length:
ngram_model = _TestDataLanguageModel.from_text(words, ngram_length)
if ngram_length == 1:
unigram_counts = self._count_unigrams(
ngram_model, filtered_languages
)
probabilities = self._compute_language_probabilities(
ngram_model, filtered_languages
)
all_probabilities.append(probabilities)
summed_up_probabilities = _sum_up_probabilities(
all_probabilities, unigram_counts, filtered_languages
)
if len(summed_up_probabilities) == 0:
_sort_confidence_values(values)
return values
denominator = sum(summed_up_probabilities.values())
for language, probability in summed_up_probabilities.items():
for i in range(len(values)):
if values[i].language == language:
# apply softmax function
normalized_probability = probability / denominator
values[i] = ConfidenceValue(language, float(normalized_probability))
break
_sort_confidence_values(values)
return values
def compute_language_confidence(self, text: str, language: Language) -> float:
"""Compute the confidence value for the given language and input text.
The confidence value denotes how likely it is that the given text
has been written in the given language. The value that this method
computes is a number between 0.0 and 1.0. If the language is
unambiguously identified by the rule engine, the value 1.0 will
always be returned. If the given language is not supported by this
detector instance, the value 0.0 will always be returned.
Args:
text (str): The text for which to compute the confidence value.
language (Language):
The language for which to compute the confidence value.
Returns:
A float value between 0.0 and 1.0.
"""
confidence_values = self.compute_language_confidence_values(text)
for value in confidence_values:
if value.language == language:
return value.value
return 0.0
def _detect_language_with_rules(self, words: List[str]) -> Optional[Language]:
total_language_counts: TypedCounter[Optional[Language]] = Counter()
half_word_count = len(words) * 0.5
for word in words:
word_language_counts: TypedCounter[Language] = Counter()
for char in word:
is_match = False
for alphabet, language in self._one_language_alphabets.items():
if alphabet.matches(char):
word_language_counts[language] += 1
is_match = True
break
if not is_match:
if _Alphabet.HAN.matches(char):
word_language_counts[Language.CHINESE] += 1
elif JAPANESE_CHARACTER_SET.fullmatch(char) is not None:
word_language_counts[Language.JAPANESE] += 1
elif (
_Alphabet.LATIN.matches(char)
or _Alphabet.CYRILLIC.matches(char)
or _Alphabet.DEVANAGARI.matches(char)
):
for language in self._languages_with_unique_characters:
if (
language._unique_characters is not None
and char in language._unique_characters
):
word_language_counts[language] += 1
if len(word_language_counts) == 0:
total_language_counts[None] += 1
elif len(word_language_counts) == 1:
language = list(word_language_counts.elements())[0]
if language in self._languages:
total_language_counts[language] += 1
else:
total_language_counts[None] += 1
elif (
Language.CHINESE in word_language_counts
and Language.JAPANESE in word_language_counts
):
total_language_counts[Language.JAPANESE] += 1
else:
most_frequent_word_languages = word_language_counts.most_common(2)
(
most_frequent_word_language,
first_count,
) = most_frequent_word_languages[0]
(_, second_count) = most_frequent_word_languages[1]
if (
first_count > second_count
and most_frequent_word_language in self._languages
):
total_language_counts[most_frequent_word_language] += 1
else:
total_language_counts[None] += 1
if total_language_counts[None] < half_word_count:
del total_language_counts[None]
if len(total_language_counts) == 0:
return None
if len(total_language_counts) == 1:
return list(total_language_counts)[0]
if (
len(total_language_counts) == 2
and Language.CHINESE in total_language_counts
and Language.JAPANESE in total_language_counts
):
return Language.JAPANESE
most_frequent_total_languages = total_language_counts.most_common(2)
(most_frequent_total_language, first_count) = most_frequent_total_languages[0]
(_, second_count) = most_frequent_total_languages[1]
if first_count == second_count:
return None
return most_frequent_total_language
def _filter_languages_by_rules(self, words: List[str]) -> FrozenSet[Language]:
detected_alphabets: TypedCounter[_Alphabet] = Counter()
half_word_count = len(words) * 0.5
for word in words:
for alphabet in _Alphabet:
if alphabet.matches(word):
detected_alphabets[alphabet] += 1
break
if len(detected_alphabets) == 0:
return self._languages
if len(detected_alphabets) > 1:
distinct_alphabets = {count for count in detected_alphabets.values()}
if len(distinct_alphabets) == 1:
return self._languages
most_frequent_alphabet = detected_alphabets.most_common(1)[0][0]
filtered_languages = {
language
for language in self._languages
if most_frequent_alphabet in language._alphabets
}
language_counts: TypedCounter[Language] = Counter()
for characters, languages in CHARS_TO_LANGUAGES_MAPPING.items():
relevant_languages = languages.intersection(filtered_languages)
for word in words:
for character in characters:
if character in word:
for language in relevant_languages:
language_counts[language] += 1
languages_subset = {
language
for language, count in language_counts.items()
if count >= half_word_count
}
if len(languages_subset) > 0:
return frozenset(languages_subset)
return frozenset(filtered_languages)
def _compute_language_probabilities(
self,
ngram_model: _TestDataLanguageModel,
filtered_languages: FrozenSet[Language],
) -> Dict[Language, float]:
probabilities = {}
for language in filtered_languages:
result = self._compute_sum_of_ngram_probabilities(language, ngram_model)
if result < 0:
probabilities[language] = result
return probabilities
def _compute_sum_of_ngram_probabilities(
self, language: Language, ngram_model: _TestDataLanguageModel
) -> float:
result = 0.0
for ngrams in ngram_model.ngrams:
for ngram in ngrams:
probability = self._look_up_ngram_probability(language, ngram)
if probability is not None:
result += probability
break
return result
def _look_up_ngram_probability(
self, language: Language, ngram: str
) -> Optional[float]:
if language not in self._cache:
self._cache[language] = {}
if ngram in self._cache[language]:
return self._cache[language][ngram]
ngram_length = len(ngram)
if ngram_length == 5:
language_models = self._fivegram_language_models
elif ngram_length == 4:
language_models = self._quadrigram_language_models
elif ngram_length == 3:
language_models = self._trigram_language_models
elif ngram_length == 2:
language_models = self._bigram_language_models
elif ngram_length == 1:
language_models = self._unigram_language_models
elif ngram_length == 0:
raise ValueError("zerogram detected")
else:
raise ValueError(f"unsupported ngram length detected: {ngram_length}")
probability = None
if language not in language_models:
models = _load_language_models(language, ngram_length)
if models is None:
self._cache[language][ngram] = probability
return probability
language_models.update(models)
mask = np.isin(language_models[language]["ngram"], ngram)
try:
probability = language_models[language]["frequency"][mask][0]
except IndexError:
pass
self._cache[language][ngram] = probability
return probability
def _count_unigrams(
self,
unigram_model: _TestDataLanguageModel,
filtered_languages: FrozenSet[Language],
) -> TypedCounter[Language]:
unigram_counts: TypedCounter[Language] = Counter()
for language in filtered_languages:
for unigrams in unigram_model.ngrams:
if self._look_up_ngram_probability(language, unigrams[0]) is not None:
unigram_counts[language] += 1
return unigram_counts | PypiClean |
/ufterm-0.0.2.tar.gz/ufterm-0.0.2/README.md | # User Friendly Terminal
Simple module help user to use terminal script.
The library allows to see a Graphical Interface instead of a terminal.
## Code example
```python
import ufterm as uft
def command1():
...
def command2():
...
if __name__ == '__main__':
uft.add_command(command1)
uft.add_command(command2)
uft.loop()
```
## Issues/Bug report or improvement ideas
https://gitlab.com/olive007/user-friendly-terminal/-/issues
## License
GNU Lesser General Public License v3 or later (LGPLv3+)
| PypiClean |
/aio_usb_hotplug-5.2.0.tar.gz/aio_usb_hotplug-5.2.0/src/aio_usb_hotplug/backends/base.py |
from abc import abstractmethod, ABCMeta
from typing import Any, Dict, List
__all__ = ("Device", "USBBusScannerBackend")
Device = Any
class USBBusScannerBackend(metaclass=ABCMeta):
"""Interface specification for USB bus scanner backends."""
def configure(self, configuration: Dict[str, Any]) -> None:
"""Configures the scanner backend and specifies which devices the
backend should report.
The format of the configuration dictionary depends on the backend.
The default implementation does nothing.
It is guaranteed that no one else holds a reference to the configuration
dictionary so it is safe to just store it as-is (without making a
copy first).
"""
pass # pragma: no cover
def is_supported(self) -> bool:
"""Returns whether the backend is supported on the current platform."""
raise NotImplementedError # pragma: no cover
@abstractmethod
def key_of(self, device: Device) -> str:
"""Returns a unique key for a USB device that can be used for identity
comparisons. The keys should take into account at least the bus ID, the
address, the vendor ID and the product ID of the device.
The string does not have to be human-readable, but it must be unique
for each connected USB device.
"""
raise NotImplementedError # pragma: no cover
@abstractmethod
async def scan(self) -> List[Device]:
"""Scans the system for USB devices and returns the list of devices
found.
This is an async function that will be executed in the main event loop.
If the backend blocks while scanning, you must delegate to a worker
thread from this method.
"""
raise NotImplementedError # pragma: no cover
@abstractmethod
async def wait_until_next_scan(self) -> None:
"""Async function that blocks until the next scan is due.
It is up to the backend to decide whether it scans the bus regularly
or whether it uses some underlying mechanism of the OS to detect
hotplug events.
"""
raise NotImplementedError # pragma: no cover | PypiClean |
/tensorflow_edwin-2.10.1-cp38-cp38-win_amd64.whl/tensorflow/python/saved_model/load_v1_in_v2.py | """Import a TF v1-style SavedModel when executing eagerly."""
import functools
from tensorflow.python.eager import context
from tensorflow.python.eager import lift_to_graph
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import function_deserialization
from tensorflow.python.saved_model import loader_impl
from tensorflow.python.saved_model import signature_serialization
from tensorflow.python.saved_model.pywrap_saved_model import metrics
from tensorflow.python.trackable import asset
from tensorflow.python.trackable import autotrackable
from tensorflow.python.trackable import resource
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.util import nest
# API label for SavedModel metrics.
_LOAD_V1_V2_LABEL = "load_v1_in_v2"
class _Initializer(resource.CapturableResource):
"""Represents an initialization operation restored from a SavedModel.
Without this object re-export of imported 1.x SavedModels would omit the
original SavedModel's initialization procedure.
Created when `tf.saved_model.load` loads a TF 1.x-style SavedModel with an
initialization op. This object holds a function that runs the
initialization. It does not require any manual user intervention;
`tf.saved_model.save` will see this object and automatically add it to the
exported SavedModel, and `tf.saved_model.load` runs the initialization
function automatically.
"""
def __init__(self, init_fn, asset_paths):
super(_Initializer, self).__init__()
self._asset_paths = asset_paths
self._init_fn = init_fn
def _create_resource(self):
return array_ops.placeholder(
dtype=dtypes.resource, shape=[], name="unused_resource")
def _initialize(self):
return self._init_fn(*[path.asset_path for path in self._asset_paths])
class _EagerSavedModelLoader(loader_impl.SavedModelLoader):
"""Loads a SavedModel without using Sessions."""
def get_meta_graph_def_from_tags(self, tags):
"""Override to support implicit one-MetaGraph loading with tags=None."""
if tags is None:
if len(self._saved_model.meta_graphs) != 1:
tag_sets = [mg.meta_info_def.tags
for mg in self._saved_model.meta_graphs]
raise ValueError(
"Importing a SavedModel with `tf.saved_model.load` requires a "
"`tags=` argument if there is more than one MetaGraph. Got "
f"`tags=None`, but there are {len(self._saved_model.meta_graphs)} "
f"MetaGraphs in the SavedModel with tag sets: {tag_sets}. Pass a "
"`tags=` argument to load this SavedModel.")
return self._saved_model.meta_graphs[0]
return super(_EagerSavedModelLoader, self).get_meta_graph_def_from_tags(
tags)
def load_graph(self, returns, meta_graph_def):
"""Called from wrap_function to import `meta_graph_def`."""
# pylint: disable=protected-access
saver, _ = tf_saver._import_meta_graph_with_return_elements(
meta_graph_def)
# pylint: enable=protected-access
returns[0] = saver
def _extract_saver_restore(self, wrapped, saver):
if saver is None:
return None
saver_def = saver.saver_def
filename_tensor = wrapped.graph.as_graph_element(
saver_def.filename_tensor_name)
# We both feed and fetch filename_tensor so we have an operation to use to
# feed into variable initializers (only relevant for v1 graph building).
return wrapped.prune(
feeds=[filename_tensor],
fetches=[filename_tensor,
wrapped.graph.as_graph_element(saver_def.restore_op_name)])
def restore_variables(self, wrapped, restore_from_saver):
"""Restores variables from the checkpoint."""
if restore_from_saver is not None:
initializer, _ = restore_from_saver(
constant_op.constant(self._variables_path))
if not ops.executing_eagerly_outside_functions():
# Add the initialization operation to the "saved_model_initializers"
# collection in case we don't have any lifted variables to attach it to.
ops.add_to_collection("saved_model_initializers", initializer)
one_unlifted = False
for variable in wrapped.graph.get_collection_ref(
ops.GraphKeys.GLOBAL_VARIABLES):
if variable.graph is wrapped.graph:
one_unlifted = True
# pylint: disable=protected-access
variable._initializer_op = initializer
# pylint: enable=protected-access
if one_unlifted:
logging.warning(
"Some variables could not be lifted out of a loaded function. "
"Please run "
"`sess.run(tf.get_collection(\"saved_model_initializers\"))`to "
"restore these variables.")
def _extract_signatures(self, wrapped, meta_graph_def):
"""Creates ConcreteFunctions for signatures in `meta_graph_def`."""
signature_functions = {}
for signature_key, signature_def in meta_graph_def.signature_def.items():
if signature_def.inputs:
input_items = sorted(
signature_def.inputs.items(), key=lambda item: item[0])
original_input_names, input_specs = zip(*input_items)
else:
original_input_names = []
input_specs = []
# TODO(b/205015292): Support optional arguments
feeds = [
wrap_function._get_element_from_tensor_info(input_spec, wrapped.graph) # pylint: disable=protected-access
for input_spec in input_specs
]
input_names = []
input_tensors = []
for original_input_name, feed in zip(original_input_names, feeds):
if isinstance(feed, sparse_tensor.SparseTensor):
# We have to give explicit name for SparseTensor arguments, because
# these are not present in the TensorInfo.
indices_name = "%s_indices" % original_input_name
values_name = "%s_values" % original_input_name
dense_shape_name = "%s_dense_shape" % original_input_name
input_names.extend([indices_name, values_name, dense_shape_name])
input_tensors.extend([feed.indices, feed.values, feed.dense_shape])
elif isinstance(feed, composite_tensor.CompositeTensor):
component_tensors = nest.flatten(feed, expand_composites=True)
input_names.extend("%s_component_%d" % (original_input_name, n)
for n in range(len(component_tensors)))
input_tensors.extend(component_tensors)
else:
input_names.append(original_input_name)
input_tensors.append(feed)
fetches = {name: out for name, out in signature_def.outputs.items()}
try:
signature_fn = wrapped.prune(feeds=feeds, fetches=fetches)
except lift_to_graph.UnliftableError as ex:
# Mutate the exception to add a bit more detail.
args = ex.args
if not args:
message = ""
else:
message = args[0]
message = (
("A SavedModel signature needs an input for each placeholder the "
"signature's outputs use. An output for signature '{}' depends on "
"a placeholder which is not an input (i.e. the placeholder is not "
"fed a value).\n\n").format(signature_key)
+ message)
ex.args = (message,) + args[1:]
raise
# pylint: disable=protected-access
signature_fn._arg_keywords = input_names
signature_fn._func_graph.structured_input_signature = (
(),
func_graph.convert_structure_to_signature(
dict(zip(input_names, input_tensors))))
if len(input_names) == 1:
# Allowing positional arguments does not create any ambiguity if there's
# only one.
signature_fn._num_positional_args = 1
else:
signature_fn._num_positional_args = 0
# pylint: enable=protected-access
signature_functions[signature_key] = signature_fn
return signature_functions
def load(self, tags):
"""Creates an object from the MetaGraph identified by `tags`."""
meta_graph_def = self.get_meta_graph_def_from_tags(tags)
load_shared_name_suffix = "_load_{}".format(ops.uid())
functions = function_deserialization.load_function_def_library(
meta_graph_def.graph_def.library,
load_shared_name_suffix=load_shared_name_suffix)
# Replace existing functions in the MetaGraphDef with renamed functions so
# we don't have duplicates or name collisions.
meta_graph_def.graph_def.library.Clear()
for function in functions.values():
meta_graph_def.graph_def.library.function.add().CopyFrom(
function.function_def)
# We've renamed functions and shared names. We need the same operation on
# the GraphDef itself for consistency.
for node_def in meta_graph_def.graph_def.node:
function_deserialization.fix_node_def(node_def, functions,
load_shared_name_suffix)
load_graph_returns = [None]
wrapped = wrap_function.wrap_function(
functools.partial(self.load_graph, load_graph_returns, meta_graph_def),
signature=[])
saver, = load_graph_returns
restore_from_saver = self._extract_saver_restore(wrapped, saver)
self.restore_variables(wrapped, restore_from_saver)
with wrapped.graph.as_default():
init_op = loader_impl.get_init_op(
meta_graph_def) or monitored_session.Scaffold.default_local_init_op()
# Add a dummy Tensor we know we can fetch to add control dependencies to.
init_anchor = constant_op.constant(0., name="dummy_fetch")
root = autotrackable.AutoTrackable()
if restore_from_saver is not None:
root.restore = (
lambda path: restore_from_saver(constant_op.constant(path)))
asset_feed_tensors = []
asset_paths = []
for tensor_name, value in loader_impl.get_asset_tensors(
self._export_dir, meta_graph_def).items():
asset_feed_tensors.append(wrapped.graph.as_graph_element(tensor_name))
asset_paths.append(asset.Asset(value))
init_fn = wrapped.prune(
feeds=asset_feed_tensors,
fetches=[init_anchor, wrapped.graph.as_graph_element(init_op)])
initializer = _Initializer(init_fn, asset_paths)
# pylint: disable=protected-access
local_init_op, _ = initializer._initialize()
# pylint: enable=protected-access
with ops.init_scope():
if not context.executing_eagerly():
ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, local_init_op)
for variable in wrapped.graph.get_collection_ref(
ops.GraphKeys.LOCAL_VARIABLES):
# pylint: disable=protected-access
variable._initializer_op = local_init_op
# pylint: enable=protected-access
root.initializer = initializer
root.asset_paths = asset_paths
signature_functions = self._extract_signatures(wrapped, meta_graph_def)
root.signatures = signature_serialization.create_signature_map(
signature_functions)
root.variables = list(wrapped.graph.variables)
root.tensorflow_version = (
meta_graph_def.meta_info_def.tensorflow_version)
root.tensorflow_git_version = (
meta_graph_def.meta_info_def.tensorflow_git_version)
root.graph = wrapped.graph
root.prune = wrapped.prune
return root
def load(export_dir, tags):
"""Load a v1-style SavedModel as an object."""
metrics.IncrementReadApi(_LOAD_V1_V2_LABEL)
loader = _EagerSavedModelLoader(export_dir)
result = loader.load(tags=tags)
metrics.IncrementRead(write_version="1")
return result | PypiClean |
/django-jet-calm-5.1.5.tar.gz/django-jet-calm-5.1.5/jet/static/jet/js/src/layout-updaters/toolbar.js | var $ = require('jquery');
var ToolbarUpdater = function($changelist) {
this.$changelist = $changelist;
};
ToolbarUpdater.prototype = {
getToolbar: function($changelist) {
var $toolbar = $changelist.find('#toolbar');
if ($toolbar.length == 0) {
$toolbar = $('<div>').attr('id', 'toolbar');
$('#changelist').prepend($toolbar);
}
return $toolbar;
},
updateToolbar: function($toolbar) {
var placeholder = $toolbar.find('input[type="submit"]').val();
$toolbar.find('#searchbar').attr('placeholder', placeholder);
},
moveFilters: function($changelist, $toolbar) {
var filterName;
var $search = $toolbar.find('#searchbar');
$changelist.find('#changelist-filter').children().each(function() {
var $element = $(this);
if ($element.prop('tagName') == 'H3') {
filterName = $element.text();
} else if ($element.prop('tagName') == 'UL') {
var $select = $('<select>');
var $items = $element.find('li');
$.each($element.prop('attributes'), function() {
$select.attr(this.name, this.value);
});
$select.addClass('changelist-filter-select');
if ($items.filter('.selected').length > 1) {
$select.attr('multiple', true);
}
$items.each(function(i) {
var $item = $(this);
var $link = $item.find('a');
var $option = $('<option>')
.text($link.text())
.attr('data-url', $link.attr('href'))
.attr('selected', $item.hasClass('selected'));
if (i == 0 ) {
if (filterName != null) {
$option.text(filterName)
}
var $separator = $('<option>')
.attr('disabled', true)
.text('---');
$option = $option.add($separator);
}
$select.append($option);
});
var $wrapper = $('<span>')
.addClass('changelist-filter-select-wrapper')
.append($select);
if ($search.length) {
$wrapper.insertAfter($search);
} else {
$toolbar.append($wrapper);
}
filterName = null;
} else if ($element.hasClass('changelist-filter-popup')) {
var $toggle = $element.find('.changelist-filter-popup-toggle');
var $content = $element.find('.changelist-filter-popup-content');
var $wrapper = $('<span>')
.addClass('changelist-filter-select-wrapper')
.append($element);
if ($search.length) {
$wrapper.insertAfter($search);
} else {
$toolbar.append($wrapper);
}
$toggle.on('click', function(e) {
e.preventDefault();
e.stopPropagation();
$content.toggleClass('visible');
});
$content.on('click', function(e) {
e.stopPropagation();
});
$(document.body).on('click', function() {
$content.removeClass('visible');
});
}
});
var $btnSubmitSearch = $toolbar.find('input[type="submit"]');
if ($search.length) {
$btnSubmitSearch.insertAfter($search);
}
$changelist.find('#changelist-filter').remove();
},
fixFloatLineBreak: function() {
$('#content-main').each(function() {
var $content = $(this);
$.each(['#toolbar', '.object-tools', 'changeform-navigation'], function(i, selector) {
var $element = $content.find(selector).first();
if ($element.length == 0) {
return;
}
$('<div>')
.addClass('clear')
.insertAfter($element);
return false;
});
});
},
run: function() {
var $toolbar = this.getToolbar(this.$changelist);
try {
this.updateToolbar($toolbar);
this.moveFilters(this.$changelist, $toolbar);
} catch (e) {
console.error(e, e.stack);
}
try {
this.fixFloatLineBreak();
} catch (e) {
console.error(e, e.stack);
}
$toolbar.addClass('initialized');
}
};
$(document).ready(function() {
$('#changelist').each(function() {
new ToolbarUpdater($(this)).run();
});
}); | PypiClean |
/benchling_api_client-2.0.207-py3-none-any.whl/benchling_api_client/v2/stable/models/rna_sequences_archival_change.py | from typing import Any, cast, Dict, List, Optional, Type, TypeVar, Union
import attr
from ..extensions import NotPresentError
from ..types import UNSET, Unset
T = TypeVar("T", bound="RnaSequencesArchivalChange")
@attr.s(auto_attribs=True, repr=False)
class RnaSequencesArchivalChange:
"""IDs of all RNA Sequences that were archived or unarchived, grouped by resource type."""
_rna_sequence_ids: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def __repr__(self):
fields = []
fields.append("rna_sequence_ids={}".format(repr(self._rna_sequence_ids)))
fields.append("additional_properties={}".format(repr(self.additional_properties)))
return "RnaSequencesArchivalChange({})".format(", ".join(fields))
def to_dict(self) -> Dict[str, Any]:
rna_sequence_ids: Union[Unset, List[Any]] = UNSET
if not isinstance(self._rna_sequence_ids, Unset):
rna_sequence_ids = self._rna_sequence_ids
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
# Allow the model to serialize even if it was created outside of the constructor, circumventing validation
if rna_sequence_ids is not UNSET:
field_dict["rnaSequenceIds"] = rna_sequence_ids
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any], strict: bool = False) -> T:
d = src_dict.copy()
def get_rna_sequence_ids() -> Union[Unset, List[str]]:
rna_sequence_ids = cast(List[str], d.pop("rnaSequenceIds"))
return rna_sequence_ids
try:
rna_sequence_ids = get_rna_sequence_ids()
except KeyError:
if strict:
raise
rna_sequence_ids = cast(Union[Unset, List[str]], UNSET)
rna_sequences_archival_change = cls(
rna_sequence_ids=rna_sequence_ids,
)
rna_sequences_archival_change.additional_properties = d
return rna_sequences_archival_change
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
def get(self, key, default=None) -> Optional[Any]:
return self.additional_properties.get(key, default)
@property
def rna_sequence_ids(self) -> List[str]:
if isinstance(self._rna_sequence_ids, Unset):
raise NotPresentError(self, "rna_sequence_ids")
return self._rna_sequence_ids
@rna_sequence_ids.setter
def rna_sequence_ids(self, value: List[str]) -> None:
self._rna_sequence_ids = value
@rna_sequence_ids.deleter
def rna_sequence_ids(self) -> None:
self._rna_sequence_ids = UNSET | PypiClean |
/nttai_faust_streaming-0.9.0-py3-none-any.whl/faust/utils/json.py | import datetime
import enum
import typing
import uuid
from collections import Counter, deque
from decimal import Decimal
from typing import (
Any,
Callable,
Iterable,
List,
Mapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
__all__ = [
"JSONEncoder",
"dumps",
"loads",
"str_to_decimal",
]
if typing.TYPE_CHECKING:
import orjson
else: # pragma: no cover
try:
import orjson
except ImportError:
orjson = None # noqa
DEFAULT_TEXTUAL_TYPES: List[Type] = [Decimal, uuid.UUID, bytes]
T = TypeVar("T")
TypeTuple = Tuple[Type[T], ...]
IsInstanceArg = Union[Type[Any], TypeTuple[Any]]
try: # pragma: no cover
from django.utils.functional import Promise
DJANGO_TEXTUAL_TYPES = [Promise]
except ImportError: # pragma: no cover
DJANGO_TEXTUAL_TYPES = []
TEXTUAL_TYPES: TypeTuple[Any] = tuple(DEFAULT_TEXTUAL_TYPES + DJANGO_TEXTUAL_TYPES)
try: # pragma: no cover
import simplejson as json
# simplejson converts Decimal to float by default, i.e. before
# we have a chance to override it using Encoder.default.
_JSON_DEFAULT_KWARGS = {
"use_decimal": False,
"namedtuple_as_object": False,
}
except ImportError: # pragma: no cover
import json # type: ignore
_JSON_DEFAULT_KWARGS = {}
#: Max length for string to be converted to decimal.
DECIMAL_MAXLEN = 1000
#: Types that we convert to lists.
SEQUENCE_TYPES: TypeTuple[Iterable] = (set, frozenset, deque)
DateTypeTuple = Tuple[Union[Type[datetime.date], Type[datetime.time]], ...]
DatetimeTypeTuple = Tuple[
Union[
Type[datetime.time],
Type[datetime.datetime],
],
...,
]
#: Types that we convert o mapping.
MAPPING_TYPES: TypeTuple[Mapping] = (Counter,)
#: Types that are datetimes and dates (-> .isoformat())
DATE_TYPES: DateTypeTuple = (datetime.date, datetime.time)
#: Types we use `return obj.value` for (Enum)
VALUE_DELEGATE_TYPES: TypeTuple[enum.Enum] = (enum.Enum,)
HAS_TIME: DatetimeTypeTuple = (datetime.datetime, datetime.time)
def str_to_decimal(s: str, maxlen: int = DECIMAL_MAXLEN) -> Optional[Decimal]:
"""Convert string to :class:`~decimal.Decimal`.
Args:
s (str): Number to convert.
maxlen (int): Max length of string. Default is 100.
Raises:
ValueError: if length exceeds maximum length, or if value is not
a valid number (e.g. Inf, NaN or sNaN).
Returns:
Decimal: Converted number.
"""
if s is None:
return None
if len(s) > maxlen:
raise ValueError(f"string of length {len(s)} is longer than limit ({maxlen})")
v = Decimal(s)
if not v.is_finite(): # check for Inf/NaN/sNaN/qNaN
raise ValueError(f"Illegal value in decimal: {s!r}")
return v
def on_default(
o: Any,
*,
sequences: TypeTuple[Iterable] = SEQUENCE_TYPES,
maps: TypeTuple[Mapping] = MAPPING_TYPES,
dates: DateTypeTuple = DATE_TYPES,
value_delegate: TypeTuple[enum.Enum] = VALUE_DELEGATE_TYPES,
has_time: DatetimeTypeTuple = HAS_TIME,
_isinstance: Callable[[Any, IsInstanceArg], bool] = isinstance,
_dict: Callable = dict,
_str: Callable[[Any], str] = str,
_list: Callable = list,
textual: TypeTuple[Any] = TEXTUAL_TYPES,
) -> Any:
if _isinstance(o, textual):
return _str(o)
elif _isinstance(o, maps):
return _dict(o)
elif _isinstance(o, dates):
if not _isinstance(o, has_time):
o = datetime.datetime(o.year, o.month, o.day, 0, 0, 0, 0)
r = o.isoformat()
if r.endswith("+00:00"):
r = r[:-6] + "Z"
return r
elif isinstance(o, value_delegate):
return o.value
elif isinstance(o, sequences):
return _list(o)
else:
to_json = getattr(o, "__json__", None)
if to_json is not None:
return to_json()
raise TypeError(f"JSON cannot serialize {type(o).__name__!r}: {o!r}")
class JSONEncoder(json.JSONEncoder):
"""Faust customized :class:`json.JSONEncoder`.
Our version supports additional types like :class:`~uuid.UUID`, and
importantly includes microsecond information in datetimes.
"""
def default(self, o: Any, *, callback: Callable[[Any], Any] = on_default) -> Any:
"""Try to convert non-built-in json type to json."""
return callback(o)
if orjson is not None: # pragma: no cover
def dumps(
obj: Any,
json_dumps: Callable = orjson.dumps,
cls: Type[JSONEncoder] = JSONEncoder,
**kwargs: Any,
) -> str:
"""Serialize to json."""
return json_dumps(
obj,
default=on_default,
)
def loads(s: str, json_loads: Callable = orjson.loads, **kwargs: Any) -> Any:
"""Deserialize json string."""
return json_loads(s)
else:
def dumps(
obj: Any,
json_dumps: Callable = json.dumps,
cls: Type[JSONEncoder] = JSONEncoder,
**kwargs: Any,
) -> str:
"""Serialize to json. See :func:`json.dumps`."""
return json_dumps(
obj,
cls=cls,
**dict(_JSON_DEFAULT_KWARGS, **kwargs),
separators=(",", ":"),
)
def loads(s: str, json_loads: Callable = json.loads, **kwargs: Any) -> Any:
"""Deserialize json string. See :func:`json.loads`."""
return json_loads(s, **kwargs) | PypiClean |
/react-frontend-20230406083236.tar.gz/react-frontend-20230406083236/react_frontend/cc5a4e16.js | "use strict";(self.webpackChunkreact_frontend=self.webpackChunkreact_frontend||[]).push([[1735],{88423:function(t,r,e){function n(t){return n="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},n(t)}function o(){o=function(){return t};var t={},r=Object.prototype,e=r.hasOwnProperty,i=Object.defineProperty||function(t,r,e){t[r]=e.value},a="function"==typeof Symbol?Symbol:{},u=a.iterator||"@@iterator",c=a.asyncIterator||"@@asyncIterator",f=a.toStringTag||"@@toStringTag";function l(t,r,e){return Object.defineProperty(t,r,{value:e,enumerable:!0,configurable:!0,writable:!0}),t[r]}try{l({},"")}catch(A){l=function(t,r,e){return t[r]=e}}function s(t,r,e,n){var o=r&&r.prototype instanceof p?r:p,a=Object.create(o.prototype),u=new _(n||[]);return i(a,"_invoke",{value:j(t,e,u)}),a}function h(t,r,e){try{return{type:"normal",arg:t.call(r,e)}}catch(A){return{type:"throw",arg:A}}}t.wrap=s;var y={};function p(){}function v(){}function d(){}var m={};l(m,u,(function(){return this}));var b=Object.getPrototypeOf,g=b&&b(b(P([])));g&&g!==r&&e.call(g,u)&&(m=g);var w=d.prototype=p.prototype=Object.create(m);function x(t){["next","throw","return"].forEach((function(r){l(t,r,(function(t){return this._invoke(r,t)}))}))}function O(t,r){function o(i,a,u,c){var f=h(t[i],t,a);if("throw"!==f.type){var l=f.arg,s=l.value;return s&&"object"==n(s)&&e.call(s,"__await")?r.resolve(s.__await).then((function(t){o("next",t,u,c)}),(function(t){o("throw",t,u,c)})):r.resolve(s).then((function(t){l.value=t,u(l)}),(function(t){return o("throw",t,u,c)}))}c(f.arg)}var a;i(this,"_invoke",{value:function(t,e){function n(){return new r((function(r,n){o(t,e,r,n)}))}return a=a?a.then(n,n):n()}})}function j(t,r,e){var n="suspendedStart";return function(o,i){if("executing"===n)throw new Error("Generator is already running");if("completed"===n){if("throw"===o)throw i;return k()}for(e.method=o,e.arg=i;;){var a=e.delegate;if(a){var u=E(a,e);if(u){if(u===y)continue;return u}}if("next"===e.method)e.sent=e._sent=e.arg;else if("throw"===e.method){if("suspendedStart"===n)throw n="completed",e.arg;e.dispatchException(e.arg)}else"return"===e.method&&e.abrupt("return",e.arg);n="executing";var c=h(t,r,e);if("normal"===c.type){if(n=e.done?"completed":"suspendedYield",c.arg===y)continue;return{value:c.arg,done:e.done}}"throw"===c.type&&(n="completed",e.method="throw",e.arg=c.arg)}}}function E(t,r){var e=r.method,n=t.iterator[e];if(void 0===n)return r.delegate=null,"throw"===e&&t.iterator.return&&(r.method="return",r.arg=void 0,E(t,r),"throw"===r.method)||"return"!==e&&(r.method="throw",r.arg=new TypeError("The iterator does not provide a '"+e+"' method")),y;var o=h(n,t.iterator,r.arg);if("throw"===o.type)return r.method="throw",r.arg=o.arg,r.delegate=null,y;var i=o.arg;return i?i.done?(r[t.resultName]=i.value,r.next=t.nextLoc,"return"!==r.method&&(r.method="next",r.arg=void 0),r.delegate=null,y):i:(r.method="throw",r.arg=new TypeError("iterator result is not an object"),r.delegate=null,y)}function L(t){var r={tryLoc:t[0]};1 in t&&(r.catchLoc=t[1]),2 in t&&(r.finallyLoc=t[2],r.afterLoc=t[3]),this.tryEntries.push(r)}function S(t){var r=t.completion||{};r.type="normal",delete r.arg,t.completion=r}function _(t){this.tryEntries=[{tryLoc:"root"}],t.forEach(L,this),this.reset(!0)}function P(t){if(t){var r=t[u];if(r)return r.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var n=-1,o=function r(){for(;++n<t.length;)if(e.call(t,n))return r.value=t[n],r.done=!1,r;return r.value=void 0,r.done=!0,r};return o.next=o}}return{next:k}}function k(){return{value:void 0,done:!0}}return v.prototype=d,i(w,"constructor",{value:d,configurable:!0}),i(d,"constructor",{value:v,configurable:!0}),v.displayName=l(d,f,"GeneratorFunction"),t.isGeneratorFunction=function(t){var r="function"==typeof t&&t.constructor;return!!r&&(r===v||"GeneratorFunction"===(r.displayName||r.name))},t.mark=function(t){return Object.setPrototypeOf?Object.setPrototypeOf(t,d):(t.__proto__=d,l(t,f,"GeneratorFunction")),t.prototype=Object.create(w),t},t.awrap=function(t){return{__await:t}},x(O.prototype),l(O.prototype,c,(function(){return this})),t.AsyncIterator=O,t.async=function(r,e,n,o,i){void 0===i&&(i=Promise);var a=new O(s(r,e,n,o),i);return t.isGeneratorFunction(e)?a:a.next().then((function(t){return t.done?t.value:a.next()}))},x(w),l(w,f,"Generator"),l(w,u,(function(){return this})),l(w,"toString",(function(){return"[object Generator]"})),t.keys=function(t){var r=Object(t),e=[];for(var n in r)e.push(n);return e.reverse(),function t(){for(;e.length;){var n=e.pop();if(n in r)return t.value=n,t.done=!1,t}return t.done=!0,t}},t.values=P,_.prototype={constructor:_,reset:function(t){if(this.prev=0,this.next=0,this.sent=this._sent=void 0,this.done=!1,this.delegate=null,this.method="next",this.arg=void 0,this.tryEntries.forEach(S),!t)for(var r in this)"t"===r.charAt(0)&&e.call(this,r)&&!isNaN(+r.slice(1))&&(this[r]=void 0)},stop:function(){this.done=!0;var t=this.tryEntries[0].completion;if("throw"===t.type)throw t.arg;return this.rval},dispatchException:function(t){if(this.done)throw t;var r=this;function n(e,n){return a.type="throw",a.arg=t,r.next=e,n&&(r.method="next",r.arg=void 0),!!n}for(var o=this.tryEntries.length-1;o>=0;--o){var i=this.tryEntries[o],a=i.completion;if("root"===i.tryLoc)return n("end");if(i.tryLoc<=this.prev){var u=e.call(i,"catchLoc"),c=e.call(i,"finallyLoc");if(u&&c){if(this.prev<i.catchLoc)return n(i.catchLoc,!0);if(this.prev<i.finallyLoc)return n(i.finallyLoc)}else if(u){if(this.prev<i.catchLoc)return n(i.catchLoc,!0)}else{if(!c)throw new Error("try statement without catch or finally");if(this.prev<i.finallyLoc)return n(i.finallyLoc)}}}},abrupt:function(t,r){for(var n=this.tryEntries.length-1;n>=0;--n){var o=this.tryEntries[n];if(o.tryLoc<=this.prev&&e.call(o,"finallyLoc")&&this.prev<o.finallyLoc){var i=o;break}}i&&("break"===t||"continue"===t)&&i.tryLoc<=r&&r<=i.finallyLoc&&(i=null);var a=i?i.completion:{};return a.type=t,a.arg=r,i?(this.method="next",this.next=i.finallyLoc,y):this.complete(a)},complete:function(t,r){if("throw"===t.type)throw t.arg;return"break"===t.type||"continue"===t.type?this.next=t.arg:"return"===t.type?(this.rval=this.arg=t.arg,this.method="return",this.next="end"):"normal"===t.type&&r&&(this.next=r),y},finish:function(t){for(var r=this.tryEntries.length-1;r>=0;--r){var e=this.tryEntries[r];if(e.finallyLoc===t)return this.complete(e.completion,e.afterLoc),S(e),y}},catch:function(t){for(var r=this.tryEntries.length-1;r>=0;--r){var e=this.tryEntries[r];if(e.tryLoc===t){var n=e.completion;if("throw"===n.type){var o=n.arg;S(e)}return o}}throw new Error("illegal catch attempt")},delegateYield:function(t,r,e){return this.delegate={iterator:P(t),resultName:r,nextLoc:e},"next"===this.method&&(this.arg=void 0),y}},t}function i(t,r,e,n,o,i,a){try{var u=t[i](a),c=u.value}catch(f){return void e(f)}u.done?r(c):Promise.resolve(c).then(n,o)}var a;e.d(r,{F:function(){return u}});var u=function(){var t,r=(t=o().mark((function t(){return o().wrap((function(t){for(;;)switch(t.prev=t.next){case 0:if(a){t.next=4;break}return t.next=3,Promise.all([e.e(6087),e.e(8697)]).then(e.bind(e,48697));case 3:a=t.sent.default;case 4:return t.abrupt("return",a);case 5:case"end":return t.stop()}}),t)})),function(){var r=this,e=arguments;return new Promise((function(n,o){var a=t.apply(r,e);function u(t){i(a,n,o,u,c,"next",t)}function c(t){i(a,n,o,u,c,"throw",t)}u(void 0)}))});return function(){return r.apply(this,arguments)}}()},1460:function(t,r,e){e.d(r,{l:function(){return y}});var n=e(15304),o=e(38941);function i(t){return i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},i(t)}function a(t,r){return function(t){if(Array.isArray(t))return t}(t)||function(t,r){var e=null==t?null:"undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(null!=e){var n,o,i,a,u=[],c=!0,f=!1;try{if(i=(e=e.call(t)).next,0===r){if(Object(e)!==e)return;c=!1}else for(;!(c=(n=i.call(e)).done)&&(u.push(n.value),u.length!==r);c=!0);}catch(l){f=!0,o=l}finally{try{if(!c&&null!=e.return&&(a=e.return(),Object(a)!==a))return}finally{if(f)throw o}}return u}}(t,r)||function(t,r){if(!t)return;if("string"==typeof t)return u(t,r);var e=Object.prototype.toString.call(t).slice(8,-1);"Object"===e&&t.constructor&&(e=t.constructor.name);if("Map"===e||"Set"===e)return Array.from(t);if("Arguments"===e||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(e))return u(t,r)}(t,r)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function u(t,r){(null==r||r>t.length)&&(r=t.length);for(var e=0,n=new Array(r);e<r;e++)n[e]=t[e];return n}function c(t,r){for(var e=0;e<r.length;e++){var n=r[e];n.enumerable=n.enumerable||!1,n.configurable=!0,"value"in n&&(n.writable=!0),Object.defineProperty(t,(o=n.key,a=void 0,a=function(t,r){if("object"!==i(t)||null===t)return t;var e=t[Symbol.toPrimitive];if(void 0!==e){var n=e.call(t,r||"default");if("object"!==i(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===r?String:Number)(t)}(o,"string"),"symbol"===i(a)?a:String(a)),n)}var o,a}function f(t,r){return f=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(t,r){return t.__proto__=r,t},f(t,r)}function l(t){var r=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(h){return!1}}();return function(){var e,n=s(t);if(r){var o=s(this).constructor;e=Reflect.construct(n,arguments,o)}else e=n.apply(this,arguments);return function(t,r){if(r&&("object"===i(r)||"function"==typeof r))return r;if(void 0!==r)throw new TypeError("Derived constructors may only return object or undefined");return function(t){if(void 0===t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return t}(t)}(this,e)}}function s(t){return s=Object.setPrototypeOf?Object.getPrototypeOf.bind():function(t){return t.__proto__||Object.getPrototypeOf(t)},s(t)}var h={},y=(0,o.XM)(function(t){!function(t,r){if("function"!=typeof r&&null!==r)throw new TypeError("Super expression must either be null or a function");t.prototype=Object.create(r&&r.prototype,{constructor:{value:t,writable:!0,configurable:!0}}),Object.defineProperty(t,"prototype",{writable:!1}),r&&f(t,r)}(u,t);var r,e,o,i=l(u);function u(){var t;return function(t,r){if(!(t instanceof r))throw new TypeError("Cannot call a class as a function")}(this,u),(t=i.apply(this,arguments)).ot=h,t}return r=u,e=[{key:"render",value:function(t,r){return r()}},{key:"update",value:function(t,r){var e=this,o=a(r,2),i=o[0],u=o[1];if(Array.isArray(i)){if(Array.isArray(this.ot)&&this.ot.length===i.length&&i.every((function(t,r){return t===e.ot[r]})))return n.Jb}else if(this.ot===i)return n.Jb;return this.ot=Array.isArray(i)?Array.from(i):i,this.render(i,u)}}],e&&c(r.prototype,e),o&&c(r,o),Object.defineProperty(r,"prototype",{writable:!1}),u}(o.Xe))}}]); | PypiClean |
/neuro_pypes-1.1.2.tar.gz/neuro_pypes-1.1.2/neuro_pypes/interfaces/ants/segmentation.py | from nipype.external.due import BibTeX
from nipype.interfaces.ants.base import ANTSCommand, ANTSCommandInputSpec
from nipype.interfaces.base import TraitedSpec, File, traits, isdefined
from nipype.utils.filemanip import split_filename
class KellyKapowskiInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='--image-dimensionality %d', usedefault=True,
desc='image dimension (2 or 3)')
segmentation_image = File(exists=True, argstr='--segmentation-image "%s"', mandatory=True,
desc="A segmentation image must be supplied labeling the gray and white matters.\n"
"Default values = 2 and 3, respectively.", )
gray_matter_label = traits.Int(2, usedefault=True,
desc="The label value for the gray matter label in the segmentation_image.")
white_matter_label = traits.Int(3, usedefault=True,
desc="The label value for the white matter label in the segmentation_image.")
gray_matter_prob_image = File(exists=True, argstr='--gray-matter-probability-image "%s"',
desc="In addition to the segmentation image, a gray matter probability image can be\n"
"used. If no such image is supplied, one is created using the segmentation image\n"
"and a variance of 1.0 mm.")
white_matter_prob_image = File(exists=True, argstr='--white-matter-probability-image "%s"',
desc="In addition to the segmentation image, a white matter probability image can be\n"
"used. If no such image is supplied, one is created using the segmentation image\n"
"and a variance of 1.0 mm.")
convergence = traits.Str(default="[50,0.001,10]", argstr='--convergence "%s"', usedefault=True,
desc="Convergence is determined by fitting a line to the normalized energy profile of\n"
"the last N iterations (where N is specified by the window size) and determining\n"
"the slope which is then compared with the convergence threshold.", )
thickness_prior_estimate = traits.Float(10, usedefault=True, argstr="--thickness-prior-estimate %f",
desc="Provides a prior constraint on the final thickness measurement in mm.")
thickness_prior_image = File(exists=True, argstr='--thickness-prior-image "%s"',
desc="An image containing spatially varying prior thickness values.")
gradient_step = traits.Float(0.025, usedefault=True, argstr="--gradient-step %f",
desc="Gradient step size for the optimization.")
smoothing_variance = traits.Float(1.0, argstr="--smoothing-variance %f",
desc="Defines the Gaussian smoothing of the hit and total images.")
smoothing_velocity_field = traits.Float(1.5, argstr="--smoothing-velocity-field-parameter %f",
desc="Defines the Gaussian smoothing of the velocity field (default = 1.5).\n"
"If the b-spline smoothing option is chosen, then this defines the \n"
"isotropic mesh spacing for the smoothing spline (default = 15).")
use_bspline_smoothing = traits.Bool(argstr="--use-bspline-smoothing 1",
desc="Sets the option for B-spline smoothing of the velocity field.")
number_integration_points = traits.Int(10, argstr="--number-of-integration-points %d",
desc="Number of compositions of the diffeomorphism per iteration.")
max_invert_displacement_field_iters = traits.Int(20,
argstr="--maximum-number-of-invert-displacement-field-iterations %d",
desc="Maximum number of iterations for estimating the invert \n"
"displacement field.")
cortical_thickness = File(argstr='--output "%s"', keep_extension=True,
name_source=["segmentation_image"], name_template='%s_cortical_thickness',
desc='Filename for the cortical thickness.', hash_files=False)
warped_white_matter = File(name_source=["segmentation_image"], keep_extension=True,
name_template='%s_warped_white_matter',
desc='Filename for the warped white matter file.', hash_files=False)
class KellyKapowskiOutputSpec(TraitedSpec):
cortical_thickness = File(desc="A thickness map defined in the segmented gray matter.")
warped_white_matter = File(desc="A warped white matter image.")
class KellyKapowski(ANTSCommand):
""" Nipype Interface to ANTs' KellyKapowski, also known as DiReCT.
DiReCT is a registration based estimate of cortical thickness. It was published
in S. R. Das, B. B. Avants, M. Grossman, and J. C. Gee, Registration based
cortical thickness measurement, Neuroimage 2009, 45:867--879.
Examples
--------
>>> from nipype.interfaces.ants.segmentation import KellyKapowski
>>> kk = KellyKapowski()
>>> kk.inputs.dimension = 3
>>> kk.inputs.segmentation_image = "segmentation0.nii.gz"
>>> kk.inputs.convergence = "[45,0.0,10]"
>>> kk.inputs.gradient_step = 0.025
>>> kk.inputs.smoothing_variance = 1.0
>>> kk.inputs.smoothing_velocity_field = 1.5
>>> #kk.inputs.use_bspline_smoothing = False
>>> kk.inputs.number_integration_points = 10
>>> kk.inputs.thickness_prior_estimate = 10
>>> kk.cmdline # doctest: +ALLOW_UNICODE
u'KellyKapowski --convergence "[45,0.0,10]" \
--output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" \
--image-dimensionality 3 --gradient-step 0.025000 --number-of-integration-points 10 \
--segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 \
--smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000'
"""
_cmd = "KellyKapowski"
input_spec = KellyKapowskiInputSpec
output_spec = KellyKapowskiOutputSpec
references_ = [{
'entry': BibTeX(
"@book{Das2009867,"
"author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee},"
"title={Registration based cortical thickness measurement.},"
"journal={NeuroImage},"
"volume={45},"
"number={37},"
"pages={867--879},"
"year={2009},"
"issn={1053-8119},"
"url={http://www.sciencedirect.com/science/article/pii/S1053811908012780},"
"doi={http://dx.doi.org/10.1016/j.neuroimage.2008.12.016}"
"}"),
'description': 'The details on the implementation of DiReCT.',
'tags': ['implementation'],
}]
def _parse_inputs(self, skip=None):
if skip is None:
skip = []
skip += ['warped_white_matter', 'gray_matter_label', 'white_matter_label']
return super(KellyKapowski, self)._parse_inputs(skip=skip)
def _gen_filename(self, name):
if name == 'cortical_thickness':
output = self.inputs.cortical_thickness
if not isdefined(output):
_, name, ext = split_filename(self.inputs.segmentation_image)
output = name + '_cortical_thickness' + ext
return output
if name == 'warped_white_matter':
output = self.inputs.warped_white_matter
if not isdefined(output):
_, name, ext = split_filename(self.inputs.segmentation_image)
output = name + '_warped_white_matter' + ext
return output
return None
def _format_arg(self, opt, spec, val):
if opt == "segmentation_image":
newval = '[{0},{1},{2}]'.format(
self.inputs.segmentation_image,
self.inputs.gray_matter_label,
self.inputs.white_matter_label
)
return spec.argstr % newval
if opt == "cortical_thickness":
ct = self._gen_filename("cortical_thickness")
wm = self._gen_filename("warped_white_matter")
newval = '[{},{}]'.format(ct, wm)
return spec.argstr % newval
return super(KellyKapowski, self)._format_arg(opt, spec, val) | PypiClean |
/AlgBench-2.2.0-py3-none-any.whl/algbench/log_capture.py | import logging
from typing import Optional
from .db.json_serializer import to_json
from .utils import Timer
class JsonLogHandler(logging.Handler):
"""
A logging handler that stores log entries in a list of JSON compatible
dictionaries.
"""
def __init__(self, level=logging.NOTSET) -> None:
"""
:param level: The level of the logger to catch.
"""
super().__init__(level)
self._log = []
self._timer = Timer()
def emit(self, record: logging.LogRecord) -> None:
data = {}
data.update(record.__dict__)
data["runtime"] = self._timer.time()
self._log.append(to_json(data))
def reset(self):
self._timer.reset()
self._log = []
def get_entries(self) -> list:
return self._log
class JsonLogCapture:
"""
A context manager that captures logs and returns them as a list of JSON
"""
def __init__(
self,
logger_name: str,
level=logging.NOTSET,
handler: Optional[JsonLogHandler] = None,
) -> None:
"""
:param logger_name: The name of the logger to catch.
:param level: The level of the logger to catch.
"""
self._logger = logging.getLogger(logger_name)
self._level = level
self._prior_level = self._logger.getEffectiveLevel()
self._json_log: JsonLogHandler = handler if handler else JsonLogHandler(level)
def __enter__(self):
self._json_log.reset()
self._logger.addHandler(self._json_log)
if self._level:
self._prior_level = self._logger.getEffectiveLevel()
self._logger.setLevel(self._level)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._logger.removeHandler(self._json_log)
self._json_log.close()
if self._level:
self._logger.setLevel(self._prior_level)
def get_entries(self) -> list:
"""
Returns the log entries as a list of JSON compatible dictionaries.
"""
return self._json_log.get_entries() | PypiClean |
/imesh-0.0.17-py3-none-any.whl/mesh/prsim/transport.py |
from abc import abstractmethod, ABC
from typing import Generic, Dict
from mesh.macro import spi, mpi, T
@spi("mesh")
class Transport(ABC, Generic[T]):
"""
Private compute data channel in async and blocking mode.
"""
MESH = "mesh"
GRPC = "grpc"
@abstractmethod
@mpi("mesh.chan.open")
def open(self, session_id: str, metadata: Dict[str, str]) -> "Session":
"""
Open a channel session.
:param session_id: node id or inst id
:param metadata channel metadata
:return:
"""
pass
@abstractmethod
@mpi("mesh.chan.close")
def close(self, timeout: int):
"""
Close the channel.
:return:
"""
pass
@abstractmethod
@mpi("mesh.chan.roundtrip")
def roundtrip(self, payload: bytes, metadata: Dict[str, str]) -> bytes:
"""
Roundtrip with the channel.
:param payload:
:param metadata:
:return:
"""
pass
@spi("mesh")
class Session(ABC, Generic[T]):
"""
Remote queue in async and blocking mode.
"""
@abstractmethod
@mpi("mesh.chan.peek")
def peek(self, topic: str = "") -> bytes:
"""
Retrieves, but does not remove, the head of this queue,
or returns None if this queue is empty.
:param topic: message topic
:return: the head of this queue, or None if this queue is empty
:return:
"""
pass
@abstractmethod
@mpi(name="mesh.chan.pop", timeout=120 * 1000)
def pop(self, timeout: int, topic: str = "") -> bytes:
"""
Retrieves and removes the head of this queue,
or returns None if this queue is empty.
:param timeout: timeout in mills.
:param topic: message topic
:return: the head of this queue, or None if this queue is empty
"""
pass
@abstractmethod
@mpi("mesh.chan.push")
def push(self, payload: bytes, metadata: Dict[str, str], topic: str = ""):
"""
Inserts the specified element into this queue if it is possible to do
so immediately without violating capacity restrictions.
When using a capacity-restricted queue, this method is generally
preferable to add, which can fail to insert an element only
by throwing an exception.
:param payload: message payload
:param metadata: Message metadata
:param topic: message topic
:return:
"""
pass
@abstractmethod
@mpi("mesh.chan.release")
def release(self, timeout: int, topic: str = ""):
"""
Close the channel session.
:param timeout:
:param topic: message topic
:return:
"""
pass
async def pick(self, topic: str = "") -> bytes:
""" Peek the instance """
return self.peek(topic)
async def poll(self, timeout: int, topic: str = "") -> bytes:
""" Pop the instance """
return self.pop(timeout, topic)
async def offer(self, payload: bytes, metadata: Dict[str, str], topic: str = ""):
""" Push the instance """
self.push(payload, metadata, topic) | PypiClean |
/echarts-china-counties-pypkg-0.0.2.tar.gz/echarts-china-counties-pypkg-0.0.2/echarts_china_counties_pypkg/resources/echarts-china-counties-js/272a360aa205f50ad3798ac8832e0052.js | (function (root, factory) {if (typeof define === 'function' && define.amd) {define(['exports', 'echarts'], factory);} else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') {factory(exports, require('echarts'));} else {factory({}, root.echarts);}}(this, function (exports, echarts) {var log = function (msg) {if (typeof console !== 'undefined') {console && console.error && console.error(msg);}};if (!echarts) {log('ECharts is not Loaded');return;}if (!echarts.registerMap) {log('ECharts Map is not loaded');return;}echarts.registerMap('方城县', {"type":"FeatureCollection","features":[{"type":"Feature","id":"411322","properties":{"name":"方城县","cp":[113.012494,33.254391],"childNum":1},"geometry":{"type":"Polygon","coordinates":["@@@CAAAE@E@A@AACAA@AA@@@CA@AA@ACAA@AA@@AA@@A@@B@BABA@@@@AA@@AA@@@@@A@A@@BA@@@A@@@@@@C@A@@@A@AB@@CAG@@@@@A@I@A@@@AHB@B@FB@BE@@BD@@@B@J@@@@F@@@@EBE@A@CBCBAB@H@@@@C@@DCBA@CB@BBB@@A@@@@@ACA@@@C@@@@@@AC@GB@@@B@@G@@@@@AD@BC@A@CAE@C@AA@@@A@@AA@@@@@AAE@A@@EBA@@@@A@@CC@@@AABA@@AA@A@AAA@@A@@@@@A@A@@@@A@CAC@A@AA@@@A@@A@A@C@@@ACA@@@E@A@@@@D@BCBA@A@BDC@@BCBC@A@AACBA@@@A@C@GD@B@@A@A@@@@AA@@B@@A@AA@@A@CAE@A@@@I@A@ABABA@A@AAA@A@@@A@A@BBA@EBC@@@@AAAM@@B@@AD@@A@@@A@@A@B@BB@@BAB@@@@AAA@A@@BA@A@C@@@AA@@@@ABA@@AA@@@A@A@A@@@@@AC@@@A@@@@@@@A@@@AAA@@DC@@@@CAGCECSIA@CBAB@@@B@@@@GDEBABA@CBA@CBABCB@@A@AA@@CBE@ABIBCCAA@@AA@A@@@BC@@BAB@BABCDC@BBAB@@AB@BAB@@@BBB@@AB@@FD@@@@@@CDA@DD@BBBBBBBBDA@CBCBABA@@@AA@AA@@@@@A@A@@@@@AACE@@A@CDGBAB@BA@ABCBABCDC@@@C@@@CDA@@@A@CA@A@@@AD@B@@@BA@@AA@A@@@AD@@@@A@@@ACAAA@@AA@CA@@@E@CB@@A@BB@BA@@@A@AAABEBA@@B@@AB@@A@@A@@@@A@C@@BA@@@AA@@@@AA@@GBGBA@@B@@EBA@@@@B@@@@@@@D@@A@@B@@@B@@BB@@BABDABBBDDDB@BBLE@@@@B@@@B@@@BCB@@@@AB@@AB@@AB@@A@@@ACAAA@@AAAAC@@AA@C@@A@E@ABCFCDABCDAB@B@B@@@BADADCD@B@B@@@D@@@BDDBDDFDDBBBBBBBB@@@@ADAD@B@B@FAF@D@B@BB@BDDDBDFDBBDBB@D@DBBBBBBDB@@BBBFFBD@@AJ@HAD@B@HBD@B@DADAH@DABAB@HA@ABADEDGF@@@B@B@BHDDFDDDBJA@B@DJ@E@BLDAFFJFDABBDHCD@HG@CEGACF@AKAAHA@JPBL@BBJACECCDDBDABH@JI@ADA@CEA@ABEBA@BB@B@@BB@BBBFHAB@BBDH@B@@@BB@BB@@BBD@BB@BC@AB@@BBBBBBBDBBB@B@BBBBFBBB@@@DABAD@B@@@DADABAB@@@B@DBB@CJBBDBH@@IDCAA@ADBBBDDBNHCDADEDBBA@@@ABA@@AC@@EA@C@CAAFABC@@FB@@BB@BBBAB@BBD@B@@AF@@FHBDAB@@@B@BFF@BDDBDBB@@DDBBD@@BFF@@@B@D@@BBB@BD@@D@@BB@@@B@BB@B@B@B@@@D@B@BB@@DBDAB@D@@@BBB@@BA@@@@B@B@B@B@@@B@@BBB@@HBB@B@BA@@BBB@@ABA@@B@B@B@B@D@@@DBDFBFBBD@F@BBFBB@FF@B@@AA@@A@@B@@@@@A@@C@@@A@@B@@@BDBB@@@B@@AB@@@B@@BBDBB@@CFA@BB@@BB@@@BA@AB@BABABBBDDBBB@BD@@BB@BF@B@BBDF@@@B@DD@B@BB@B@BFB@@B@FDGB@@@B@B@@@BBDB@ADA@@BBBB@@B@D@B@BB@BB@@B@B@@@@B@@AB@B@@@B@BBDBBDB@@@B@@@B@@@BBBB@@@BBB@B@B@BBDB@@@@D@B@@D@B@@BBBBDDDDFDDDDDD@BBBB@@BBBD@@AB@BCBA@E@ADA@ABC@AF@BAB@B@@HB@@CBBB@BBBBB@DBD@B@B@@DDBB@DBBB@DBD@B@BBFHBB@B@@@BDDAB@B@D@BAB@FB@AD@B@B@B@@FD@JDDEHA@A@@D@@D@@B@F@BB@@@BBADABBDBHBD@@AD@@@@@B@BBB@@EFABA@@B@B@BBB@@@BB@@B@BB@@BF@BB@@D@B@DBB@DBBB@@@B@B@@B@@@B@@@DA@AB@B@B@@@BAB@BAB@B@BBD@D@D@B@B@@@B@BBBBBBBBDF@AB@D@FAFCB@BCB@BA@AB@BA@@@@BBB@@@B@DA@@B@@@F@DAB@F@B@DBB@D@DBD@B@B@@@D@B@D@B@@@@@@AAA@A@@@A@@BCD@@AB@@@@A@A@AB@@@AA@AB@BC@@@A@A@ABABAB@@@B@F@@@@@B@@A@@BABADAFCBAB@BADBB@DDB@BBB@@@B@@ADC@@@ABC@@@@B@B@BAB@BAB@@@D@B@@@@A@ABC@A@A@@AA@@BAD@@@B@@AB@DA@@@@B@BB@@@@@B@@B@B@BAB@B@B@B@B@B@BB@@BA@@B@@@BCDA@AB@@B@BB@@A@AF@FAD@B@@ABAD@@@@CB@@@B@BA@@@C@@BA@A@ACE@@@A@AFCBA@@DBB@F@B@@@@@@AB@@@@A@A@A@@@A@@@C@@@@@C@@@AB@@@B@B@@@BAB@FAB@BA@@@@@A@A@A@AAA@@@CBABA@CAA@E@CAABA@CBA@ABC@@BA@AA@@AAA@C@@BA@A@@AA@A@@@AB@B@@A@@AA@@@A@@BC@A@A@@BA@@BABAB@BA@AAA@@@A@@@CAA@AA@@@@@ABA@AACA@AC@CCAA@@@AAE@E@AAA@A@@@AA@@A@@@@A@B@@A@@@ABA@@@E@A@C@CBABA@A@C@C@C@A@@BAB@@ADCB@BA@@@@B@BB@@FDDBBBFBBBD@J@DBBBH@BBD@B@B@B@FAB@D@D@@@F@B@@@BAD@BAD@D@BAB@DBB@D@B@B@DAFC@@B@@@AF@@BB@ADA@@BABADABAF@@@DA@@BA@@@A@C@E@EBADEB@@@B@D@B@BCB@DA@@BBBB@@B@B@BB@@BB@B@@@@D@B@B@@@DD@@B@B@@@@@@BBBBB@@B@B@B@DA@@B@B@@@B@BBJ@BBD@BBBBB@@@F@HAB@DAB@DA@@DAB@DA@AB@@A@@DCDCBA@A@A@ABA@AFEDAD@DCB@@A@CBA@C@@@A@@@CFCDA@@B@@@B@@A@CDAB@AAB@@@@AB@@A@A@@B@@DBA@BBA@@@@BAB@B@BA@@@BB@@@B@@AA@@@B@B@@BBA@@@EBA@@@AB@ACB@@C@@CBA@@@@@ACC@AA@@B@@A@@@A@@AG@@B@@AB@@@BDBA@CBA@@A@BAA@@@@@@AB@CCB@@@@@@@DA@ADA@CB@B@@@B@B@BDDA@@B@@BB@@@BA@@BH@@DA@AB@BAAAD@DAB@B@BBD@@@B@B@BAB@@@@AD@@ABA@@B@@AB@@A@@BA@@BB@@@@B@@AAA@AB@@@B@BB@@@@BA@@BABAD@@@BA@@B@@A@C@A@@BA@@@A@@B@BBB@B@@BBAH@D@ADB@J@D@D@FADABABABA@EB@@AD@@@F@F@BBB@DBBBB@B@BAB@NB@@@BAB@@B@DD@@FA@@@@@D@@@BH@BBDA@@DAFCBABA@A@@B@@@B@BB@@B@D@D@DAB@@@@A@AB@D@D@FAB@@A@AB@BB@BHAB@@AB@@@BBB@@@@C@@B@@@@@DDDBB@@ADA@@FEBAJEB@B@B@B@BBBBD@B@@@@@BA@@@A@A@A@C@@@AA@CA@@A@@BBD@@@BA@A@@@A@@@@A@A@CAA@@@CBA@@B@@@B@@@B@B@B@BCHF@@@A@@BA@A@@AEAA@@A@EAA@@@AA@@AE@A@AB@@@B@DA@@B@BCBA@@@A@@A@@A@@@A@A@AA@@@A@C@A@A@A@A@CAAAAA@@A@E@A@C@@@@AACAA@A@@BA@A@@@ADA@@@@AC@@BA@@BA@@@E@@BAB@@ABA@A@C@A@AA@@AB@@@BBB@@@BABCBA@E@@BA@@@@B@@@BF@@@@FC@AB@@A@CBA@AFC@A@A@CBA@C@@AACAA@C@A@@@@@@CBA@@@AA@@A@A@A@AB@@ABC@CBAB@@@B@@@@@@AD@@@C@@A@AA@@@A@@A@@@@@A@@A@@A@A@@BBB@@AB@@@C@@AD@AAC@B@B@@@AC@C@@A@@@C@@@A@@DAA@BC@A@@@@@A@@@@AABA@@@A@@A@@@@@AA@A@@@@@@A@A@@@@B@@@ACA@B@@@AB@@A@@AA@B@@AA@@@AA@@BA@C@@@A@@A@A@@@@A@@@@@A@@@AB@@@BB@@B@@@@@@AB@@@@@AA@@C@@AA@@@@@@AAA@A@@B@AA@@@@@@@A@@@@@A@A@@AABA@@@A@@@@@@@A@A@A@@@C@@B@@ABA@@@@@@@AA@@@A@@@@@@AA@@AAA@A@@AA@A@A@AA@@@@A@@@@@A@@@@A@@@@A@@A@@@@@ABAB@@A@@@CAA@@@@@@BA@@@@A@@@@@@@@A@BD@@A@@@C@@@CA@@@AB@@@B@@A@@@@@AB@@A@@BA@@D@B@@@B@@AA@@AB@@@B@@AB@@@BA@@DBB@B@@@B@@AB@BB@@DA@@@@AEB@ACA@@A@CB@@CB@@A@A@@@@B@BA@@@@@@A@A@@A@@BA@@@AA@@@@A@A@@@@@A@@@@@@A@@@@AA@@@@@@@@@@A@@AAA@B@@A@@@A@@B@@A@@@A@@@@BA@@@@@@BAB@@@BA@A@A@@@@@@AA@@AA@@@@@A@@AAA@@ADCBA@@ACAEAC@@B@@A@@@@DC@@B@BA@A@@AAAB@AA@@AB@@A@GCCC@CAAAA@A@@@@A@A@CCC@@AA@@@@@ABC@AA@AAA@AAAAC@@@@A@@@A@@AAAAECCAAACAAA@AA@A@A@A@AA@@@@AB@@A@CAA@A@@@EAA@CAA@@ACAAAC@E@@ACAEA@AC@AAA@ABABA@A@@AC@@@@@A@@AA@A@@@A@ABAD@@@@@A@@AAA@A@@AC@A@@@@@@D@@@@@BBBBB@@AB@@ABA@@@A@@@EA@@@@A@A@AAA@@BA@@@A@@D@@ABAB@@A@AABA@@AA@ACC@@A@AB@A@@@@AC@@@@AB@BA@E@@@@AA@B@@A@A@@A@@@AAC@E@AAC@@@A@A@@BA@@@A@E@@@@BAB@@@@C@@@CB@@ACA@A@C@CAA@@@@E@EA@@@ABA@A@C@IDC@AAEE@@AAAC@@AA@@ABC@A@@@AAA@CA@@@A@@AA@CAC@@@@C@A@@@@@A@A@@BABEBA@@@AB@@G@CDGD@@@B@BADABAF@@@BBBBB@@BB@DA@@B@BA@@@EBCBA@@@@F@@BF@BDF@@BB@@@@@BEBA@ABAB@@@F@B@@BL@F@@@BCF@FA@AD@@EBADAB@B@@BB@DDD@@@@AC@CAAB@BAB@BA@A@BB@BFB@@B@D@DAB@DABABCDABABA@@@@BABABABAB@@A@EB@@ABE@C@@@@@CB@@E@A@@@@A@@B@@@A@@@@@@@@A@@@@@@ABA@@@@@@@@AB@@A@@CB@@@@@@A@A@@@@@BA@@A@A@A@@@ACA@@@@B@@@BAB@@ABCBAB@@MCAACCI@A@CAA@A@A@IC@AAA@AIGEE@@@@ABAA@@@A@@@BA@@@@CA@@@@BA@GGACA@@@G@C@@B@@A@A@@@A@@BAB@@@@@A@@A@CBBB@BA@ABA@@A@@BC@CE@@AB@B@B@FABBBBBA@@@K@A@@@A@@DA@@@A@@CE@A@@A@@@@CC@@A@@A@@@@BA@@CG@@@@@@@G@@ABA@A@A@@@@A@@@E@CBA@CBABGF@@@BADBB@B@B@@@@ABEF@B@@AB@B@BCD@BA@AAA@BDA@A@@AC@@B@@CB@@AAC@@@ABC@@D@@FB@@@@@D@@GH@@B@BD@B@@@@CB@@BBABEFED@@AAAACCA@A@A@ACAAA@@@@AA@AA@@A@A@@CAA@AAACAGC@@@AA@@BCBKD@DI@@@@CA@@@@DA@@@@C@@A@@@@FA@@@@C@@A@A@@@@B@B@@@@A@@@A@@@AA@A@@@AAA@@@A@@@A@A@C@@BA@@@AAA@A@A@@AC@CAE@@C@GB@@A@BDA@BF@BC@CBA@A@A@C@A@E@BFBDAF@@@BEBC@A@A@@@A@ECICEAAA@@E@CA@@@@AA@A"],"encodeOffsets":[[115668,33924]]}}],"UTF8Encoding":true});})); | PypiClean |
/cogent3-2023.2.12a1.tar.gz/cogent3-2023.2.12a1/doc/_build/jupyter_execute/examples/hmm_par_heterogeneity.ipynb | ```
import set_working_directory
from cogent3 import load_aligned_seqs, load_tree
from cogent3.evolve.substitution_model import (
TimeReversibleNucleotide,
predicate,
)
from scipy.stats.distributions import chi2
aln = load_aligned_seqs("data/long_testseqs.fasta")
tree = load_tree("data/test.tree")
MotifChange = predicate.MotifChange
treat_gap = dict(recode_gaps=True, model_gaps=False)
kappa = (~MotifChange("R", "Y")).aliased("kappa")
model = TimeReversibleNucleotide(predicates=[kappa], **treat_gap)
lf_one = model.make_likelihood_function(tree, digits=2, space=3)
lf_one.set_alignment(aln)
lf_one.optimise(show_progress=False)
lnL_one = lf_one.get_log_likelihood()
df_one = lf_one.get_num_free_params()
lf_one
bin_submod = TimeReversibleNucleotide(
predicates=[kappa], ordered_param="rate", distribution="gamma", **treat_gap
)
lf_bins = bin_submod.make_likelihood_function(
tree, bins=2, sites_independent=True, digits=2, space=3
)
lf_bins.set_param_rule("bprobs", is_constant=True)
lf_bins.set_alignment(aln)
lf_bins.optimise(local=True, show_progress=False)
lnL_bins = lf_bins.get_log_likelihood()
df_bins = lf_bins.get_num_free_params()
assert df_bins == 9
lf_bins
lf_patches = bin_submod.make_likelihood_function(
tree, bins=2, sites_independent=False, digits=2, space=3
)
lf_patches.set_param_rule("bprobs", is_constant=True)
lf_patches.set_alignment(aln)
lf_patches.optimise(local=True, show_progress=False)
lnL_patches = lf_patches.get_log_likelihood()
df_patches = lf_patches.get_num_free_params()
lf_patches
LR = lambda alt, null: 2 * (alt - null)
lr = LR(lnL_bins, lnL_one)
lr
print("%.4f" % chi2.sf(lr, df_patches - df_bins))
bprobs = lf_patches.get_param_value("bprobs")
print("%.1f : %.1f" % tuple(bprobs))
pp = lf_patches.get_bin_probs()
print(pp["bin0"][20])
from numpy import array
single_kappa = lf_one.get_param_value("kappa")
kappa_bin_submod = TimeReversibleNucleotide(predicates=[kappa], **treat_gap)
lf_kappa = kappa_bin_submod.make_likelihood_function(
tree, bins=["slow", "fast"], sites_independent=False, digits=1, space=3
)
epsilon = 1e-6
lf_kappa.set_param_rule(
kappa, init=single_kappa - epsilon, upper=single_kappa, bin="slow"
)
lf_kappa.set_param_rule(
kappa, init=single_kappa + epsilon, lower=single_kappa, bin="fast"
)
lf_kappa.set_param_rule("bprobs", init=array([1.0 - epsilon, 0.0 + epsilon]))
lf_kappa.set_alignment(aln)
lf_kappa.optimise(local=True, show_progress=False)
lf_kappa
```
| PypiClean |
/raffiot-0.6.3.tar.gz/raffiot-0.6.3/README.md | # Robust And Fast Functional IO Toolkit
*Raffiot* is small (almost) dependency-free python library providing some
usual functional tools. It currently provides
- an easy-to-use `IO` monad which is **stack-safe**, **fast**, support
**asynchronous**, **concurrent**, **parallel** programming, has many other features.
- a `Resource` data type for easy but reliable **resource management**.
- a `Result` data structure to represent errors
## Demo
For a demo, just type this in a terminal:
```shell script
curl https://raw.githubusercontent.com/chrilves/raffiot.py/main/demos/raffiot_demo.sh | /bin/sh
```
This demo runs 4 computations in parallel. It demonstrates how simple concurrent
and parallel programing is in *raffiot*.
**Note that this command will install raffiot in your current Python environment**
## Documentation
### [Guide](https://chrilves.github.io/raffiot.py/index.html)
The [guide](https://chrilves.github.io/raffiot.py/index.html) is online at
[https://chrilves.github.io/raffiot.py/index.html](https://chrilves.github.io/raffiot.py/index.html).
### [API](https://chrilves.github.io/raffiot.py/api/index.html)
The [API](https://chrilves.github.io/raffiot.py/api/index.html) is online at
[https://chrilves.github.io/raffiot.py/api/index.html](https://chrilves.github.io/raffiot.py/api/index.html).
## Features
- **pure python**: *Raffiot* is written entirely in Python 3.7+.
- **small**: it is just a few small files.
- **(almost) dependency-free**: it only depends on `typing-extensions` (for the
`@final` annotation).
- **crystal clear code**
### IO
- **stack safe**: you just won't run into stack overflows anymore.
- **fast**: you won't notice the overhead.
- **dependency injection** *made easy*: make some context visible from anywhere.
- *simple* **asynchronous** *and* **concurrent programming**: full support of synchronous,
asynchronous and concurrent programming *with the same simple API*.
- **railway-oriented programming**: clean and simple failure management.
- **distinction** *between* **expected and unexpected failures**: some failures are part
of your program's normal behaviour (errors) while others are show something
terribly wrong happened (panics). Yes, that's heavily inspired by *Rust*.
### Resource
Python has the `with` construction, but `Resource` goes a step further.
- **easy user-defined resource creation**: just provide some open and close
function.
- **composability**: the resource you want to create depends on another resource?
Not a problem, you can compose resources the way you want. It scales.
- **failures handling in resources**: `Resource` has everything `IO` has, including
its wonderful failure management.
### Result
Did I mention **Railway-Oriented Programming**? `Result` is represent the 3 possible
result of a computation:
- `Ok(value)`: the computation successfully computed the this `value`.
- `Error(error)`: the computation failed on some expected failure `error`, probably
from the business domain.
- `Panic(exception)`: the computation failed on some unexpected failure `exception`. | PypiClean |
/vioneta-2023.7.3.tar.gz/vioneta-2023.7.3/homeassistant/components/isy994/sensor.py | from __future__ import annotations
from typing import Any, cast
from pyisy.constants import (
ATTR_ACTION,
ATTR_CONTROL,
COMMAND_FRIENDLY_NAME,
ISY_VALUE_UNKNOWN,
NC_NODE_ENABLED,
PROP_BATTERY_LEVEL,
PROP_COMMS_ERROR,
PROP_ENERGY_MODE,
PROP_HEAT_COOL_STATE,
PROP_HUMIDITY,
PROP_ON_LEVEL,
PROP_RAMP_RATE,
PROP_STATUS,
PROP_TEMPERATURE,
TAG_ADDRESS,
)
from pyisy.helpers import EventListener, NodeProperty
from pyisy.nodes import Node, NodeChangedEvent
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory, Platform, UnitOfTemperature
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
_LOGGER,
DOMAIN,
UOM_DOUBLE_TEMP,
UOM_FRIENDLY_NAME,
UOM_INDEX,
UOM_ON_OFF,
UOM_TO_STATES,
)
from .entity import ISYNodeEntity
from .helpers import convert_isy_value_to_hass
# Disable general purpose and redundant sensors by default
AUX_DISABLED_BY_DEFAULT_MATCH = ["GV", "DO"]
AUX_DISABLED_BY_DEFAULT_EXACT = {
PROP_COMMS_ERROR,
PROP_ENERGY_MODE,
PROP_HEAT_COOL_STATE,
PROP_ON_LEVEL,
PROP_RAMP_RATE,
PROP_STATUS,
}
# Reference pyisy.constants.COMMAND_FRIENDLY_NAME for API details.
# Note: "LUMIN"/Illuminance removed, some devices use non-conformant "%" unit
# "VOCLVL"/VOC removed, uses qualitative UOM not ug/m^3
ISY_CONTROL_TO_DEVICE_CLASS = {
PROP_BATTERY_LEVEL: SensorDeviceClass.BATTERY,
PROP_HUMIDITY: SensorDeviceClass.HUMIDITY,
PROP_TEMPERATURE: SensorDeviceClass.TEMPERATURE,
"BARPRES": SensorDeviceClass.ATMOSPHERIC_PRESSURE,
"CC": SensorDeviceClass.CURRENT,
"CO2LVL": SensorDeviceClass.CO2,
"CPW": SensorDeviceClass.POWER,
"CV": SensorDeviceClass.VOLTAGE,
"DEWPT": SensorDeviceClass.TEMPERATURE,
"DISTANC": SensorDeviceClass.DISTANCE,
"ETO": SensorDeviceClass.PRECIPITATION_INTENSITY,
"FATM": SensorDeviceClass.WEIGHT,
"FREQ": SensorDeviceClass.FREQUENCY,
"MUSCLEM": SensorDeviceClass.WEIGHT,
"PF": SensorDeviceClass.POWER_FACTOR,
"PM10": SensorDeviceClass.PM10,
"PM25": SensorDeviceClass.PM25,
"PRECIP": SensorDeviceClass.PRECIPITATION,
"RAINRT": SensorDeviceClass.PRECIPITATION_INTENSITY,
"RFSS": SensorDeviceClass.SIGNAL_STRENGTH,
"SOILH": SensorDeviceClass.MOISTURE,
"SOILT": SensorDeviceClass.TEMPERATURE,
"SOLRAD": SensorDeviceClass.IRRADIANCE,
"SPEED": SensorDeviceClass.SPEED,
"TEMPEXH": SensorDeviceClass.TEMPERATURE,
"TEMPOUT": SensorDeviceClass.TEMPERATURE,
"TPW": SensorDeviceClass.ENERGY,
"WATERP": SensorDeviceClass.PRESSURE,
"WATERT": SensorDeviceClass.TEMPERATURE,
"WATERTB": SensorDeviceClass.TEMPERATURE,
"WATERTD": SensorDeviceClass.TEMPERATURE,
"WEIGHT": SensorDeviceClass.WEIGHT,
"WINDCH": SensorDeviceClass.TEMPERATURE,
}
ISY_CONTROL_TO_STATE_CLASS = {
control: SensorStateClass.MEASUREMENT for control in ISY_CONTROL_TO_DEVICE_CLASS
}
ISY_CONTROL_TO_ENTITY_CATEGORY = {
PROP_RAMP_RATE: EntityCategory.DIAGNOSTIC,
PROP_ON_LEVEL: EntityCategory.DIAGNOSTIC,
PROP_COMMS_ERROR: EntityCategory.DIAGNOSTIC,
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the ISY sensor platform."""
isy_data = hass.data[DOMAIN][entry.entry_id]
entities: list[ISYSensorEntity] = []
devices: dict[str, DeviceInfo] = isy_data.devices
for node in isy_data.nodes[Platform.SENSOR]:
_LOGGER.debug("Loading %s", node.name)
entities.append(ISYSensorEntity(node, devices.get(node.primary_node)))
aux_sensors_list = isy_data.aux_properties[Platform.SENSOR]
for node, control in aux_sensors_list:
_LOGGER.debug("Loading %s %s", node.name, COMMAND_FRIENDLY_NAME.get(control))
enabled_default = control not in AUX_DISABLED_BY_DEFAULT_EXACT and not any(
control.startswith(match) for match in AUX_DISABLED_BY_DEFAULT_MATCH
)
entities.append(
ISYAuxSensorEntity(
node=node,
control=control,
enabled_default=enabled_default,
unique_id=f"{isy_data.uid_base(node)}_{control}",
device_info=devices.get(node.primary_node),
)
)
async_add_entities(entities)
class ISYSensorEntity(ISYNodeEntity, SensorEntity):
"""Representation of an ISY sensor device."""
@property
def target(self) -> Node | NodeProperty | None:
"""Return target for the sensor."""
return self._node
@property
def target_value(self) -> Any:
"""Return the target value."""
return self._node.status
@property
def raw_unit_of_measurement(self) -> dict | str | None:
"""Get the raw unit of measurement for the ISY sensor device."""
if self.target is None:
return None
uom = self.target.uom
# Backwards compatibility for ISYv4 Firmware:
if isinstance(uom, list):
return UOM_FRIENDLY_NAME.get(uom[0], uom[0])
# Special cases for ISY UOM index units:
if isy_states := UOM_TO_STATES.get(uom):
return isy_states
if uom in (UOM_ON_OFF, UOM_INDEX):
assert isinstance(uom, str)
return uom
return UOM_FRIENDLY_NAME.get(uom)
@property
def native_value(self) -> float | int | str | None:
"""Get the state of the ISY sensor device."""
if self.target is None:
return None
if (value := self.target_value) == ISY_VALUE_UNKNOWN:
return None
# Get the translated ISY Unit of Measurement
uom = self.raw_unit_of_measurement
# Check if this is a known index pair UOM
if isinstance(uom, dict):
return uom.get(value, value)
if uom in (UOM_INDEX, UOM_ON_OFF):
return cast(str, self.target.formatted)
# Check if this is an index type and get formatted value
if uom == UOM_INDEX and hasattr(self.target, "formatted"):
return cast(str, self.target.formatted)
# Handle ISY precision and rounding
value = convert_isy_value_to_hass(value, uom, self.target.prec)
# Convert temperatures to Home Assistant's unit
if uom in (UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT):
value = self.hass.config.units.temperature(value, uom)
if value is None:
return None
assert isinstance(value, (int, float))
return value
@property
def native_unit_of_measurement(self) -> str | None:
"""Get the Home Assistant unit of measurement for the device."""
raw_units = self.raw_unit_of_measurement
# Check if this is a known index pair UOM
if isinstance(raw_units, dict) or raw_units in (UOM_ON_OFF, UOM_INDEX):
return None
if raw_units in (
UnitOfTemperature.FAHRENHEIT,
UnitOfTemperature.CELSIUS,
UOM_DOUBLE_TEMP,
):
return self.hass.config.units.temperature_unit
return raw_units
class ISYAuxSensorEntity(ISYSensorEntity):
"""Representation of an ISY aux sensor device."""
def __init__(
self,
node: Node,
control: str,
enabled_default: bool,
unique_id: str,
device_info: DeviceInfo | None = None,
) -> None:
"""Initialize the ISY aux sensor."""
super().__init__(node, device_info=device_info)
self._control = control
self._attr_entity_registry_enabled_default = enabled_default
self._attr_entity_category = ISY_CONTROL_TO_ENTITY_CATEGORY.get(control)
self._attr_device_class = ISY_CONTROL_TO_DEVICE_CLASS.get(control)
self._attr_state_class = ISY_CONTROL_TO_STATE_CLASS.get(control)
self._attr_unique_id = unique_id
self._change_handler: EventListener = None
self._availability_handler: EventListener = None
name = COMMAND_FRIENDLY_NAME.get(self._control, self._control)
self._attr_name = f"{node.name} {name.replace('_', ' ').title()}"
@property
def target(self) -> Node | NodeProperty | None:
"""Return target for the sensor."""
if self._control not in self._node.aux_properties:
# Property not yet set (i.e. no errors)
return None
return cast(NodeProperty, self._node.aux_properties[self._control])
@property
def target_value(self) -> Any:
"""Return the target value."""
return None if self.target is None else self.target.value
async def async_added_to_hass(self) -> None:
"""Subscribe to the node control change events.
Overloads the default ISYNodeEntity updater to only update when
this control is changed on the device and prevent duplicate firing
of `isy994_control` events.
"""
self._change_handler = self._node.control_events.subscribe(
self.async_on_update, event_filter={ATTR_CONTROL: self._control}
)
self._availability_handler = self._node.isy.nodes.status_events.subscribe(
self.async_on_update,
event_filter={
TAG_ADDRESS: self._node.address,
ATTR_ACTION: NC_NODE_ENABLED,
},
)
@callback
def async_on_update(self, event: NodeProperty | NodeChangedEvent) -> None:
"""Handle a control event from the ISY Node."""
self.async_write_ha_state()
@property
def available(self) -> bool:
"""Return entity availability."""
return cast(bool, self._node.enabled) | PypiClean |
/flexmeasures_client-0.1.8-py3-none-any.whl/flexmeasures_client/s2/script/websockets_server.py | import asyncio
import json
import aiohttp
from aiohttp import web
from flexmeasures_client.client import FlexMeasuresClient
from flexmeasures_client.s2.cem import CEM
from flexmeasures_client.s2.control_types.FRBC.frbc_simple import FRBCSimple
from flexmeasures_client.s2.python_s2_protocol.common.schemas import ControlType
async def rm_details_watchdog(ws, cem: CEM):
"""This function will define a service in Home Assistant, or could
be a HTTP endpoint to trigger schedules.
:param ws: websocket object
:param cem: Customer Energy Manager petitions handler
"""
# wait to get resource manager details
while cem._control_type is None:
await asyncio.sleep(1)
await cem.activate_control_type(control_type=ControlType.FILL_RATE_BASED_CONTROL)
# check/wait that the control type is set properly
while cem._control_type != ControlType.FILL_RATE_BASED_CONTROL:
print("waiting for the activation of the control type...")
await asyncio.sleep(1)
print("CONTROL TYPE: ", cem._control_type)
# after this, schedule will be triggered on reception of a new system description
async def websocket_producer(ws, cem: CEM):
print("start websocket message producer")
print("IS CLOSED? ", cem.is_closed())
while not cem.is_closed():
message = await cem.get_message()
print("sending message")
await ws.send_json(message)
print("cem closed")
async def websocket_consumer(ws, cem: CEM):
async for msg in ws:
print("RECEIVED: ", json.loads(msg.json()))
if msg.type == aiohttp.WSMsgType.TEXT:
if msg.data == "close":
# TODO: save cem state?
print("close...")
cem.close()
await ws.close()
else:
await cem.handle_message(json.loads(msg.json()))
elif msg.type == aiohttp.WSMsgType.ERROR:
print("close...")
cem.close()
print("ws connection closed with exception %s" % ws.exception())
# TODO: save cem state?
print("websocket connection closed")
async def websocket_handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
fm_client = FlexMeasuresClient("toy-password", "[email protected]")
cem = CEM(sensor_id=1, fm_client=fm_client)
frbc = FRBCSimple(power_sensor_id=1, price_sensor_id=2)
cem.register_control_type(frbc)
# create "parallel" tasks for the message producer and consumer
await asyncio.gather(
websocket_consumer(ws, cem),
websocket_producer(ws, cem),
rm_details_watchdog(ws, cem),
)
return ws
app = web.Application()
app.add_routes([web.get("/ws", websocket_handler)])
web.run_app(app) | PypiClean |
/ultracart_rest_sdk-4.0.173-py3-none-any.whl/ultracart/model/tax_provider_sovos.py | import re # noqa: F401
import sys # noqa: F401
from ultracart.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from ultracart.exceptions import ApiAttributeError
def lazy_import():
from ultracart.model.sovos_config import SovosConfig
globals()['SovosConfig'] = SovosConfig
class TaxProviderSovos(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'configuration': (SovosConfig,), # noqa: E501
'description': (str,), # noqa: E501
'selected': (bool,), # noqa: E501
'title': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'configuration': 'configuration', # noqa: E501
'description': 'description', # noqa: E501
'selected': 'selected', # noqa: E501
'title': 'title', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""TaxProviderSovos - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
configuration (SovosConfig): [optional] # noqa: E501
description (str): Description. [optional] # noqa: E501
selected (bool): Selected. [optional] # noqa: E501
title (str): Title. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""TaxProviderSovos - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
configuration (SovosConfig): [optional] # noqa: E501
description (str): Description. [optional] # noqa: E501
selected (bool): Selected. [optional] # noqa: E501
title (str): Title. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/esperclient-0.1.2.tar.gz/esperclient-0.1.2/test/scripts/utils.py | import esperclient
import os
from esperclient.rest import ApiException
def get_esper_credentials():
try:
host = os.environ['ESPER_HOST']
except KeyError:
print('ESPER_HOST environment variable not set.')
return
try:
key = os.environ['ESPER_KEY']
except KeyError:
print('ESPER_KEY environment variable not set.')
return
return {
'host': host,
'key': key
}
# Configuration
configuration = esperclient.Configuration()
esper_creds = get_esper_credentials()
configuration.host = esper_creds.get('host')
configuration.api_key['Authorization'] = esper_creds.get('key')
configuration.api_key_prefix['Authorization'] = 'Bearer'
def get_enterprise_for_env():
api_instance = esperclient.EnterpriseApi(esperclient.ApiClient(configuration))
try:
api_response = api_instance.get_all_enterprises()
except ApiException as e:
print("Exception when calling EnterpriseApi->get_all_enterprises: %s\n" % e)
return api_response.results[0].id
def get_device_for_enterprise(enterprise_id):
api_instance = esperclient.DeviceApi(esperclient.ApiClient(configuration))
try:
api_response = api_instance.get_all_devices(enterprise_id)
except ApiException as e:
print("Exception when calling DeviceApi->get_all_devices: %s\n" % e)
return api_response.results[0].id
def get_group_for_enterprise(enterprise_id):
api_instance = esperclient.DeviceGroupApi(esperclient.ApiClient(configuration))
try:
api_response = api_instance.get_all_groups(enterprise_id)
except ApiException as e:
print("Exception when calling DeviceGroupApi->get_all_groups: %s\n" % e)
return api_response.results[0].id
def get_application_for_enterprise(enterprise_id):
api_instance = esperclient.ApplicationApi(esperclient.ApiClient(configuration))
try:
api_response = api_instance.get_all_applications(enterprise_id)
except ApiException as e:
print("Exception when calling ApplicationApi->get_all_applications: %s\n" % e)
return api_response.results[0].id
def get_version_for_app(application_id, enterprise_id):
api_instance = esperclient.ApplicationApi(esperclient.ApiClient(configuration))
try:
api_response = api_instance.get_app_versions(application_id, enterprise_id)
except ApiException as e:
print("Exception when calling ApplicationApi->get_app_versions: %s\n" % e)
return api_response.results[0].id | PypiClean |
/python_modulr_client-0.0.11-py3-none-any.whl/modulr_client/api/customers/get_customers.py | import datetime
from http import HTTPStatus
from typing import Any, Dict, List, Optional, Union
import httpx
from ... import errors
from ...client import Client
from ...models.account_customer_page_response import AccountCustomerPageResponse
from ...models.account_string_search_criteria import AccountStringSearchCriteria
from ...models.get_customers_associate_search_criteria_associate_types_item import (
GetCustomersAssociateSearchCriteriaAssociateTypesItem,
)
from ...models.get_customers_associate_search_criteria_last_name_type import (
GetCustomersAssociateSearchCriteriaLastNameType,
)
from ...models.get_customers_name_type import GetCustomersNameType
from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
client: Client,
id: Union[Unset, None, str] = UNSET,
q: Union[Unset, None, str] = UNSET,
type: Union[Unset, None, str] = UNSET,
verification_status: Union[Unset, None, str] = UNSET,
from_created_date: Union[Unset, None, str] = UNSET,
to_created_date: Union[Unset, None, str] = UNSET,
page: Union[Unset, None, str] = UNSET,
size: Union[Unset, None, str] = UNSET,
sort_field: Union[Unset, None, str] = UNSET,
sort_order: Union[Unset, None, str] = UNSET,
external_ref: Union[Unset, None, str] = UNSET,
external_reference: Union[Unset, None, str] = UNSET,
name_type: GetCustomersNameType,
name_value: str,
company_reg_number: Union[Unset, None, str] = UNSET,
legal_entity: Union[Unset, None, str] = UNSET,
trading_address_address_lines: Union[Unset, None, List[str]] = UNSET,
trading_address_post_code: Union[Unset, None, str] = UNSET,
trading_address_post_town: Union[Unset, None, str] = UNSET,
trading_address_country: Union[Unset, None, str] = UNSET,
trading_address_start_date: Union[Unset, None, datetime.date] = UNSET,
trading_address_end_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_address_lines: Union[Unset, None, List[str]] = UNSET,
registered_address_post_code: Union[Unset, None, str] = UNSET,
registered_address_post_town: Union[Unset, None, str] = UNSET,
registered_address_country: Union[Unset, None, str] = UNSET,
registered_address_start_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_first_name_initial: Union[Unset, None, str] = UNSET,
associate_search_criteria_last_name_type: GetCustomersAssociateSearchCriteriaLastNameType,
associate_search_criteria_last_name_value: str,
associate_search_criteria_last_names: Union[
Unset, None, List["AccountStringSearchCriteria"]
] = UNSET,
associate_search_criteria_last_name_prefix: Union[Unset, None, str] = UNSET,
associate_search_criteria_date_of_birth: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_additional_identifier_type: str,
associate_search_criteria_additional_identifier_value: str,
associate_search_criteria_home_address_address_lines: Union[Unset, None, List[str]] = UNSET,
associate_search_criteria_home_address_post_code: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_post_town: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_country: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_start_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_home_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_associate_types: Union[
Unset, None, List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]
] = UNSET,
) -> Dict[str, Any]:
url = f"{client.base_url}/customers"
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
params: Dict[str, Any] = {}
params["id"] = id
params["q"] = q
params["type"] = type
params["verificationStatus"] = verification_status
params["fromCreatedDate"] = from_created_date
params["toCreatedDate"] = to_created_date
params["page"] = page
params["size"] = size
params["sortField"] = sort_field
params["sortOrder"] = sort_order
params["externalRef"] = external_ref
params["externalReference"] = external_reference
json_name_type = name_type.value
params["name.type"] = json_name_type
params["name.value"] = name_value
params["companyRegNumber"] = company_reg_number
params["legalEntity"] = legal_entity
json_trading_address_address_lines: Union[Unset, None, List[str]] = UNSET
if not isinstance(trading_address_address_lines, Unset):
if trading_address_address_lines is None:
json_trading_address_address_lines = None
else:
json_trading_address_address_lines = trading_address_address_lines
params["tradingAddress.addressLines"] = json_trading_address_address_lines
params["tradingAddress.postCode"] = trading_address_post_code
params["tradingAddress.postTown"] = trading_address_post_town
params["tradingAddress.country"] = trading_address_country
json_trading_address_start_date: Union[Unset, None, str] = UNSET
if not isinstance(trading_address_start_date, Unset):
json_trading_address_start_date = (
trading_address_start_date.isoformat() if trading_address_start_date else None
)
params["tradingAddress.startDate"] = json_trading_address_start_date
json_trading_address_end_date: Union[Unset, None, str] = UNSET
if not isinstance(trading_address_end_date, Unset):
json_trading_address_end_date = (
trading_address_end_date.isoformat() if trading_address_end_date else None
)
params["tradingAddress.endDate"] = json_trading_address_end_date
json_registered_address_address_lines: Union[Unset, None, List[str]] = UNSET
if not isinstance(registered_address_address_lines, Unset):
if registered_address_address_lines is None:
json_registered_address_address_lines = None
else:
json_registered_address_address_lines = registered_address_address_lines
params["registeredAddress.addressLines"] = json_registered_address_address_lines
params["registeredAddress.postCode"] = registered_address_post_code
params["registeredAddress.postTown"] = registered_address_post_town
params["registeredAddress.country"] = registered_address_country
json_registered_address_start_date: Union[Unset, None, str] = UNSET
if not isinstance(registered_address_start_date, Unset):
json_registered_address_start_date = (
registered_address_start_date.isoformat() if registered_address_start_date else None
)
params["registeredAddress.startDate"] = json_registered_address_start_date
json_registered_address_end_date: Union[Unset, None, str] = UNSET
if not isinstance(registered_address_end_date, Unset):
json_registered_address_end_date = (
registered_address_end_date.isoformat() if registered_address_end_date else None
)
params["registeredAddress.endDate"] = json_registered_address_end_date
params[
"associateSearchCriteria.firstNameInitial"
] = associate_search_criteria_first_name_initial
json_associate_search_criteria_last_name_type = associate_search_criteria_last_name_type.value
params["associateSearchCriteria.lastName.type"] = json_associate_search_criteria_last_name_type
params["associateSearchCriteria.lastName.value"] = associate_search_criteria_last_name_value
json_associate_search_criteria_last_names: Union[Unset, None, List[Dict[str, Any]]] = UNSET
if not isinstance(associate_search_criteria_last_names, Unset):
if associate_search_criteria_last_names is None:
json_associate_search_criteria_last_names = None
else:
json_associate_search_criteria_last_names = []
for (
associate_search_criteria_last_names_item_data
) in associate_search_criteria_last_names:
associate_search_criteria_last_names_item = (
associate_search_criteria_last_names_item_data.to_dict()
)
json_associate_search_criteria_last_names.append(
associate_search_criteria_last_names_item
)
params["associateSearchCriteria.lastNames"] = json_associate_search_criteria_last_names
params["associateSearchCriteria.lastNamePrefix"] = associate_search_criteria_last_name_prefix
json_associate_search_criteria_date_of_birth: Union[Unset, None, str] = UNSET
if not isinstance(associate_search_criteria_date_of_birth, Unset):
json_associate_search_criteria_date_of_birth = (
associate_search_criteria_date_of_birth.isoformat()
if associate_search_criteria_date_of_birth
else None
)
params["associateSearchCriteria.dateOfBirth"] = json_associate_search_criteria_date_of_birth
params[
"associateSearchCriteria.additionalIdentifier.type"
] = associate_search_criteria_additional_identifier_type
params[
"associateSearchCriteria.additionalIdentifier.value"
] = associate_search_criteria_additional_identifier_value
json_associate_search_criteria_home_address_address_lines: Union[
Unset, None, List[str]
] = UNSET
if not isinstance(associate_search_criteria_home_address_address_lines, Unset):
if associate_search_criteria_home_address_address_lines is None:
json_associate_search_criteria_home_address_address_lines = None
else:
json_associate_search_criteria_home_address_address_lines = (
associate_search_criteria_home_address_address_lines
)
params[
"associateSearchCriteria.homeAddress.addressLines"
] = json_associate_search_criteria_home_address_address_lines
params[
"associateSearchCriteria.homeAddress.postCode"
] = associate_search_criteria_home_address_post_code
params[
"associateSearchCriteria.homeAddress.postTown"
] = associate_search_criteria_home_address_post_town
params[
"associateSearchCriteria.homeAddress.country"
] = associate_search_criteria_home_address_country
json_associate_search_criteria_home_address_start_date: Union[Unset, None, str] = UNSET
if not isinstance(associate_search_criteria_home_address_start_date, Unset):
json_associate_search_criteria_home_address_start_date = (
associate_search_criteria_home_address_start_date.isoformat()
if associate_search_criteria_home_address_start_date
else None
)
params[
"associateSearchCriteria.homeAddress.startDate"
] = json_associate_search_criteria_home_address_start_date
json_associate_search_criteria_home_address_end_date: Union[Unset, None, str] = UNSET
if not isinstance(associate_search_criteria_home_address_end_date, Unset):
json_associate_search_criteria_home_address_end_date = (
associate_search_criteria_home_address_end_date.isoformat()
if associate_search_criteria_home_address_end_date
else None
)
params[
"associateSearchCriteria.homeAddress.endDate"
] = json_associate_search_criteria_home_address_end_date
json_associate_search_criteria_associate_types: Union[Unset, None, List[str]] = UNSET
if not isinstance(associate_search_criteria_associate_types, Unset):
if associate_search_criteria_associate_types is None:
json_associate_search_criteria_associate_types = None
else:
json_associate_search_criteria_associate_types = []
for (
associate_search_criteria_associate_types_item_data
) in associate_search_criteria_associate_types:
associate_search_criteria_associate_types_item = (
associate_search_criteria_associate_types_item_data.value
)
json_associate_search_criteria_associate_types.append(
associate_search_criteria_associate_types_item
)
params[
"associateSearchCriteria.associateTypes"
] = json_associate_search_criteria_associate_types
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
"follow_redirects": client.follow_redirects,
"params": params,
}
def _parse_response(
*, client: Client, response: httpx.Response
) -> Optional[AccountCustomerPageResponse]:
if response.status_code == HTTPStatus.OK:
response_200 = AccountCustomerPageResponse.from_dict(response.json())
return response_200
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None
def _build_response(
*, client: Client, response: httpx.Response
) -> Response[AccountCustomerPageResponse]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
*,
client: Client,
id: Union[Unset, None, str] = UNSET,
q: Union[Unset, None, str] = UNSET,
type: Union[Unset, None, str] = UNSET,
verification_status: Union[Unset, None, str] = UNSET,
from_created_date: Union[Unset, None, str] = UNSET,
to_created_date: Union[Unset, None, str] = UNSET,
page: Union[Unset, None, str] = UNSET,
size: Union[Unset, None, str] = UNSET,
sort_field: Union[Unset, None, str] = UNSET,
sort_order: Union[Unset, None, str] = UNSET,
external_ref: Union[Unset, None, str] = UNSET,
external_reference: Union[Unset, None, str] = UNSET,
name_type: GetCustomersNameType,
name_value: str,
company_reg_number: Union[Unset, None, str] = UNSET,
legal_entity: Union[Unset, None, str] = UNSET,
trading_address_address_lines: Union[Unset, None, List[str]] = UNSET,
trading_address_post_code: Union[Unset, None, str] = UNSET,
trading_address_post_town: Union[Unset, None, str] = UNSET,
trading_address_country: Union[Unset, None, str] = UNSET,
trading_address_start_date: Union[Unset, None, datetime.date] = UNSET,
trading_address_end_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_address_lines: Union[Unset, None, List[str]] = UNSET,
registered_address_post_code: Union[Unset, None, str] = UNSET,
registered_address_post_town: Union[Unset, None, str] = UNSET,
registered_address_country: Union[Unset, None, str] = UNSET,
registered_address_start_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_first_name_initial: Union[Unset, None, str] = UNSET,
associate_search_criteria_last_name_type: GetCustomersAssociateSearchCriteriaLastNameType,
associate_search_criteria_last_name_value: str,
associate_search_criteria_last_names: Union[
Unset, None, List["AccountStringSearchCriteria"]
] = UNSET,
associate_search_criteria_last_name_prefix: Union[Unset, None, str] = UNSET,
associate_search_criteria_date_of_birth: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_additional_identifier_type: str,
associate_search_criteria_additional_identifier_value: str,
associate_search_criteria_home_address_address_lines: Union[Unset, None, List[str]] = UNSET,
associate_search_criteria_home_address_post_code: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_post_town: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_country: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_start_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_home_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_associate_types: Union[
Unset, None, List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]
] = UNSET,
) -> Response[AccountCustomerPageResponse]:
"""Retrieve customers using filters
Either using unique references, such as customer ID, or filter parameters, such as verification
status, get details of any customers found.
Args:
id (Union[Unset, None, str]): ID of Customer(s) to fetch
q (Union[Unset, None, str]): Query parameter. ID, name or external reference of customer
to search for
type (Union[Unset, None, str]): Type to filter, can be one of:
1. LLC -> limited company
2. PLC -> publicly listed company
3. SOLETRADER -> sole trader
4. OPARTNRSHP -> ordinary partnership
5. LPARTNRSHP -> limited partnership
6. LLP -> limited liability partnership
7. CHARITY -> charity
8. INDIVIDUAL -> individual consumer
9. PCM_INDIVIDUAL -> partner clearing model individual consumer
10. PCM_BUSINESS -> partner clearing model business consumer
verification_status (Union[Unset, None, str]): Verification Status to filter, can be one
of:
1. UNVERIFIED -> no verification checks have been completed
2. VERIFIED -> verification checks completed satisfactorily
3. EXVERIFIED -> verification completed externally
4. REFERRED -> verification is pending manual review
5. DECLINED -> verification is complete with a negative result
6. REVIEWED -> verification check has been reviewed
from_created_date (Union[Unset, None, str]): Customers created after and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
to_created_date (Union[Unset, None, str]): Customers created before and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
page (Union[Unset, None, str]): Page to fetch (0 indexed)
size (Union[Unset, None, str]): Size of Page to fetch
sort_field (Union[Unset, None, str]): Sort by field. Sorted by createdDate if not present
sort_order (Union[Unset, None, str]): Sorting order:
1. asc -> ascendant
2. desc -> descendant
external_ref (Union[Unset, None, str]): A list of external references to filter
external_reference (Union[Unset, None, str]): A list of external references to filter
Example: externalReference[0].type.
name_type (GetCustomersNameType):
name_value (str):
company_reg_number (Union[Unset, None, str]): Customer registration number
legal_entity (Union[Unset, None, str]): Customer legal entity
trading_address_address_lines (Union[Unset, None, List[str]]):
trading_address_post_code (Union[Unset, None, str]):
trading_address_post_town (Union[Unset, None, str]):
trading_address_country (Union[Unset, None, str]):
trading_address_start_date (Union[Unset, None, datetime.date]):
trading_address_end_date (Union[Unset, None, datetime.date]):
registered_address_address_lines (Union[Unset, None, List[str]]):
registered_address_post_code (Union[Unset, None, str]):
registered_address_post_town (Union[Unset, None, str]):
registered_address_country (Union[Unset, None, str]):
registered_address_start_date (Union[Unset, None, datetime.date]):
registered_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_first_name_initial (Union[Unset, None, str]):
associate_search_criteria_last_name_type
(GetCustomersAssociateSearchCriteriaLastNameType):
associate_search_criteria_last_name_value (str):
associate_search_criteria_last_names (Union[Unset, None,
List['AccountStringSearchCriteria']]):
associate_search_criteria_last_name_prefix (Union[Unset, None, str]):
associate_search_criteria_date_of_birth (Union[Unset, None, datetime.date]):
associate_search_criteria_additional_identifier_type (str): Type of additional personal
identifier
associate_search_criteria_additional_identifier_value (str): Personal identifier value
associate_search_criteria_home_address_address_lines (Union[Unset, None, List[str]]):
associate_search_criteria_home_address_post_code (Union[Unset, None, str]):
associate_search_criteria_home_address_post_town (Union[Unset, None, str]):
associate_search_criteria_home_address_country (Union[Unset, None, str]):
associate_search_criteria_home_address_start_date (Union[Unset, None, datetime.date]):
associate_search_criteria_home_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_associate_types (Union[Unset, None,
List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[AccountCustomerPageResponse]
"""
kwargs = _get_kwargs(
client=client,
id=id,
q=q,
type=type,
verification_status=verification_status,
from_created_date=from_created_date,
to_created_date=to_created_date,
page=page,
size=size,
sort_field=sort_field,
sort_order=sort_order,
external_ref=external_ref,
external_reference=external_reference,
name_type=name_type,
name_value=name_value,
company_reg_number=company_reg_number,
legal_entity=legal_entity,
trading_address_address_lines=trading_address_address_lines,
trading_address_post_code=trading_address_post_code,
trading_address_post_town=trading_address_post_town,
trading_address_country=trading_address_country,
trading_address_start_date=trading_address_start_date,
trading_address_end_date=trading_address_end_date,
registered_address_address_lines=registered_address_address_lines,
registered_address_post_code=registered_address_post_code,
registered_address_post_town=registered_address_post_town,
registered_address_country=registered_address_country,
registered_address_start_date=registered_address_start_date,
registered_address_end_date=registered_address_end_date,
associate_search_criteria_first_name_initial=associate_search_criteria_first_name_initial,
associate_search_criteria_last_name_type=associate_search_criteria_last_name_type,
associate_search_criteria_last_name_value=associate_search_criteria_last_name_value,
associate_search_criteria_last_names=associate_search_criteria_last_names,
associate_search_criteria_last_name_prefix=associate_search_criteria_last_name_prefix,
associate_search_criteria_date_of_birth=associate_search_criteria_date_of_birth,
associate_search_criteria_additional_identifier_type=associate_search_criteria_additional_identifier_type,
associate_search_criteria_additional_identifier_value=associate_search_criteria_additional_identifier_value,
associate_search_criteria_home_address_address_lines=associate_search_criteria_home_address_address_lines,
associate_search_criteria_home_address_post_code=associate_search_criteria_home_address_post_code,
associate_search_criteria_home_address_post_town=associate_search_criteria_home_address_post_town,
associate_search_criteria_home_address_country=associate_search_criteria_home_address_country,
associate_search_criteria_home_address_start_date=associate_search_criteria_home_address_start_date,
associate_search_criteria_home_address_end_date=associate_search_criteria_home_address_end_date,
associate_search_criteria_associate_types=associate_search_criteria_associate_types,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(client=client, response=response)
def sync(
*,
client: Client,
id: Union[Unset, None, str] = UNSET,
q: Union[Unset, None, str] = UNSET,
type: Union[Unset, None, str] = UNSET,
verification_status: Union[Unset, None, str] = UNSET,
from_created_date: Union[Unset, None, str] = UNSET,
to_created_date: Union[Unset, None, str] = UNSET,
page: Union[Unset, None, str] = UNSET,
size: Union[Unset, None, str] = UNSET,
sort_field: Union[Unset, None, str] = UNSET,
sort_order: Union[Unset, None, str] = UNSET,
external_ref: Union[Unset, None, str] = UNSET,
external_reference: Union[Unset, None, str] = UNSET,
name_type: GetCustomersNameType,
name_value: str,
company_reg_number: Union[Unset, None, str] = UNSET,
legal_entity: Union[Unset, None, str] = UNSET,
trading_address_address_lines: Union[Unset, None, List[str]] = UNSET,
trading_address_post_code: Union[Unset, None, str] = UNSET,
trading_address_post_town: Union[Unset, None, str] = UNSET,
trading_address_country: Union[Unset, None, str] = UNSET,
trading_address_start_date: Union[Unset, None, datetime.date] = UNSET,
trading_address_end_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_address_lines: Union[Unset, None, List[str]] = UNSET,
registered_address_post_code: Union[Unset, None, str] = UNSET,
registered_address_post_town: Union[Unset, None, str] = UNSET,
registered_address_country: Union[Unset, None, str] = UNSET,
registered_address_start_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_first_name_initial: Union[Unset, None, str] = UNSET,
associate_search_criteria_last_name_type: GetCustomersAssociateSearchCriteriaLastNameType,
associate_search_criteria_last_name_value: str,
associate_search_criteria_last_names: Union[
Unset, None, List["AccountStringSearchCriteria"]
] = UNSET,
associate_search_criteria_last_name_prefix: Union[Unset, None, str] = UNSET,
associate_search_criteria_date_of_birth: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_additional_identifier_type: str,
associate_search_criteria_additional_identifier_value: str,
associate_search_criteria_home_address_address_lines: Union[Unset, None, List[str]] = UNSET,
associate_search_criteria_home_address_post_code: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_post_town: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_country: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_start_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_home_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_associate_types: Union[
Unset, None, List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]
] = UNSET,
) -> Optional[AccountCustomerPageResponse]:
"""Retrieve customers using filters
Either using unique references, such as customer ID, or filter parameters, such as verification
status, get details of any customers found.
Args:
id (Union[Unset, None, str]): ID of Customer(s) to fetch
q (Union[Unset, None, str]): Query parameter. ID, name or external reference of customer
to search for
type (Union[Unset, None, str]): Type to filter, can be one of:
1. LLC -> limited company
2. PLC -> publicly listed company
3. SOLETRADER -> sole trader
4. OPARTNRSHP -> ordinary partnership
5. LPARTNRSHP -> limited partnership
6. LLP -> limited liability partnership
7. CHARITY -> charity
8. INDIVIDUAL -> individual consumer
9. PCM_INDIVIDUAL -> partner clearing model individual consumer
10. PCM_BUSINESS -> partner clearing model business consumer
verification_status (Union[Unset, None, str]): Verification Status to filter, can be one
of:
1. UNVERIFIED -> no verification checks have been completed
2. VERIFIED -> verification checks completed satisfactorily
3. EXVERIFIED -> verification completed externally
4. REFERRED -> verification is pending manual review
5. DECLINED -> verification is complete with a negative result
6. REVIEWED -> verification check has been reviewed
from_created_date (Union[Unset, None, str]): Customers created after and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
to_created_date (Union[Unset, None, str]): Customers created before and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
page (Union[Unset, None, str]): Page to fetch (0 indexed)
size (Union[Unset, None, str]): Size of Page to fetch
sort_field (Union[Unset, None, str]): Sort by field. Sorted by createdDate if not present
sort_order (Union[Unset, None, str]): Sorting order:
1. asc -> ascendant
2. desc -> descendant
external_ref (Union[Unset, None, str]): A list of external references to filter
external_reference (Union[Unset, None, str]): A list of external references to filter
Example: externalReference[0].type.
name_type (GetCustomersNameType):
name_value (str):
company_reg_number (Union[Unset, None, str]): Customer registration number
legal_entity (Union[Unset, None, str]): Customer legal entity
trading_address_address_lines (Union[Unset, None, List[str]]):
trading_address_post_code (Union[Unset, None, str]):
trading_address_post_town (Union[Unset, None, str]):
trading_address_country (Union[Unset, None, str]):
trading_address_start_date (Union[Unset, None, datetime.date]):
trading_address_end_date (Union[Unset, None, datetime.date]):
registered_address_address_lines (Union[Unset, None, List[str]]):
registered_address_post_code (Union[Unset, None, str]):
registered_address_post_town (Union[Unset, None, str]):
registered_address_country (Union[Unset, None, str]):
registered_address_start_date (Union[Unset, None, datetime.date]):
registered_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_first_name_initial (Union[Unset, None, str]):
associate_search_criteria_last_name_type
(GetCustomersAssociateSearchCriteriaLastNameType):
associate_search_criteria_last_name_value (str):
associate_search_criteria_last_names (Union[Unset, None,
List['AccountStringSearchCriteria']]):
associate_search_criteria_last_name_prefix (Union[Unset, None, str]):
associate_search_criteria_date_of_birth (Union[Unset, None, datetime.date]):
associate_search_criteria_additional_identifier_type (str): Type of additional personal
identifier
associate_search_criteria_additional_identifier_value (str): Personal identifier value
associate_search_criteria_home_address_address_lines (Union[Unset, None, List[str]]):
associate_search_criteria_home_address_post_code (Union[Unset, None, str]):
associate_search_criteria_home_address_post_town (Union[Unset, None, str]):
associate_search_criteria_home_address_country (Union[Unset, None, str]):
associate_search_criteria_home_address_start_date (Union[Unset, None, datetime.date]):
associate_search_criteria_home_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_associate_types (Union[Unset, None,
List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[AccountCustomerPageResponse]
"""
return sync_detailed(
client=client,
id=id,
q=q,
type=type,
verification_status=verification_status,
from_created_date=from_created_date,
to_created_date=to_created_date,
page=page,
size=size,
sort_field=sort_field,
sort_order=sort_order,
external_ref=external_ref,
external_reference=external_reference,
name_type=name_type,
name_value=name_value,
company_reg_number=company_reg_number,
legal_entity=legal_entity,
trading_address_address_lines=trading_address_address_lines,
trading_address_post_code=trading_address_post_code,
trading_address_post_town=trading_address_post_town,
trading_address_country=trading_address_country,
trading_address_start_date=trading_address_start_date,
trading_address_end_date=trading_address_end_date,
registered_address_address_lines=registered_address_address_lines,
registered_address_post_code=registered_address_post_code,
registered_address_post_town=registered_address_post_town,
registered_address_country=registered_address_country,
registered_address_start_date=registered_address_start_date,
registered_address_end_date=registered_address_end_date,
associate_search_criteria_first_name_initial=associate_search_criteria_first_name_initial,
associate_search_criteria_last_name_type=associate_search_criteria_last_name_type,
associate_search_criteria_last_name_value=associate_search_criteria_last_name_value,
associate_search_criteria_last_names=associate_search_criteria_last_names,
associate_search_criteria_last_name_prefix=associate_search_criteria_last_name_prefix,
associate_search_criteria_date_of_birth=associate_search_criteria_date_of_birth,
associate_search_criteria_additional_identifier_type=associate_search_criteria_additional_identifier_type,
associate_search_criteria_additional_identifier_value=associate_search_criteria_additional_identifier_value,
associate_search_criteria_home_address_address_lines=associate_search_criteria_home_address_address_lines,
associate_search_criteria_home_address_post_code=associate_search_criteria_home_address_post_code,
associate_search_criteria_home_address_post_town=associate_search_criteria_home_address_post_town,
associate_search_criteria_home_address_country=associate_search_criteria_home_address_country,
associate_search_criteria_home_address_start_date=associate_search_criteria_home_address_start_date,
associate_search_criteria_home_address_end_date=associate_search_criteria_home_address_end_date,
associate_search_criteria_associate_types=associate_search_criteria_associate_types,
).parsed
async def asyncio_detailed(
*,
client: Client,
id: Union[Unset, None, str] = UNSET,
q: Union[Unset, None, str] = UNSET,
type: Union[Unset, None, str] = UNSET,
verification_status: Union[Unset, None, str] = UNSET,
from_created_date: Union[Unset, None, str] = UNSET,
to_created_date: Union[Unset, None, str] = UNSET,
page: Union[Unset, None, str] = UNSET,
size: Union[Unset, None, str] = UNSET,
sort_field: Union[Unset, None, str] = UNSET,
sort_order: Union[Unset, None, str] = UNSET,
external_ref: Union[Unset, None, str] = UNSET,
external_reference: Union[Unset, None, str] = UNSET,
name_type: GetCustomersNameType,
name_value: str,
company_reg_number: Union[Unset, None, str] = UNSET,
legal_entity: Union[Unset, None, str] = UNSET,
trading_address_address_lines: Union[Unset, None, List[str]] = UNSET,
trading_address_post_code: Union[Unset, None, str] = UNSET,
trading_address_post_town: Union[Unset, None, str] = UNSET,
trading_address_country: Union[Unset, None, str] = UNSET,
trading_address_start_date: Union[Unset, None, datetime.date] = UNSET,
trading_address_end_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_address_lines: Union[Unset, None, List[str]] = UNSET,
registered_address_post_code: Union[Unset, None, str] = UNSET,
registered_address_post_town: Union[Unset, None, str] = UNSET,
registered_address_country: Union[Unset, None, str] = UNSET,
registered_address_start_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_first_name_initial: Union[Unset, None, str] = UNSET,
associate_search_criteria_last_name_type: GetCustomersAssociateSearchCriteriaLastNameType,
associate_search_criteria_last_name_value: str,
associate_search_criteria_last_names: Union[
Unset, None, List["AccountStringSearchCriteria"]
] = UNSET,
associate_search_criteria_last_name_prefix: Union[Unset, None, str] = UNSET,
associate_search_criteria_date_of_birth: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_additional_identifier_type: str,
associate_search_criteria_additional_identifier_value: str,
associate_search_criteria_home_address_address_lines: Union[Unset, None, List[str]] = UNSET,
associate_search_criteria_home_address_post_code: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_post_town: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_country: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_start_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_home_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_associate_types: Union[
Unset, None, List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]
] = UNSET,
) -> Response[AccountCustomerPageResponse]:
"""Retrieve customers using filters
Either using unique references, such as customer ID, or filter parameters, such as verification
status, get details of any customers found.
Args:
id (Union[Unset, None, str]): ID of Customer(s) to fetch
q (Union[Unset, None, str]): Query parameter. ID, name or external reference of customer
to search for
type (Union[Unset, None, str]): Type to filter, can be one of:
1. LLC -> limited company
2. PLC -> publicly listed company
3. SOLETRADER -> sole trader
4. OPARTNRSHP -> ordinary partnership
5. LPARTNRSHP -> limited partnership
6. LLP -> limited liability partnership
7. CHARITY -> charity
8. INDIVIDUAL -> individual consumer
9. PCM_INDIVIDUAL -> partner clearing model individual consumer
10. PCM_BUSINESS -> partner clearing model business consumer
verification_status (Union[Unset, None, str]): Verification Status to filter, can be one
of:
1. UNVERIFIED -> no verification checks have been completed
2. VERIFIED -> verification checks completed satisfactorily
3. EXVERIFIED -> verification completed externally
4. REFERRED -> verification is pending manual review
5. DECLINED -> verification is complete with a negative result
6. REVIEWED -> verification check has been reviewed
from_created_date (Union[Unset, None, str]): Customers created after and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
to_created_date (Union[Unset, None, str]): Customers created before and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
page (Union[Unset, None, str]): Page to fetch (0 indexed)
size (Union[Unset, None, str]): Size of Page to fetch
sort_field (Union[Unset, None, str]): Sort by field. Sorted by createdDate if not present
sort_order (Union[Unset, None, str]): Sorting order:
1. asc -> ascendant
2. desc -> descendant
external_ref (Union[Unset, None, str]): A list of external references to filter
external_reference (Union[Unset, None, str]): A list of external references to filter
Example: externalReference[0].type.
name_type (GetCustomersNameType):
name_value (str):
company_reg_number (Union[Unset, None, str]): Customer registration number
legal_entity (Union[Unset, None, str]): Customer legal entity
trading_address_address_lines (Union[Unset, None, List[str]]):
trading_address_post_code (Union[Unset, None, str]):
trading_address_post_town (Union[Unset, None, str]):
trading_address_country (Union[Unset, None, str]):
trading_address_start_date (Union[Unset, None, datetime.date]):
trading_address_end_date (Union[Unset, None, datetime.date]):
registered_address_address_lines (Union[Unset, None, List[str]]):
registered_address_post_code (Union[Unset, None, str]):
registered_address_post_town (Union[Unset, None, str]):
registered_address_country (Union[Unset, None, str]):
registered_address_start_date (Union[Unset, None, datetime.date]):
registered_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_first_name_initial (Union[Unset, None, str]):
associate_search_criteria_last_name_type
(GetCustomersAssociateSearchCriteriaLastNameType):
associate_search_criteria_last_name_value (str):
associate_search_criteria_last_names (Union[Unset, None,
List['AccountStringSearchCriteria']]):
associate_search_criteria_last_name_prefix (Union[Unset, None, str]):
associate_search_criteria_date_of_birth (Union[Unset, None, datetime.date]):
associate_search_criteria_additional_identifier_type (str): Type of additional personal
identifier
associate_search_criteria_additional_identifier_value (str): Personal identifier value
associate_search_criteria_home_address_address_lines (Union[Unset, None, List[str]]):
associate_search_criteria_home_address_post_code (Union[Unset, None, str]):
associate_search_criteria_home_address_post_town (Union[Unset, None, str]):
associate_search_criteria_home_address_country (Union[Unset, None, str]):
associate_search_criteria_home_address_start_date (Union[Unset, None, datetime.date]):
associate_search_criteria_home_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_associate_types (Union[Unset, None,
List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[AccountCustomerPageResponse]
"""
kwargs = _get_kwargs(
client=client,
id=id,
q=q,
type=type,
verification_status=verification_status,
from_created_date=from_created_date,
to_created_date=to_created_date,
page=page,
size=size,
sort_field=sort_field,
sort_order=sort_order,
external_ref=external_ref,
external_reference=external_reference,
name_type=name_type,
name_value=name_value,
company_reg_number=company_reg_number,
legal_entity=legal_entity,
trading_address_address_lines=trading_address_address_lines,
trading_address_post_code=trading_address_post_code,
trading_address_post_town=trading_address_post_town,
trading_address_country=trading_address_country,
trading_address_start_date=trading_address_start_date,
trading_address_end_date=trading_address_end_date,
registered_address_address_lines=registered_address_address_lines,
registered_address_post_code=registered_address_post_code,
registered_address_post_town=registered_address_post_town,
registered_address_country=registered_address_country,
registered_address_start_date=registered_address_start_date,
registered_address_end_date=registered_address_end_date,
associate_search_criteria_first_name_initial=associate_search_criteria_first_name_initial,
associate_search_criteria_last_name_type=associate_search_criteria_last_name_type,
associate_search_criteria_last_name_value=associate_search_criteria_last_name_value,
associate_search_criteria_last_names=associate_search_criteria_last_names,
associate_search_criteria_last_name_prefix=associate_search_criteria_last_name_prefix,
associate_search_criteria_date_of_birth=associate_search_criteria_date_of_birth,
associate_search_criteria_additional_identifier_type=associate_search_criteria_additional_identifier_type,
associate_search_criteria_additional_identifier_value=associate_search_criteria_additional_identifier_value,
associate_search_criteria_home_address_address_lines=associate_search_criteria_home_address_address_lines,
associate_search_criteria_home_address_post_code=associate_search_criteria_home_address_post_code,
associate_search_criteria_home_address_post_town=associate_search_criteria_home_address_post_town,
associate_search_criteria_home_address_country=associate_search_criteria_home_address_country,
associate_search_criteria_home_address_start_date=associate_search_criteria_home_address_start_date,
associate_search_criteria_home_address_end_date=associate_search_criteria_home_address_end_date,
associate_search_criteria_associate_types=associate_search_criteria_associate_types,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(client=client, response=response)
async def asyncio(
*,
client: Client,
id: Union[Unset, None, str] = UNSET,
q: Union[Unset, None, str] = UNSET,
type: Union[Unset, None, str] = UNSET,
verification_status: Union[Unset, None, str] = UNSET,
from_created_date: Union[Unset, None, str] = UNSET,
to_created_date: Union[Unset, None, str] = UNSET,
page: Union[Unset, None, str] = UNSET,
size: Union[Unset, None, str] = UNSET,
sort_field: Union[Unset, None, str] = UNSET,
sort_order: Union[Unset, None, str] = UNSET,
external_ref: Union[Unset, None, str] = UNSET,
external_reference: Union[Unset, None, str] = UNSET,
name_type: GetCustomersNameType,
name_value: str,
company_reg_number: Union[Unset, None, str] = UNSET,
legal_entity: Union[Unset, None, str] = UNSET,
trading_address_address_lines: Union[Unset, None, List[str]] = UNSET,
trading_address_post_code: Union[Unset, None, str] = UNSET,
trading_address_post_town: Union[Unset, None, str] = UNSET,
trading_address_country: Union[Unset, None, str] = UNSET,
trading_address_start_date: Union[Unset, None, datetime.date] = UNSET,
trading_address_end_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_address_lines: Union[Unset, None, List[str]] = UNSET,
registered_address_post_code: Union[Unset, None, str] = UNSET,
registered_address_post_town: Union[Unset, None, str] = UNSET,
registered_address_country: Union[Unset, None, str] = UNSET,
registered_address_start_date: Union[Unset, None, datetime.date] = UNSET,
registered_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_first_name_initial: Union[Unset, None, str] = UNSET,
associate_search_criteria_last_name_type: GetCustomersAssociateSearchCriteriaLastNameType,
associate_search_criteria_last_name_value: str,
associate_search_criteria_last_names: Union[
Unset, None, List["AccountStringSearchCriteria"]
] = UNSET,
associate_search_criteria_last_name_prefix: Union[Unset, None, str] = UNSET,
associate_search_criteria_date_of_birth: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_additional_identifier_type: str,
associate_search_criteria_additional_identifier_value: str,
associate_search_criteria_home_address_address_lines: Union[Unset, None, List[str]] = UNSET,
associate_search_criteria_home_address_post_code: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_post_town: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_country: Union[Unset, None, str] = UNSET,
associate_search_criteria_home_address_start_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_home_address_end_date: Union[Unset, None, datetime.date] = UNSET,
associate_search_criteria_associate_types: Union[
Unset, None, List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]
] = UNSET,
) -> Optional[AccountCustomerPageResponse]:
"""Retrieve customers using filters
Either using unique references, such as customer ID, or filter parameters, such as verification
status, get details of any customers found.
Args:
id (Union[Unset, None, str]): ID of Customer(s) to fetch
q (Union[Unset, None, str]): Query parameter. ID, name or external reference of customer
to search for
type (Union[Unset, None, str]): Type to filter, can be one of:
1. LLC -> limited company
2. PLC -> publicly listed company
3. SOLETRADER -> sole trader
4. OPARTNRSHP -> ordinary partnership
5. LPARTNRSHP -> limited partnership
6. LLP -> limited liability partnership
7. CHARITY -> charity
8. INDIVIDUAL -> individual consumer
9. PCM_INDIVIDUAL -> partner clearing model individual consumer
10. PCM_BUSINESS -> partner clearing model business consumer
verification_status (Union[Unset, None, str]): Verification Status to filter, can be one
of:
1. UNVERIFIED -> no verification checks have been completed
2. VERIFIED -> verification checks completed satisfactorily
3. EXVERIFIED -> verification completed externally
4. REFERRED -> verification is pending manual review
5. DECLINED -> verification is complete with a negative result
6. REVIEWED -> verification check has been reviewed
from_created_date (Union[Unset, None, str]): Customers created after and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
to_created_date (Union[Unset, None, str]): Customers created before and on this date.
Format is 'yyyy-MM-dd'T'HH:mm:ssZ' where Z is UTC offset. e.g 2017-01-28T01:01:01+0000
page (Union[Unset, None, str]): Page to fetch (0 indexed)
size (Union[Unset, None, str]): Size of Page to fetch
sort_field (Union[Unset, None, str]): Sort by field. Sorted by createdDate if not present
sort_order (Union[Unset, None, str]): Sorting order:
1. asc -> ascendant
2. desc -> descendant
external_ref (Union[Unset, None, str]): A list of external references to filter
external_reference (Union[Unset, None, str]): A list of external references to filter
Example: externalReference[0].type.
name_type (GetCustomersNameType):
name_value (str):
company_reg_number (Union[Unset, None, str]): Customer registration number
legal_entity (Union[Unset, None, str]): Customer legal entity
trading_address_address_lines (Union[Unset, None, List[str]]):
trading_address_post_code (Union[Unset, None, str]):
trading_address_post_town (Union[Unset, None, str]):
trading_address_country (Union[Unset, None, str]):
trading_address_start_date (Union[Unset, None, datetime.date]):
trading_address_end_date (Union[Unset, None, datetime.date]):
registered_address_address_lines (Union[Unset, None, List[str]]):
registered_address_post_code (Union[Unset, None, str]):
registered_address_post_town (Union[Unset, None, str]):
registered_address_country (Union[Unset, None, str]):
registered_address_start_date (Union[Unset, None, datetime.date]):
registered_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_first_name_initial (Union[Unset, None, str]):
associate_search_criteria_last_name_type
(GetCustomersAssociateSearchCriteriaLastNameType):
associate_search_criteria_last_name_value (str):
associate_search_criteria_last_names (Union[Unset, None,
List['AccountStringSearchCriteria']]):
associate_search_criteria_last_name_prefix (Union[Unset, None, str]):
associate_search_criteria_date_of_birth (Union[Unset, None, datetime.date]):
associate_search_criteria_additional_identifier_type (str): Type of additional personal
identifier
associate_search_criteria_additional_identifier_value (str): Personal identifier value
associate_search_criteria_home_address_address_lines (Union[Unset, None, List[str]]):
associate_search_criteria_home_address_post_code (Union[Unset, None, str]):
associate_search_criteria_home_address_post_town (Union[Unset, None, str]):
associate_search_criteria_home_address_country (Union[Unset, None, str]):
associate_search_criteria_home_address_start_date (Union[Unset, None, datetime.date]):
associate_search_criteria_home_address_end_date (Union[Unset, None, datetime.date]):
associate_search_criteria_associate_types (Union[Unset, None,
List[GetCustomersAssociateSearchCriteriaAssociateTypesItem]]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[AccountCustomerPageResponse]
"""
return (
await asyncio_detailed(
client=client,
id=id,
q=q,
type=type,
verification_status=verification_status,
from_created_date=from_created_date,
to_created_date=to_created_date,
page=page,
size=size,
sort_field=sort_field,
sort_order=sort_order,
external_ref=external_ref,
external_reference=external_reference,
name_type=name_type,
name_value=name_value,
company_reg_number=company_reg_number,
legal_entity=legal_entity,
trading_address_address_lines=trading_address_address_lines,
trading_address_post_code=trading_address_post_code,
trading_address_post_town=trading_address_post_town,
trading_address_country=trading_address_country,
trading_address_start_date=trading_address_start_date,
trading_address_end_date=trading_address_end_date,
registered_address_address_lines=registered_address_address_lines,
registered_address_post_code=registered_address_post_code,
registered_address_post_town=registered_address_post_town,
registered_address_country=registered_address_country,
registered_address_start_date=registered_address_start_date,
registered_address_end_date=registered_address_end_date,
associate_search_criteria_first_name_initial=associate_search_criteria_first_name_initial,
associate_search_criteria_last_name_type=associate_search_criteria_last_name_type,
associate_search_criteria_last_name_value=associate_search_criteria_last_name_value,
associate_search_criteria_last_names=associate_search_criteria_last_names,
associate_search_criteria_last_name_prefix=associate_search_criteria_last_name_prefix,
associate_search_criteria_date_of_birth=associate_search_criteria_date_of_birth,
associate_search_criteria_additional_identifier_type=associate_search_criteria_additional_identifier_type,
associate_search_criteria_additional_identifier_value=associate_search_criteria_additional_identifier_value,
associate_search_criteria_home_address_address_lines=associate_search_criteria_home_address_address_lines,
associate_search_criteria_home_address_post_code=associate_search_criteria_home_address_post_code,
associate_search_criteria_home_address_post_town=associate_search_criteria_home_address_post_town,
associate_search_criteria_home_address_country=associate_search_criteria_home_address_country,
associate_search_criteria_home_address_start_date=associate_search_criteria_home_address_start_date,
associate_search_criteria_home_address_end_date=associate_search_criteria_home_address_end_date,
associate_search_criteria_associate_types=associate_search_criteria_associate_types,
)
).parsed | PypiClean |
/tripleo-operator-ansible-0.9.0.tar.gz/tripleo-operator-ansible-0.9.0/roles/tripleo_overcloud_container_image_prepare/README.md | tripleo_overcloud_container_image_prepare
=========================================
IMPORTANT: This role is for use in Queens only. This functionality was replaced
by the `openstack tripleo container image prepare` command in Rocky.
A role to perform the overcloud container image prepare.
Requirements
------------
None.
Role Variables
--------------
* `tripleo_overcloud_container_image_prepare_debug`: (Boolean) Flag to print out the delete command. Default: False
* `tripleo_overcloud_container_image_prepare_environment_directory`: (String) Path to a directory containing the environment files specifying which services are containerized.
* `tripleo_overcloud_container_image_prepare_environment_files`: (List) List of environment files specifying which services are containerized. Default: []
* `tripleo_overcloud_container_image_prepare_excludes`: (List) List of patterns to match the image name against to exclude from the output. Default: []
* `tripleo_overcloud_container_image_prepare_generate_scripts`: (Boolean) Write out a shell script that can be used to reproduce the command being executed. By default uses the value of `tripleo_generate_scripts` or False if `tripleo_generate_scripts` is not defined.
* `tripleo_overcloud_container_image_prepare_generate_scripts_only`: (Boolean) Do not run the actual command - to be used in conjunction with `tripleo_overcloud_container_image_prepare_generate_scripts`. By default uses the value of `tripleo_generate_scripts_only` or False if `tripleo_generate_scripts_only` is not defined.
* `tripleo_overcloud_container_image_prepare_home_dir`: (String) Home directory to where the command is run from. Default: "{{ ansible_env.HOME }}"
* `tripleo_overcloud_container_image_prepare_includes`: (List) List of patterns to match the image name against to include in the output. Default: []
* `tripleo_overcloud_container_image_prepare_log_combine`: (Boolean) Flag to enable capturing stderr with stdout. Default: true
* `tripleo_overcloud_container_image_prepare_log_output`: (Boolean) Flag to enable logging to a file. Since the output of this command can be large, it is not recommended to disable this. Default: true
* `tripleo_overcloud_container_image_prepare_log`: (String) Path to a log file for the command output. Default: "{{ tripleo_overcloud_container_image_prepare_home_dir }}/overcloud_container_image_prepare.log"
* `tripleo_overcloud_container_image_prepare_modify_role`: (String) Name of ansible role to run between every image pull and push.
* `tripleo_overcloud_container_image_prepare_modify_vars`: (String) Ansible variables file containing variables to use when using modify role.
* `tripleo_overcloud_container_image_prepare_namespace`: (String) Override the default namespace substitution
* `tripleo_overcloud_container_image_prepare_output_env_file`: (String) Output heat environment file which specifies all image parameters.
* `tripleo_overcloud_container_image_prepare_output_images_file`: (String) Path to write the output image entries to.
* `tripleo_overcloud_container_image_prepare_poll`: (Integer) Number of seconds to wait between each checks to see if the command has completed. Default: 10
* `tripleo_overcloud_container_image_prepare_prefix`: (String) Override the default name prefix substitution
* `tripleo_overcloud_container_image_prepare_push_destination`: (String) Location of image registry to push images to
* `tripleo_overcloud_container_image_prepare_roles_file`: (String) Roles file path for the cloud.
* `tripleo_overcloud_container_image_prepare_set`: (List) Set the value of a variable in the template even if it has no dedicated argument. Default: []
* `tripleo_overcloud_container_image_prepare_suffix`: (String) Override the default name suffix substitution
* `tripleo_overcloud_container_image_prepare_tag_from_label`: (String) Use the value of the specify label to discover the tag
* `tripleo_overcloud_container_image_prepare_tag`: (String) Override the default tag substitution
* `tripleo_overcloud_container_image_prepare_template_file`: (String) YAML template file for image config
* `tripleo_overcloud_container_image_prepare_timeout`: (Integer) Number in seconds to wait for the ansible execution of the command to finish. Default: 3600
Output Variables
----------------
* `tripleo_overcloud_container_image_prepare_output`: (String) The command standard output.
* `tripleo_overcloud_container_image_prepare_result`: Ansible shell execution results
Dependencies
------------
None.
Example Playbook
----------------
Example overcloud container image prepare.
```yaml
- hosts: undercloud
gather_facts: true
tasks:
- name: Overcloud container image prepare
import_role:
name: tripleo_overcloud_container_image_prepare
var:
tripleo_overcloud_container_image_prepare_debug: true
```
License
-------
Apache-2.0
| PypiClean |
/suds-bis-1.0.0.tar.gz/suds-bis-1.0.0/suds/mx/core.py | from suds.mx.appender import ContentAppender
from suds.sax.element import Element
from suds.sax.document import Document
from suds.sudsobject import Property
from logging import getLogger
log = getLogger(__name__)
class Core:
"""
An I{abstract} marshaller. This class implement the core
functionality of the marshaller.
@ivar appender: A content appender.
@type appender: L{ContentAppender}
"""
def __init__(self):
""" """
self.appender = ContentAppender(self)
def process(self, content):
"""
Process (marshal) the tag with the specified value using the
optional type information.
@param content: The content to process.
@type content: L{Object}
"""
log.debug("processing:\n%s", content)
self.reset()
if content.tag is None:
content.tag = content.value.__class__.__name__
document = Document()
if isinstance(content.value, Property):
self.node(content)
self.append(document, content)
return document.root()
def append(self, parent, content):
"""
Append the specified L{content} to the I{parent}.
@param parent: The parent node to append to.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Object}
"""
log.debug("appending parent:\n%s\ncontent:\n%s", parent, content)
if self.start(content):
self.appender.append(parent, content)
self.end(parent, content)
def reset(self):
"""
Reset the marshaller.
"""
pass
def node(self, content):
"""
Create and return an XML node.
@param content: The content for which processing has been suspended.
@type content: L{Object}
@return: An element.
@rtype: L{Element}
"""
return Element(content.tag)
def start(self, content):
"""
Appending this content has started.
@param content: The content for which processing has started.
@type content: L{Content}
@return: True to continue appending
@rtype: boolean
"""
return True
def suspend(self, content):
"""
Appending this content has suspended.
@param content: The content for which processing has been suspended.
@type content: L{Content}
"""
pass
def resume(self, content):
"""
Appending this content has resumed.
@param content: The content for which processing has been resumed.
@type content: L{Content}
"""
pass
def end(self, parent, content):
"""
Appending this content has ended.
@param parent: The parent node ending.
@type parent: L{Element}
@param content: The content for which processing has ended.
@type content: L{Content}
"""
pass
def setnil(self, node, content):
"""
Set the value of the I{node} to nill.
@param node: A I{nil} node.
@type node: L{Element}
@param content: The content to set nil.
@type content: L{Content}
"""
pass
def setdefault(self, node, content):
"""
Set the value of the I{node} to a default value.
@param node: A I{nil} node.
@type node: L{Element}
@param content: The content to set the default value.
@type content: L{Content}
@return: The default.
"""
pass
def optional(self, content):
"""
Get whether the specified content is optional.
@param content: The content which to check.
@type content: L{Content}
"""
return False | PypiClean |
/cubed4th-2.0.20221024.tar.gz/cubed4th-2.0.20221024/pdoc/_compat.py | import sys
if sys.version_info >= (3, 9):
from functools import cache
else: # pragma: no cover
from functools import lru_cache
cache = lru_cache(maxsize=None)
if sys.version_info >= (3, 9):
from ast import unparse as ast_unparse
else: # pragma: no cover
from astunparse import unparse as _unparse
def ast_unparse(t): # type: ignore
return _unparse(t).strip("\t\n \"'")
if sys.version_info >= (3, 9):
from types import GenericAlias
else: # pragma: no cover
from typing import _GenericAlias as GenericAlias
if sys.version_info >= (3, 10):
from types import UnionType # type: ignore
else: # pragma: no cover
class UnionType:
pass
if sys.version_info >= (3, 9):
removesuffix = str.removesuffix
else: # pragma: no cover
def removesuffix(x: str, suffix: str):
if x.endswith(suffix):
x = x[: -len(suffix)]
return x
if sys.version_info >= (3, 9):
removeprefix = str.removeprefix
else: # pragma: no cover
def removeprefix(x: str, prefix: str):
if x.startswith(prefix):
x = x[len(prefix):]
return x
if sys.version_info >= (3, 8):
from functools import cached_property
else: # pragma: no cover
from threading import RLock
# https://github.com/python/cpython/blob/863eb7170b3017399fb2b786a1e3feb6457e54c2/Lib/functools.py#L930-L980
# ✂ start ✂
_NOT_FOUND = object()
class cached_property: # type: ignore
def __init__(self, func):
self.func = func
self.attrname = None
self.__doc__ = func.__doc__
self.lock = RLock()
def __set_name__(self, owner, name):
if self.attrname is None:
self.attrname = name
elif name != self.attrname:
raise TypeError(
"Cannot assign the same cached_property to two different names "
f"({self.attrname!r} and {name!r})."
)
def __get__(self, instance, owner=None):
if instance is None:
return self
if self.attrname is None:
raise TypeError(
"Cannot use cached_property instance without calling __set_name__ on it.")
try:
cache = instance.__dict__
except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
msg = (
f"No '__dict__' attribute on {type(instance).__name__!r} "
f"instance to cache {self.attrname!r} property."
)
raise TypeError(msg) from None
val = cache.get(self.attrname, _NOT_FOUND)
if val is _NOT_FOUND:
with self.lock:
# check if another thread filled cache while we awaited lock
val = cache.get(self.attrname, _NOT_FOUND)
if val is _NOT_FOUND:
val = self.func(instance)
try:
cache[self.attrname] = val
except TypeError:
msg = (
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
f"does not support item assignment for caching {self.attrname!r} property."
)
raise TypeError(msg) from None
return val
__class_getitem__ = classmethod(GenericAlias)
# ✂ end ✂
if sys.version_info >= (3, 8):
from typing import Literal, get_origin
else: # pragma: no cover
from typing import Generic
# There is no Literal on 3.7, so we just make one up. It should not be used anyways!
class Literal:
pass
# get_origin is adapted from
# https://github.com/python/cpython/blob/863eb7170b3017399fb2b786a1e3feb6457e54c2/Lib/typing.py#L1474-L1515
# with Annotations removed (not present in 3.7)
def get_origin(tp): # type: ignore
if isinstance(tp, GenericAlias):
return tp.__origin__
if tp is Generic:
return Generic
return None
if (3, 9) <= sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1): # pragma: no cover
import inspect
import types
def formatannotation(annotation) -> str:
"""
https://github.com/python/cpython/pull/29212
"""
if isinstance(annotation, types.GenericAlias):
return str(annotation)
return inspect.formatannotation(annotation)
else:
from inspect import formatannotation
if True:
# https://github.com/python/cpython/pull/27672
from argparse import Action
class BooleanOptionalAction(Action): # pragma: no cover
def __init__(self,
option_strings,
dest,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
_option_strings = []
for option_string in option_strings:
_option_strings.append(option_string)
if option_string.startswith('--'):
option_string = '--no-' + option_string[2:]
_option_strings.append(option_string)
if help is not None and default is not None:
help += " (default: %(default)s)"
super().__init__(
option_strings=_option_strings,
dest=dest,
nargs=0,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
if option_string in self.option_strings:
setattr(namespace, self.dest, not option_string.startswith('--no-'))
def format_usage(self):
return ' | '.join(self.option_strings)
__all__ = [
"cache",
"ast_unparse",
"GenericAlias",
"UnionType",
"removesuffix",
"cached_property",
"get_origin",
"Literal",
"formatannotation",
"BooleanOptionalAction",
] | PypiClean |
/sdtbu-acm-tool-1.1.1.tar.gz/sdtbu-acm-tool-1.1.1/README.md | # 山东工商学院acm辅助工具
## OJ网站账号移植
### **[ 介绍 ]**
将一个账号所做过的所有题移植到另一个账号,部分题目无法转移
### **[ 纯新手教程 ]**
如果您不是第一次接触python,可以跳过这一步
1. 下载python3.9并安装(Mac用户自行下载任意大于3.6的python版本),安装时请勾选`Add Python 3.9 to PATH` [Windows下载](https://www.python.org/ftp/python/3.9.13/python-3.9.13-amd64.exe)
2. 下载开发工具,这里推荐使用[Pycharm](https://www.jetbrains.com/pycharm/download/#section=windows)(Mac用户自行下载),下载后安装
3. 打开Pycharm并创建新项目,并在下方找到`Terminal`,点击`Create New Terminal`(中文`终端`),在弹出的窗口中输入`pip install sdtbu-acm-tool`,等待安装完成
### **[ 使用说明 ]**
#### 1. 安装依赖
如果您的python版本大于3.9或者您的操作系统非Windows系统,则无法开启验证码自动识别模式,需要手动输入验证码
如果确定您的python版本小于3.9且您的操作系统为Windows系统,则使用以下命令安装依赖
```bash
pip install ddddocr -i https://pypi.mirrors.ustc.edu.cn/simple
```
#### 2. 用户登录
#### **「 校内使用 」**
必须保证您的电脑已经连接到校园网,然后使用以下代码登录
```python
from acm.oj import OJ
user = OJ(user_id='您的账号',
password='您的密码',
mode='校内', # 访问模式 [校内/校外]
code_mode='自动', # 验证码识别模式 [自动/手动]
).login()
```
或者使用以下代码登录
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码').login()
```
#### **「 校外使用 」**
在使用之前,您需要先获取wvpn的token
获取方法:
1. 打开链接:[https://wvpn.sdtbu.edu.cn](https://wvpn.sdtbu.edu.cn)并扫码登录, 如果您已经登录过wvpn,则跳过此步骤[![zzuN0x.png](https://s1.ax1x.com/2022/12/27/zzuN0x.png)](https://imgse.com/i/zzuN0x)
2. 打开开发者工具(通常为F12),切换到Network选项卡(中文为"网络"),然后点击刷新按钮(或者按下F5)[![zzuwtO.png](https://s1.ax1x.com/2022/12/27/zzuwtO.png)](https://imgse.com/i/zzuwtO)
3. 此时会出现大量网络请求,点击第一个网络请求,此时「 wengine_vpn_ticketwvpn_sdtbu_edu_cn 」的值便是我们所需的token,如果该值显示不完整,请调整窗口直至调整至完全显示为止(以下操作将采用图中的token值演示)[![zzu59g.md.png](https://s1.ax1x.com/2022/12/27/zzu59g.md.png)](https://imgse.com/i/zzu59g)
然后您可以使用以下代码进行登录
```python
from acm.oj import OJ
user = OJ(user_id='您的账号',
password='您的密码',
mode='校外', # 访问模式 [校内/校外]
code_mode='自动', # 验证码识别模式 [自动/手动]
wvpn_token='39a7de9aad39f158'
).login()
```
或者使用以下代码登录
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
```
#### 3. 功能演示
### **[ 移植账号 ]**
注意:校外访问模式同时只能登录一个账号,如果您已经登录了一个账号,那么您将无法再登录另一个账号,如果您需要登录另一个账号,那么您需要先退出当前账号,然后再登录另一个账号
一个转移账号的例子:
```python
from acm.oj import OJ
user_1 = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user_1.save('user_1.json') # 保存账号信息
user_2 = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user_2.load_and_submit(
'user_1.json',
10 # 题目提交间隔时间(单位: 秒),速度过快可能会导致提交失败
) # 加载账号信息并提交题目
```
### **[ 提交题目 ]**
一个提交题目的例子:
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user.submit(
10, # 题目ID
'你的代码类型', # 代码类型
'你的代码', # 代码
)
```
其中代码类型有以下几种:
> C
> C++
> Pascal
> Java
> Ruby
> Python
### **[ 用户注册 ]**
一个用户注册的例子:
```python
from acm.oj import OJ
user = OJ('需要注册的账号', '你的密码', wvpn_token='39a7de9aad39f158').register()
```
### **[ 修改用户信息 ]**
一个修改用户信息的例子:
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user.modify(
nickname='你的新昵称', # 昵称
email='你的新邮箱', # 邮箱
password='你的新密码', # 密码
)
```
只需要修改其中几项即可,无需全部填写
### **[ 获取已完成题目信息 ]**
一个获取已完成题目信息的例子:
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
datas = user.info()
print(datas)
```
### **[ 测试提交代码 ]**
一个测试提交代码的例子:
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user.submit_test_code()
```
效果如下
[![zzK1VP.png](https://s1.ax1x.com/2022/12/27/zzK1VP.png)](https://imgse.com/i/zzK1VP)
[![zzKY8g.png](https://s1.ax1x.com/2022/12/27/zzKY8g.png)](https://imgse.com/i/zzKY8g)
### **[ 贡献代码 ]**
如果您想帮助我们完善测试数据库,您可以将您账户的代码贡献给我们,您可以使用以下代码保存您的数据
```python
from acm.oj import OJ
user = OJ('你的账号', '你的密码', wvpn_token='39a7de9aad39f158').login()
user.save('data.json') # 保存账号信息
```
然后将生成的`data.json`文件发送至我们的邮箱[ [email protected] ](mailto:[email protected]),我们会将您的数据添加到测试数据库中,感谢您对我们的支持
## 注意事项
请勿使用此项目进行非法操作,否则后果自负
此仓库仅供学习交流使用,不得用于非法商业用途
反馈邮箱: [ [email protected] ](mailto:[email protected])
| PypiClean |
/snake-shop-0.9.0.tar.gz/snake-shop-0.9.0/apps/checkout/mixins.py | import logging
from django.contrib.sites.models import Site
from oscar.apps.checkout import mixins as base_mixins
from oscar.apps.checkout.mixins import OrderDispatcher
from apps.order import models as order_models
from apps.communication.utils import CustomDispatcher
from custom.context_processors import main as context_processor_main
from . import forms
logger = logging.getLogger('oscar.checkout')
class OrderPlacementMixin(base_mixins.OrderPlacementMixin):
def handle_payment(self, order_number, total, **kwargs):
if self.bank_account:
user_address = self.request.user.addresses.get(
id=self.checkout_session.billing_user_address_id()
)
user_address.default_bank_account = self.bank_account
user_address.save()
self.add_payment_source(self.source)
def handle_order_placement(self, order_number, user, basket,
shipping_address, shipping_method,
shipping_charge, billing_address, order_total,
surcharges=None, **kwargs):
kwargs.update({
'delivery': self.delivery,
'payment_code': self.source_type.code,
'payment_provider': self.source_type.name,
})
if self.bank_account:
kwargs.update({
'iban': self.bank_account.number,
'owner': self.bank_account.name,
})
result = super().handle_order_placement(order_number, user, basket,
shipping_address, shipping_method,
shipping_charge, billing_address, order_total,
surcharges=surcharges, **kwargs)
return result
def handle_successful_order(self, order):
form = forms.OrderNoteForm(self.checkout_session, self.request.POST)
if form.is_valid() and form.cleaned_data.get('newsletter_accepted'):
user = self.request.user
user.newsletter_accepted = True
user.save()
note_text = self.request.POST.get('note')
if note_text:
order_models.OrderNote.objects.create(
order=order, user=self.request.user, note_type='checkout',
message=note_text
)
result = super().handle_successful_order(order)
order.refresh_from_db()
self.send_custom_emails(order)
return result
def get_message_context(self, order):
context = context_processor_main(self.request)
context.update(super().get_message_context(order))
context['CONFIG'] = Site.objects.get_current()
context['request'] = self.request
return context
def send_custom_emails(self, order):
site = self.request.site
ctx = self.get_message_context(order)
codes = [
'ORDER_PLACED_MACHINE1',
'ORDER_PLACED_INTERNAL_WITH_BANK',
'ORDER_PLACED_INTERNAL_WITHOUT_BANK',
]
if order.payment_code == 'transfer':
codes.append('ORDER_PLACED_INTERNAL_TRANSFER')
for code in codes:
CustomDispatcher(code, site, extra_context=ctx).send()
def send_order_placed_email(self, order):
extra_context = self.get_message_context(order)
dispatcher = OrderDispatcher(logger=logger)
dispatcher.send_order_placed_email_for_user(order, extra_context) | PypiClean |
/minerva_etl-5.3.2-py3-none-any.whl/minerva/commands/entity_type.py | from contextlib import closing
from psycopg2 import connect
from minerva.util.tabulate import render_table
def setup_command_parser(subparsers):
cmd = subparsers.add_parser(
'entity-type', help='command for administering entity types'
)
cmd_subparsers = cmd.add_subparsers()
setup_create_parser(cmd_subparsers)
setup_delete_parser(cmd_subparsers)
setup_list_parser(cmd_subparsers)
def setup_create_parser(subparsers):
cmd = subparsers.add_parser(
'create', help='command for creating entity types'
)
cmd.add_argument('name', help='name of the new entity type')
cmd.set_defaults(cmd=create_entity_type_cmd)
def setup_delete_parser(subparsers):
cmd = subparsers.add_parser(
'delete', help='command for deleting entity types'
)
cmd.add_argument('name', help='name of the entity type to delete')
cmd.set_defaults(cmd=delete_entity_type_cmd)
def setup_list_parser(subparsers):
cmd = subparsers.add_parser(
'list', help='command for listing entity types'
)
cmd.set_defaults(cmd=list_entity_type_cmd)
def create_entity_type_cmd(args):
create_entity_type(args.name)
def delete_entity_type_cmd(args):
delete_entity_type(args.name)
def list_entity_type_cmd(args):
list_entity_types()
def create_entity_type(name):
query_args = (name,)
with closing(connect('')) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute(
'SELECT directory.create_entity_type(%s)',
query_args
)
conn.commit()
def delete_entity_type(name):
query_args = (name,)
with closing(connect('')) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute(
'SELECT directory.delete_entity_type(%s)',
query_args
)
rowcount = cursor.rowcount
conn.commit()
if rowcount == 1:
print('successfully deleted entity type {}'.format(name))
def list_entity_types():
with closing(connect('')) as conn:
with closing(conn.cursor()) as cursor:
cursor.execute('SELECT id, name FROM directory.entity_type')
rows = cursor.fetchall()
column_names = ["id", "name"]
column_align = "<" * len(column_names)
column_sizes = ["max"] * len(column_names)
for line in render_table(column_names, column_align, column_sizes, rows):
print(line) | PypiClean |
/msm_we-0.1.27.tar.gz/msm_we-0.1.27/msm_we/_hamsm/_dimensionality.py | from sklearn.decomposition import IncrementalPCA as iPCA
import concurrent
import multiprocessing as mp
import tqdm.auto as tqdm
import numpy as np
from deeptime.decomposition import TICA, VAMP
from msm_we._logging import log
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from msm_we import modelWE
class DimensionalityReductionMixin:
dimReduceMethod = None
"""str: Dimensionality reduction method. Must be one of "pca", "vamp", or "none" (**NOT** NoneType)"""
vamp_lag = None
vamp_dim = None
class Coordinates(object):
"""
Fake Coordinates class, in case you don't want to use either PCA or VAMP for dimensionality reduction
"""
# The class "constructor" - It's actually an initializer
def __init__(self):
self.explanation = "coordinate object"
def transform(self, coords):
return coords
def reduceCoordinates(self: "modelWE", coords):
"""
This performs the dimensionality reduction.
dimReduce() defines self.coordinates, which is an object that has a .transform() function that produces the
reduced data.
reduceCoordinates() actually uses that coordinates object, transforms the coordinates,
and returns the reduced data.
The reduced coordinates are then stored in /auxdata for each iteration.
Parameters
----------
coords: array-like
Array of coordinates to reduce.
Returns
-------
Reduced data
"""
# log.debug("Reducing coordinates")
# TODO: This list should not be stored here, this should be a class attribute or something
if (
self.dimReduceMethod == "none"
or self.dimReduceMethod == "pca"
or self.dimReduceMethod == "vamp"
or self.dimReduceMethod == "tica"
):
coords = self.processCoordinates(coords)
coords = self.coordinates.transform(coords)
return coords
raise Exception("dimReduceMethod undefined in reduceCoordinates")
def do_pca(self: "modelWE", arg):
rough_pca, iteration, processCoordinates = arg
iter_coords = self.get_iter_coordinates(iteration)
# If no good coords in this iteration, skip it
if iter_coords.shape[0] == 0:
return rough_pca
processed_iter_coords = processCoordinates(iter_coords)
rough_pca.partial_fit(processed_iter_coords)
log.debug(f"{rough_pca.n_samples_seen_} samples seen")
return rough_pca
def do_full_pca(self: "modelWE", arg):
ipca, iteration, processCoordinates, components_for_var = arg
iter_coords = self.get_iter_coordinates(iteration)
used_iters = 0
# Keep adding coords until you have more than your components
while iter_coords.shape[0] <= components_for_var:
used_iters += 1
_iter_coords = self.get_iter_coordinates(iteration + used_iters)
if _iter_coords.shape[0] == 0:
continue
iter_coords = np.append(iter_coords, _iter_coords, axis=0)
processed_iter_coords = processCoordinates(iter_coords)
log.debug(
f"About to run iPCA on {processed_iter_coords.shape} processed coords"
)
ipca.partial_fit(processed_iter_coords)
return ipca, used_iters
def dimReduce(
self: "modelWE",
first_iter=1,
first_rough_iter=None,
last_iter=None,
rough_stride=10,
fine_stride=1,
variance_cutoff=0.95,
use_weights=True,
):
"""
Dimensionality reduction using the scheme specified in initialization.
This just defines the dimensionality reduction scheme and builds the model -- it does NOT actually transform
the data!
Transforming the data is performed via reduceCoordinates(), which uses self.coordinates as set
by this.
Updates:
- `self.coordinates`
- `self.ndim`
Returns
-------
None
"""
log.debug(f"Running dimensionality reduction -- method: {self.dimReduceMethod}")
# log.debug(self.coordSet)
if self.dimReduceMethod == "pca":
# Do this in a streaming way, iteration by iteration
# First, do a "rough" PCA on the last 10% of the data to get the number of components that explain the
# variance cutoff.
# This is necessary because with incremental PCA, there's no way to do this ahead of time.
if last_iter is None:
last_iter = self.maxIter
rough_ipca = iPCA()
if first_rough_iter is None:
total_num_iterations = last_iter
# Stride every 10th frame, so you're only doing the "rough" pca on 10% of the data
if total_num_iterations > 100:
rough_iters = range(1, total_num_iterations, rough_stride)
# But if you only have 100 frames or fewer, then just do the last-half.
else:
rough_iters = range(total_num_iterations // 2, total_num_iterations)
else:
rough_iters = range(first_rough_iter, last_iter, rough_stride)
for iteration in tqdm.tqdm(rough_iters, desc="Initial iPCA"):
# TODO: Allow chunking here so you don't have to go 1 by 1, but N by N
# If you don't use 'fork' context here, this will break in Jupyter.
# That's because processCoordinates is monkey-patched in. With 'spawn' (i.e. without fork), the module
# is re-imported in the child process. In the reimported module, processCoordinates is undefined.
# With 'fork', it preserves the monkey-patched version.
# Additionally, 'fork' is a little faster than spawn. Ironically, that's usually at the cost of memory
# usage. But here, the memory being used by the main thread (and therefore being copied here) isn't
# that great -- the memory issue stems from it not being freed up between successive calls.
with concurrent.futures.ProcessPoolExecutor(
max_workers=1, mp_context=mp.get_context("fork")
) as executor:
rough_ipca = executor.submit(
self.do_pca, [rough_ipca, iteration, self.processCoordinates]
).result()
components_for_var = (
np.argmax(
np.cumsum(rough_ipca.explained_variance_ratio_) > variance_cutoff
)
+ 1
)
log.debug(f"Keeping {components_for_var} components")
components_for_var = min(
components_for_var, (last_iter - first_iter) // fine_stride - 1
)
# Now do the PCA again, with that many components, using all the iterations.
ipca = iPCA(n_components=components_for_var)
extra_iters_used = 0
for iteration in tqdm.tqdm(
range(first_iter, last_iter, fine_stride), desc="iPCA"
):
if extra_iters_used > 0:
extra_iters_used -= 1
log.debug(f"Already processed iter {iteration}")
continue
# Try some stuff to help memory management. I think a lot of memory is not being explicitly released
# here when I'm looping, because I can watch my swap usage steadily grow while it's running this loop.
# https://stackoverflow.com/questions/1316767/how-can-i-explicitly-free-memory-in-python has some good
# details on how memory may be freed by Python, but not necessarily recognized as free by the OS.
# One "guaranteed" way to free memory back to the OS that's been released by Python is to do the memory
# intensive operation in a subprocess. So, maybe I need to do my partial fit in a subprocess.
# In fact, I first moved partial_fit alone to a subprocess, but that didn't help. The issue isn't
# partial_fit, it's actually loading the coords.
with concurrent.futures.ProcessPoolExecutor(
max_workers=1, mp_context=mp.get_context("fork")
) as executor:
ipca, extra_iters_used = executor.submit(
self.do_full_pca,
[ipca, iteration, self.processCoordinates, components_for_var],
).result()
self.coordinates = ipca
self.ndim = components_for_var
# elif self.dimReduceMethod == "vamp":
# # TODO: I don't think trajSet is initialized by itself -- you need to manually call get_traj_coordinates
# log.warning(
# "VAMP dimensionality reduction requires you to *manually* call get_traj_coordinates first, "
# "or self.trajSet will be all None. Make sure you've done that!"
# )
# raise NotImplementedError
#
# ntraj = len(self.trajSet)
# data = [None] * ntraj
# for itraj in range(ntraj):
# data[itraj] = self.processCoordinates(self.trajSet[itraj])
# self.coordinates = coor.vamp(
# data,
# lag=self.vamp_lag,
# dim=self.vamp_dim,
# scaling=None,
# right=False,
# stride=1,
# skip=0,
# )
# self.ndim = self.coordinates.dimension()
elif self.dimReduceMethod == "tica" or self.dimReduceMethod == "vamp":
# TODO: Streaming implementation, a la
# https://deeptime-ml.github.io/latest/api/generated/deeptime.decomposition.TICA.html#deeptime.decomposition.TICA.partial_fit
# TODO: Pre-allocate these.. but how can I get their sizes? And, they're ragged
trajs_start = []
trajs_end = []
weights = []
if last_iter is None:
last_iter = self.maxIter
for iteration in range(first_iter, last_iter, fine_stride):
# iter_coords = self.get_iter_coordinates(iteration)
self.load_iter_data(iteration)
self.get_transition_data_lag0()
coords_from = self.coordPairList[:, :, :, 0]
coords_to = self.coordPairList[:, :, :, 1]
# If no good coords in this iteration, skip it
# if iter_coords.shape[0] == 0:
# continue
processed_start = self.processCoordinates(coords_from)
processed_end = self.processCoordinates(coords_to)
trajs_start.extend(processed_start)
trajs_end.extend(processed_end)
weights.extend(self.weightList)
weights = np.array(weights)
if self.dimReduceMethod == "tica":
self.coordinates = TICA(
lagtime=1, var_cutoff=variance_cutoff, scaling="kinetic_map"
)
elif self.dimReduceMethod == "vamp":
self.coordinates = VAMP(
lagtime=1, var_cutoff=variance_cutoff, scaling="kinetic_map"
)
# self.coordinates.fit(trajs)
log.info(f"Performing weighted {self.dimReduceMethod}")
# print(f"Performing weighted TICA with weights {weights.shape} and trajs {trajs.shape}")
# Weights are not currently supported in VAMP
# See: https://github.com/deeptime-ml/deeptime/blob/main/deeptime/covariance/util/_running_moments.py#L247
if not use_weights or self.dimReduceMethod == "vamp":
weights = None
self.coordinates.fit_from_timeseries(
(np.array(trajs_start), np.array(trajs_end)), weights=weights
)
# Note: ndim is only used in one place, and it's a deprecated obsolete function
self.ndim = self.coordinates.model.output_dimension
log.info(
f"Weighted {self.dimReduceMethod} will reduce "
f"{self.coordinates._model._instantaneous_coefficients.shape[0]} to {self.ndim} components."
)
elif self.dimReduceMethod == "none":
self.ndim = int(3 * self.nAtoms)
# TODO: Why is this unused?
# data = self.all_coords.reshape(-1, self.ndim)
self.coordinates = self.Coordinates()
# self.coordinates.transform = self.processCoordinates
else:
raise NotImplementedError | PypiClean |
/tensorflow_cpu-2.14.0rc1-cp311-cp311-macosx_10_15_x86_64.whl/tensorflow/python/debug/lib/profiling.py | """Data structures and algorithms for profiling information."""
import os
class ProfileDatum(object):
"""Profile data point."""
def __init__(self,
device_name,
node_exec_stats,
file_path,
line_number,
func_name,
op_type):
"""Constructor.
Args:
device_name: (string) name of the device.
node_exec_stats: `NodeExecStats` proto.
file_path: path to the source file involved in creating the op.
line_number: line number in the file involved in creating the op.
func_name: name of the function that the line belongs to.
op_type: (string) Operation type.
"""
self.device_name = device_name
self.node_exec_stats = node_exec_stats
self.file_path = file_path
self.line_number = line_number
self.func_name = func_name
if self.file_path:
self.file_line_func = "%s:%d(%s)" % (
os.path.basename(self.file_path), self.line_number, self.func_name)
else:
self.file_line_func = ""
self.op_type = op_type
self.start_time = self.node_exec_stats.all_start_micros
self.op_time = (self.node_exec_stats.op_end_rel_micros -
self.node_exec_stats.op_start_rel_micros)
@property
def exec_time(self):
"""Op execution time plus pre- and post-processing."""
return self.node_exec_stats.all_end_rel_micros
class AggregateProfile(object):
"""Profile summary data for aggregating a number of ProfileDatum."""
def __init__(self, profile_datum):
"""Constructor.
Args:
profile_datum: (`ProfileDatum`) an instance of `ProfileDatum` to
initialize this object with.
"""
self.total_op_time = profile_datum.op_time
self.total_exec_time = profile_datum.exec_time
device_and_node = "%s:%s" % (profile_datum.device_name,
profile_datum.node_exec_stats.node_name)
self._node_to_exec_count = {device_and_node: 1}
def add(self, profile_datum):
"""Accumulate a new instance of ProfileDatum.
Args:
profile_datum: (`ProfileDatum`) an instance of `ProfileDatum` to
accumulate to this object.
"""
self.total_op_time += profile_datum.op_time
self.total_exec_time += profile_datum.exec_time
device_and_node = "%s:%s" % (profile_datum.device_name,
profile_datum.node_exec_stats.node_name)
device_and_node = "%s:%s" % (profile_datum.device_name,
profile_datum.node_exec_stats.node_name)
if device_and_node in self._node_to_exec_count:
self._node_to_exec_count[device_and_node] += 1
else:
self._node_to_exec_count[device_and_node] = 1
@property
def node_count(self):
return len(self._node_to_exec_count)
@property
def node_exec_count(self):
return sum(self._node_to_exec_count.values()) | PypiClean |
/airflow-plugins-0.1.3.tar.gz/airflow-plugins-0.1.3/airflow_plugins/utils.py | from copy import deepcopy
from datetime import datetime
from airflow.models import Variable
from pytz import timezone
def get_variable(key, default_var=None):
"""Returns variable from Variable or config defaults"""
return Variable.get(key, default_var=default_var)
def create_variable(key, value):
"""Create variable"""
return Variable.set(key, value)
def update_params(params, *args):
d = deepcopy(params)
for arg in args:
d.update(deepcopy(arg))
return d
def get_connection(conn_id):
"""Returns a connection by id
"""
from airflow import settings, models
session = settings.Session()
return session.query(
models.Connection).filter_by(
conn_id=conn_id).first()
def delete_connection(conn_id):
"""Delete a connection by id. Return is deleted"""
from airflow import settings, models
session = settings.Session()
connection = session.query(
models.Connection).filter_by(
conn_id=conn_id)
deleted_rows = connection.delete()
session.commit()
return deleted_rows
def get_connection_str(conn_id, db_name=""):
"""Returns standard connection string
"""
con = get_connection(conn_id)
if con:
return "{type}://{user}:{password}@{host}:{port}/{db_name}".format(**{
'type': con.conn_type,
'user': con.login,
'password': con.password,
'host': con.host,
'port': con.port,
'db_name': db_name,
}).rstrip("/")
def get_or_create_conn(name, **kwargs):
"""Returns a connection by id
"""
from airflow import settings, models
session = settings.Session()
con = get_connection(name)
if not con:
con = models.Connection(name, **kwargs)
session.add(con)
session.commit()
return con
def get_or_update_conn(name, **kwargs):
"""Returns a connection by id
"""
from airflow import settings, models
session = settings.Session()
con = get_connection(name)
if not con:
con = models.Connection(name, **kwargs)
session.add(con)
session.commit()
else:
for key, value in kwargs.items():
if key == "extra":
con.set_extra(value)
else:
setattr(con, key, value)
session.commit()
return con
def get_utc_offset(tz='Europe/Prague'): # as hours
tz = timezone(tz)
utc = timezone('UTC')
now = datetime.utcnow()
utcnow = utc.localize(now)
dt = utcnow.astimezone(tz).replace(tzinfo=None)
offset = (dt - now).total_seconds() / (60 * 60)
return offset # float | PypiClean |