id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/CarRentalGoodsInfo.py | import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.ExtraInfo import ExtraInfo
from alipay.aop.api.domain.CarRentalGoods import CarRentalGoods
class CarRentalGoodsInfo(object):
def __init__(self):
self._goods_extra_info = None
self._goods_list = None
@property
def goods_extra_info(self):
return self._goods_extra_info
@goods_extra_info.setter
def goods_extra_info(self, value):
if isinstance(value, list):
self._goods_extra_info = list()
for i in value:
if isinstance(i, ExtraInfo):
self._goods_extra_info.append(i)
else:
self._goods_extra_info.append(ExtraInfo.from_alipay_dict(i))
@property
def goods_list(self):
return self._goods_list
@goods_list.setter
def goods_list(self, value):
if isinstance(value, list):
self._goods_list = list()
for i in value:
if isinstance(i, CarRentalGoods):
self._goods_list.append(i)
else:
self._goods_list.append(CarRentalGoods.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.goods_extra_info:
if isinstance(self.goods_extra_info, list):
for i in range(0, len(self.goods_extra_info)):
element = self.goods_extra_info[i]
if hasattr(element, 'to_alipay_dict'):
self.goods_extra_info[i] = element.to_alipay_dict()
if hasattr(self.goods_extra_info, 'to_alipay_dict'):
params['goods_extra_info'] = self.goods_extra_info.to_alipay_dict()
else:
params['goods_extra_info'] = self.goods_extra_info
if self.goods_list:
if isinstance(self.goods_list, list):
for i in range(0, len(self.goods_list)):
element = self.goods_list[i]
if hasattr(element, 'to_alipay_dict'):
self.goods_list[i] = element.to_alipay_dict()
if hasattr(self.goods_list, 'to_alipay_dict'):
params['goods_list'] = self.goods_list.to_alipay_dict()
else:
params['goods_list'] = self.goods_list
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = CarRentalGoodsInfo()
if 'goods_extra_info' in d:
o.goods_extra_info = d['goods_extra_info']
if 'goods_list' in d:
o.goods_list = d['goods_list']
return o | PypiClean |
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_15/models/resource_space.py | import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_15 import models
class ResourceSpace(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str',
'space': 'Space',
'time': 'int'
}
attribute_map = {
'id': 'id',
'name': 'name',
'space': 'space',
'time': 'time'
}
required_args = {
}
def __init__(
self,
id=None, # type: str
name=None, # type: str
space=None, # type: models.Space
time=None, # type: int
):
"""
Keyword args:
id (str): A globally unique, system-generated ID. The ID cannot be modified and cannot refer to another resource.
name (str): A locally unique, system-generated name. The name cannot be modified.
space (Space): Displays size and space consumption information.
time (int): The timestamp of when the data was taken. Measured in milliseconds since the UNIX epoch.
"""
if id is not None:
self.id = id
if name is not None:
self.name = name
if space is not None:
self.space = space
if time is not None:
self.time = time
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ResourceSpace`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ResourceSpace`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ResourceSpace`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ResourceSpace`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResourceSpace, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResourceSpace):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | PypiClean |
/ora2-5.2.5-py3-none-any.whl/openassessment/xblock/submission_mixin.py | import copy
import json
import logging
import os
from django.core.exceptions import ObjectDoesNotExist
from django.utils.functional import cached_property
from xblock.core import XBlock
from xblock.exceptions import NoSuchServiceError
from submissions.team_api import get_team_submission
from openassessment.fileupload import api as file_upload_api
from openassessment.fileupload.exceptions import FileUploadError
from openassessment.workflow.errors import AssessmentWorkflowError
from ..data import OraSubmissionAnswerFactory
from .data_conversion import (
create_submission_dict,
list_to_conversational_format,
prepare_submission_for_serialization
)
from .resolve_dates import DISTANT_FUTURE
from .user_data import get_user_preferences
from .validation import validate_submission
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class NoTeamToCreateSubmissionForError(Exception):
pass
class EmptySubmissionError(Exception):
pass
class SubmissionMixin:
"""Submission Mixin introducing all Submission-related functionality.
Submission Mixin contains all logic and handlers associated with rendering
the submission section of the front end, as well as making all API calls to
the middle tier for constructing new submissions, or fetching submissions.
SubmissionMixin is a Mixin for the OpenAssessmentBlock. Functions in the
SubmissionMixin call into the OpenAssessmentBlock functions and will not
work outside the scope of OpenAssessmentBlock.
"""
ALLOWED_IMAGE_MIME_TYPES = ['image/gif', 'image/jpeg', 'image/pjpeg', 'image/png'] # pragma: no cover
ALLOWED_IMAGE_EXTENSIONS = ['gif', 'jpg', 'jpeg', 'jfif', 'pjpeg', 'pjp', 'png'] # pragma: no cover
ALLOWED_FILE_MIME_TYPES = ['application/pdf'] + ALLOWED_IMAGE_MIME_TYPES # pragma: no cover
ALLOWED_FILE_EXTENSIONS = ['pdf'] + ALLOWED_IMAGE_EXTENSIONS # pragma: no cover
MAX_FILES_COUNT = 20 # pragma: no cover
# taken from http://www.howtogeek.com/137270/50-file-extensions-that-are-potentially-dangerous-on-windows/
# and http://pcsupport.about.com/od/tipstricks/a/execfileext.htm
# left out .js and office extensions
FILE_EXT_BLACK_LIST = [
'exe', 'msi', 'app', 'dmg', 'com', 'pif', 'application', 'gadget',
'msp', 'scr', 'hta', 'cpl', 'msc', 'jar', 'bat', 'cmd', 'vb', 'vbs',
'jse', 'ws', 'wsf', 'wsc', 'wsh', 'scf', 'lnk', 'inf', 'reg', 'ps1',
'ps1xml', 'ps2', 'ps2xml', 'psc1', 'psc2', 'msh', 'msh1', 'msh2', 'mshxml',
'msh1xml', 'msh2xml', 'action', 'apk', 'app', 'bin', 'command', 'csh',
'ins', 'inx', 'ipa', 'isu', 'job', 'mst', 'osx', 'out', 'paf', 'prg',
'rgs', 'run', 'sct', 'shb', 'shs', 'u3p', 'vbscript', 'vbe', 'workflow',
'htm', 'html',
]
FILE_UPLOAD_PRESETS = {
'image': {
'mime_types': ALLOWED_IMAGE_MIME_TYPES,
'extensions': ALLOWED_IMAGE_EXTENSIONS
},
'pdf-and-image': {
'mime_types': ALLOWED_FILE_MIME_TYPES,
'extensions': ALLOWED_FILE_EXTENSIONS,
},
'custom': {}
}
@cached_property
def file_manager(self):
return file_upload_api.FileUploadManager(self)
@XBlock.json_handler
def submit(self, data, suffix=''): # pylint: disable=unused-argument
"""Place the submission text into Openassessment system
Allows submission of new responses. Performs basic workflow validation
on any new submission to ensure it is acceptable to receive a new
response at this time.
Args:
data (dict): Data may contain two attributes: submission and
file_urls. submission is the response from the student which
should be stored in the Open Assessment system. file_urls is the
path to a related file for the submission. file_urls is optional.
suffix (str): Not used in this handler.
Returns:
(tuple | [tuple]): Returns the status (boolean) of this request, the
associated status tag (str), and status text (unicode).
This becomes an array of similarly structured tuples in the event
of a team submisison, one entry per student entry.
"""
# Import is placed here to avoid model import at project startup.
from submissions import api
if 'submission' not in data:
return (
False,
'EBADARGS',
self._('"submission" required to submit answer.')
)
status = False
student_sub_data = data['submission']
success, msg = validate_submission(student_sub_data, self.prompts, self._, self.text_response)
if not success:
return (
False,
'EBADARGS',
msg
)
student_item_dict = self.get_student_item_dict()
# Short-circuit if no user is defined (as in Studio Preview mode)
# Since students can't submit, they will never be able to progress in the workflow
if self.in_studio_preview:
return (
False,
'ENOPREVIEW',
self._('To submit a response, view this component in Preview or Live mode.')
)
workflow = self.get_workflow_info()
status_tag = 'ENOMULTI' # It is an error to submit multiple times for the same item
status_text = self._('Multiple submissions are not allowed.')
if not workflow:
try:
# a submission for a team generates matching submissions for all members
if self.is_team_assignment():
submission = self.create_team_submission(student_sub_data)
else:
submission = self.create_submission(student_item_dict, student_sub_data)
return self._create_submission_response(submission)
except api.SubmissionRequestError as err:
# Handle the case of an answer that's too long as a special case,
# so we can display a more specific error message.
# Although we limit the number of characters the user can
# enter on the client side, the submissions API uses the JSON-serialized
# submission to calculate length. If each character submitted
# by the user takes more than 1 byte to encode (for example, double-escaped
# newline characters or non-ASCII unicode), then the user might
# exceed the limits set by the submissions API. In that case,
# we display an error message indicating that the answer is too long.
answer_too_long = any(
"maximum answer size exceeded" in answer_err.lower()
for answer_err in err.field_errors.get('answer', [])
)
if answer_too_long:
logger.exception(
f"Response exceeds maximum allowed size: {student_item_dict}"
)
status_tag = 'EANSWERLENGTH'
max_size = f"({int(api.Submission.MAXSIZE / 1024)} KB)"
base_error = self._("Response exceeds maximum allowed size.")
extra_info = self._(
"Note: if you have a spellcheck or grammar check browser extension, "
"try disabling, reloading, and reentering your response before submitting."
)
status_text = f"{base_error} {max_size} {extra_info}"
else:
msg = (
"The submissions API reported an invalid request error "
"when submitting a response for the user: {student_item}"
).format(student_item=student_item_dict)
logger.exception(msg)
status_tag = 'EBADFORM'
status_text = msg
except EmptySubmissionError:
msg = (
"Attempted to submit submission for user {student_item}, "
"but submission contained no content."
).format(student_item=student_item_dict)
logger.exception(msg)
status_tag = 'EEMPTYSUB'
status_text = self._(
'Submission cannot be empty. '
'Please refresh the page and try again.'
)
except (api.SubmissionError, AssessmentWorkflowError, NoTeamToCreateSubmissionForError):
msg = (
"An unknown error occurred while submitting "
"a response for the user: {student_item}"
).format(student_item=student_item_dict)
logger.exception(msg)
status_tag = 'EUNKNOWN'
status_text = self._('API returned unclassified exception.')
# error cases fall through to here
return status, status_tag, status_text
def _create_submission_response(self, submission):
""" Wrap submisison info for return to client
Returns:
(tuple): True (indicates success), student item, attempt number
"""
status = True
status_tag = submission.get('student_item')
status_text = submission.get('attempt_number')
return (status, status_tag, status_text)
@XBlock.json_handler
def save_submission(self, data, suffix=''): # pylint: disable=unused-argument
"""
Save the current student's response submission.
If the student already has a response saved, this will overwrite it.
Args:
data (dict): Data should have a single key 'submission' that contains
the text of the student's response. Optionally, the data could
have a 'file_urls' key that is the path to an associated file for
this submission.
suffix (str): Not used.
Returns:
dict: Contains a bool 'success' and unicode string 'msg'.
"""
if 'submission' in data:
student_sub_data = data['submission']
success, msg = validate_submission(student_sub_data, self.prompts, self._, self.text_response)
if not success:
return {'success': False, 'msg': msg}
try:
self.saved_response = json.dumps(
prepare_submission_for_serialization(student_sub_data)
)
self.has_saved = True
# Emit analytics event...
self.runtime.publish(
self,
"openassessmentblock.save_submission",
{"saved_response": self.saved_response}
)
except Exception: # pylint: disable=broad-except
return {'success': False, 'msg': self._("Please contact support staff.")}
else:
return {'success': True, 'msg': ''}
else:
return {'success': False, 'msg': self._("Submission data missing. Please contact support staff.")}
@XBlock.json_handler
def save_files_descriptions(self, data, suffix=''): # pylint: disable=unused-argument
"""
Save the metadata for each uploaded file.
Args:
data (dict): Data should have a single key 'fileMetadata' that contains
a list of dictionaries with the following keys: 'description','fileName', and 'fileSize'
each element of the list maps to a single file
suffix (str): Not used.
Returns:
dict: Contains a bool 'success' and unicode string 'msg'.
"""
failure_response = {'success': False, 'msg': self._("Files descriptions were not submitted.")}
if 'fileMetadata' not in data:
return failure_response
if not isinstance(data['fileMetadata'], list):
return failure_response
file_data = [
{
'description': item['description'],
'name': item['fileName'],
'size': item['fileSize'],
} for item in data['fileMetadata']
]
for new_upload in file_data:
if not all([
isinstance(new_upload['description'], str),
isinstance(new_upload['name'], str),
isinstance(new_upload['size'], int),
]):
return failure_response
try:
self.file_manager.append_uploads(*file_data)
# Emit analytics event...
self.runtime.publish(
self,
"openassessmentblock.save_files_descriptions",
{"saved_response": self.saved_files_descriptions}
)
except FileUploadError as exc:
logger.exception(
"FileUploadError: file description for data %s failed with error %s",
data,
exc,
exc_info=True,
)
return {'success': False, 'msg': self._("Files metadata could not be saved.")}
except Exception as exc: # pylint: disable=broad-except
logger.exception(
"FileUploadError: unhandled exception for data %s. Error: %s",
data,
exc,
exc_info=True,
)
return {'success': True, 'msg': ''}
def create_team_submission(self, student_sub_data):
""" A student submitting for a team should generate matching submissions for every member of the team. """
if not self.has_team():
msg = "Student {} has no team for course {}".format(
self.get_student_item_dict()['student_id'],
self.course_id
)
logger.exception(msg)
raise NoTeamToCreateSubmissionForError(msg)
# Import is placed here to avoid model import at project startup.
from submissions import team_api
team_info = self.get_team_info()
# Store the student's response text in a JSON-encodable dict
# so that later we can add additional response fields.
student_sub_dict = prepare_submission_for_serialization(student_sub_data)
self._collect_files_for_submission(student_sub_dict)
self.check_for_empty_submission_and_raise_error(student_sub_dict)
submitter_anonymous_user_id = self.xmodule_runtime.anonymous_student_id
user = self.get_real_user(submitter_anonymous_user_id)
student_item_dict = self.get_student_item_dict(anonymous_user_id=submitter_anonymous_user_id)
anonymous_student_ids = self.get_anonymous_user_ids_for_team()
submission = team_api.create_submission_for_team(
self.course_id,
student_item_dict['item_id'],
team_info['team_id'],
user.id,
anonymous_student_ids,
student_sub_dict,
)
self.create_team_workflow(submission["team_submission_uuid"])
# Emit analytics event...
self.runtime.publish(
self,
"openassessmentblock.create_team_submission",
{
"submission_uuid": submission["team_submission_uuid"],
"team_id": team_info["team_id"],
"attempt_number": submission["attempt_number"],
"created_at": submission["created_at"],
"submitted_at": submission["submitted_at"],
"answer": submission["answer"],
}
)
return submission
def create_submission(self, student_item_dict, student_sub_data):
""" Creates submission for the submitted assessment response or a list for a team assessment. """
# Import is placed here to avoid model import at project startup.
from submissions import api
# Store the student's response text in a JSON-encodable dict
# so that later we can add additional response fields.
student_sub_dict = prepare_submission_for_serialization(student_sub_data)
self._collect_files_for_submission(student_sub_dict)
self.check_for_empty_submission_and_raise_error(student_sub_dict)
submission = api.create_submission(student_item_dict, student_sub_dict)
self.create_workflow(submission["uuid"])
self.submission_uuid = submission["uuid"]
# Emit analytics event...
self.runtime.publish(
self,
"openassessmentblock.create_submission",
{
"submission_uuid": submission["uuid"],
"attempt_number": submission["attempt_number"],
"created_at": submission["created_at"],
"submitted_at": submission["submitted_at"],
"answer": submission["answer"],
}
)
return submission
def check_for_empty_submission_and_raise_error(self, student_sub_dict):
"""
Check if student_sub_dict has any submission content so that we don't
create empty submissions.
If there are no text responses and no file responses, raise an EmptySubmissionError
"""
has_content = False
# Does the student_sub_dict have any non-zero-length strings in 'parts'?
has_content |= any(part.get('text', '') for part in student_sub_dict.get('parts', []))
# Are there any file_keys in student_sub_dict?
has_content |= len(student_sub_dict.get('file_keys', [])) > 0
if not has_content:
raise EmptySubmissionError
def _collect_files_for_submission(self, student_sub_dict):
""" Collect files from CSM for individual submisisons or SharedFileUpload for team submisisons. """
if not self.file_upload_type:
return None
for field in ('file_keys', 'files_descriptions', 'files_names', 'files_sizes'):
student_sub_dict[field] = []
team_id = None if not self.has_team() else self.team.team_id
uploads = self.file_manager.get_uploads(team_id=team_id)
if self.is_team_assignment():
uploads += self.file_manager.get_team_uploads(team_id=team_id)
for upload in uploads:
student_sub_dict['file_keys'].append(upload.key)
student_sub_dict['files_descriptions'].append(upload.description)
student_sub_dict['files_names'].append(upload.name)
student_sub_dict['files_sizes'].append(upload.size)
return student_sub_dict
@XBlock.json_handler
def get_student_username(self, data, suffix): # pylint: disable=unused-argument
"""
Gets the username of the current student for use in team lookup.
"""
anonymous_id = self.xmodule_runtime.anonymous_student_id
return {'username': self.get_username(anonymous_id)}
@XBlock.json_handler
def upload_url(self, data, suffix=''): # pylint: disable=unused-argument
"""
Request a URL to be used for uploading content related to this
submission.
Returns:
A URL to be used to upload content associated with this submission.
"""
if 'contentType' not in data or 'filename' not in data:
return {'success': False, 'msg': self._("There was an error uploading your file.")}
if not self.allow_multiple_files:
# Here we check if there are existing file uploads by checking for
# an existing download url for any of the upload slots.
# Note that we can't use self.saved_files_descriptions because that
# is populated before files are uploaded
for i in range(self.MAX_FILES_COUNT):
file_url = self._get_download_url(i)
if file_url:
return {'success': False,
'msg': self._("Only a single file upload is allowed for this assessment.")}
file_num = int(data.get('filenum', 0))
_, file_ext = os.path.splitext(data['filename'])
file_ext = file_ext.strip('.') if file_ext else None
content_type = data['contentType']
# Validate that there are no data issues and file type is allowed
if not self.is_supported_upload_type(file_ext, content_type):
return {'success': False, 'msg': self._(
"File upload failed: unsupported file type."
"Only the supported file types can be uploaded."
"If you have questions, please reach out to the course team."
)}
# Attempt to upload
file_num = int(data.get('filenum', 0))
try:
key = self._get_student_item_key(file_num)
url = file_upload_api.get_upload_url(key, content_type)
return {'success': True, 'url': url}
except FileUploadError:
logger.exception("FileUploadError:Error retrieving upload URL for the data: %s.", data)
return {'success': False, 'msg': self._("Error retrieving upload URL.")}
def is_supported_upload_type(self, file_ext, content_type):
"""
Determine if the uploaded file type/extension is allowed for the configured file upload configuration
Returns:
True/False if file type is supported/unsupported
"""
if self.file_upload_type == 'image' and content_type not in self.ALLOWED_IMAGE_MIME_TYPES:
return False
elif self.file_upload_type == 'pdf-and-image' and content_type not in self.ALLOWED_FILE_MIME_TYPES:
return False
elif self.file_upload_type == 'custom' and file_ext.lower() not in self.white_listed_file_types:
return False
elif file_ext in self.FILE_EXT_BLACK_LIST:
return False
return True
@XBlock.json_handler
def download_url(self, data, suffix=''): # pylint: disable=unused-argument
"""
Request a download URL.
Returns:
A URL to be used for downloading content related to the submission.
"""
file_num = int(data.get('filenum', 0))
return {'success': True, 'url': self._get_download_url(file_num)}
@XBlock.json_handler
def remove_uploaded_file(self, data, suffix=''): # pylint: disable=unused-argument
"""
Removes uploaded user file.
"""
filenum = data.get('filenum', -1)
try:
filenum = int(filenum)
except ValueError:
filenum = -1
student_item_key = self._get_student_item_key(num=filenum)
if self._can_delete_file(filenum):
try:
self.file_manager.delete_upload(filenum)
# Emit analytics event...
self.runtime.publish(
self,
"openassessmentblock.remove_uploaded_file",
{"student_item_key": student_item_key}
)
logger.debug("Deleted file %s", student_item_key)
return {'success': True}
except FileUploadError as exc:
logger.exception(
"FileUploadError: Error when deleting file %s : %s",
student_item_key,
exc,
exc_info=True
)
except Exception as exc: # pylint: disable=broad-except
logger.exception(
"FileUploadError: unhandled exception for data %s. Error: %s",
data,
exc,
exc_info=True,
)
return {'success': False}
def _can_delete_file(self, filenum):
"""
Helper function, wraps `file_upload_api.can_delete_file()`.
"""
team_id = self.get_team_info().get('team_id')
key = self._get_student_item_key(filenum)
current_user_id = self.get_student_item_dict()['student_id']
return file_upload_api.can_delete_file(current_user_id, self.teams_enabled, key, team_id)
def _get_download_url(self, file_num=0):
"""
Internal function for retrieving the download url.
"""
return self._get_url_by_file_key(self._get_student_item_key(file_num))
def _get_student_item_key(self, num=0):
"""
Simple utility method to generate a common file upload key based on
the student item.
Returns:
A string representation of the key.
"""
return file_upload_api.get_student_file_key(self.get_student_item_dict(), index=num)
@classmethod
def _get_url_by_file_key(cls, key):
"""
Return download url for some particular file key.
"""
url = ''
try:
if key:
url = file_upload_api.get_download_url(key)
except FileUploadError as exc:
logger.exception(
"FileUploadError: Download url for file key %s failed with error %s",
key,
exc,
exc_info=True
)
return url
@classmethod
def get_download_urls_from_submission(cls, submission):
"""
Returns a download URLs for retrieving content within a submission.
Args:
submission (dict): Dictionary containing an answer and a file_keys.
The file_keys is used to try and retrieve a download urls
with related content
Returns:
List of FileDescriptor dicts for each file associated with the submission
"""
urls = []
raw_answer = submission.get('answer')
answer = OraSubmissionAnswerFactory.parse_submission_raw_answer(raw_answer)
for file_upload in answer.get_file_uploads(missing_blank=True):
file_download_url = cls._get_url_by_file_key(file_upload.key)
if file_download_url:
urls.append(
file_upload_api.FileDescriptor(
download_url=file_download_url,
description=file_upload.description,
name=file_upload.name,
size=file_upload.size,
show_delete_button=False
)._asdict()
)
return urls
def get_files_info_from_user_state(self, username):
"""
Returns the files information from the user state for a given username.
If the files information is present in the user state, return a list of following tuple:
(file_download_url, file_description, file_name)
Arguments:
username(str): user's name whose state is being check for files information.
Returns:
List of FileDescriptor dicts, if present, else empty list.
"""
files_info = []
user_state = self.get_user_state(username)
item_dict = self.get_student_item_dict_from_username_or_email(username)
if 'saved_files_descriptions' in user_state:
# pylint: disable=protected-access
files_descriptions = file_upload_api._safe_load_json_list(
user_state.get('saved_files_descriptions'),
log_error=True
)
files_names = file_upload_api._safe_load_json_list(
user_state.get('saved_files_names', '[]'),
log_error=True
)
for index, description in enumerate(files_descriptions):
file_key = file_upload_api.get_student_file_key(item_dict, index)
download_url = self._get_url_by_file_key(file_key)
if download_url:
file_name = files_names[index] if index < len(files_names) else ''
files_info.append(
file_upload_api.FileDescriptor(
download_url=download_url,
description=description,
name=file_name,
size=None,
show_delete_button=False
)._asdict()
)
else:
# If file has been removed, the URL doesn't exist
logger.info(
"URLWorkaround: no URL for description %s & key %s for user:%s",
description,
username,
file_key
)
continue
return files_info
def get_all_upload_urls_for_user(self, username_or_email):
"""
For a particular ORA block, get the download URLs for all the files uploaded and still present.
Used for an extreme edge case, where the stored files indices are out of sync with
the uploaded files, this is a last resort to get the download URLs of all the files
that have been uploaded by a learner in an ORA block(and haven't been deleted from the storage).
Starting from 0 index to maximum file upload count possible, this checks if a file exists against
every index. If present, add the info, else repeat it for the next indices.
Arguments:
username_or_email(str): username or email of the learner whose files' information is to be obtained.
Returns:
List of FileDescriptor dicts
"""
file_uploads = []
student_item_dict = self.get_student_item_dict_from_username_or_email(username_or_email)
for index in range(self.MAX_FILES_COUNT):
file_key = file_upload_api.get_student_file_key(student_item_dict, index)
download_url = ''
try:
download_url = file_upload_api.get_download_url(file_key)
except FileUploadError:
pass
if download_url:
logger.info(
"Download URL exists for key %s in block %s for user %s",
file_key,
username_or_email,
str(self.location)
)
file_uploads.append(
file_upload_api.FileDescriptor(
download_url=download_url,
description='',
name='',
size=None,
show_delete_button=False
)._asdict()
)
else:
continue
return file_uploads
@staticmethod
def get_user_submission(submission_uuid):
"""Return the most recent submission by user in workflow
Return the most recent submission. If no submission is available,
return None. All submissions are preserved, but only the most recent
will be returned in this function, since the active workflow will only
be concerned with the most recent submission.
Args:
submission_uuid (str): The uuid for the submission to retrieve.
Returns:
(dict): A dictionary representation of a submission to render to
the front end.
"""
# Import is placed here to avoid model import at project startup.
from submissions import api
try:
return api.get_submission(submission_uuid)
except api.SubmissionRequestError:
# This error is actually ok.
return None
@property
def save_status(self):
"""
Return a string indicating whether the response has been saved.
Returns:
unicode
"""
return self._('Draft saved!') if self.has_saved else self._(
'Response not started.')
@XBlock.handler
def render_submission(self, data, suffix=''): # pylint: disable=unused-argument
"""Renders the Submission HTML section of the XBlock
Generates the submission HTML for the first section of an Open
Assessment XBlock. See OpenAssessmentBlock.render_assessment() for
more information on rendering XBlock sections.
Needs to support the following scenarios:
Unanswered and Open
Unanswered and Closed
Saved
Saved and Closed
Submitted
Submitted and Closed
Submitted, waiting assessment
Submitted and graded
"""
path, context = self.submission_path_and_context()
return self.render_assessment(path, context_dict=context)
def get_team_submission_context(self, context):
"""
Populate the passed context object with team info, including a set of students on
the team with submissions to the current item from another team, under the key
`team_members_with_external_submissions`.
Args:
context (dict): render context to add team submission context into
Returns
(dict): context arg with additional team-related fields
"""
from submissions import team_api
try:
team_info = self.get_team_info()
if team_info:
context.update(team_info)
if self.is_course_staff:
return
student_item_dict = self.get_student_item_dict()
external_submissions = team_api.get_teammates_with_submissions_from_other_teams(
self.course_id,
student_item_dict["item_id"],
team_info["team_id"],
self.get_anonymous_user_ids_for_team()
)
context["team_members_with_external_submissions"] = list_to_conversational_format([
self.get_username(submission['student_id']) for submission in external_submissions
])
except ObjectDoesNotExist:
logger.error(
'%s: User associated with anonymous_user_id %s can not be found.',
str(self.location),
self.get_student_item_dict()['student_id'],
)
except NoSuchServiceError:
logger.error('%s: Teams service is unavailable', str(self.location))
def get_allowed_file_types_or_preset(self):
"""
If allowed files are not explicitly set for file uploads, use preset extensions
"""
if self.white_listed_file_types:
return self.white_listed_file_types
elif self.file_upload_type == 'image':
return self.ALLOWED_IMAGE_EXTENSIONS
elif self.file_upload_type == 'pdf-and-image':
return self.ALLOWED_FILE_EXTENSIONS
return None
def submission_path_and_context(self):
"""
Determine the template path and context to use when
rendering the response (submission) step.
Returns:
tuple of `(path, context)`, where `path` (str) is the path to the template,
and `context` (dict) is the template context.
"""
workflow = self.get_team_workflow_info() if self.teams_enabled else self.get_workflow_info()
problem_closed, reason, start_date, due_date = self.is_closed('submission')
user_preferences = get_user_preferences(self.runtime.service(self, 'user'))
course_id = self.location.course_key if hasattr(self, 'location') else None
path = 'openassessmentblock/response/oa_response.html'
context = {
'enable_delete_files': False,
'file_upload_response': self.file_upload_response,
'has_real_user': self.has_real_user,
'prompts_type': self.prompts_type,
'show_rubric_during_response': self.show_rubric_during_response,
'text_response': self.text_response,
'text_response_editor': self.text_response_editor,
'user_language': user_preferences['user_language'],
'user_timezone': user_preferences['user_timezone'],
'xblock_id': self.get_xblock_id(),
'base_asset_url': self._get_base_url_path_for_course_assets(course_id)
}
if self.show_rubric_during_response:
context['rubric_criteria'] = copy.deepcopy(self.rubric_criteria_with_labels)
# Due dates can default to the distant future, in which case
# there's effectively no due date.
# If we don't add the date to the context, the template won't display it.
if due_date < DISTANT_FUTURE:
context["submission_due"] = due_date
# For team assignments, if a user submitted with a past team, that gets precidence.
# So we first see if they have a submission and load context from that.
# Otherwise, we fall back to the current team.
team_id_for_current_submission = None
if self.is_team_assignment():
if not workflow:
team_id_for_current_submission = self.get_team_info().get('team_id', None)
else:
team_submission = get_team_submission(workflow['team_submission_uuid'])
team_id_for_current_submission = team_submission['team_id']
# If it's a team assignment, the user hasn't submitted and is not on a team, the assignment is unavailable.
if team_id_for_current_submission is None:
path = 'openassessmentblock/response/oa_response_unavailable.html'
return path, context
context['file_upload_type'] = self.file_upload_type
context['allow_multiple_files'] = self.allow_multiple_files
context['allow_latex'] = self.allow_latex
if self.file_upload_type:
context['file_urls'] = self.file_manager.file_descriptors(
team_id=team_id_for_current_submission, include_deleted=True
)
context['team_file_urls'] = self.file_manager.team_file_descriptors(
team_id=team_id_for_current_submission
)
context['white_listed_file_types'] = ['.' + ext for ext in self.get_allowed_file_types_or_preset()]
if not workflow and problem_closed:
if reason == 'due':
path = 'openassessmentblock/response/oa_response_closed.html'
elif reason == 'start':
context['submission_start'] = start_date
path = 'openassessmentblock/response/oa_response_unavailable.html'
elif not workflow:
# For backwards compatibility. Initially, problems had only one prompt
# and a string answer. We convert it to the appropriate dict.
no_workflow_path = "openassessmentblock/response/oa_response.html"
try:
json.loads(self.saved_response)
saved_response = {
'answer': json.loads(self.saved_response),
}
except ValueError:
saved_response = {
'answer': {
'text': self.saved_response,
},
}
context['saved_response'] = create_submission_dict(saved_response, self.prompts)
context['save_status'] = self.save_status
context['enable_delete_files'] = True
if self.teams_enabled:
self.get_team_submission_context(context)
if self.does_team_have_submission(context['team_id']):
no_workflow_path = 'openassessmentblock/response/oa_response_team_already_submitted.html'
path = no_workflow_path
elif workflow["status"] == "cancelled":
if self.teams_enabled:
context["workflow_cancellation"] = self.get_team_workflow_cancellation_info(
workflow["team_submission_uuid"])
else:
context["workflow_cancellation"] = self.get_workflow_cancellation_info(
self.submission_uuid)
context["student_submission"] = self.get_user_submission(
workflow["submission_uuid"]
)
path = 'openassessmentblock/response/oa_response_cancelled.html'
elif workflow["status"] == "done":
student_submission = self.get_user_submission(
workflow["submission_uuid"]
)
context["student_submission"] = create_submission_dict(student_submission, self.prompts)
path = 'openassessmentblock/response/oa_response_graded.html'
else:
student_submission = self.get_user_submission(
workflow["submission_uuid"]
)
peer_in_workflow = "peer" in workflow["status_details"]
self_in_workflow = "self" in workflow["status_details"]
context["peer_incomplete"] = peer_in_workflow and not workflow["status_details"]["peer"]["complete"]
context["self_incomplete"] = self_in_workflow and not workflow["status_details"]["self"]["complete"]
context["student_submission"] = create_submission_dict(student_submission, self.prompts)
path = 'openassessmentblock/response/oa_response_submitted.html'
return path, context | PypiClean |
/minion_data-0.2.1-py3-none-any.whl/minion_data/preparation/_re_squiggled.py | import argparse
import logging
import gzip
import h5py
from os import path
import typing
import os
from tqdm import tqdm
from typing import NamedTuple
from .. import dataset_pb2
import numpy as np
import multiprocessing as mp
import threading
from glob import glob
from .common import fillDataPoint
from minion_data.inspect import debug_output
class MinionDataCfg(NamedTuple):
input: str
out: str
basecall_group: str
basecall_subgroup: str
class ProcessDataPointCfg(NamedTuple):
fname_no_ext: str
cfg: MinionDataCfg
completed: mp.Queue
def processDataPoint(cfgDp: ProcessDataPointCfg):
try:
cfg = cfgDp.cfg
sol = dataset_pb2.DataPoint()
with h5py.File(cfgDp.fname_no_ext + ".fast5", 'r') as fast5_data:
# Get raw data
try:
raw_dat = list(fast5_data['/Raw/Reads/'].values())[0]
# raw_attrs = raw_dat.attrs
raw_dat = raw_dat['Signal'].value
sol.MergeFrom(dataset_pb2.DataPoint(signal=raw_dat))
except:
raise RuntimeError(
'Raw data is not stored in Raw/Reads/Read_[read#] so ' +
'new segments cannot be identified.')
# Read corrected data
try:
corr_data = fast5_data[
'/Analyses/RawGenomeCorrected_000/' + cfg.basecall_subgroup + '/Events']
corr_attrs = dict(list(corr_data.attrs.items()))
corr_data = corr_data.value
except:
raise RuntimeError((
'Corrected data not found.'))
# Maybe
basecalled = fast5_data[f"/Analyses/{cfg.basecall_group}/BaseCalled_template/Fastq"].value.strip().split()[2].decode("ASCII")
# fast5_info = fast5_data['UniqueGlobalKey/channel_id'].attrs
# sampling_rate = fast5_info['sampling_rate'].astype('int_')
# Reading extra information
corr_start_rel_to_raw = corr_attrs['read_start_rel_to_raw']
if any(len(vals) <= 1 for vals in (
corr_data, raw_dat)):
raise NotImplementedError((
'One or no segments or signal present in read.'))
event_starts = corr_data['start'] + corr_start_rel_to_raw
event_lengths = corr_data['length']
event_bases = corr_data['base']
label_data = np.array(
list(zip(event_starts, event_lengths, event_bases)),
dtype=[('start', '<u4'), ('length', '<u4'), ('base', 'S1')])
labels = []
for index, start in enumerate(event_starts):
labels.append(dataset_pb2.DataPoint.BPConfidenceInterval(
lower=start,
upper=start + event_lengths[index],
pair=typing.cast(
dataset_pb2.BasePair,
dataset_pb2.BasePair.Value(label_data['base'][index].decode("ASCII").upper())),
),
)
sol.MergeFrom(dataset_pb2.DataPoint(
basecalled=[typing.cast(dataset_pb2.BasePair, dataset_pb2.BasePair.Value(x)) for x in basecalled],
aligned_ref=[x.pair for x in labels],
labels=labels,
))
fillDataPoint(sol)
fname_out = path.join(cfg.out, cfgDp.fname_no_ext.split(os.sep)[-1] + ".datapoint")
with gzip.open(fname_out, "w") as f:
sol_pb_str = sol.SerializeToString()
f.write(sol_pb_str)
cfgDp.completed.put(sol_pb_str)
except Exception as ex:
logging.getLogger(__name__).error(f"Cannot process {cfgDp.fname_no_ext} {type(ex).__name__}\n{ex}", exc_info=True)
cfgDp.completed.put(ex)
def main(cfg: MinionDataCfg):
os.makedirs(cfg.out, exist_ok=True)
all = glob(cfg.input + "/*.fast5")
with tqdm(total=len(all), desc="preparing dataset") as pbar:
with mp.Pool() as p:
m = mp.Manager()
q = m.Queue()
def f():
for _ in range(len(all)):
q.get()
pbar.update()
threading.Thread(target=f, daemon=True).start()
p.map(
processDataPoint,
[ProcessDataPointCfg(
fname_no_ext=os.path.splitext(x)[0],
cfg=cfg,
completed=q,
) for x in all]
)
def run(args):
logging.basicConfig(level=logging.INFO)
if args.debug:
logging.basicConfig(level=logging.DEBUG)
cfg = MinionDataCfg(
input=path.abspath(args.input),
out=path.abspath(args.out),
basecall_group=args.basecall_group,
basecall_subgroup=args.basecall_subgroup,
)
main(cfg)
return 0
def add_args(parser: argparse.ArgumentParser):
parser.add_argument("--input", "-i", help="input folder with re-sqiggled fast5s", required=True)
parser.add_argument("--out", "-o", help="output folder", required=True)
parser.add_argument('--basecall_group', default='Basecall_1D_000',
help='Basecall group Nanoraw resquiggle into. Default is Basecall_1D_000')
parser.add_argument('--basecall_subgroup', default='BaseCalled_template',
help='Basecall subgroup Nanoraw resquiggled into. Default is BaseCalled_template')
parser.set_defaults(func=run) | PypiClean |
/llama_index-0.8.17.tar.gz/llama_index-0.8.17/llama_index/agent/openai_agent.py | import asyncio
import json
import logging
from abc import abstractmethod
from threading import Thread
from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
from llama_index.agent.types import BaseAgent
from llama_index.callbacks import (
CallbackManager,
trace_method,
CBEventType,
EventPayload,
)
from llama_index.chat_engine.types import (
AGENT_CHAT_RESPONSE_TYPE,
AgentChatResponse,
ChatResponseMode,
StreamingAgentChatResponse,
)
from llama_index.indices.base_retriever import BaseRetriever
from llama_index.llms.base import LLM, ChatMessage, ChatResponse, MessageRole
from llama_index.llms.openai import OpenAI
from llama_index.llms.openai_utils import is_function_calling_model
from llama_index.memory import BaseMemory, ChatMemoryBuffer
from llama_index.schema import BaseNode, NodeWithScore
from llama_index.tools import BaseTool, ToolOutput, adapt_to_async_tool
logger = logging.getLogger(__name__)
logger.setLevel(logging.WARNING)
DEFAULT_MAX_FUNCTION_CALLS = 5
DEFAULT_MODEL_NAME = "gpt-3.5-turbo-0613"
def get_function_by_name(tools: List[BaseTool], name: str) -> BaseTool:
"""Get function by name."""
name_to_tool = {tool.metadata.name: tool for tool in tools}
if name not in name_to_tool:
raise ValueError(f"Tool with name {name} not found")
return name_to_tool[name]
def call_function(
tools: List[BaseTool], function_call: dict, verbose: bool = False
) -> Tuple[ChatMessage, ToolOutput]:
"""Call a function and return the output as a string."""
name = function_call["name"]
arguments_str = function_call["arguments"]
if verbose:
print("=== Calling Function ===")
print(f"Calling function: {name} with args: {arguments_str}")
tool = get_function_by_name(tools, name)
argument_dict = json.loads(arguments_str)
output = tool(**argument_dict)
if verbose:
print(f"Got output: {str(output)}")
print("========================")
return (
ChatMessage(
content=str(output),
role=MessageRole.FUNCTION,
additional_kwargs={
"name": function_call["name"],
},
),
output,
)
async def acall_function(
tools: List[BaseTool], function_call: dict, verbose: bool = False
) -> Tuple[ChatMessage, ToolOutput]:
"""Call a function and return the output as a string."""
name = function_call["name"]
arguments_str = function_call["arguments"]
if verbose:
print("=== Calling Function ===")
print(f"Calling function: {name} with args: {arguments_str}")
tool = get_function_by_name(tools, name)
async_tool = adapt_to_async_tool(tool)
argument_dict = json.loads(arguments_str)
output = await async_tool.acall(**argument_dict)
if verbose:
print(f"Got output: {str(output)}")
print("========================")
return (
ChatMessage(
content=str(output),
role=MessageRole.FUNCTION,
additional_kwargs={
"name": function_call["name"],
},
),
output,
)
def resolve_function_call(function_call: Union[str, dict] = "auto") -> Union[str, dict]:
"""Resolve function call.
If function_call is a function name string, return a dict with the name.
"""
if isinstance(function_call, str) and function_call not in ["none", "auto"]:
return {"name": function_call}
return function_call
class BaseOpenAIAgent(BaseAgent):
def __init__(
self,
llm: OpenAI,
memory: BaseMemory,
prefix_messages: List[ChatMessage],
verbose: bool,
max_function_calls: int,
callback_manager: Optional[CallbackManager],
):
self._llm = llm
self._verbose = verbose
self._max_function_calls = max_function_calls
self.prefix_messages = prefix_messages
self.memory = memory
self.callback_manager = callback_manager or CallbackManager([])
self.sources: List[ToolOutput] = []
@property
def chat_history(self) -> List[ChatMessage]:
return self.memory.get_all()
@property
def all_messages(self) -> List[ChatMessage]:
return self.prefix_messages + self.memory.get()
@property
def latest_function_call(self) -> Optional[dict]:
return self.memory.get_all()[-1].additional_kwargs.get("function_call", None)
def reset(self) -> None:
self.memory.reset()
@abstractmethod
def _get_tools(self, message: str) -> List[BaseTool]:
"""Get tools."""
pass
def _should_continue(
self, function_call: Optional[dict], n_function_calls: int
) -> bool:
if n_function_calls > self._max_function_calls:
return False
if not function_call:
return False
return True
def init_chat(
self, message: str, chat_history: Optional[List[ChatMessage]] = None
) -> Tuple[List[BaseTool], List[dict]]:
if chat_history is not None:
self.memory.set(chat_history)
self.sources = []
self.memory.put(ChatMessage(content=message, role=MessageRole.USER))
tools = self._get_tools(message)
functions = [tool.metadata.to_openai_function() for tool in tools]
return tools, functions
def _process_message(self, chat_response: ChatResponse) -> AgentChatResponse:
ai_message = chat_response.message
self.memory.put(ai_message)
return AgentChatResponse(response=str(ai_message.content), sources=self.sources)
def _get_stream_ai_response(
self, **llm_chat_kwargs: Any
) -> StreamingAgentChatResponse:
chat_stream_response = StreamingAgentChatResponse(
chat_stream=self._llm.stream_chat(**llm_chat_kwargs),
sources=self.sources,
)
# Get the response in a separate thread so we can yield the response
thread = Thread(
target=chat_stream_response.write_response_to_history,
args=(self.memory,),
)
thread.start()
# Wait for the event to be set
chat_stream_response._is_function_not_none_thread_event.wait()
# If it is executing an openAI function, wait for the thread to finish
if chat_stream_response._is_function:
thread.join()
# if it's false, return the answer (to stream)
return chat_stream_response
async def _get_async_stream_ai_response(
self, **llm_chat_kwargs: Any
) -> StreamingAgentChatResponse:
chat_stream_response = StreamingAgentChatResponse(
achat_stream=await self._llm.astream_chat(**llm_chat_kwargs),
sources=self.sources,
)
# create task to write chat response to history
asyncio.create_task(
chat_stream_response.awrite_response_to_history(self.memory)
)
# wait until openAI functions stop executing
await chat_stream_response._is_function_false_event.wait()
# return response stream
return chat_stream_response
def _call_function(self, tools: List[BaseTool], function_call: dict) -> None:
with self.callback_manager.event(
CBEventType.FUNCTION_CALL,
payload={
EventPayload.FUNCTION_CALL: function_call["arguments"],
EventPayload.TOOL: get_function_by_name(
tools, function_call["name"]
).metadata,
},
) as event:
function_message, tool_output = call_function(
tools, function_call, verbose=self._verbose
)
event.on_end(payload={EventPayload.FUNCTION_OUTPUT: str(tool_output)})
self.sources.append(tool_output)
self.memory.put(function_message)
async def _acall_function(self, tools: List[BaseTool], function_call: dict) -> None:
with self.callback_manager.event(
CBEventType.FUNCTION_CALL,
payload={
EventPayload.FUNCTION_CALL: function_call["arguments"],
EventPayload.TOOL: get_function_by_name(
tools, function_call["name"]
).metadata,
},
) as event:
function_message, tool_output = await acall_function(
tools, function_call, verbose=self._verbose
)
event.on_end(payload={EventPayload.FUNCTION_OUTPUT: str(tool_output)})
self.sources.append(tool_output)
self.memory.put(function_message)
def _get_llm_chat_kwargs(
self, functions: List[dict], function_call: Union[str, dict] = "auto"
) -> Dict[str, Any]:
llm_chat_kwargs: dict = dict(messages=self.all_messages)
if functions:
llm_chat_kwargs.update(
functions=functions, function_call=resolve_function_call(function_call)
)
return llm_chat_kwargs
def _get_agent_response(
self, mode: ChatResponseMode, **llm_chat_kwargs: Any
) -> AGENT_CHAT_RESPONSE_TYPE:
if mode == ChatResponseMode.WAIT:
chat_response: ChatResponse = self._llm.chat(**llm_chat_kwargs)
return self._process_message(chat_response)
elif mode == ChatResponseMode.STREAM:
return self._get_stream_ai_response(**llm_chat_kwargs)
else:
raise NotImplementedError
async def _get_async_agent_response(
self, mode: ChatResponseMode, **llm_chat_kwargs: Any
) -> AGENT_CHAT_RESPONSE_TYPE:
if mode == ChatResponseMode.WAIT:
chat_response: ChatResponse = await self._llm.achat(**llm_chat_kwargs)
return self._process_message(chat_response)
elif mode == ChatResponseMode.STREAM:
return await self._get_async_stream_ai_response(**llm_chat_kwargs)
else:
raise NotImplementedError
def _chat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
mode: ChatResponseMode = ChatResponseMode.WAIT,
) -> AGENT_CHAT_RESPONSE_TYPE:
tools, functions = self.init_chat(message, chat_history)
n_function_calls = 0
# Loop until no more function calls or max_function_calls is reached
current_func = function_call
while True:
llm_chat_kwargs = self._get_llm_chat_kwargs(functions, current_func)
agent_chat_response = self._get_agent_response(mode=mode, **llm_chat_kwargs)
if not self._should_continue(self.latest_function_call, n_function_calls):
logger.debug("Break: should continue False")
break
assert isinstance(self.latest_function_call, dict)
self._call_function(tools, self.latest_function_call)
# change function call to the default value, if a custom function was given
# as an argument (none and auto are predefined by OpenAI)
if current_func not in ("auto", "none"):
current_func = "auto"
n_function_calls += 1
return agent_chat_response
async def _achat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
mode: ChatResponseMode = ChatResponseMode.WAIT,
) -> AGENT_CHAT_RESPONSE_TYPE:
tools, functions = self.init_chat(message, chat_history)
n_function_calls = 0
# Loop until no more function calls or max_function_calls is reached
current_func = function_call
while True:
llm_chat_kwargs = self._get_llm_chat_kwargs(functions, current_func)
agent_chat_response = await self._get_async_agent_response(
mode=mode, **llm_chat_kwargs
)
if not self._should_continue(self.latest_function_call, n_function_calls):
break
assert isinstance(self.latest_function_call, dict)
await self._acall_function(tools, self.latest_function_call)
# change function call to the default value, if a custom function was given
# as an argument (none and auto are predefined by OpenAI)
if current_func not in ("auto", "none"):
current_func = "auto"
n_function_calls += 1
return agent_chat_response
@trace_method("chat")
def chat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
) -> AgentChatResponse:
chat_response = self._chat(
message, chat_history, function_call, mode=ChatResponseMode.WAIT
)
assert isinstance(chat_response, AgentChatResponse)
return chat_response
@trace_method("chat")
async def achat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
) -> AgentChatResponse:
chat_response = await self._achat(
message, chat_history, function_call, mode=ChatResponseMode.WAIT
)
assert isinstance(chat_response, AgentChatResponse)
return chat_response
@trace_method("chat")
def stream_chat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
) -> StreamingAgentChatResponse:
chat_response = self._chat(
message, chat_history, function_call, mode=ChatResponseMode.STREAM
)
assert isinstance(chat_response, StreamingAgentChatResponse)
return chat_response
@trace_method("chat")
async def astream_chat(
self,
message: str,
chat_history: Optional[List[ChatMessage]] = None,
function_call: Union[str, dict] = "auto",
) -> StreamingAgentChatResponse:
chat_response = await self._achat(
message, chat_history, function_call, mode=ChatResponseMode.STREAM
)
assert isinstance(chat_response, StreamingAgentChatResponse)
return chat_response
class OpenAIAgent(BaseOpenAIAgent):
def __init__(
self,
tools: List[BaseTool],
llm: OpenAI,
memory: BaseMemory,
prefix_messages: List[ChatMessage],
verbose: bool = False,
max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
callback_manager: Optional[CallbackManager] = None,
) -> None:
super().__init__(
llm=llm,
memory=memory,
prefix_messages=prefix_messages,
verbose=verbose,
max_function_calls=max_function_calls,
callback_manager=callback_manager,
)
self._tools = tools
@classmethod
def from_tools(
cls,
tools: Optional[List[BaseTool]] = None,
llm: Optional[LLM] = None,
chat_history: Optional[List[ChatMessage]] = None,
memory: Optional[BaseMemory] = None,
memory_cls: Type[BaseMemory] = ChatMemoryBuffer,
verbose: bool = False,
max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
callback_manager: Optional[CallbackManager] = None,
system_prompt: Optional[str] = None,
prefix_messages: Optional[List[ChatMessage]] = None,
**kwargs: Any,
) -> "OpenAIAgent":
tools = tools or []
chat_history = chat_history or []
llm = llm or OpenAI(model=DEFAULT_MODEL_NAME)
if not isinstance(llm, OpenAI):
raise ValueError("llm must be a OpenAI instance")
if callback_manager is not None:
llm.callback_manager = callback_manager
memory = memory or memory_cls.from_defaults(chat_history, llm=llm)
if not is_function_calling_model(llm.model):
raise ValueError(
f"Model name {llm.model} does not support function calling API. "
)
if system_prompt is not None:
if prefix_messages is not None:
raise ValueError(
"Cannot specify both system_prompt and prefix_messages"
)
prefix_messages = [ChatMessage(content=system_prompt, role="system")]
prefix_messages = prefix_messages or []
return cls(
tools=tools,
llm=llm,
memory=memory,
prefix_messages=prefix_messages,
verbose=verbose,
max_function_calls=max_function_calls,
callback_manager=callback_manager,
)
def _get_tools(self, message: str) -> List[BaseTool]:
"""Get tools."""
return self._tools
class RetrieverOpenAIAgent(BaseOpenAIAgent):
"""Retriever OpenAI Agent.
This agent specifically performs retrieval on top of functions
during query-time.
NOTE: this is a beta feature, function interfaces might change.
NOTE: this is also a too generally named, a better name is
FunctionRetrieverOpenAIAgent
TODO: add a native OpenAI Tool Index.
"""
def __init__(
self,
retriever: BaseRetriever,
node_to_tool_fn: Callable[[BaseNode], BaseTool],
llm: OpenAI,
memory: BaseMemory,
prefix_messages: List[ChatMessage],
verbose: bool = False,
max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
callback_manager: Optional[CallbackManager] = None,
) -> None:
super().__init__(
llm=llm,
memory=memory,
prefix_messages=prefix_messages,
verbose=verbose,
max_function_calls=max_function_calls,
callback_manager=callback_manager,
)
self._retriever = retriever
self._node_to_tool_fn = node_to_tool_fn
@classmethod
def from_retriever(
cls,
retriever: BaseRetriever,
node_to_tool_fn: Callable[[BaseNode], BaseTool],
llm: Optional[OpenAI] = None,
chat_history: Optional[List[ChatMessage]] = None,
memory: Optional[BaseMemory] = None,
memory_cls: Type[BaseMemory] = ChatMemoryBuffer,
verbose: bool = False,
max_function_calls: int = DEFAULT_MAX_FUNCTION_CALLS,
callback_manager: Optional[CallbackManager] = None,
system_prompt: Optional[str] = None,
prefix_messages: Optional[List[ChatMessage]] = None,
) -> "RetrieverOpenAIAgent":
chat_history = chat_history or []
llm = llm or OpenAI(model=DEFAULT_MODEL_NAME)
if not isinstance(llm, OpenAI):
raise ValueError("llm must be a OpenAI instance")
if callback_manager is not None:
llm.callback_manager = callback_manager
memory = memory or memory_cls.from_defaults(chat_history, llm=llm)
if not is_function_calling_model(llm.model):
raise ValueError(
f"Model name {llm.model} does not support function calling API. "
)
if system_prompt is not None:
if prefix_messages is not None:
raise ValueError(
"Cannot specify both system_prompt and prefix_messages"
)
prefix_messages = [ChatMessage(content=system_prompt, role="system")]
prefix_messages = prefix_messages or []
return cls(
retriever=retriever,
node_to_tool_fn=node_to_tool_fn,
llm=llm,
memory=memory,
prefix_messages=prefix_messages,
verbose=verbose,
max_function_calls=max_function_calls,
callback_manager=callback_manager,
)
def _get_tools(self, message: str) -> List[BaseTool]:
retrieved_nodes_w_scores: List[NodeWithScore] = self._retriever.retrieve(
message
)
retrieved_nodes = [node.node for node in retrieved_nodes_w_scores]
retrieved_tools: List[BaseTool] = [
self._node_to_tool_fn(n) for n in retrieved_nodes
]
return retrieved_tools | PypiClean |
/fs-django-mdeditor-0.1.24.tar.gz/fs-django-mdeditor-0.1.24/mdeditor/static/mdeditor/js/lib/codemirror/mode/python/python.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
function wordRegexp(words) {
return new RegExp("^((" + words.join(")|(") + "))\\b");
}
var wordOperators = wordRegexp(["and", "or", "not", "is"]);
var commonKeywords = ["as", "assert", "break", "class", "continue",
"def", "del", "elif", "else", "except", "finally",
"for", "from", "global", "if", "import",
"lambda", "pass", "raise", "return",
"try", "while", "with", "yield", "in"];
var commonBuiltins = ["abs", "all", "any", "bin", "bool", "bytearray", "callable", "chr",
"classmethod", "compile", "complex", "delattr", "dict", "dir", "divmod",
"enumerate", "eval", "filter", "float", "format", "frozenset",
"getattr", "globals", "hasattr", "hash", "help", "hex", "id",
"input", "int", "isinstance", "issubclass", "iter", "len",
"list", "locals", "map", "max", "memoryview", "min", "next",
"object", "oct", "open", "ord", "pow", "property", "range",
"repr", "reversed", "round", "set", "setattr", "slice",
"sorted", "staticmethod", "str", "sum", "super", "tuple",
"type", "vars", "zip", "__import__", "NotImplemented",
"Ellipsis", "__debug__"];
var py2 = {builtins: ["apply", "basestring", "buffer", "cmp", "coerce", "execfile",
"file", "intern", "long", "raw_input", "reduce", "reload",
"unichr", "unicode", "xrange", "False", "True", "None"],
keywords: ["exec", "print"]};
var py3 = {builtins: ["ascii", "bytes", "exec", "print"],
keywords: ["nonlocal", "False", "True", "None"]};
CodeMirror.registerHelper("hintWords", "python", commonKeywords.concat(commonBuiltins));
function top(state) {
return state.scopes[state.scopes.length - 1];
}
CodeMirror.defineMode("python", function(conf, parserConf) {
var ERRORCLASS = "error";
var singleDelimiters = parserConf.singleDelimiters || new RegExp("^[\\(\\)\\[\\]\\{\\}@,:`=;\\.]");
var doubleOperators = parserConf.doubleOperators || new RegExp("^((==)|(!=)|(<=)|(>=)|(<>)|(<<)|(>>)|(//)|(\\*\\*))");
var doubleDelimiters = parserConf.doubleDelimiters || new RegExp("^((\\+=)|(\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))");
var tripleDelimiters = parserConf.tripleDelimiters || new RegExp("^((//=)|(>>=)|(<<=)|(\\*\\*=))");
if (parserConf.version && parseInt(parserConf.version, 10) == 3){
// since http://legacy.python.org/dev/peps/pep-0465/ @ is also an operator
var singleOperators = parserConf.singleOperators || new RegExp("^[\\+\\-\\*/%&|\\^~<>!@]");
var identifiers = parserConf.identifiers|| new RegExp("^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*");
} else {
var singleOperators = parserConf.singleOperators || new RegExp("^[\\+\\-\\*/%&|\\^~<>!]");
var identifiers = parserConf.identifiers|| new RegExp("^[_A-Za-z][_A-Za-z0-9]*");
}
var hangingIndent = parserConf.hangingIndent || conf.indentUnit;
var myKeywords = commonKeywords, myBuiltins = commonBuiltins;
if(parserConf.extra_keywords != undefined){
myKeywords = myKeywords.concat(parserConf.extra_keywords);
}
if(parserConf.extra_builtins != undefined){
myBuiltins = myBuiltins.concat(parserConf.extra_builtins);
}
if (parserConf.version && parseInt(parserConf.version, 10) == 3) {
myKeywords = myKeywords.concat(py3.keywords);
myBuiltins = myBuiltins.concat(py3.builtins);
var stringPrefixes = new RegExp("^(([rb]|(br))?('{3}|\"{3}|['\"]))", "i");
} else {
myKeywords = myKeywords.concat(py2.keywords);
myBuiltins = myBuiltins.concat(py2.builtins);
var stringPrefixes = new RegExp("^(([rub]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i");
}
var keywords = wordRegexp(myKeywords);
var builtins = wordRegexp(myBuiltins);
// tokenizers
function tokenBase(stream, state) {
// Handle scope changes
if (stream.sol() && top(state).type == "py") {
var scopeOffset = top(state).offset;
if (stream.eatSpace()) {
var lineOffset = stream.indentation();
if (lineOffset > scopeOffset)
pushScope(stream, state, "py");
else if (lineOffset < scopeOffset && dedent(stream, state))
state.errorToken = true;
return null;
} else {
var style = tokenBaseInner(stream, state);
if (scopeOffset > 0 && dedent(stream, state))
style += " " + ERRORCLASS;
return style;
}
}
return tokenBaseInner(stream, state);
}
function tokenBaseInner(stream, state) {
if (stream.eatSpace()) return null;
var ch = stream.peek();
// Handle Comments
if (ch == "#") {
stream.skipToEnd();
return "comment";
}
// Handle Number Literals
if (stream.match(/^[0-9\.]/, false)) {
var floatLiteral = false;
// Floats
if (stream.match(/^\d*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; }
if (stream.match(/^\d+\.\d*/)) { floatLiteral = true; }
if (stream.match(/^\.\d+/)) { floatLiteral = true; }
if (floatLiteral) {
// Float literals may be "imaginary"
stream.eat(/J/i);
return "number";
}
// Integers
var intLiteral = false;
// Hex
if (stream.match(/^0x[0-9a-f]+/i)) intLiteral = true;
// Binary
if (stream.match(/^0b[01]+/i)) intLiteral = true;
// Octal
if (stream.match(/^0o[0-7]+/i)) intLiteral = true;
// Decimal
if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) {
// Decimal literals may be "imaginary"
stream.eat(/J/i);
// TODO - Can you have imaginary longs?
intLiteral = true;
}
// Zero by itself with no other piece of number.
if (stream.match(/^0(?![\dx])/i)) intLiteral = true;
if (intLiteral) {
// Integer literals may be "long"
stream.eat(/L/i);
return "number";
}
}
// Handle Strings
if (stream.match(stringPrefixes)) {
state.tokenize = tokenStringFactory(stream.current());
return state.tokenize(stream, state);
}
// Handle operators and Delimiters
if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters))
return null;
if (stream.match(doubleOperators)
|| stream.match(singleOperators)
|| stream.match(wordOperators))
return "operator";
if (stream.match(singleDelimiters))
return null;
if (stream.match(keywords))
return "keyword";
if (stream.match(builtins))
return "builtin";
if (stream.match(/^(self|cls)\b/))
return "variable-2";
if (stream.match(identifiers)) {
if (state.lastToken == "def" || state.lastToken == "class")
return "def";
return "variable";
}
// Handle non-detected items
stream.next();
return ERRORCLASS;
}
function tokenStringFactory(delimiter) {
while ("rub".indexOf(delimiter.charAt(0).toLowerCase()) >= 0)
delimiter = delimiter.substr(1);
var singleline = delimiter.length == 1;
var OUTCLASS = "string";
function tokenString(stream, state) {
while (!stream.eol()) {
stream.eatWhile(/[^'"\\]/);
if (stream.eat("\\")) {
stream.next();
if (singleline && stream.eol())
return OUTCLASS;
} else if (stream.match(delimiter)) {
state.tokenize = tokenBase;
return OUTCLASS;
} else {
stream.eat(/['"]/);
}
}
if (singleline) {
if (parserConf.singleLineStringErrors)
return ERRORCLASS;
else
state.tokenize = tokenBase;
}
return OUTCLASS;
}
tokenString.isString = true;
return tokenString;
}
function pushScope(stream, state, type) {
var offset = 0, align = null;
if (type == "py") {
while (top(state).type != "py")
state.scopes.pop();
}
offset = top(state).offset + (type == "py" ? conf.indentUnit : hangingIndent);
if (type != "py" && !stream.match(/^(\s|#.*)*$/, false))
align = stream.column() + 1;
state.scopes.push({offset: offset, type: type, align: align});
}
function dedent(stream, state) {
var indented = stream.indentation();
while (top(state).offset > indented) {
if (top(state).type != "py") return true;
state.scopes.pop();
}
return top(state).offset != indented;
}
function tokenLexer(stream, state) {
var style = state.tokenize(stream, state);
var current = stream.current();
// Handle '.' connected identifiers
if (current == ".") {
style = stream.match(identifiers, false) ? null : ERRORCLASS;
if (style == null && state.lastStyle == "meta") {
// Apply 'meta' style to '.' connected identifiers when
// appropriate.
style = "meta";
}
return style;
}
// Handle decorators
if (current == "@"){
if(parserConf.version && parseInt(parserConf.version, 10) == 3){
return stream.match(identifiers, false) ? "meta" : "operator";
} else {
return stream.match(identifiers, false) ? "meta" : ERRORCLASS;
}
}
if ((style == "variable" || style == "builtin")
&& state.lastStyle == "meta")
style = "meta";
// Handle scope changes.
if (current == "pass" || current == "return")
state.dedent += 1;
if (current == "lambda") state.lambda = true;
if (current == ":" && !state.lambda && top(state).type == "py")
pushScope(stream, state, "py");
var delimiter_index = current.length == 1 ? "[({".indexOf(current) : -1;
if (delimiter_index != -1)
pushScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1));
delimiter_index = "])}".indexOf(current);
if (delimiter_index != -1) {
if (top(state).type == current) state.scopes.pop();
else return ERRORCLASS;
}
if (state.dedent > 0 && stream.eol() && top(state).type == "py") {
if (state.scopes.length > 1) state.scopes.pop();
state.dedent -= 1;
}
return style;
}
var external = {
startState: function(basecolumn) {
return {
tokenize: tokenBase,
scopes: [{offset: basecolumn || 0, type: "py", align: null}],
lastStyle: null,
lastToken: null,
lambda: false,
dedent: 0
};
},
token: function(stream, state) {
var addErr = state.errorToken;
if (addErr) state.errorToken = false;
var style = tokenLexer(stream, state);
state.lastStyle = style;
var current = stream.current();
if (current && style)
state.lastToken = current;
if (stream.eol() && state.lambda)
state.lambda = false;
return addErr ? style + " " + ERRORCLASS : style;
},
indent: function(state, textAfter) {
if (state.tokenize != tokenBase)
return state.tokenize.isString ? CodeMirror.Pass : 0;
var scope = top(state);
var closing = textAfter && textAfter.charAt(0) == scope.type;
if (scope.align != null)
return scope.align - (closing ? 1 : 0);
else if (closing && state.scopes.length > 1)
return state.scopes[state.scopes.length - 2].offset;
else
return scope.offset;
},
lineComment: "#",
fold: "indent"
};
return external;
});
CodeMirror.defineMIME("text/x-python", "python");
var words = function(str) { return str.split(" "); };
CodeMirror.defineMIME("text/x-cython", {
name: "python",
extra_keywords: words("by cdef cimport cpdef ctypedef enum except"+
"extern gil include nogil property public"+
"readonly struct union DEF IF ELIF ELSE")
});
}); | PypiClean |
/text-tracker-1.0.1.tar.gz/text-tracker-1.0.1/text_tracker/text_tracker.py | def track(original_file, spoken_file):
"""
Main function of the text-tracker package.
:param original_file: file containing original text, with pages separated
by lines, opened in 'r' mode
:param spoken_file: file containing spoken text, with pages separated by
lines, opened in 'r' mode
:return: list of optimal indeces aligning the words in the spoken file to
those in the original file
"""
# get data
original, spoken, nan_indeces = get_data(original_file, spoken_file)
out = []
for page, o in enumerate(original):
# get an array of possible indeces for each word
index_array = generate_match_indeces(
generate_index_list(o, spoken[page]))
# get a corresponding array of step values
step_array = [generate_match_steps(row) for row in index_array]
# find minimum sum of steps and return corresponding index vector
sums = [sum([abs(x) for x in row]) for row in step_array]
out.append(index_array[sums.index(min(sums))])
for row_number, row in enumerate(out):
for nan_index in nan_indeces[row_number]:
row.insert(nan_index, float('NaN'))
return out
def get_data(original_file, spoken_file):
"""
Loads data given files.
:param original_file: file with the written text, opened in 'r' mode
:param spoken_file: file with the spoken_text text, opened in 'r' mode
:return: original_text and spoken_text lists; lists of lists of words
"""
# define original_text list
original_text = [line.lower().split() for line in original_file]
# define spoken_text list
spoken_text = [line.lower().split() for line in spoken_file]
# for each line, if a spoken_text word doesn't exist, ignore it
# and add to nan_indeces, a returned value
nan_indeces = []
for row_number, word_array in enumerate(spoken_text):
nan_row = []
for index, word in enumerate(word_array):
if word not in original_text[row_number]:
word_array.pop(index)
nan_row.append(index)
nan_indeces.append(nan_row)
return original_text, spoken_text, nan_indeces
# example match_indeces: [0, 1, 2, 1, 3, 4]
# example return value: [0, 0, -2, 2, 0]
def generate_match_steps(match_indeces):
"""
Create step vector
:param match_indeces: list of possible indeces
:return: list of step sizes
"""
return [b - match_indeces[a] - 1 for a, b in enumerate(match_indeces[1:])]
# example spoken string: "One day a bird bird met hippo."
# example actual string: "One day a bird met a hippo."
# example index_list: [[0], [1], [2,5], [3], [3], [4], [6]]
# example return value: [[0, 1, 2, 3, 3, 4, 6], [0, 1, 5, 3, 3, 4, 6]
def generate_match_indeces(index_list): # "The Permutator"
"""
The hard part: generating every possible list of indeces
:param index_list: list of lists of possible indeces
:return: array of possible index lists
"""
# permutations will be the size of the return value (number of rows)
permutations = 1
for element in index_list:
permutations *= len(element)
out = []
for col in range(permutations):
row = []
# n starts at the number of permutations for each new row...
n = permutations
for index in index_list:
# ...and is divided by the number of indeces possible for each word
n //= len(index)
# move to next item in index every n columns
row.append(index[col // n % len(index)])
out.append(row)
return out
# example spoken_string_list: ['one','day','a','bird','bird','met','hippo']
# example original_string_list: ['one','day','a','bird','met','a','hippo']
# example return value: [[0], [1], [2,5], [3], [3], [4], [6]]
def generate_index_list(original_string_list, spoken_string_list):
"""
The output of this goes to generate_match_indeces to be "permutated"
:param original_string_list: list of words
:param spoken_string_list: list of words
:return: index list
"""
return [[index for index, value in enumerate(original_string_list)
if value == word]
for word in spoken_string_list] | PypiClean |
/ploomber-engine-0.0.31.tar.gz/ploomber-engine-0.0.31/src/ploomber_engine/engine.py | import warnings
import nbformat
from papermill.engines import Engine
from papermill.utils import merge_kwargs, remove_args
from papermill.log import logger
from papermill.clientwrap import PapermillNotebookClient
from ploomber_engine.papermill import PapermillPloomberNotebookClient
from ploomber_engine.ipython import PloomberManagedClient
from ploomber_engine._telemetry import telemetry
class DebugEngine(Engine):
"""An engine that starts a debugging session once the notebook fails"""
@classmethod
@telemetry.log_call("debug-execute-managed-nb")
def execute_managed_notebook(
cls,
nb_man,
kernel_name,
log_output=False,
stdout_file=None,
stderr_file=None,
start_timeout=60,
execution_timeout=None,
**kwargs,
):
# Exclude parameters that named differently downstream
safe_kwargs = remove_args(["timeout", "startup_timeout"], **kwargs)
# Nicely handle preprocessor arguments prioritizing values set by
# engine
final_kwargs = merge_kwargs(
safe_kwargs,
timeout=execution_timeout if execution_timeout else kwargs.get("timeout"),
startup_timeout=start_timeout,
kernel_name=kernel_name,
log=logger,
log_output=log_output,
stdout_file=stdout_file,
stderr_file=stderr_file,
)
cell = nbformat.versions[nb_man.nb["nbformat"]].new_code_cell(
source="%pdb on", metadata=dict(tags=[], papermill=dict())
)
nb_man.nb.cells.insert(0, cell)
# use our Papermill client
return PapermillPloomberNotebookClient(nb_man, **final_kwargs).execute()
class DebugLaterEngine(Engine):
"""An engine that stores the traceback object for later debugging"""
@classmethod
@telemetry.log_call("debuglater-execute-managed-nb")
def execute_managed_notebook(
cls,
nb_man,
kernel_name,
log_output=False,
stdout_file=None,
stderr_file=None,
start_timeout=60,
execution_timeout=None,
**kwargs,
):
# Exclude parameters that named differently downstream
safe_kwargs = remove_args(["timeout", "startup_timeout"], **kwargs)
# Nicely handle preprocessor arguments prioritizing values set by
# engine
final_kwargs = merge_kwargs(
safe_kwargs,
timeout=execution_timeout if execution_timeout else kwargs.get("timeout"),
startup_timeout=start_timeout,
kernel_name=kernel_name,
log=logger,
log_output=log_output,
stdout_file=stdout_file,
stderr_file=stderr_file,
)
path_to_dump = kwargs.get("path_to_dump")
if path_to_dump is None:
warnings.warn(
"Did not pass path_to_dump to "
"DebugLaterEngine.execute_managed_notebook, "
"the default value will be used"
)
source = """
from debuglater import patch_ipython
patch_ipython()
"""
else:
source = f"""
from debuglater import patch_ipython
patch_ipython({path_to_dump!r})
"""
cell = nbformat.versions[nb_man.nb["nbformat"]].new_code_cell(
source=source, metadata=dict(tags=[], papermill=dict())
)
nb_man.nb.cells.insert(0, cell)
return PapermillNotebookClient(nb_man, **final_kwargs).execute()
class ProfilingEngine(Engine):
"""
An engine that runs the notebook in the current process and can be used
for resource usage profiling
"""
@classmethod
@telemetry.log_call("embedded-execute-managed-nb")
def execute_managed_notebook(
cls,
nb_man,
kernel_name,
log_output=False,
stdout_file=None,
stderr_file=None,
start_timeout=60,
execution_timeout=None,
**kwargs,
):
return PloomberManagedClient(nb_man).execute() | PypiClean |
/accelbyte_py_sdk-0.48.0.tar.gz/accelbyte_py_sdk-0.48.0/accelbyte_py_sdk/api/platform/models/achievement_info.py |
# template file: ags_py_codegen
# AccelByte Gaming Services Platform Service (4.34.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
class AchievementInfo(Model):
"""Achievement info (AchievementInfo)
Properties:
id_: (id) OPTIONAL str
name: (name) OPTIONAL str
progress_state: (progressState) OPTIONAL str
progression: (progression) OPTIONAL Dict[str, Any]
service_config_id: (serviceConfigId) OPTIONAL str
"""
# region fields
id_: str # OPTIONAL
name: str # OPTIONAL
progress_state: str # OPTIONAL
progression: Dict[str, Any] # OPTIONAL
service_config_id: str # OPTIONAL
# endregion fields
# region with_x methods
def with_id(self, value: str) -> AchievementInfo:
self.id_ = value
return self
def with_name(self, value: str) -> AchievementInfo:
self.name = value
return self
def with_progress_state(self, value: str) -> AchievementInfo:
self.progress_state = value
return self
def with_progression(self, value: Dict[str, Any]) -> AchievementInfo:
self.progression = value
return self
def with_service_config_id(self, value: str) -> AchievementInfo:
self.service_config_id = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "id_"):
result["id"] = str(self.id_)
elif include_empty:
result["id"] = ""
if hasattr(self, "name"):
result["name"] = str(self.name)
elif include_empty:
result["name"] = ""
if hasattr(self, "progress_state"):
result["progressState"] = str(self.progress_state)
elif include_empty:
result["progressState"] = ""
if hasattr(self, "progression"):
result["progression"] = {str(k0): v0 for k0, v0 in self.progression.items()}
elif include_empty:
result["progression"] = {}
if hasattr(self, "service_config_id"):
result["serviceConfigId"] = str(self.service_config_id)
elif include_empty:
result["serviceConfigId"] = ""
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
id_: Optional[str] = None,
name: Optional[str] = None,
progress_state: Optional[str] = None,
progression: Optional[Dict[str, Any]] = None,
service_config_id: Optional[str] = None,
**kwargs,
) -> AchievementInfo:
instance = cls()
if id_ is not None:
instance.id_ = id_
if name is not None:
instance.name = name
if progress_state is not None:
instance.progress_state = progress_state
if progression is not None:
instance.progression = progression
if service_config_id is not None:
instance.service_config_id = service_config_id
return instance
@classmethod
def create_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> AchievementInfo:
instance = cls()
if not dict_:
return instance
if "id" in dict_ and dict_["id"] is not None:
instance.id_ = str(dict_["id"])
elif include_empty:
instance.id_ = ""
if "name" in dict_ and dict_["name"] is not None:
instance.name = str(dict_["name"])
elif include_empty:
instance.name = ""
if "progressState" in dict_ and dict_["progressState"] is not None:
instance.progress_state = str(dict_["progressState"])
elif include_empty:
instance.progress_state = ""
if "progression" in dict_ and dict_["progression"] is not None:
instance.progression = {
str(k0): v0 for k0, v0 in dict_["progression"].items()
}
elif include_empty:
instance.progression = {}
if "serviceConfigId" in dict_ and dict_["serviceConfigId"] is not None:
instance.service_config_id = str(dict_["serviceConfigId"])
elif include_empty:
instance.service_config_id = ""
return instance
@classmethod
def create_many_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> Dict[str, AchievementInfo]:
return (
{k: cls.create_from_dict(v, include_empty=include_empty) for k, v in dict_}
if dict_
else {}
)
@classmethod
def create_many_from_list(
cls, list_: list, include_empty: bool = False
) -> List[AchievementInfo]:
return (
[cls.create_from_dict(i, include_empty=include_empty) for i in list_]
if list_
else []
)
@classmethod
def create_from_any(
cls, any_: any, include_empty: bool = False, many: bool = False
) -> Union[AchievementInfo, List[AchievementInfo], Dict[Any, AchievementInfo]]:
if many:
if isinstance(any_, dict):
return cls.create_many_from_dict(any_, include_empty=include_empty)
elif isinstance(any_, list):
return cls.create_many_from_list(any_, include_empty=include_empty)
else:
raise ValueError()
else:
return cls.create_from_dict(any_, include_empty=include_empty)
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"id": "id_",
"name": "name",
"progressState": "progress_state",
"progression": "progression",
"serviceConfigId": "service_config_id",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"id": False,
"name": False,
"progressState": False,
"progression": False,
"serviceConfigId": False,
}
# endregion static methods | PypiClean |
/fastapi-0.103.0.tar.gz/fastapi-0.103.0/docs/em/docs/how-to/conditional-openapi.md | # 🎲 🗄
🚥 👆 💪, 👆 💪 ⚙️ ⚒ & 🌐 🔢 🔗 🗄 ✔ ⚓️ 🔛 🌐, & ❎ ⚫️ 🍕.
## 🔃 💂♂, 🔗, & 🩺
🕵♂ 👆 🧾 👩💻 🔢 🏭 *🚫🔜 🚫* 🌌 🛡 👆 🛠️.
👈 🚫 🚮 🙆 ➕ 💂♂ 👆 🛠️, *➡ 🛠️* 🔜 💪 🌐❔ 👫.
🚥 📤 💂♂ ⚠ 👆 📟, ⚫️ 🔜 🔀.
🕵♂ 🧾 ⚒ ⚫️ 🌅 ⚠ 🤔 ❔ 🔗 ⏮️ 👆 🛠️, & 💪 ⚒ ⚫️ 🌅 ⚠ 👆 ℹ ⚫️ 🏭. ⚫️ 💪 🤔 🎯 📨 <a href="https://en.wikipedia.org/wiki/Security_through_obscurity" class="external-link" target="_blank">💂♂ 🔘 🌌</a>.
🚥 👆 💚 🔐 👆 🛠️, 📤 📚 👍 👜 👆 💪, 🖼:
* ⚒ 💭 👆 ✔️ 👍 🔬 Pydantic 🏷 👆 📨 💪 & 📨.
* 🔗 🙆 ✔ ✔ & 🔑 ⚙️ 🔗.
* 🙅 🏪 🔢 🔐, 🕴 🔐#️⃣.
* 🛠️ & ⚙️ 👍-💭 🔐 🧰, 💖 🇸🇲 & 🥙 🤝, ♒️.
* 🚮 🌅 🧽 ✔ 🎛 ⏮️ Oauth2️⃣ ↔ 🌐❔ 💪.
* ...♒️.
👐, 👆 5️⃣📆 ✔️ 📶 🎯 ⚙️ 💼 🌐❔ 👆 🤙 💪 ❎ 🛠️ 🩺 🌐 (✅ 🏭) ⚖️ ⚓️ 🔛 📳 ⚪️➡️ 🌐 🔢.
## 🎲 🗄 ⚪️➡️ ⚒ & 🇨🇻 {
👆 💪 💪 ⚙️ 🎏 Pydantic ⚒ 🔗 👆 🏗 🗄 & 🩺 ⚜.
🖼:
```Python hl_lines="6 11"
{!../../../docs_src/conditional_openapi/tutorial001.py!}
```
📥 👥 📣 ⚒ `openapi_url` ⏮️ 🎏 🔢 `"/openapi.json"`.
& ⤴️ 👥 ⚙️ ⚫️ 🕐❔ 🏗 `FastAPI` 📱.
⤴️ 👆 💪 ❎ 🗄 (✅ 🎚 🩺) ⚒ 🌐 🔢 `OPENAPI_URL` 🛁 🎻, 💖:
<div class="termy">
```console
$ OPENAPI_URL= uvicorn main:app
<span style="color: green;">INFO</span>: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)
```
</div>
⤴️ 🚥 👆 🚶 📛 `/openapi.json`, `/docs`, ⚖️ `/redoc` 👆 🔜 🤚 `404 Not Found` ❌ 💖:
```JSON
{
"detail": "Not Found"
}
```
| PypiClean |
/scikit_allel-1.3.6-cp311-cp311-macosx_10_9_x86_64.whl/allel/stats/selection.py | import multiprocessing
from multiprocessing.pool import ThreadPool
import numpy as np
from allel.compat import memoryview_safe
from allel.util import asarray_ndim, check_dim0_aligned, check_integer_dtype
from allel.model.ndarray import HaplotypeArray, AlleleCountsArray
from allel.stats.window import moving_statistic, index_windows
from allel.stats.diversity import moving_tajima_d
from allel.stats.fst import moving_hudson_fst
from allel.opt.stats import pairwise_shared_prefix_lengths, paint_shared_prefixes, \
ihh01_scan, ihh_scan, nsl01_scan, nsl_scan
def ehh_decay(h, truncate=False):
"""Compute the decay of extended haplotype homozygosity (EHH)
moving away from the first variant.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
truncate : bool, optional
If True, the return array will exclude trailing zeros.
Returns
-------
ehh : ndarray, float, shape (n_variants, )
EHH at successive variants from the first variant.
"""
# check inputs
# N.B., ensure int8 so we can use cython optimisation
h = HaplotypeArray(np.asarray(h), copy=False)
if h.min() < 0:
raise NotImplementedError('missing calls are not supported')
# initialise
n_variants = h.n_variants # number of rows, i.e., variants
n_haplotypes = h.n_haplotypes # number of columns, i.e., haplotypes
n_pairs = (n_haplotypes * (n_haplotypes - 1)) // 2
# compute the shared prefix length between all pairs of haplotypes
spl = pairwise_shared_prefix_lengths(memoryview_safe(np.asarray(h)))
# compute EHH by counting the number of shared prefixes extending beyond
# each variant
minlength = None if truncate else n_variants + 1
b = np.bincount(spl, minlength=minlength)
c = np.cumsum(b[::-1])[:-1]
ehh = (c / n_pairs)[::-1]
return ehh
def voight_painting(h):
"""Paint haplotypes, assigning a unique integer to each shared haplotype
prefix.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
Returns
-------
painting : ndarray, int, shape (n_variants, n_haplotypes)
Painting array.
indices : ndarray, int, shape (n_hapotypes,)
Haplotype indices after sorting by prefix.
"""
# check inputs
# N.B., ensure int8 so we can use cython optimisation
h = HaplotypeArray(np.asarray(h), copy=False)
if h.max() > 1:
raise NotImplementedError('only biallelic variants are supported')
if h.min() < 0:
raise NotImplementedError('missing calls are not supported')
# sort by prefix
indices = h.prefix_argsort()
h = np.take(h, indices, axis=1)
# paint
painting = paint_shared_prefixes(memoryview_safe(np.asarray(h)))
return painting, indices
def plot_voight_painting(painting, palette='colorblind', flank='right',
ax=None, height_factor=0.01):
"""Plot a painting of shared haplotype prefixes.
Parameters
----------
painting : array_like, int, shape (n_variants, n_haplotypes)
Painting array.
ax : axes, optional
The axes on which to draw. If not provided, a new figure will be
created.
palette : string, optional
A Seaborn palette name.
flank : {'right', 'left'}, optional
If left, painting will be reversed along first axis.
height_factor : float, optional
If no axes provided, determine height of figure by multiplying
height of painting array by this number.
Returns
-------
ax : axes
"""
import seaborn as sns
from matplotlib.colors import ListedColormap
import matplotlib.pyplot as plt
if flank == 'left':
painting = painting[::-1]
n_colors = painting.max()
palette = sns.color_palette(palette, n_colors)
# use white for singleton haplotypes
cmap = ListedColormap(['white'] + palette)
# setup axes
if ax is None:
w = plt.rcParams['figure.figsize'][0]
h = height_factor*painting.shape[1]
fig, ax = plt.subplots(figsize=(w, h))
sns.despine(ax=ax, bottom=True, left=True)
ax.pcolormesh(painting.T, cmap=cmap)
ax.set_xticks([])
ax.set_yticks([])
ax.set_xlim(0, painting.shape[0])
ax.set_ylim(0, painting.shape[1])
return ax
def fig_voight_painting(h, index=None, palette='colorblind',
height_factor=0.01, fig=None):
"""Make a figure of shared haplotype prefixes for both left and right
flanks, centred on some variant of choice.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
index : int, optional
Index of the variant within the haplotype array to centre on. If not
provided, the middle variant will be used.
palette : string, optional
A Seaborn palette name.
height_factor : float, optional
If no axes provided, determine height of figure by multiplying
height of painting array by this number.
fig : figure
The figure on which to draw. If not provided, a new figure will be
created.
Returns
-------
fig : figure
Notes
-----
N.B., the ordering of haplotypes on the left and right flanks will be
different. This means that haplotypes on the right flank **will not**
correspond to haplotypes on the left flank at the same vertical position.
"""
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
import seaborn as sns
# check inputs
h = asarray_ndim(h, 2)
if index is None:
# use midpoint
index = h.shape[0] // 2
# divide data into two flanks
hl = h[:index+1][::-1]
hr = h[index:]
# paint both flanks
pl, il = voight_painting(hl)
pr, ir = voight_painting(hr)
# compute ehh decay for both flanks
el = ehh_decay(hl, truncate=False)
er = ehh_decay(hr, truncate=False)
# setup figure
# fixed height for EHH decay subplot
h_ehh = plt.rcParams['figure.figsize'][1] // 3
# add height for paintings
h_painting = height_factor*h.shape[1]
if fig is None:
w = plt.rcParams['figure.figsize'][0]
h = h_ehh + h_painting
fig = plt.figure(figsize=(w, h))
# setup gridspec
gs = GridSpec(2, 2,
width_ratios=[hl.shape[0], hr.shape[0]],
height_ratios=[h_painting, h_ehh])
# plot paintings
ax = fig.add_subplot(gs[0, 0])
sns.despine(ax=ax, left=True, bottom=True)
plot_voight_painting(pl, palette=palette, flank='left', ax=ax)
ax = fig.add_subplot(gs[0, 1])
sns.despine(ax=ax, left=True, bottom=True)
plot_voight_painting(pr, palette=palette, flank='right', ax=ax)
# plot ehh
ax = fig.add_subplot(gs[1, 0])
sns.despine(ax=ax, offset=3)
x = np.arange(el.shape[0])
y = el
ax.fill_between(x, 0, y)
ax.set_ylim(0, 1)
ax.set_yticks([0, 1])
ax.set_ylabel('EHH')
ax.invert_xaxis()
ax = fig.add_subplot(gs[1, 1])
sns.despine(ax=ax, left=True, right=False, offset=3)
ax.yaxis.tick_right()
ax.set_ylim(0, 1)
ax.set_yticks([0, 1])
x = np.arange(er.shape[0])
y = er
ax.fill_between(x, 0, y)
# tidy up
fig.tight_layout()
return fig
# noinspection PyAugmentAssignment
def compute_ihh_gaps(pos, map_pos, gap_scale, max_gap, is_accessible):
"""Compute spacing between variants for integrating haplotype
homozygosity.
Parameters
----------
pos : array_like, int, shape (n_variants,)
Variant positions (physical distance).
map_pos : array_like, float, shape (n_variants,)
Variant positions (genetic map distance).
gap_scale : int, optional
Rescale distance between variants if gap is larger than this value.
max_gap : int, optional
Do not report scores if EHH spans a gap larger than this number of
base pairs.
is_accessible : array_like, bool, optional
Genome accessibility array. If provided, distance between variants
will be computed as the number of accessible bases between them.
Returns
-------
gaps : ndarray, float, shape (n_variants - 1,)
"""
# check inputs
if map_pos is None:
# integrate over physical distance
map_pos = pos
else:
map_pos = asarray_ndim(map_pos, 1)
check_dim0_aligned(pos, map_pos)
# compute physical gaps
physical_gaps = np.diff(pos)
# compute genetic gaps
gaps = np.diff(map_pos).astype('f8')
if is_accessible is not None:
# compute accessible gaps
is_accessible = asarray_ndim(is_accessible, 1)
assert is_accessible.shape[0] > pos[-1], \
'accessibility array too short'
accessible_gaps = np.zeros_like(physical_gaps)
for i in range(1, len(pos)):
# N.B., expect pos is 1-based
n_access = np.count_nonzero(is_accessible[pos[i-1]-1:pos[i]-1])
accessible_gaps[i-1] = n_access
# adjust using accessibility
scaling = accessible_gaps / physical_gaps
gaps = gaps * scaling
elif gap_scale is not None and gap_scale > 0:
scaling = np.ones(gaps.shape, dtype='f8')
loc_scale = physical_gaps > gap_scale
scaling[loc_scale] = gap_scale / physical_gaps[loc_scale]
gaps = gaps * scaling
if max_gap is not None and max_gap > 0:
# deal with very large gaps
gaps[physical_gaps > max_gap] = -1
return gaps
def ihs(h, pos, map_pos=None, min_ehh=0.05, min_maf=0.05, include_edges=False,
gap_scale=20000, max_gap=200000, is_accessible=None, use_threads=True):
"""Compute the unstandardized integrated haplotype score (IHS) for each
variant, comparing integrated haplotype homozygosity between the
reference (0) and alternate (1) alleles.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
pos : array_like, int, shape (n_variants,)
Variant positions (physical distance).
map_pos : array_like, float, shape (n_variants,)
Variant positions (genetic map distance).
min_ehh: float, optional
Minimum EHH beyond which to truncate integrated haplotype
homozygosity calculation.
min_maf : float, optional
Do not compute integrated haplotype homozogysity for variants with
minor allele frequency below this value.
include_edges : bool, optional
If True, report scores even if EHH does not decay below `min_ehh`
before reaching the edge of the data.
gap_scale : int, optional
Rescale distance between variants if gap is larger than this value.
max_gap : int, optional
Do not report scores if EHH spans a gap larger than this number of
base pairs.
is_accessible : array_like, bool, optional
Genome accessibility array. If provided, distance between variants
will be computed as the number of accessible bases between them.
use_threads : bool, optional
If True use multiple threads to compute.
Returns
-------
score : ndarray, float, shape (n_variants,)
Unstandardized IHS scores.
Notes
-----
This function will calculate IHS for all variants. To exclude variants
below a given minor allele frequency, filter the input haplotype array
before passing to this function.
This function computes IHS comparing the reference and alternate alleles.
These can be polarised by switching the sign for any variant where the
reference allele is derived.
This function returns NaN for any IHS calculations where haplotype
homozygosity does not decay below `min_ehh` before reaching the first or
last variant. To disable this behaviour, set `include_edges` to True.
Note that the unstandardized score is returned. Usually these scores are
then standardized in different allele frequency bins.
See Also
--------
standardize_by_allele_count
"""
# check inputs
h = asarray_ndim(h, 2)
check_integer_dtype(h)
pos = asarray_ndim(pos, 1)
check_dim0_aligned(h, pos)
h = memoryview_safe(h)
pos = memoryview_safe(pos)
# compute gaps between variants for integration
gaps = compute_ihh_gaps(pos, map_pos, gap_scale, max_gap, is_accessible)
# setup kwargs
kwargs = dict(min_ehh=min_ehh, min_maf=min_maf, include_edges=include_edges)
if use_threads and multiprocessing.cpu_count() > 1:
# run with threads
# create pool
pool = ThreadPool(2)
# scan forward
result_fwd = pool.apply_async(ihh01_scan, (h, gaps), kwargs)
# scan backward
result_rev = pool.apply_async(ihh01_scan, (h[::-1], gaps[::-1]), kwargs)
# wait for both to finish
pool.close()
pool.join()
# obtain results
ihh0_fwd, ihh1_fwd = result_fwd.get()
ihh0_rev, ihh1_rev = result_rev.get()
# cleanup
pool.terminate()
else:
# run without threads
# scan forward
ihh0_fwd, ihh1_fwd = ihh01_scan(h, gaps, **kwargs)
# scan backward
ihh0_rev, ihh1_rev = ihh01_scan(h[::-1], gaps[::-1], **kwargs)
# handle reverse scan
ihh0_rev = ihh0_rev[::-1]
ihh1_rev = ihh1_rev[::-1]
# compute unstandardized score
ihh0 = ihh0_fwd + ihh0_rev
ihh1 = ihh1_fwd + ihh1_rev
score = np.log(ihh1 / ihh0)
return score
def xpehh(h1, h2, pos, map_pos=None, min_ehh=0.05, include_edges=False,
gap_scale=20000, max_gap=200000, is_accessible=None,
use_threads=True):
"""Compute the unstandardized cross-population extended haplotype
homozygosity score (XPEHH) for each variant.
Parameters
----------
h1 : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array for the first population.
h2 : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array for the second population.
pos : array_like, int, shape (n_variants,)
Variant positions on physical or genetic map.
map_pos : array_like, float, shape (n_variants,)
Variant positions (genetic map distance).
min_ehh: float, optional
Minimum EHH beyond which to truncate integrated haplotype
homozygosity calculation.
include_edges : bool, optional
If True, report scores even if EHH does not decay below `min_ehh`
before reaching the edge of the data.
gap_scale : int, optional
Rescale distance between variants if gap is larger than this value.
max_gap : int, optional
Do not report scores if EHH spans a gap larger than this number of
base pairs.
is_accessible : array_like, bool, optional
Genome accessibility array. If provided, distance between variants
will be computed as the number of accessible bases between them.
use_threads : bool, optional
If True use multiple threads to compute.
Returns
-------
score : ndarray, float, shape (n_variants,)
Unstandardized XPEHH scores.
Notes
-----
This function will calculate XPEHH for all variants. To exclude variants
below a given minor allele frequency, filter the input haplotype arrays
before passing to this function.
This function returns NaN for any EHH calculations where haplotype
homozygosity does not decay below `min_ehh` before reaching the first or
last variant. To disable this behaviour, set `include_edges` to True.
Note that the unstandardized score is returned. Usually these scores are
then standardized genome-wide.
Haplotype arrays from the two populations may have different numbers of
haplotypes.
See Also
--------
standardize
"""
# check inputs
h1 = asarray_ndim(h1, 2)
check_integer_dtype(h1)
h2 = asarray_ndim(h2, 2)
check_integer_dtype(h2)
pos = asarray_ndim(pos, 1)
check_dim0_aligned(h1, h2, pos)
h1 = memoryview_safe(h1)
h2 = memoryview_safe(h2)
pos = memoryview_safe(pos)
# compute gaps between variants for integration
gaps = compute_ihh_gaps(pos, map_pos, gap_scale, max_gap, is_accessible)
# setup kwargs
kwargs = dict(min_ehh=min_ehh, include_edges=include_edges)
if use_threads and multiprocessing.cpu_count() > 1:
# use multiple threads
# setup threadpool
pool = ThreadPool(min(4, multiprocessing.cpu_count()))
# scan forward
res1_fwd = pool.apply_async(ihh_scan, (h1, gaps), kwargs)
res2_fwd = pool.apply_async(ihh_scan, (h2, gaps), kwargs)
# scan backward
res1_rev = pool.apply_async(ihh_scan, (h1[::-1], gaps[::-1]), kwargs)
res2_rev = pool.apply_async(ihh_scan, (h2[::-1], gaps[::-1]), kwargs)
# wait for both to finish
pool.close()
pool.join()
# obtain results
ihh1_fwd = res1_fwd.get()
ihh2_fwd = res2_fwd.get()
ihh1_rev = res1_rev.get()
ihh2_rev = res2_rev.get()
# cleanup
pool.terminate()
else:
# compute without threads
# scan forward
ihh1_fwd = ihh_scan(h1, gaps, **kwargs)
ihh2_fwd = ihh_scan(h2, gaps, **kwargs)
# scan backward
ihh1_rev = ihh_scan(h1[::-1], gaps[::-1], **kwargs)
ihh2_rev = ihh_scan(h2[::-1], gaps[::-1], **kwargs)
# handle reverse scans
ihh1_rev = ihh1_rev[::-1]
ihh2_rev = ihh2_rev[::-1]
# compute unstandardized score
ihh1 = ihh1_fwd + ihh1_rev
ihh2 = ihh2_fwd + ihh2_rev
score = np.log(ihh1 / ihh2)
return score
def nsl(h, use_threads=True):
"""Compute the unstandardized number of segregating sites by length (nSl)
for each variant, comparing the reference and alternate alleles,
after Ferrer-Admetlla et al. (2014).
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
use_threads : bool, optional
If True use multiple threads to compute.
Returns
-------
score : ndarray, float, shape (n_variants,)
Notes
-----
This function will calculate nSl for all variants. To exclude variants
below a given minor allele frequency, filter the input haplotype array
before passing to this function.
This function computes nSl by comparing the reference and alternate
alleles. These can be polarised by switching the sign for any variant where
the reference allele is derived.
This function does nothing about nSl calculations where haplotype
homozygosity extends up to the first or last variant. There may be edge
effects.
Note that the unstandardized score is returned. Usually these scores are
then standardized in different allele frequency bins.
See Also
--------
standardize_by_allele_count
"""
# check inputs
h = asarray_ndim(h, 2)
check_integer_dtype(h)
h = memoryview_safe(h)
# # check there are no invariant sites
# ac = h.count_alleles()
# assert np.all(ac.is_segregating()), 'please remove non-segregating sites'
if use_threads and multiprocessing.cpu_count() > 1:
# create pool
pool = ThreadPool(2)
# scan forward
result_fwd = pool.apply_async(nsl01_scan, args=(h,))
# scan backward
result_rev = pool.apply_async(nsl01_scan, args=(h[::-1],))
# wait for both to finish
pool.close()
pool.join()
# obtain results
nsl0_fwd, nsl1_fwd = result_fwd.get()
nsl0_rev, nsl1_rev = result_rev.get()
else:
# scan forward
nsl0_fwd, nsl1_fwd = nsl01_scan(h)
# scan backward
nsl0_rev, nsl1_rev = nsl01_scan(h[::-1])
# handle backwards
nsl0_rev = nsl0_rev[::-1]
nsl1_rev = nsl1_rev[::-1]
# compute unstandardized score
nsl0 = nsl0_fwd + nsl0_rev
nsl1 = nsl1_fwd + nsl1_rev
score = np.log(nsl1 / nsl0)
return score
def xpnsl(h1, h2, use_threads=True):
"""Cross-population version of the NSL statistic.
Parameters
----------
h1 : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array for the first population.
h2 : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array for the second population.
use_threads : bool, optional
If True use multiple threads to compute.
Returns
-------
score : ndarray, float, shape (n_variants,)
Unstandardized XPNSL scores.
"""
# check inputs
h1 = asarray_ndim(h1, 2)
check_integer_dtype(h1)
h2 = asarray_ndim(h2, 2)
check_integer_dtype(h2)
check_dim0_aligned(h1, h2)
h1 = memoryview_safe(h1)
h2 = memoryview_safe(h2)
if use_threads and multiprocessing.cpu_count() > 1:
# use multiple threads
# setup threadpool
pool = ThreadPool(min(4, multiprocessing.cpu_count()))
# scan forward
res1_fwd = pool.apply_async(nsl_scan, args=(h1,))
res2_fwd = pool.apply_async(nsl_scan, args=(h2,))
# scan backward
res1_rev = pool.apply_async(nsl_scan, args=(h1[::-1],))
res2_rev = pool.apply_async(nsl_scan, args=(h2[::-1],))
# wait for both to finish
pool.close()
pool.join()
# obtain results
nsl1_fwd = res1_fwd.get()
nsl2_fwd = res2_fwd.get()
nsl1_rev = res1_rev.get()
nsl2_rev = res2_rev.get()
# cleanup
pool.terminate()
else:
# compute without threads
# scan forward
nsl1_fwd = nsl_scan(h1)
nsl2_fwd = nsl_scan(h2)
# scan backward
nsl1_rev = nsl_scan(h1[::-1])
nsl2_rev = nsl_scan(h2[::-1])
# handle reverse scans
nsl1_rev = nsl1_rev[::-1]
nsl2_rev = nsl2_rev[::-1]
# compute unstandardized score
nsl1 = nsl1_fwd + nsl1_rev
nsl2 = nsl2_fwd + nsl2_rev
score = np.log(nsl1 / nsl2)
return score
def haplotype_diversity(h):
"""Estimate haplotype diversity.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
Returns
-------
hd : float
Haplotype diversity.
"""
# check inputs
h = HaplotypeArray(h, copy=False)
# number of haplotypes
n = h.n_haplotypes
# compute haplotype frequencies
f = h.distinct_frequencies()
# estimate haplotype diversity
hd = (1 - np.sum(f**2)) * n / (n - 1)
return hd
def moving_haplotype_diversity(h, size, start=0, stop=None, step=None):
"""Estimate haplotype diversity in moving windows.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
hd : ndarray, float, shape (n_windows,)
Haplotype diversity.
"""
hd = moving_statistic(values=h, statistic=haplotype_diversity, size=size,
start=start, stop=stop, step=step)
return hd
def garud_h(h):
"""Compute the H1, H12, H123 and H2/H1 statistics for detecting signatures
of soft sweeps, as defined in Garud et al. (2015).
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
Returns
-------
h1 : float
H1 statistic (sum of squares of haplotype frequencies).
h12 : float
H12 statistic (sum of squares of haplotype frequencies, combining
the two most common haplotypes into a single frequency).
h123 : float
H123 statistic (sum of squares of haplotype frequencies, combining
the three most common haplotypes into a single frequency).
h2_h1 : float
H2/H1 statistic, indicating the "softness" of a sweep.
"""
# check inputs
h = HaplotypeArray(h, copy=False)
# compute haplotype frequencies
f = h.distinct_frequencies()
# compute H1
h1 = np.sum(f**2)
# compute H12
h12 = np.sum(f[:2])**2 + np.sum(f[2:]**2)
# compute H123
h123 = np.sum(f[:3])**2 + np.sum(f[3:]**2)
# compute H2/H1
h2 = h1 - f[0]**2
h2_h1 = h2 / h1
return h1, h12, h123, h2_h1
def moving_garud_h(h, size, start=0, stop=None, step=None):
"""Compute the H1, H12, H123 and H2/H1 statistics for detecting signatures
of soft sweeps, as defined in Garud et al. (2015), in moving windows,
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
h1 : ndarray, float, shape (n_windows,)
H1 statistics (sum of squares of haplotype frequencies).
h12 : ndarray, float, shape (n_windows,)
H12 statistics (sum of squares of haplotype frequencies, combining
the two most common haplotypes into a single frequency).
h123 : ndarray, float, shape (n_windows,)
H123 statistics (sum of squares of haplotype frequencies, combining
the three most common haplotypes into a single frequency).
h2_h1 : ndarray, float, shape (n_windows,)
H2/H1 statistics, indicating the "softness" of a sweep.
"""
gh = moving_statistic(values=h, statistic=garud_h, size=size, start=start,
stop=stop, step=step)
h1 = gh[:, 0]
h12 = gh[:, 1]
h123 = gh[:, 2]
h2_h1 = gh[:, 3]
return h1, h12, h123, h2_h1
def plot_haplotype_frequencies(h, palette='Paired', singleton_color='w',
ax=None):
"""Plot haplotype frequencies.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
palette : string, optional
A Seaborn palette name.
singleton_color : string, optional
Color to paint singleton haplotypes.
ax : axes, optional
The axes on which to draw. If not provided, a new figure will be
created.
Returns
-------
ax : axes
"""
import matplotlib.pyplot as plt
import seaborn as sns
# check inputs
h = HaplotypeArray(h, copy=False)
# setup figure
if ax is None:
width = plt.rcParams['figure.figsize'][0]
height = width / 10
fig, ax = plt.subplots(figsize=(width, height))
sns.despine(ax=ax, left=True)
# count distinct haplotypes
hc = h.distinct_counts()
# setup palette
n_colors = np.count_nonzero(hc > 1)
palette = sns.color_palette(palette, n_colors)
# paint frequencies
x1 = 0
for i, c in enumerate(hc):
x2 = x1 + c
if c > 1:
color = palette[i]
else:
color = singleton_color
ax.axvspan(x1, x2, color=color)
x1 = x2
# tidy up
ax.set_xlim(0, h.shape[1])
ax.set_yticks([])
return ax
def moving_hfs_rank(h, size, start=0, stop=None):
"""Helper function for plotting haplotype frequencies in moving windows.
Parameters
----------
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
Returns
-------
hr : ndarray, int, shape (n_windows, n_haplotypes)
Haplotype rank array.
"""
# determine windows
windows = np.asarray(list(index_windows(h, size=size, start=start,
stop=stop, step=None)))
# setup output
hr = np.zeros((windows.shape[0], h.shape[1]), dtype='i4')
# iterate over windows
for i, (window_start, window_stop) in enumerate(windows):
# extract haplotypes for the current window
hw = h[window_start:window_stop]
# count haplotypes
hc = hw.distinct_counts()
# ensure sorted descending
hc.sort()
hc = hc[::-1]
# compute ranks for non-singleton haplotypes
cp = 0
for j, c in enumerate(hc):
if c > 1:
hr[i, cp:cp+c] = j+1
cp += c
return hr
def plot_moving_haplotype_frequencies(pos, h, size, start=0, stop=None, n=None,
palette='Paired', singleton_color='w',
ax=None):
"""Plot haplotype frequencies in moving windows over the genome.
Parameters
----------
pos : array_like, int, shape (n_items,)
Variant positions, using 1-based coordinates, in ascending order.
h : array_like, int, shape (n_variants, n_haplotypes)
Haplotype array.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
n : int, optional
Color only the `n` most frequent haplotypes (by default, all
non-singleton haplotypes are colored).
palette : string, optional
A Seaborn palette name.
singleton_color : string, optional
Color to paint singleton haplotypes.
ax : axes, optional
The axes on which to draw. If not provided, a new figure will be
created.
Returns
-------
ax : axes
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
# setup figure
if ax is None:
fig, ax = plt.subplots()
# compute haplotype frequencies
# N.B., here we use a haplotype rank data structure to enable the use of
# pcolormesh() which is a lot faster than any other type of plotting
# function
hr = moving_hfs_rank(h, size=size, start=start, stop=stop)
# truncate to n most common haplotypes
if n:
hr[hr > n] = 0
# compute window start and stop positions
windows = moving_statistic(pos, statistic=lambda v: (v[0], v[-1]),
size=size, start=start, stop=stop)
# create color map
colors = [singleton_color] + sns.color_palette(palette, n_colors=hr.max())
cmap = mpl.colors.ListedColormap(colors)
# draw colors
x = np.append(windows[:, 0], windows[-1, -1])
y = np.arange(h.shape[1]+1)
ax.pcolormesh(x, y, hr.T, cmap=cmap)
# tidy up
ax.set_xlim(windows[0, 0], windows[-1, -1])
ax.set_ylim(0, h.shape[1])
ax.set_ylabel('haplotype count')
ax.set_xlabel('position (bp)')
return ax
def moving_delta_tajima_d(ac1, ac2, size, start=0, stop=None, step=None):
"""Compute the difference in Tajima's D between two populations in
moving windows.
Parameters
----------
ac1 : array_like, int, shape (n_variants, n_alleles)
Allele counts array for the first population.
ac2 : array_like, int, shape (n_variants, n_alleles)
Allele counts array for the second population.
size : int
The window size (number of variants).
start : int, optional
The index at which to start.
stop : int, optional
The index at which to stop.
step : int, optional
The number of variants between start positions of windows. If not
given, defaults to the window size, i.e., non-overlapping windows.
Returns
-------
delta_d : ndarray, float, shape (n_windows,)
Standardized delta Tajima's D.
See Also
--------
allel.stats.diversity.moving_tajima_d
"""
d1 = moving_tajima_d(ac1, size=size, start=start, stop=stop, step=step)
d2 = moving_tajima_d(ac2, size=size, start=start, stop=stop, step=step)
delta = d1 - d2
delta_z = (delta - np.mean(delta)) / np.std(delta)
return delta_z
def make_similar_sized_bins(x, n):
"""Utility function to create a set of bins over the range of values in `x`
such that each bin contains roughly the same number of values.
Parameters
----------
x : array_like
The values to be binned.
n : int
The number of bins to create.
Returns
-------
bins : ndarray
An array of bin edges.
Notes
-----
The actual number of bins returned may be less than `n` if `x` contains
integer values and any single value is represented more than len(x)//n
times.
"""
# copy and sort the array
y = np.array(x).flatten()
y.sort()
# setup bins
bins = [y[0]]
# determine step size
step = len(y) // n
# add bin edges
for i in range(step, len(y), step):
# get value at this index
v = y[i]
# only add bin edge if larger than previous
if v > bins[-1]:
bins.append(v)
# fix last bin edge
bins[-1] = y[-1]
return np.array(bins)
def standardize(score):
"""Centre and scale to unit variance."""
score = asarray_ndim(score, 1)
return (score - np.nanmean(score)) / np.nanstd(score)
def standardize_by_allele_count(score, aac, bins=None, n_bins=None,
diagnostics=True):
"""Standardize `score` within allele frequency bins.
Parameters
----------
score : array_like, float
The score to be standardized, e.g., IHS or NSL.
aac : array_like, int
An array of alternate allele counts.
bins : array_like, int, optional
Allele count bins, overrides `n_bins`.
n_bins : int, optional
Number of allele count bins to use.
diagnostics : bool, optional
If True, plot some diagnostic information about the standardization.
Returns
-------
score_standardized : ndarray, float
Standardized scores.
bins : ndarray, int
Allele count bins used for standardization.
"""
from scipy.stats import binned_statistic
# check inputs
score = asarray_ndim(score, 1)
aac = asarray_ndim(aac, 1)
check_dim0_aligned(score, aac)
# remove nans
nonan = ~np.isnan(score)
score_nonan = score[nonan]
aac_nonan = aac[nonan]
if bins is None:
# make our own similar sized bins
# how many bins to make?
if n_bins is None:
# something vaguely reasonable
n_bins = np.max(aac) // 2
# make bins
bins = make_similar_sized_bins(aac_nonan, n_bins)
else:
# user-provided bins
bins = asarray_ndim(bins, 1)
mean_score, _, _ = binned_statistic(aac_nonan, score_nonan,
statistic=np.mean,
bins=bins)
std_score, _, _ = binned_statistic(aac_nonan, score_nonan,
statistic=np.std,
bins=bins)
if diagnostics:
import matplotlib.pyplot as plt
x = (bins[:-1] + bins[1:]) / 2
plt.figure()
plt.fill_between(x,
mean_score - std_score,
mean_score + std_score,
alpha=.5,
label='std')
plt.plot(x, mean_score, marker='o', label='mean')
plt.grid(axis='y')
plt.xlabel('Alternate allele count')
plt.ylabel('Unstandardized score')
plt.title('Standardization diagnostics')
plt.legend()
# apply standardization
score_standardized = np.empty_like(score)
for i in range(len(bins) - 1):
x1 = bins[i]
x2 = bins[i + 1]
if i == 0:
# first bin
loc = (aac < x2)
elif i == len(bins) - 2:
# last bin
loc = (aac >= x1)
else:
# middle bins
loc = (aac >= x1) & (aac < x2)
m = mean_score[i]
s = std_score[i]
score_standardized[loc] = (score[loc] - m) / s
return score_standardized, bins
def pbs(ac1, ac2, ac3, window_size, window_start=0, window_stop=None,
window_step=None, normed=True):
"""Compute the population branching statistic (PBS) which performs a comparison
of allele frequencies between three populations to detect genome regions that are
unusually differentiated in one population relative to the other two populations.
Parameters
----------
ac1 : array_like, int
Allele counts from the first population.
ac2 : array_like, int
Allele counts from the second population.
ac3 : array_like, int
Allele counts from the third population.
window_size : int
The window size (number of variants) within which to compute PBS values.
window_start : int, optional
The variant index at which to start windowed calculations.
window_stop : int, optional
The variant index at which to stop windowed calculations.
window_step : int, optional
The number of variants between start positions of windows. If not given, defaults
to the window size, i.e., non-overlapping windows.
normed : bool, optional
If True (default), use the normalised version of PBS, also known as PBSn1 [2]_.
Otherwise, use the PBS statistic as originally defined in [1]_.
Returns
-------
pbs : ndarray, float
Windowed PBS values.
Notes
-----
The F:sub:`ST` calculations use Hudson's estimator.
References
----------
.. [1] Yi et al., "Sequencing of Fifty Human Exomes Reveals Adaptation to High
Altitude", Science, 329(5987): 75–78, 2 July 2010.
.. [2] Malaspinas et al., "A genomic history of Aboriginal Australia", Nature. volume
538, pages 207–214, 13 October 2016.
"""
# normalise and check inputs
ac1 = AlleleCountsArray(ac1)
ac2 = AlleleCountsArray(ac2)
ac3 = AlleleCountsArray(ac3)
check_dim0_aligned(ac1, ac2, ac3)
# compute fst
fst12 = moving_hudson_fst(ac1, ac2, size=window_size, start=window_start,
stop=window_stop, step=window_step)
fst13 = moving_hudson_fst(ac1, ac3, size=window_size, start=window_start,
stop=window_stop, step=window_step)
fst23 = moving_hudson_fst(ac2, ac3, size=window_size, start=window_start,
stop=window_stop, step=window_step)
# clip fst values to avoid infinite if fst is 1
for x in fst12, fst13, fst23:
np.clip(x, a_min=0, a_max=0.99999, out=x)
# compute fst transform
t12 = -np.log(1 - fst12)
t13 = -np.log(1 - fst13)
t23 = -np.log(1 - fst23)
# compute pbs
ret = (t12 + t13 - t23) / 2
if normed:
# compute pbs normalising constant
norm = 1 + (t12 + t13 + t23) / 2
ret = ret / norm
return ret | PypiClean |
/redguy-api-1.0.0.tar.gz/redguy-api-1.0.0/openapi_client/api_client.py | import json
import atexit
import mimetypes
from multiprocessing.pool import ThreadPool
import io
import os
import re
import typing
from urllib.parse import quote
from urllib3.fields import RequestField
from openapi_client import rest
from openapi_client.configuration import Configuration
from openapi_client.exceptions import ApiTypeError, ApiValueError, ApiException
from openapi_client.model_utils import (
ModelNormal,
ModelSimple,
ModelComposed,
check_allowed_values,
check_validations,
date,
datetime,
deserialize_file,
file_type,
model_to_dict,
none_type,
validate_and_convert_types
)
class ApiClient(object):
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
_pool = None
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None, pool_threads=1):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool_threads = pool_threads
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'OpenAPI-Generator/1.0.0/python'
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, 'unregister'):
atexit.unregister(self.close)
@property
def pool(self):
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self,
resource_path: str,
method: str,
path_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None,
header_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
body: typing.Optional[typing.Any] = None,
post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None,
files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None,
response_type: typing.Optional[typing.Tuple[typing.Any]] = None,
auth_settings: typing.Optional[typing.List[str]] = None,
_return_http_data_only: typing.Optional[bool] = None,
collection_formats: typing.Optional[typing.Dict[str, str]] = None,
_preload_content: bool = True,
_request_timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
_host: typing.Optional[str] = None,
_check_type: typing.Optional[bool] = None
):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = post_params if post_params else []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
post_params.extend(self.files_parameters(files))
if header_params['Content-Type'].startswith("multipart"):
post_params = self.parameters_to_multipart(post_params,
(dict) )
# body
if body:
body = self.sanitize_for_serialization(body)
# auth setting
self.update_params_for_auth(header_params, query_params,
auth_settings, resource_path, method, body)
# request url
if _host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = _host + resource_path
try:
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
except ApiException as e:
e.body = e.body.decode('utf-8')
raise e
content_type = response_data.getheader('content-type')
self.last_response = response_data
return_data = response_data
if not _preload_content:
return (return_data)
return return_data
if response_type not in ["file", "bytes"]:
match = None
if content_type is not None:
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type)
encoding = match.group(1) if match else "utf-8"
response_data.data = response_data.data.decode(encoding)
# deserialize response data
if response_type:
return_data = self.deserialize(
response_data,
response_type,
_check_type
)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def parameters_to_multipart(self, params, collection_types):
"""Get parameters as list of tuples, formatting as json if value is collection_types
:param params: Parameters as list of two-tuples
:param dict collection_types: Parameter collection types
:return: Parameters as list of tuple or urllib3.fields.RequestField
"""
new_params = []
if collection_types is None:
collection_types = (dict)
for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501
if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json
v = json.dumps(v, ensure_ascii=False).encode("utf-8")
field = RequestField(k, v)
field.make_multipart(content_type="application/json; charset=utf-8")
new_params.append(field)
else:
new_params.append((k, v))
return new_params
@classmethod
def sanitize_for_serialization(cls, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if isinstance(obj, (ModelNormal, ModelComposed)):
return {
key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items()
}
elif isinstance(obj, (str, int, float, none_type, bool)):
return obj
elif isinstance(obj, (datetime, date)):
return obj.isoformat()
elif isinstance(obj, ModelSimple):
return cls.sanitize_for_serialization(obj.value)
elif isinstance(obj, (list, tuple)):
return [cls.sanitize_for_serialization(item) for item in obj]
if isinstance(obj, dict):
return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()}
raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
def deserialize(self, response, response_type, _check_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param _check_type: boolean, whether to check the types of the data
received from the server
:type _check_type: bool
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == (file_type,):
content_disposition = response.getheader("Content-Disposition")
return deserialize_file(response.data, self.configuration,
content_disposition=content_disposition)
# fetch data from response object
try:
received_data = json.loads(response.data)
except ValueError:
received_data = response.data
# store our data under the key of 'received_data' so users have some
# context if they are deserializing a string and the data type is wrong
deserialized_data = validate_and_convert_types(
received_data,
response_type,
['received_data'],
True,
_check_type,
configuration=self.configuration
)
return deserialized_data
def call_api(
self,
resource_path: str,
method: str,
path_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None,
header_params: typing.Optional[typing.Dict[str, typing.Any]] = None,
body: typing.Optional[typing.Any] = None,
post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None,
files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None,
response_type: typing.Optional[typing.Tuple[typing.Any]] = None,
auth_settings: typing.Optional[typing.List[str]] = None,
async_req: typing.Optional[bool] = None,
_return_http_data_only: typing.Optional[bool] = None,
collection_formats: typing.Optional[typing.Dict[str, str]] = None,
_preload_content: bool = True,
_request_timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
_host: typing.Optional[str] = None,
_check_type: typing.Optional[bool] = None
):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async_req request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param files: key -> field name, value -> a list of open file
objects for `multipart/form-data`.
:type files: dict
:param async_req bool: execute request asynchronously
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:type collection_formats: dict, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _check_type: boolean describing if the data back from the server
should have its type checked.
:type _check_type: bool, optional
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout, _host,
_check_type)
return self.pool.apply_async(self.__call_api, (resource_path,
method, path_params,
query_params,
header_params, body,
post_params, files,
response_type,
auth_settings,
_return_http_data_only,
collection_formats,
_preload_content,
_request_timeout,
_host, _check_type))
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ApiValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None):
"""Builds form parameters.
:param files: None or a dict with key=param_name and
value is a list of open file objects
:return: List of tuples of form parameters with file data
"""
if files is None:
return []
params = []
for param_name, file_instances in files.items():
if file_instances is None:
# if the file field is nullable, skip None values
continue
for file_instance in file_instances:
if file_instance is None:
# if the file field is nullable, skip None values
continue
if file_instance.closed is True:
raise ApiValueError(
"Cannot read a closed file. The passed in file_type "
"for %s must be open." % param_name
)
filename = os.path.basename(file_instance.name)
filedata = file_instance.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([param_name, tuple([filename, filedata, mimetype])]))
file_instance.close()
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings,
resource_path, method, body):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
:param resource_path: A string representation of the HTTP request resource path.
:param method: A string representation of the HTTP request method.
:param body: A object representing the body of the HTTP request.
The object type is the return value of _encoder.default().
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if auth_setting['in'] == 'cookie':
headers['Cookie'] = auth_setting['value']
elif auth_setting['in'] == 'header':
if auth_setting['type'] != 'http-signature':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
'Authentication token must be in `query` or `header`'
)
class Endpoint(object):
def __init__(self, settings=None, params_map=None, root_map=None,
headers_map=None, api_client=None, callable=None):
"""Creates an endpoint
Args:
settings (dict): see below key value pairs
'response_type' (tuple/None): response type
'auth' (list): a list of auth type keys
'endpoint_path' (str): the endpoint path
'operation_id' (str): endpoint string identifier
'http_method' (str): POST/PUT/PATCH/GET etc
'servers' (list): list of str servers that this endpoint is at
params_map (dict): see below key value pairs
'all' (list): list of str endpoint parameter names
'required' (list): list of required parameter names
'nullable' (list): list of nullable parameter names
'enum' (list): list of parameters with enum values
'validation' (list): list of parameters with validations
root_map
'validations' (dict): the dict mapping endpoint parameter tuple
paths to their validation dictionaries
'allowed_values' (dict): the dict mapping endpoint parameter
tuple paths to their allowed_values (enum) dictionaries
'openapi_types' (dict): param_name to openapi type
'attribute_map' (dict): param_name to camelCase name
'location_map' (dict): param_name to 'body', 'file', 'form',
'header', 'path', 'query'
collection_format_map (dict): param_name to `csv` etc.
headers_map (dict): see below key value pairs
'accept' (list): list of Accept header strings
'content_type' (list): list of Content-Type header strings
api_client (ApiClient) api client instance
callable (function): the function which is invoked when the
Endpoint is called
"""
self.settings = settings
self.params_map = params_map
self.params_map['all'].extend([
'async_req',
'_host_index',
'_preload_content',
'_request_timeout',
'_return_http_data_only',
'_check_input_type',
'_check_return_type'
])
self.params_map['nullable'].extend(['_request_timeout'])
self.validations = root_map['validations']
self.allowed_values = root_map['allowed_values']
self.openapi_types = root_map['openapi_types']
extra_types = {
'async_req': (bool,),
'_host_index': (none_type, int),
'_preload_content': (bool,),
'_request_timeout': (none_type, int, (int,), [int]),
'_return_http_data_only': (bool,),
'_check_input_type': (bool,),
'_check_return_type': (bool,)
}
self.openapi_types.update(extra_types)
self.attribute_map = root_map['attribute_map']
self.location_map = root_map['location_map']
self.collection_format_map = root_map['collection_format_map']
self.headers_map = headers_map
self.api_client = api_client
self.callable = callable
def __validate_inputs(self, kwargs):
for param in self.params_map['enum']:
if param in kwargs:
check_allowed_values(
self.allowed_values,
(param,),
kwargs[param]
)
for param in self.params_map['validation']:
if param in kwargs:
check_validations(
self.validations,
(param,),
kwargs[param],
configuration=self.api_client.configuration
)
if kwargs['_check_input_type'] is False:
return
for key, value in kwargs.items():
fixed_val = validate_and_convert_types(
value,
self.openapi_types[key],
[key],
False,
kwargs['_check_input_type'],
configuration=self.api_client.configuration
)
kwargs[key] = fixed_val
def __gather_params(self, kwargs):
params = {
'body': None,
'collection_format': {},
'file': {},
'form': [],
'header': {},
'path': {},
'query': []
}
for param_name, param_value in kwargs.items():
param_location = self.location_map.get(param_name)
if param_location is None:
continue
if param_location:
if param_location == 'body':
params['body'] = param_value
continue
base_name = self.attribute_map[param_name]
if (param_location == 'form' and
self.openapi_types[param_name] == (file_type,)):
params['file'][param_name] = [param_value]
elif (param_location == 'form' and
self.openapi_types[param_name] == ([file_type],)):
# param_value is already a list
params['file'][param_name] = param_value
elif param_location in {'form', 'query'}:
param_value_full = (base_name, param_value)
params[param_location].append(param_value_full)
if param_location not in {'form', 'query'}:
params[param_location][base_name] = param_value
collection_format = self.collection_format_map.get(param_name)
if collection_format:
params['collection_format'][base_name] = collection_format
return params
def __call__(self, *args, **kwargs):
""" This method is invoked when endpoints are called
Example:
api_instance = AudienceApi()
api_instance.v1_audience_service_delete # this is an instance of the class Endpoint
api_instance.v1_audience_service_delete() # this invokes api_instance.v1_audience_service_delete.__call__()
which then invokes the callable functions stored in that endpoint at
api_instance.v1_audience_service_delete.callable or self.callable in this class
"""
return self.callable(self, *args, **kwargs)
def call_with_http_info(self, **kwargs):
try:
index = self.api_client.configuration.server_operation_index.get(
self.settings['operation_id'], self.api_client.configuration.server_index
) if kwargs['_host_index'] is None else kwargs['_host_index']
server_variables = self.api_client.configuration.server_operation_variables.get(
self.settings['operation_id'], self.api_client.configuration.server_variables
)
_host = self.api_client.configuration.get_host_from_settings(
index, variables=server_variables, servers=self.settings['servers']
)
except IndexError:
if self.settings['servers']:
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s" %
len(self.settings['servers'])
)
_host = None
for key, value in kwargs.items():
if key not in self.params_map['all']:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `%s`" %
(key, self.settings['operation_id'])
)
# only throw this nullable ApiValueError if _check_input_type
# is False, if _check_input_type==True we catch this case
# in self.__validate_inputs
if (key not in self.params_map['nullable'] and value is None
and kwargs['_check_input_type'] is False):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `%s`" %
(key, self.settings['operation_id'])
)
for key in self.params_map['required']:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`%s`" % (key, self.settings['operation_id'])
)
self.__validate_inputs(kwargs)
params = self.__gather_params(kwargs)
accept_headers_list = self.headers_map['accept']
if accept_headers_list:
params['header']['Accept'] = self.api_client.select_header_accept(
accept_headers_list)
content_type_headers_list = self.headers_map['content_type']
if content_type_headers_list:
header_list = self.api_client.select_header_content_type(
content_type_headers_list)
params['header']['Content-Type'] = header_list
return self.api_client.call_api(
self.settings['endpoint_path'], self.settings['http_method'],
params['path'],
params['query'],
params['header'],
body=params['body'],
post_params=params['form'],
files=params['file'],
response_type=self.settings['response_type'],
auth_settings=self.settings['auth'],
async_req=kwargs['async_req'],
_check_type=kwargs['_check_return_type'],
_return_http_data_only=kwargs['_return_http_data_only'],
_preload_content=kwargs['_preload_content'],
_request_timeout=kwargs['_request_timeout'],
_host=_host,
collection_formats=params['collection_format']) | PypiClean |
/netoprmgr-1.3.5.tar.gz/netoprmgr-1.3.5/pip/_internal/configuration.py |
import locale
import logging
import os
import sys
from pip._vendor.six.moves import configparser
from pip._internal.exceptions import (
ConfigurationError,
ConfigurationFileCouldNotBeLoaded,
)
from pip._internal.utils import appdirs
from pip._internal.utils.compat import WINDOWS, expanduser
from pip._internal.utils.misc import ensure_dir, enum
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import (
Any, Dict, Iterable, List, NewType, Optional, Tuple
)
RawConfigParser = configparser.RawConfigParser # Shorthand
Kind = NewType("Kind", str)
logger = logging.getLogger(__name__)
# NOTE: Maybe use the optionx attribute to normalize keynames.
def _normalize_name(name):
# type: (str) -> str
"""Make a name consistent regardless of source (environment or file)
"""
name = name.lower().replace('_', '-')
if name.startswith('--'):
name = name[2:] # only prefer long opts
return name
def _disassemble_key(name):
# type: (str) -> List[str]
if "." not in name:
error_message = (
"Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?"
).format(name)
raise ConfigurationError(error_message)
return name.split(".", 1)
# The kinds of configurations there are.
kinds = enum(
USER="user", # User Specific
GLOBAL="global", # System Wide
SITE="site", # [Virtual] Environment Specific
ENV="env", # from PIP_CONFIG_FILE
ENV_VAR="env-var", # from Environment Variables
)
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
def get_configuration_files():
# type: () -> Dict[Kind, List[str]]
global_config_files = [
os.path.join(path, CONFIG_BASENAME)
for path in appdirs.site_config_dirs('pip')
]
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
legacy_config_file = os.path.join(
expanduser('~'),
'pip' if WINDOWS else '.pip',
CONFIG_BASENAME,
)
new_config_file = os.path.join(
appdirs.user_config_dir("pip"), CONFIG_BASENAME
)
return {
kinds.GLOBAL: global_config_files,
kinds.SITE: [site_config_file],
kinds.USER: [legacy_config_file, new_config_file],
}
class Configuration(object):
"""Handles management of configuration.
Provides an interface to accessing and managing configuration files.
This class converts provides an API that takes "section.key-name" style
keys and stores the value associated with it as "key-name" under the
section "section".
This allows for a clean interface wherein the both the section and the
key-name are preserved in an easy to manage form in the configuration files
and the data stored is also nice.
"""
def __init__(self, isolated, load_only=None):
# type: (bool, Kind) -> None
super(Configuration, self).__init__()
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
if load_only not in _valid_load_only:
raise ConfigurationError(
"Got invalid value for load_only - should be one of {}".format(
", ".join(map(repr, _valid_load_only[:-1]))
)
)
self.isolated = isolated # type: bool
self.load_only = load_only # type: Optional[Kind]
# The order here determines the override order.
self._override_order = [
kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
]
self._ignore_env_names = ["version", "help"]
# Because we keep track of where we got the data from
self._parsers = {
variant: [] for variant in self._override_order
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
self._config = {
variant: {} for variant in self._override_order
} # type: Dict[Kind, Dict[str, Any]]
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
def load(self):
# type: () -> None
"""Loads configuration from configuration files and environment
"""
self._load_config_files()
if not self.isolated:
self._load_environment_vars()
def get_file_to_edit(self):
# type: () -> Optional[str]
"""Returns the file with highest priority in configuration
"""
assert self.load_only is not None, \
"Need to be specified a file to be editing"
try:
return self._get_parser_to_modify()[0]
except IndexError:
return None
def items(self):
# type: () -> Iterable[Tuple[str, Any]]
"""Returns key-value pairs like dict.items() representing the loaded
configuration
"""
return self._dictionary.items()
def get_value(self, key):
# type: (str) -> Any
"""Get a value from the configuration.
"""
try:
return self._dictionary[key]
except KeyError:
raise ConfigurationError("No such key - {}".format(key))
def set_value(self, key, value):
# type: (str, Any) -> None
"""Modify a value in the configuration.
"""
self._ensure_have_load_only()
fname, parser = self._get_parser_to_modify()
if parser is not None:
section, name = _disassemble_key(key)
# Modify the parser and the configuration
if not parser.has_section(section):
parser.add_section(section)
parser.set(section, name, value)
self._config[self.load_only][key] = value
self._mark_as_modified(fname, parser)
def unset_value(self, key):
# type: (str) -> None
"""Unset a value in the configuration.
"""
self._ensure_have_load_only()
if key not in self._config[self.load_only]:
raise ConfigurationError("No such key - {}".format(key))
fname, parser = self._get_parser_to_modify()
if parser is not None:
section, name = _disassemble_key(key)
# Remove the key in the parser
modified_something = False
if parser.has_section(section):
# Returns whether the option was removed or not
modified_something = parser.remove_option(section, name)
if modified_something:
# name removed from parser, section may now be empty
section_iter = iter(parser.items(section))
try:
val = next(section_iter)
except StopIteration:
val = None
if val is None:
parser.remove_section(section)
self._mark_as_modified(fname, parser)
else:
raise ConfigurationError(
"Fatal Internal error [id=1]. Please report as a bug."
)
del self._config[self.load_only][key]
def save(self):
# type: () -> None
"""Save the current in-memory state.
"""
self._ensure_have_load_only()
for fname, parser in self._modified_parsers:
logger.info("Writing to %s", fname)
# Ensure directory exists.
ensure_dir(os.path.dirname(fname))
with open(fname, "w") as f:
parser.write(f)
#
# Private routines
#
def _ensure_have_load_only(self):
# type: () -> None
if self.load_only is None:
raise ConfigurationError("Needed a specific file to be modifying.")
logger.debug("Will be working with %s variant only", self.load_only)
@property
def _dictionary(self):
# type: () -> Dict[str, Any]
"""A dictionary representing the loaded configuration.
"""
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
# are not needed here.
retval = {}
for variant in self._override_order:
retval.update(self._config[variant])
return retval
def _load_config_files(self):
# type: () -> None
"""Loads configuration from configuration files
"""
config_files = dict(self._iter_config_files())
if config_files[kinds.ENV][0:1] == [os.devnull]:
logger.debug(
"Skipping loading configuration files due to "
"environment's PIP_CONFIG_FILE being os.devnull"
)
return
for variant, files in config_files.items():
for fname in files:
# If there's specific variant set in `load_only`, load only
# that variant, not the others.
if self.load_only is not None and variant != self.load_only:
logger.debug(
"Skipping file '%s' (variant: %s)", fname, variant
)
continue
parser = self._load_file(variant, fname)
# Keeping track of the parsers used
self._parsers[variant].append((fname, parser))
def _load_file(self, variant, fname):
# type: (Kind, str) -> RawConfigParser
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
parser = self._construct_parser(fname)
for section in parser.sections():
items = parser.items(section)
self._config[variant].update(self._normalized_keys(section, items))
return parser
def _construct_parser(self, fname):
# type: (str) -> RawConfigParser
parser = configparser.RawConfigParser()
# If there is no such file, don't bother reading it but create the
# parser anyway, to hold the data.
# Doing this is useful when modifying and saving files, where we don't
# need to construct a parser.
if os.path.exists(fname):
try:
parser.read(fname)
except UnicodeDecodeError:
# See https://github.com/pypa/pip/issues/4963
raise ConfigurationFileCouldNotBeLoaded(
reason="contains invalid {} characters".format(
locale.getpreferredencoding(False)
),
fname=fname,
)
except configparser.Error as error:
# See https://github.com/pypa/pip/issues/4893
raise ConfigurationFileCouldNotBeLoaded(error=error)
return parser
def _load_environment_vars(self):
# type: () -> None
"""Loads configuration from environment variables
"""
self._config[kinds.ENV_VAR].update(
self._normalized_keys(":env:", self._get_environ_vars())
)
def _normalized_keys(self, section, items):
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
"""Normalizes items to construct a dictionary with normalized keys.
This routine is where the names become keys and are made the same
regardless of source - configuration files or environment.
"""
normalized = {}
for name, val in items:
key = section + "." + _normalize_name(name)
normalized[key] = val
return normalized
def _get_environ_vars(self):
# type: () -> Iterable[Tuple[str, str]]
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
should_be_yielded = (
key.startswith("PIP_") and
key[4:].lower() not in self._ignore_env_names
)
if should_be_yielded:
yield key[4:].lower(), val
# XXX: This is patched in the tests.
def _iter_config_files(self):
# type: () -> Iterable[Tuple[Kind, List[str]]]
"""Yields variant and configuration files associated with it.
This should be treated like items of a dictionary.
"""
# SMELL: Move the conditions out of this function
# environment variables have the lowest priority
config_file = os.environ.get('PIP_CONFIG_FILE', None)
if config_file is not None:
yield kinds.ENV, [config_file]
else:
yield kinds.ENV, []
config_files = get_configuration_files()
# at the base we have any global configuration
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
# per-user configuration next
should_load_user_config = not self.isolated and not (
config_file and os.path.exists(config_file)
)
if should_load_user_config:
# The legacy config file is overridden by the new config file
yield kinds.USER, config_files[kinds.USER]
# finally virtualenv configuration first trumping others
yield kinds.SITE, config_files[kinds.SITE]
def _get_parser_to_modify(self):
# type: () -> Tuple[str, RawConfigParser]
# Determine which parser to modify
parsers = self._parsers[self.load_only]
if not parsers:
# This should not happen if everything works correctly.
raise ConfigurationError(
"Fatal Internal error [id=2]. Please report as a bug."
)
# Use the highest priority parser.
return parsers[-1]
# XXX: This is patched in the tests.
def _mark_as_modified(self, fname, parser):
# type: (str, RawConfigParser) -> None
file_parser_tuple = (fname, parser)
if file_parser_tuple not in self._modified_parsers:
self._modified_parsers.append(file_parser_tuple) | PypiClean |
/lppydsmc_taltos-0.0.4-py3-none-any.whl/lppydsmc/utils/wall_collision.py | import numpy as np
from random import random
import numpy.ma as ma
import numexpr
# ----------------------------- Wall collision -------------------------- #
# NOTE : THE THREE FOLLOWING FUNCTIONS ARE INPLACE
def make_collisions(arr, a, ct, cp, old_count = None): # ct : collision time, cp : collision position
count = np.count_nonzero(~np.isinf(ct), axis = 1) # problem when there is no collision to process with no wall whatsoever. Because 0 counts as requiring a collision to be processed...
count = np.where(count == 0, -1, count)%2 # not super optimal I think.
idxes = np.argmin(ct, axis = 1)
# count = [0, 0, 0, 1] for example, 0 if outside, 1 if inside.
# with loop:
for k, c in enumerate(count) :
if(c==0):
# reflect
idx = idxes[k]
arr[[k],:] = _reflect_particle(arr[[k],:], a[[idx],:], ct[[k], [idx]], cp[[k], [idx]]) # arr, a, ct, cp
return count
def make_collisions_vectorized(arr, a, ct, cp, old_count = None): # ct : collision time, cp : collision position
idxes = np.argmin(ct, axis = 1)
count = np.count_nonzero(~np.isinf(ct), axis = 1) # problem when there is no collision to process with no wall whatsoever. Because 0 counts as requiring a collision to be processed...
count = ~(np.where(count == 0, -1, count)%2).astype(bool) # not super optimal I think.
# ic(np.sum(count, where = count == True))
#count = ~count.astype(bool)
# ct = np.where(np.isinf(ct), 0, ct)
# count = [0, 0, 0, 1] for example, 0 if outside, 1 if inside.
# ---- Overhead to avoid the python loop (and multiple call to a function) ---:
# ct and cp mush be shrink in dimension, from (number of particles x number of walls) to (number of particles)
# a must be changes to a vector [number of particles], where is line is the required wall
# all this depends on idxes which is of size [number of particles]
cp_ = np.take_along_axis(cp, idxes[:,None, None], axis = 1)[count, :].squeeze(axis=1)
ct_ = np.take_along_axis(ct, idxes[:,None], axis = 1)[count, :].squeeze(axis=1)
a_ = np.take_along_axis(a, idxes[:,None], axis = 0)[count, :]
arr[count,:] = _reflect_particle(arr[count,:], a_, ct_, cp_)
return count
def make_collisions_out_walls(arr, a, ct, cp, idx_out_walls, old_count = None): # ct : collision time, cp : collision position
# basically the same function as make_collisions_vectorized, only we add some conditions
idxes = np.argmin(ct, axis = 1)
idxes_out_bool = np.isin(idxes, idx_out_walls) # if the wall for min collision time is in idx_out_walls
# getting actual collision
count = np.count_nonzero(~np.isinf(ct), axis = 1)
count = ~(np.where(count == 0, 1, count)%2).astype(bool) # not super optimal I think.
# True if it should collide
# processing only collision that are at the same time true collisions and not with the out wall
# TODO : clean it
count_ = np.where(count & ~idxes_out_bool, True, False)
idxes_out_bool = np.where(count & idxes_out_bool, True, False) # Out of the system and should collide
idxes_out = np.where(idxes_out_bool)[0] # indexes of particles out of the system (certain may still not be, we need to verify it is not in the system)
# True if out of the system (and thus we should not compute collisions for theses ones)
count = count_
if(old_count is not None):
c = np.where(old_count)[0]
old_count[c] = old_count[c]&count
idxes_out = c[idxes_out]
c2 = old_count
else:
c2 = count
if(np.sum(count, where = count == True)==0):
return c2, idxes_out
cp_ = np.take_along_axis(cp, idxes[:,None, None], axis = 1)[count, :].squeeze(axis=1)
ct_ = np.take_along_axis(ct, idxes[:,None], axis = 1)[count, :].squeeze(axis=1)
a_ = np.take_along_axis(a, idxes[:,None], axis = 0)[count, :]
arr[c2,:] = _reflect_particle(arr[c2,:], a_, ct_, cp_)
return c2, idxes_out # the indexes in arr of the particles that got out of the system by out_walls
pos_end_idx = 2
# arr is number of particles x (pos_end_idx+3) - we use 3D velocity and 2D pos by default
# walls is a 2D array consisting wall = np.array([x1, y1, x2, y2]) (for now)
# these walls have been stored such that x1 < x2, in case x1 == x2, y1 < y2.
def handler_wall_collision(arr, walls, a, radius):
# TODO : je pense que je devrais passer tout ça sur 100% numpy et pas numexpr. En réalité, je risque pas d'utiliser souvent pour plusieurs particles (que celles qui sont sorties du système).
# boucle sur les particules a priori.
# En fait cet algo donne aussi la présence de la particule dans le système, puisque si on est dans le système, alors on a un nombre impair de murs avec lesquels on peut collisionner.
""" Determine if there is a collision between the particule which position, velocity and radius
are given in parameters and the wall of index wall_indx.
If there is, it compute the time to collision and update the events table.
We suppose the particule is caracterized by its position (x,y), its velocity (vx, vy) and its radius r.
The wall is caracterized by its two extremities : p1 = (x1,y1), p2 = (x2,y2) and its normal vector n directed toward the center (such that (p2-p1, n, (p2-p1) x n) is a direct system coordinate).
The formula which is used is to compute the possible collision time is :
t_coll_1/2 = (-a sgn(b) +/- r)/|b| = (-a +/- r)/b
where :
* a = -x sin(theta) + y cos(theta)
* b = -vx sin(theta) + vy cos(theta)
* theta = sgn(ny) arccos(nx)
thus : cos(theta) = nx
sin(theta) = ny
Note that theses times give the moment the disk crosses the infinite line formed from the wall,
not strictly speaking the segment formed by the wall...
If b = 0 : we consider that there is not collision and return t_coll = np.nan
If b != O : then a necessary condition is to have both t_coll_1 > 0 and t_coll_2 > 0.
Indeed : * The first time the particule collides, is when its closest point to the wall collides with it.
* The second time is for when the furthest point to the wall collided with it.
In such a case, we have to verify that the disk crossing the line occurs on the portion of the line
which is the wall. To do that, we compute the position of the particule at the time of collision and verify that it
is on the "wall" segment. If it is we return t_coll = min(t_coll_1, t_coll_2). Else, np.nan.
Args:
part_indx (int): index of the particule in self.particules
position (MyVector): position of the particule
velocity (MyVector): velocity of the particule
radius (float): radius of the particule
wall_indx (int): index of the wall in self.walls
Returns:
int, MyVector: the time before the wall and particule collides. Return np.nan is no collision is possible.
"""
# since we are determining for past collisions, we have to velocity -> -velocity
# a and b
ctheta, stheta, norm = a[:,0], a[:, 1], a[:, 2] # directing vector of the walls. Normalized !
p1x, p1y, p2x, p2y = walls[:,0], walls[:,1], walls[:,2], walls[:,3] # # np.split(walls, indices_or_sections=4, axis = 1)
x, y, vx, vy, vz = np.split(arr, indices_or_sections=5, axis = 1) # arr[:,0], arr[:,1], arr[:,2], arr[:,3], arr[:,4] #
# split keeps the dimension constant, thus p1x is [number of particles x 1] which allow for the operation later on
# supposing p2x-p1x > 0
b = numexpr.evaluate("vx*stheta-vy*ctheta") # -velocity.x*stheta+velocity.y*ctheta; stheta = p2y-p1y; ctheta = p2x-p1x
a_prime = numexpr.evaluate("(p1x-x)*stheta+(y-p1y)*ctheta")
# at this point b is 2D and b[i] returns b for all walls for particle i
# possible collision time :
t_coll_1 = np.full(shape=b.shape, fill_value=-1.)
t_coll_2 = np.full(shape=b.shape, fill_value=-1.)
np.divide((-a_prime-radius), b, out=t_coll_1, where=b!=0)
np.divide((-a_prime+radius), b, out=t_coll_2, where=b!=0)
# ic(t_coll_1)
# ic(t_coll_2)
t_intersect = np.full(shape=b.shape, fill_value=np.inf)
t_intersect = np.maximum(t_coll_1, t_coll_2 , where = t_coll_2 + t_coll_1 > 0, out = t_intersect) # ((t_coll_2 > 0) | (t_coll_1>0))
picx = numexpr.evaluate("x-0.5*(t_coll_1+t_coll_2)*vx")
picy = numexpr.evaluate("y-0.5*(t_coll_1+t_coll_2)*vy")
pix = numexpr.evaluate("x-t_intersect*vx")
piy = numexpr.evaluate("y-t_intersect*vy")
# qty = numexpr.evaluate("((radius+(ctheta*(pix-p1x)+stheta*(piy-p1y))))/(norm+2*radius)") # dP1.inner(dP2)/(norm_1*norm_1) # norm_1 cant be 0 because wall segments are not on same points.
qty = numexpr.evaluate("(ctheta*(picx-p1x)+stheta*(picy-p1y))/norm")
# ic(qty)
qty = np.where(~np.isnan(qty), qty, -1)
return np.where((qty >= 0) & (qty <= 1), t_intersect, np.inf), np.moveaxis(np.where((qty >= 0) & (qty <= 1), np.array([pix,piy]), np.nan), 0, -1)
def handler_wall_collision_point(arr, walls, a, deal_with_corner = False): # particles are considered as points
# TODO : je pense que je devrais passer tout ça sur 100% numpy et pas numexpr. En réalité, je risque pas d'utiliser souvent pour plusieurs particles (que celles qui sont sorties du système).
# boucle sur les particules a priori.
# En fait cet algo donne aussi la présence de la particule dans le système, puisque si on est dans le système, alors on a un nombre impair de murs avec lesquels on peut collisionner.
""" Determine if there is a collision between the particule which position, velocity and radius
are given in parameters and the wall of index wall_indx.
If there is, it compute the time to collision and update the events table.
We suppose the particule is caracterized by its position (x,y), its velocity (vx, vy) and its radius r.
The wall is caracterized by its two extremities : p1 = (x1,y1), p2 = (x2,y2) and its normal vector n directed toward the center (such that (p2-p1, n, (p2-p1) x n) is a direct system coordinate).
The formula which is used is to compute the possible collision time is :
t_coll_1/2 = (-a sgn(b) +/- r)/|b| = (-a +/- r)/b
where :
* a = -x sin(theta) + y cos(theta)
* b = -vx sin(theta) + vy cos(theta)
* theta = sgn(ny) arccos(nx)
thus : cos(theta) = nx
sin(theta) = ny
Note that theses times give the moment the disk crosses the infinite line formed from the wall,
not strictly speaking the segment formed by the wall...
If b = 0 : we consider that there is not collision and return t_coll = np.nan
If b != O : then a necessary condition is to have both t_coll_1 > 0 and t_coll_2 > 0.
Indeed : * The first time the particule collides, is when its closest point to the wall collides with it.
* The second time is for when the furthest point to the wall collided with it.
In such a case, we have to verify that the disk crossing the line occurs on the portion of the line
which is the wall. To do that, we compute the position of the particule at the time of collision and verify that it
is on the "wall" segment. If it is we return t_coll = min(t_coll_1, t_coll_2). Else, np.nan.
Args:
part_indx (int): index of the particule in self.particules
position (MyVector): position of the particule
velocity (MyVector): velocity of the particule
radius (float): radius of the particule
wall_indx (int): index of the wall in self.walls
Returns:
int, MyVector: the time before the wall and particule collides. Return np.nan is no collision is possible.
"""
# since we are determining for past collisions, we have to velocity -> -velocity
# a and b
ctheta, stheta, norm = a[:,0], a[:, 1], a[:, 2] # directing vector of the walls. Normalized !
p1x, p1y, p2x, p2y = walls[:,0], walls[:,1], walls[:,2], walls[:,3] # # np.split(walls, indices_or_sections=4, axis = 1)
x, y, vx, vy, vz = np.split(arr, indices_or_sections=5, axis = 1) # arr[:,0], arr[:,1], arr[:,2], arr[:,3], arr[:,4] #
# split keeps the dimension constant, thus p1x is [number of particles x 1] which allow for the operation later on
# supposing p2x-p1x > 0
b = numexpr.evaluate("vx*stheta-vy*ctheta") # -velocity.x*stheta+velocity.y*ctheta; stheta = p2y-p1y; ctheta = p2x-p1x
a_prime = numexpr.evaluate("(p1x-x)*stheta+(y-p1y)*ctheta")
# at this point b is 2D and b[i] returns b for all walls for particle i
# possible collision time :
t_intersect = np.full(shape=b.shape, fill_value=-1.)
np.divide(-a_prime, b, out=t_intersect, where=b!=0)
t_intersect = np.where(t_intersect>0,t_intersect,np.inf)
# ic(t_intersect)
pix = numexpr.evaluate("x-t_intersect*vx")
piy = numexpr.evaluate("y-t_intersect*vy")
# ic(pix[0])
# ic(piy[0])
# ic(p1x[0])
# ic(ctheta[0])
# ic(norm[0])
# qty = numexpr.evaluate("((radius+(ctheta*(pix-p1x)+stheta*(piy-p1y))))/(norm+2*radius)") # dP1.inner(dP2)/(norm_1*norm_1) # norm_1 cant be 0 because wall segments are not on same points.
qty = numexpr.evaluate("(ctheta*(pix-p1x)+stheta*(piy-p1y))/norm")
# ic(qty)
qty = np.where(~np.isnan(qty), qty, -1)
return np.where((qty >= 0) & (qty <= 1), t_intersect, np.inf), np.moveaxis(np.where((qty >= 0) & (qty <= 1), np.array([pix,piy]), np.nan), 0, -1)
def _reflect_particle(arr, a, ct, cp):
# be careful, Theta is the opposite of the angle between the wall and the default coord system.
k1, k2 = 2*a[:,0]**2-1, 2*a[:,0]*a[:, 1] # 2*ctheta**2-1, 2*ctheta*stheta # TODO : could be saved before computing, this way it gets even faster
# velocity after the collision
arr[:,2] = arr[:,2]*k1+ arr[:,3]*k2 # i.e. : vx = vx*k1+vy*k2
arr[:,3] = - arr[:,3]*k1+arr[:,2]*k2 # i.e. : vy = -vy*k1+vx*k2
# new position (we could add some scattering which we do not do there)
arr[:,0] = cp[:,0]+ct*arr[:,2] # new x pos
arr[:,1] = cp[:,1]+ct*arr[:,3] # new y pos
return arr
# very useless
def deal_with_corner(ct):
# very inefficient
for k in range(ct.shape[0]):
ct_, indexes = np.unique(ct[k], return_index = True)
if(ct_.shape != ct[k].shape):
ct[k, :] = np.inf
ct[k, indexes] = ct_ | PypiClean |
/stratuslab-libcloud-drivers-14.06.0_RC4.tar.gz/stratuslab-libcloud-drivers-14.06.0_RC4/docs/developer_info.md | Developer Info for StratusLab Libcloud Drivers
==============================================
[Libcloud][lc-web] provides abstractions for cloud servers, cloud
storage, load balancers, and DNS. The Libcloud abstraction for cloud
servers is similar to that for StratusLab, so it should be fairly
straightforward to provide a plugin for this.
The storage abstraction for Libcloud is "file-based". This doesn't
match very well with the "disk-based" storage that StratusLab provides
and that is included in the "compute" part of the Libcloud API. A
mapping between the "file-based" storage API and StratusLab volumes
could be done, but a serious evaluation needs to be done first to see
if this is useful.
StratusLab does not provide load balancers or DNS services, so neither
of those abstractions make sense for a StratusLab plugin.
Mapping Cloud Server Semantics
==============================
The Libcloud cloud server interface (protocol) is object based with
`Node` being the primary object. (See the `libcloud/compute/base.py`
class in the [codebase][lc-github].) The node consists of:
* ID
* Name
* State
* Public IPs
* Private IPs
* Libcloud driver
* Size
* Image
* Extra driver-specific information
This matches very well the characteristics of an instance with
StratusLab. The driver (plugin) is used to create a machine instance
and then it is controlled directly the Node instance. Aside from
getter functions, the interface has `reboot` and `destroy` methods.
StratusLab (at least at the moment) won't be able to support the
`reboot` method.
The `NodeSize` is just a tuple containing the id, name, RAM, disk,
bandwidth, and price. StratusLab can map the StratusLab type names to
the id and name. RAM to RAM and disk to swap space. We don't
currently have bandwidth and price, but perhaps we should consider
adding these even if they are unused.
The `NodeImage` is a machine image and contains only an id and a name.
These can be taken from the Marketplace with the id mapped to the
usual StratusLab image identifier. The name can be the title, if
provided, or the image description.
There is a concept of a `NodeLocation` in the API. This corresponds to,
for example, the different geographic regions of Amazon. This can
easily correspond to the various cloud infrastructure sections that we
allow in our standard configuration file. This provides a name to
indicate the various endpoints, credentials, etc. tied to a given
cloud resource.
There is also a `StorageVolume` in the API to describe volumes that can
be attached to a Node. This corresponds well to the StratusLab
storage abstraction. There are also methods in the abstraction for
attaching and detaching a volume from a machine.
Open Questions
==============
* What is the policy with external dependencies?
* Why is there no list_volumes() method in NodeDriver?
* Why is there no CPU (and/or core) fields in NodeSize?
* Why is there no function to get the state of a node (list_nodes()
seems to be used for this)?
* Why does list_nodes() not take a location? Always getting all nodes
at all locations seems wasteful in terms of bandwidth and time.
* Had problems with RSA SSH keys. Are only DSA keys accepted?
[lc-web]: http://libcloud.apache.org
[lc-github]: https://github.com/apache/libcloud
| PypiClean |
/ElectronCounting-1.0.0.tar.gz/ElectronCounting-1.0.0/CountingNN/blocks.py | from typing import Union, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
class ConvBlock(nn.Module):
"""
Creates block of layers each consisting of convolution operation,
leaky relu and (optionally) dropout and batch normalization
Args:
ndim:
Data dimensionality (1D or 2D)
nb_layers:
Number of layers in the block
input_channels:
Number of input channels for the block
output_channels:
Number of the output channels for the block
kernel_size:
Size of convolutional filter (in pixels)
stride:
Stride of convolutional filter
padding:
Value for edge padding
batch_norm:
Add batch normalization to each layer in the block
lrelu_a:
Value of alpha parameter in leaky ReLU activation
for each layer in the block
dropout_:
Dropout value for each layer in the block
"""
def __init__(self,
ndim: int, nb_layers: int,
input_channels: int, output_channels: int,
kernel_size: Union[Tuple[int], int] = 1,
stride: Union[Tuple[int], int] = 1,
padding: Union[Tuple[int], int] = 0, # 1
batch_norm: bool = False, lrelu_a: float = 0.01,
dropout_: float = 0) -> None:
"""
Initializes module parameters
"""
super(ConvBlock, self).__init__()
if not 0 < ndim < 3:
raise AssertionError("ndim must be equal to 1 or 2")
conv = nn.Conv2d if ndim == 2 else nn.Conv1d
block = []
for idx in range(nb_layers):
input_channels = output_channels if idx > 0 else input_channels
block.append(conv(input_channels,
output_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
padding_mode='circular'))
if dropout_ > 0:
block.append(nn.Dropout(dropout_))
block.append(nn.LeakyReLU(negative_slope=lrelu_a))
if batch_norm:
if ndim == 2:
block.append(nn.BatchNorm2d(output_channels))
else:
block.append(nn.BatchNorm1d(output_channels))
self.block = nn.Sequential(*block)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
output = self.block(x)
return output
class ConvBlock_ori(nn.Module):
"""
Creates block of layers each consisting of convolution operation,
leaky relu and (optionally) dropout and batch normalization
Args:
ndim:
Data dimensionality (1D or 2D)
nb_layers:
Number of layers in the block
input_channels:
Number of input channels for the block
output_channels:
Number of the output channels for the block
kernel_size:
Size of convolutional filter (in pixels)
stride:
Stride of convolutional filter
padding:
Value for edge padding
batch_norm:
Add batch normalization to each layer in the block
lrelu_a:
Value of alpha parameter in leaky ReLU activation
for each layer in the block
dropout_:
Dropout value for each layer in the block
"""
def __init__(self,
ndim: int, nb_layers: int,
input_channels: int, output_channels: int,
kernel_size: Union[Tuple[int], int] = 3,
stride: Union[Tuple[int], int] = 1,
padding: Union[Tuple[int], int] = 1,
batch_norm: bool = False, lrelu_a: float = 0.01,
dropout_: float = 0) -> None:
"""
Initializes module parameters
"""
super(ConvBlock_ori, self).__init__()
if not 0 < ndim < 3:
raise AssertionError("ndim must be equal to 1 or 2")
conv = nn.Conv2d if ndim == 2 else nn.Conv1d
block = []
for idx in range(nb_layers):
input_channels = output_channels if idx > 0 else input_channels
block.append(conv(input_channels,
output_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
padding_mode='circular'))
if dropout_ > 0:
block.append(nn.Dropout(dropout_))
block.append(nn.LeakyReLU(negative_slope=lrelu_a))
if batch_norm:
if ndim == 2:
block.append(nn.BatchNorm2d(output_channels))
else:
block.append(nn.BatchNorm1d(output_channels))
self.block = nn.Sequential(*block)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
output = self.block(x)
return output
class ResModule(nn.Module):
"""
Stitches multiple convolutional blocks with residual connections together
Args:
ndim: Data dimensionality (1D or 2D)
res_depth: Number of residual blocks in a residual module
input_channels: Number of filters in the input layer
output_channels: Number of channels in the output layer
batch_norm: Batch normalization for non-unity layers in the block
lrelu_a: value of negative slope for LeakyReLU activation
"""
def __init__(self,
ndim: int,
res_depth: int,
input_channels: int,
output_channels: int,
batch_norm: bool = True,
lrelu_a: float = 0.01
) -> None:
"""
Initializes module parameters
"""
super(ResModule, self).__init__()
res_module = []
for i in range(res_depth):
input_channels = output_channels if i > 0 else input_channels
res_module.append(
ResBlock(ndim, input_channels, output_channels,
lrelu_a=lrelu_a, batch_norm=batch_norm))
self.res_module = nn.Sequential(*res_module)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
x = self.res_module(x)
return x
class ResModule_ori(nn.Module):
"""
Stitches multiple convolutional blocks with residual connections together
Args:
ndim: Data dimensionality (1D or 2D)
res_depth: Number of residual blocks in a residual module
input_channels: Number of filters in the input layer
output_channels: Number of channels in the output layer
batch_norm: Batch normalization for non-unity layers in the block
lrelu_a: value of negative slope for LeakyReLU activation
"""
def __init__(self,
ndim: int,
res_depth: int,
input_channels: int,
output_channels: int,
batch_norm: bool = True,
lrelu_a: float = 0.01
) -> None:
"""
Initializes module parameters
"""
super(ResModule_ori, self).__init__()
res_module = []
for i in range(res_depth):
input_channels = output_channels if i > 0 else input_channels
res_module.append(
ResBlock_ori(ndim, input_channels, output_channels,
lrelu_a=lrelu_a, batch_norm=batch_norm))
self.res_module = nn.Sequential(*res_module)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
x = self.res_module(x)
return x
class ResBlock(nn.Module):
"""
Builds a residual block
Args:
ndim:
Data dimensionality (1D or 2D)
nb_layers:
Number of layers in the block
input_channels:
Number of input channels for the block
output_channels:
Number of the output channels for the block
kernel_size:
Size of convolutional filter (in pixels)
stride:
Stride of convolutional filter
padding:
Value for edge padding
batch_norm:
Add batch normalization to each layer in the block
lrelu_a:
Value of alpha parameter in leaky ReLU activation
for each layer in the block
"""
def __init__(self,
ndim: int,
input_channels: int,
output_channels: int,
kernel_size: Union[Tuple[int], int] = 3,
stride: Union[Tuple[int], int] = 1,
padding: Union[Tuple[int], int] = 1,
batch_norm: bool = True,
lrelu_a: float = 0.01
) -> None:
"""
Initializes block's parameters
"""
super(ResBlock, self).__init__()
if not 0 < ndim < 3:
raise AssertionError("ndim must be equal to 1 or 2")
conv = nn.Conv2d if ndim == 2 else nn.Conv1d
self.lrelu_a = lrelu_a
self.batch_norm = batch_norm
self.c0 = conv(input_channels,
output_channels,
kernel_size=1,
stride=1,
padding=0,
padding_mode='circular')
self.c1 = conv(output_channels,
output_channels,
kernel_size=1,
stride=1,
padding=0, # 1
padding_mode='circular')
self.c2 = conv(output_channels,
output_channels,
kernel_size=1,
stride=1,
padding=0, # 1
padding_mode='circular')
if batch_norm:
bn = nn.BatchNorm2d if ndim == 2 else nn.BatchNorm1d
self.bn1 = bn(output_channels)
self.bn2 = bn(output_channels)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines forward pass
"""
x = self.c0(x)
residual = x
out = self.c1(x)
if self.batch_norm:
out = self.bn1(out)
out = F.leaky_relu(out, negative_slope=self.lrelu_a)
out = self.c2(out)
if self.batch_norm:
out = self.bn2(out)
out += residual
out = F.leaky_relu(out, negative_slope=self.lrelu_a)
return out
class ResBlock_ori(nn.Module):
"""
Builds a residual block, the original version with kernel = 3.
Args:
ndim:
Data dimensionality (1D or 2D)
nb_layers:
Number of layers in the block
input_channels:
Number of input channels for the block
output_channels:
Number of the output channels for the block
kernel_size:
Size of convolutional filter (in pixels)
stride:
Stride of convolutional filter
padding:
Value for edge padding
batch_norm:
Add batch normalization to each layer in the block
lrelu_a:
Value of alpha parameter in leaky ReLU activation
for each layer in the block
"""
def __init__(self,
ndim: int,
input_channels: int,
output_channels: int,
kernel_size: Union[Tuple[int], int] = 3,
stride: Union[Tuple[int], int] = 1,
padding: Union[Tuple[int], int] = 1,
batch_norm: bool = True,
lrelu_a: float = 0.01
) -> None:
"""
Initializes block's parameters
"""
super(ResBlock_ori, self).__init__()
if not 0 < ndim < 3:
raise AssertionError("ndim must be equal to 1 or 2")
conv = nn.Conv2d if ndim == 2 else nn.Conv1d
self.lrelu_a = lrelu_a
self.batch_norm = batch_norm
self.c0 = conv(input_channels,
output_channels,
kernel_size=1,
stride=1,
padding=0,
padding_mode='circular')
self.c1 = conv(output_channels,
output_channels,
kernel_size=3,
stride=1,
padding=1,
padding_mode='circular')
self.c2 = conv(output_channels,
output_channels,
kernel_size=3,
stride=1,
padding=1,
padding_mode='circular')
if batch_norm:
bn = nn.BatchNorm2d if ndim == 2 else nn.BatchNorm1d
self.bn1 = bn(output_channels)
self.bn2 = bn(output_channels)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines forward pass
"""
x = self.c0(x)
residual = x
out = self.c1(x)
if self.batch_norm:
out = self.bn1(out)
out = F.leaky_relu(out, negative_slope=self.lrelu_a)
out = self.c2(out)
if self.batch_norm:
out = self.bn2(out)
out += residual
out = F.leaky_relu(out, negative_slope=self.lrelu_a)
return out
class UpsampleBlock(nn.Module):
"""
Defines upsampling block performed using bilinear
or nearest-neigbor interpolation followed by 1-by-1 convolution
(the latter can be used to reduce a number of feature channels)
Args:
ndim:
Data dimensionality (1D or 2D)
input_channels:
Number of input channels for the block
output_channels:
Number of the output channels for the block
scale_factor:
Scale factor for upsampling
mode:
Upsampling mode. Select between "bilinear" and "nearest"
"""
def __init__(self,
ndim: int,
input_channels: int,
output_channels: int,
scale_factor: int = 2,
mode: str = "bilinear") -> None:
"""
Initializes module parameters
"""
super(UpsampleBlock, self).__init__()
if not any([mode == 'bilinear', mode == 'nearest']):
raise NotImplementedError(
"use 'bilinear' or 'nearest' for upsampling mode")
if not 0 < ndim < 3:
raise AssertionError("ndim must be equal to 1 or 2")
conv = nn.Conv2d if ndim == 2 else nn.Conv1d
self.scale_factor = scale_factor
self.mode = mode if ndim == 2 else "nearest"
self.conv = conv(
input_channels, output_channels,
kernel_size=1, stride=1, padding=0, padding_mode='circular')
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
Defines a forward pass
"""
x = F.interpolate(
x, scale_factor=self.scale_factor, mode=self.mode)
return self.conv(x) | PypiClean |
/django-admin-kit-0.0.20.tar.gz/django-admin-kit-0.0.20/README.md | # Django Admin Kit
![Build Status](https://travis-ci.org/RohanPoojary/django-admin-kit.svg?branch=master)
![Docs Status](https://readthedocs.org/projects/django-admin-kit/badge/?version=latest)
![Coverage Status](https://coveralls.io/repos/github/RohanPoojary/django-admin-kit/badge.svg?branch=master)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
Django admin kit is developed to provide additional functionalities to django that includes Multi Select Field, Add duplicate models and easier Ajax bindings.
# Compatibility
The project is compatible with Django 1.11+, Django 2.0+ and Python 3.5+
It is not compatible with Django-3.0 yet.
# Installation
The project can be installed by running command
pip install django-admin-kit
# Configuration
The app name ``admin_kit`` should be put at the top of installed apps in django ``settings`` file.
# settings.py
INSTALLED_APPS = [
'admin_kit',
'django.contrib.admin',
'django.contrib.auth',
...
]
This is because, Admin Kit overrides Django *change_form* template.
Then register the admin_kit app in root ``urls`` file
with name ``admin_kit``
# urls.py
from django.conf.urls import url
import admin_kit
urlpatterns = [
...
url(r'^admin_kit/', admin_kit.site.urls, name="admin_kit"),
]
Start the server and hit ``/admin_kit/ping`` url response. You will get a ``PONG`` response
if configured correctly.
# Features
There are mainly three features Admin_Kit provides. For detailed features visit [documentation](https://django-admin-kit.readthedocs.io/)
## Duplicate Button
This is a default feature that is added right after successfull configuration of the app.
![Duplicate Button](https://raw.githubusercontent.com/RohanPoojary/django-admin-kit/master/docs/images/duplicate%20button.png)
This button is similar to ``Add Another`` button, but it initializes the fields with previously
filled data. It is also compatible with [django-nested-admin](https://github.com/theatlantic/django-nested-admin). This button is only for inlined fields.
## New Fields
Proviedes new fields like MultiSelect and SelectField with supports ajax features.
## Ajax Features
There is also features to bind your model fields from APIs via Ajax requests.
# Documentation
For documentation go to https://django-admin-kit.readthedocs.io/
# License
The project is lincensed under MIT License. Please go through ``LICENSE`` file in the root folder.
| PypiClean |
/azure-mgmt-sql-4.0.0b3.zip/azure-mgmt-sql-4.0.0b3/azure/mgmt/sql/operations/_server_dev_ops_audit_settings_operations.py | from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast
from msrest import Serializer
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
server_name: str,
dev_ops_auditing_settings_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings/{devOpsAuditingSettingsName}") # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"devOpsAuditingSettingsName": _SERIALIZER.url("dev_ops_auditing_settings_name", dev_ops_auditing_settings_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_params,
headers=_headers,
**kwargs
)
def build_create_or_update_request_initial(
resource_group_name: str,
server_name: str,
dev_ops_auditing_settings_name: str,
subscription_id: str,
*,
json: Optional[_models.ServerDevOpsAuditingSettings] = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str]
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings/{devOpsAuditingSettingsName}") # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"devOpsAuditingSettingsName": _SERIALIZER.url("dev_ops_auditing_settings_name", dev_ops_auditing_settings_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
if content_type is not None:
_headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=_url,
params=_params,
headers=_headers,
json=json,
content=content,
**kwargs
)
def build_list_by_server_request(
resource_group_name: str,
server_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
accept = _headers.pop('Accept', "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings") # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_headers['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_params,
headers=_headers,
**kwargs
)
class ServerDevOpsAuditSettingsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.sql.SqlManagementClient`'s
:attr:`server_dev_ops_audit_settings` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get(
self,
resource_group_name: str,
server_name: str,
dev_ops_auditing_settings_name: str,
**kwargs: Any
) -> _models.ServerDevOpsAuditingSettings:
"""Gets a server's DevOps audit settings.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dev_ops_auditing_settings_name: The name of the devops audit settings. This should
always be 'default'.
:type dev_ops_auditing_settings_name: str
:keyword api_version: Api Version. Default value is "2020-11-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerDevOpsAuditingSettings, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ServerDevOpsAuditingSettings
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.ServerDevOpsAuditingSettings]
request = build_get_request(
resource_group_name=resource_group_name,
server_name=server_name,
dev_ops_auditing_settings_name=dev_ops_auditing_settings_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ServerDevOpsAuditingSettings', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings/{devOpsAuditingSettingsName}"} # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
server_name: str,
dev_ops_auditing_settings_name: str,
parameters: _models.ServerDevOpsAuditingSettings,
**kwargs: Any
) -> Optional[_models.ServerDevOpsAuditingSettings]:
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.ServerDevOpsAuditingSettings]]
_json = self._serialize.body(parameters, 'ServerDevOpsAuditingSettings')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
server_name=server_name,
dev_ops_auditing_settings_name=dev_ops_auditing_settings_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ServerDevOpsAuditingSettings', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings/{devOpsAuditingSettingsName}"} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
server_name: str,
dev_ops_auditing_settings_name: str,
parameters: _models.ServerDevOpsAuditingSettings,
**kwargs: Any
) -> LROPoller[_models.ServerDevOpsAuditingSettings]:
"""Creates or updates a server's DevOps audit settings.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param dev_ops_auditing_settings_name: The name of the devops audit settings. This should
always be 'default'.
:type dev_ops_auditing_settings_name: str
:param parameters: Properties of DevOps audit settings.
:type parameters: ~azure.mgmt.sql.models.ServerDevOpsAuditingSettings
:keyword api_version: Api Version. Default value is "2020-11-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ServerDevOpsAuditingSettings or the
result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.sql.models.ServerDevOpsAuditingSettings]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[_models.ServerDevOpsAuditingSettings]
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
server_name=server_name,
dev_ops_auditing_settings_name=dev_ops_auditing_settings_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ServerDevOpsAuditingSettings', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(
lro_delay,
lro_options={'final-state-via': 'azure-async-operation'},
**kwargs
)) # type: PollingMethod
elif polling is False: polling_method = cast(PollingMethod, NoPolling())
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings/{devOpsAuditingSettingsName}"} # type: ignore
@distributed_trace
def list_by_server(
self,
resource_group_name: str,
server_name: str,
**kwargs: Any
) -> Iterable[_models.ServerDevOpsAuditSettingsListResult]:
"""Lists DevOps audit settings of a server.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:keyword api_version: Api Version. Default value is "2020-11-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ServerDevOpsAuditSettingsListResult or the result
of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.sql.models.ServerDevOpsAuditSettingsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-11-01-preview")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.ServerDevOpsAuditSettingsListResult]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_server_request(
resource_group_name=resource_group_name,
server_name=server_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_server.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_by_server_request(
resource_group_name=resource_group_name,
server_name=server_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ServerDevOpsAuditSettingsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_server.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/devOpsAuditingSettings"} # type: ignore | PypiClean |
/ClusterAPIServer-0.3.tar.gz/ClusterAPIServer-0.3/app/common/helper.py | import subprocess
import json
import xmltodict
from functools import wraps
from flask import make_response, jsonify
from app import app
def xml_to_json(xml_data):
return json.dumps(xmltodict.parse(xml_data))
def to_ascii(s):
'''
Convert the bytes string to a ASCII string
Usefull to remove accent (diacritics)
From crmsh.utils
'''
if s is None:
return s
if isinstance(s, str):
return s
try:
return str(s, 'utf-8')
except UnicodeDecodeError:
import traceback
traceback.print_exc()
return s
def get_stdout_stderr(cmd, input_s=None, shell=True):
'''
Run a cmd, return (rc, stdout, stderr)
From crmsh.utils
'''
proc = subprocess.Popen(cmd,
shell=shell,
stdin=input_s and subprocess.PIPE or None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout_data, stderr_data = proc.communicate(input_s)
return proc.returncode, to_ascii(stdout_data), to_ascii(stderr_data)
def get_cib_data_raw(scope=None):
get_cib_cmd = "%s %s" % \
(app.config['CIB_CMD'], app.config['CIB_CMD_OPTIONS'])
if scope:
get_cib_cmd += " -o %s" % scope
ret, out, err = get_stdout_stderr(get_cib_cmd)
return out
def get_cib_data(scope=None):
def decorator(f):
@wraps(f)
def decorator_function(*args, **kwargs):
g['cib_data'] = xml_to_json(get_cib_data_raw(scope))
return f(*args, **kwargs)
g = f.__globals__
return decorator_function
return decorator
def response(status, message, status_code):
"""
Helper method to make an Http response
:param status: Status
:param message: Message
:param status_code: Http status code
:return:
"""
return make_response(jsonify({
'status': status,
'message': message
})), status_code
def response_auth(status, message, token, status_code):
"""
Make a Http response to send the auth token
:param status: Status
:param message: Message
:param token: Authorization Token
:param status_code: Http status code
:return: Http Json response
"""
return make_response(jsonify({
'status': status,
'message': message,
'auth_token': token.decode("utf-8")
})), status_code | PypiClean |
/behave-jenkins-1.2.5.1.zip/behave-jenkins-1.2.5.1/behave/tag_expression.py |
import six
class TagExpression(object):
"""
Tag expression, as logical boolean expression, to select
(include or exclude) model elements.
BOOLEAN LOGIC := (or_expr1) and (or_expr2) and ...
with or_exprN := [not] tag1 or [not] tag2 or ...
"""
def __init__(self, tag_expressions):
self.ands = []
self.limits = {}
for expr in tag_expressions:
self.store_and_extract_limits(self.normalized_tags_from_or(expr))
@staticmethod
def normalize_tag(tag):
"""
Normalize a tag for a tag expression:
* strip whitespace
* strip '@' char
* convert '~' (tilde) into '-' (minus sign)
:param tag: Tag (as string).
:return: Normalized tag (as string).
"""
tag = tag.strip()
if tag.startswith('@'):
tag = tag[1:]
elif tag.startswith('-@') or tag.startswith('~@'):
tag = '-' + tag[2:]
elif tag.startswith('~'):
tag = '-' + tag[1:]
return tag
@classmethod
def normalized_tags_from_or(cls, expr):
"""
Normalizes all tags in an OR expression (and return it as list).
:param expr: OR expression to normalize and split (as string).
:return: Generator of normalized tags (as string)
"""
for tag in expr.strip().split(','):
yield cls.normalize_tag(tag)
def store_and_extract_limits(self, tags):
tags_with_negation = []
for tag in tags:
negated = tag.startswith('-')
tag = tag.split(':')
tag_with_negation = tag.pop(0)
tags_with_negation.append(tag_with_negation)
if tag:
limit = int(tag[0])
if negated:
tag_without_negation = tag_with_negation[1:]
else:
tag_without_negation = tag_with_negation
limited = tag_without_negation in self.limits
if limited and self.limits[tag_without_negation] != limit:
msg = "Inconsistent tag limits for {0}: {1:d} and {2:d}"
msg = msg.format(tag_without_negation,
self.limits[tag_without_negation], limit)
raise Exception(msg)
self.limits[tag_without_negation] = limit
if tags_with_negation:
self.ands.append(tags_with_negation)
def check(self, tags):
"""
Checks if this tag expression matches the tags of a model element.
:param tags: List of tags of a model element.
:return: True, if tag expression matches. False, otherwise.
"""
if not self.ands:
return True
element_tags = set(tags)
def test_tag(xtag):
if xtag.startswith('-'): # -- or xtag.startswith('~'):
return xtag[1:] not in element_tags
return xtag in element_tags
# -- EVALUATE: (or_expr1) and (or_expr2) and ...
return all(any(test_tag(xtag) for xtag in ors) for ors in self.ands)
def __len__(self):
return len(self.ands)
def __str__(self):
"""Conversion back into string that represents this tag expression."""
and_parts = []
for or_terms in self.ands:
and_parts.append(u",".join(or_terms))
return u" ".join(and_parts)
if six.PY2:
__unicode__ = __str__
__str__ = lambda self: self.__unicode__().encode("utf-8") | PypiClean |
/manimgl-1.6.1-py38-none-any.whl/manimlib/utils/config_ops.py | import inspect
import itertools as it
def get_all_descendent_classes(Class):
awaiting_review = [Class]
result = []
while awaiting_review:
Child = awaiting_review.pop()
awaiting_review += Child.__subclasses__()
result.append(Child)
return result
def filtered_locals(caller_locals):
result = caller_locals.copy()
ignored_local_args = ["self", "kwargs"]
for arg in ignored_local_args:
result.pop(arg, caller_locals)
return result
def digest_config(obj, kwargs, caller_locals={}):
"""
Sets init args and CONFIG values as local variables
The purpose of this function is to ensure that all
configuration of any object is inheritable, able to
be easily passed into instantiation, and is attached
as an attribute of the object.
"""
# Assemble list of CONFIGs from all super classes
classes_in_hierarchy = [obj.__class__]
static_configs = []
while len(classes_in_hierarchy) > 0:
Class = classes_in_hierarchy.pop()
classes_in_hierarchy += Class.__bases__
if hasattr(Class, "CONFIG"):
static_configs.append(Class.CONFIG)
# Order matters a lot here, first dicts have higher priority
caller_locals = filtered_locals(caller_locals)
all_dicts = [kwargs, caller_locals, obj.__dict__]
all_dicts += static_configs
obj.__dict__ = merge_dicts_recursively(*reversed(all_dicts))
def merge_dicts_recursively(*dicts):
"""
Creates a dict whose keyset is the union of all the
input dictionaries. The value for each key is based
on the first dict in the list with that key.
dicts later in the list have higher priority
When values are dictionaries, it is applied recursively
"""
result = dict()
all_items = it.chain(*[d.items() for d in dicts])
for key, value in all_items:
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = merge_dicts_recursively(result[key], value)
else:
result[key] = value
return result
def soft_dict_update(d1, d2):
"""
Adds key values pairs of d2 to d1 only when d1 doesn't
already have that key
"""
for key, value in list(d2.items()):
if key not in d1:
d1[key] = value
def digest_locals(obj, keys=None):
caller_locals = filtered_locals(
inspect.currentframe().f_back.f_locals
)
if keys is None:
keys = list(caller_locals.keys())
for key in keys:
setattr(obj, key, caller_locals[key])
# Occasionally convenient in order to write dict.x instead of more laborious
# (and less in keeping with all other attr accesses) dict["x"]
class DictAsObject(object):
def __init__(self, dict):
self.__dict__ = dict | PypiClean |
/catchpoint-trace-0.0.4.tar.gz/catchpoint-trace-0.0.4/catchpoint/plugins/trace/patcher.py | from __future__ import absolute_import, unicode_literals
import sys
from importlib.machinery import PathFinder, ModuleSpec, SourceFileLoader
import catchpoint.utils as utils
from catchpoint import constants
from catchpoint.config import config_names
from catchpoint.config.config_provider import ConfigProvider
from catchpoint.plugins.trace.traceable import Traceable
class ImportPatcher(utils.Singleton):
def __init__(self):
self.modules_map = self.__process_env_var_modules_to_instrument()
for module_path in self.modules_map.keys():
sys.meta_path.insert(0, CatchpointFinder(module_path))
@staticmethod
def __process_env_var_modules_to_instrument():
modules = {}
for variable in ConfigProvider.configs:
value = ConfigProvider.get(variable)
if variable.startswith(config_names.CATCHPOINT_TRACE_INSTRUMENT_TRACEABLECONFIG):
try:
module_path, function_prefix, arguments = utils.process_trace_def_var(value)
if module_path is not None:
modules[module_path] = [function_prefix, arguments]
except:
pass
return modules
def get_module_function_prefix(self, module_name):
try:
return self.modules_map[module_name][0]
except:
return None
def get_trace_arguments(self, module_name):
try:
return self.modules_map[module_name][1]
except:
return None
class CatchpointFinder(PathFinder):
def __init__(self, module_name):
self.module_name = module_name
def find_spec(self, fullname, path=None, target=None):
if fullname == self.module_name:
spec = super(CatchpointFinder, self).find_spec(fullname, path, target)
loader = CatchpointLoader(fullname, spec.origin)
return ModuleSpec(fullname, loader)
# Loading the module in a load time
class CatchpointLoader(SourceFileLoader):
def exec_module(self, module):
super(CatchpointLoader, self).exec_module(module)
import_patcher = ImportPatcher()
module_name = utils.get_module_name(module)
function_prefix = import_patcher.get_module_function_prefix(module_name)
trace_args_list = import_patcher.get_trace_arguments(module_name)
try:
trace_args = utils.str2bool(trace_args_list[constants.TRACE_ARGS])
except:
trace_args = False
try:
trace_return_value = utils.str2bool(trace_args_list[constants.TRACE_RETURN_VALUE])
except:
trace_return_value = False
try:
trace_error = utils.str2bool(trace_args_list[constants.TRACE_ERROR])
except:
trace_error = True
allowed_functions = utils.get_allowed_functions(module)
if function_prefix != '':
for function in allowed_functions:
if function_prefix in function:
setattr(module, function,
Traceable(trace_args=trace_args,
trace_return_value=trace_return_value,
trace_error=trace_error)(getattr(module, function)))
else:
for function in allowed_functions:
setattr(module, function,
Traceable(trace_args=trace_args,
trace_return_value=trace_return_value,
trace_error=trace_error)(getattr(module, function)))
return module | PypiClean |
/iter2-1.1-py3-none-any.whl/iter2-1.1.dist-info/DESCRIPTION.rst | # iter2
This library provides implementation of
[rich-iterator](http://code.activestate.com/recipes/498272-rich-iterator-wrapper/)
concept, inspired by
[Rust's std::iter::Iterator](https://doc.rust-lang.org/std/iter/trait.Iterator.html),
[Java's Stream](https://docs.oracle.com/javase/8/docs/api/?java/util/stream/Stream.html)
and [more-itertools library](https://more-itertools.readthedocs.io/en/latest/).
## Usage
The main object of library is `iter2`. It behaves like built-in
`iter` except that it creates an instance of rich-iterator.
```python
iter2(['dzen', 'of', 'python']).map(str.capitalize).join(' ') # 'Dzen Of Python'
```
> **Every** method of rich-iterator that returns new iterator makes original rich-iterator **invalid**, so it
> **cannot be used** in any iteration process. This behaviour can be bypassed with `ref` method.
Original iterator can be retrieved with `raw` method:
```python
orig = iter2.range(5).raw()
tuple(orig) # (0, 1, 2, 3, 4)
```
`iter2` has some methods to build sequences of values:
```python
iter2.of('a', 'b', 'cde').join() # 'abcde'
iter2.range(5).map(str).join() # '01234'
iter2.count_from(100).take(5).map(str).join('->') # '100->101->102->103->104'
iter2.numeric_range(1.0, 3.5, 0.3).to_tuple() # (1.0, 1.3, 1.6, 1.9, 2.2, 2.5, 2.8, 3.1, 3.4)
# and some other ...
```
and some algorithm-methods on multiple iterables:
```python
iter2.cartesian_product(range(2), repeat=2).to_tuple() # ((0, 0), (0, 1), (1, 0), (1, 1))
iter2.zip_longest(range(3), range(1, 5), fillvalue=-1).to_tuple() # ((0, 1), (1, 2), (2, 3), (-1, 4))
iter2.chain(['Somewhere', 'over'], ['the', 'Rainbow']).join(' ') # 'Somewhere over the Rainbow
# and some other ...
```
### Comparison
Here are some examples of usage compared with builtins-and-itertools-based
implementations:
```python
from itertools import islice
from functools import reduce
import operator
from iter2 import iter2
square = lambda x: x ** 2
odd = lambda x: x % 2 == 1
def fibonacci():
a = b = 1
while True:
yield a
a, b = b, a + b
# Example 1:
cool_song = 'Somewhere over the Rainbow'
# "from `cool_song` take capital characters and join them with '+'"
iter2(cool_song).filter(str.isupper).join('+')
# vs
# "with '+' join capital characters from `cool_song`"
'+'.join(filter(str.isupper, cool_song))
# Example 2:
# "for values in 0..99 take squares of them that are odd and sum them" (sounds like algo)
iter2.range(100).map(square).filter(odd).sum()
# vs
# "sum values that are odd that are squares of values in 0..99" (sounds like shit)
sum(filter(odd, map(square, range(100))))
# "sum squares of values in 0..99 that are odd"
sum(x ** 2 for x in range(100) if x % 2 == 1) # luckily, oddity doesn't change on squaring
# Example 3: "Playing with infinite sequences"
(iter2(fibonacci())
.drop(10)
.filter(odd)
.map(square)
.take(5)
.product()) # shortcut for `.fold(operator.mul)`
# vs
reduce(operator.mul,islice(map(square, filter(odd, islice(fibonacci(), 10, None))), 5)) # (counting braces balance and commas positions)
# (to make clearer):
reduce(
operator.mul,
islice(
map(
square,
filter(
odd,
islice(fibonacci(), 10, None)
)
),
5
)
) # or not?
```
##Changelog
####v1.1
- Tuple-wise methods
#### v1.0
- Initial
| PypiClean |
/nnisgf-0.4-py3-none-manylinux1_x86_64.whl/nnisgf-0.4.data/data/nni/node_modules/lodash/_createRecurry.js | var isLaziable = require('./_isLaziable'),
setData = require('./_setData'),
setWrapToString = require('./_setWrapToString');
/** Used to compose bitmasks for function metadata. */
var WRAP_BIND_FLAG = 1,
WRAP_BIND_KEY_FLAG = 2,
WRAP_CURRY_BOUND_FLAG = 4,
WRAP_CURRY_FLAG = 8,
WRAP_PARTIAL_FLAG = 32,
WRAP_PARTIAL_RIGHT_FLAG = 64;
/**
* Creates a function that wraps `func` to continue currying.
*
* @private
* @param {Function} func The function to wrap.
* @param {number} bitmask The bitmask flags. See `createWrap` for more details.
* @param {Function} wrapFunc The function to create the `func` wrapper.
* @param {*} placeholder The placeholder value.
* @param {*} [thisArg] The `this` binding of `func`.
* @param {Array} [partials] The arguments to prepend to those provided to
* the new function.
* @param {Array} [holders] The `partials` placeholder indexes.
* @param {Array} [argPos] The argument positions of the new function.
* @param {number} [ary] The arity cap of `func`.
* @param {number} [arity] The arity of `func`.
* @returns {Function} Returns the new wrapped function.
*/
function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) {
var isCurry = bitmask & WRAP_CURRY_FLAG,
newHolders = isCurry ? holders : undefined,
newHoldersRight = isCurry ? undefined : holders,
newPartials = isCurry ? partials : undefined,
newPartialsRight = isCurry ? undefined : partials;
bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG);
bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG);
if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) {
bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG);
}
var newData = [
func, bitmask, thisArg, newPartials, newHolders, newPartialsRight,
newHoldersRight, argPos, ary, arity
];
var result = wrapFunc.apply(undefined, newData);
if (isLaziable(func)) {
setData(result, newData);
}
result.placeholder = placeholder;
return setWrapToString(result, func, bitmask);
}
module.exports = createRecurry; | PypiClean |
/robotframework-drupallibrary-1.0.0a1.tar.gz/robotframework-drupallibrary-1.0.0a1/README.rst | ============================
robotframework-drupallibrary
============================
RobotFramework keywords and utilities for Drupal site acceptance testings.
You may learn about RobotFramework reading from http://robotframework.org
Full documentation and API
==========================
http://alterway.github.io/robotframework-drupallibrary
Developer installation
======================
Please use a virtualenv to maintain this package, but I should not need to say that.
Grab the source from the SCM repository, "cd" in, then:
.. code:: console
$ python setup.py develop
$ pip install robotframework-drupallibrary[dev]
Run the tests:
.. code:: console
$ python setup.py test
Build the Sphinx documentation:
.. code:: console
$ python setup.py build_sphinx
$ firefox gh-pages/html/index.html
Links
=====
Project home page and source code
https://github.com/alterway/robotframework-drupallibrary
Issues tracker
https://github.com/alterway/robotframework-drupallibrary/issues
| PypiClean |
/ParmEd-4.1.0.tar.gz/ParmEd-4.1.0/parmed/formats/pdbx/PdbxContainers.py | __docformat__ = "restructuredtext en"
__author__ = "John Westbrook"
__email__ = "[email protected]"
__license__ = "Creative Commons Attribution 3.0 Unported"
__version__ = "V0.01"
import re,sys,traceback
class CifName(object):
''' Class of utilities for CIF-style data names -
'''
def __init__(self):
pass
@staticmethod
def categoryPart(name):
tname=""
if name.startswith("_"):
tname=name[1:]
else:
tname=name
i = tname.find(".")
if i == -1:
return tname
else:
return tname[:i]
@staticmethod
def attributePart(name):
i = name.find(".")
if i == -1:
return None
else:
return name[i+1:]
class ContainerBase(object):
''' Container base class for data and definition objects.
'''
def __init__(self,name):
# The enclosing scope of the data container (e.g. data_/save_)
self.__name = name
# List of category names within this container -
self.__objNameList=[]
# dictionary of DataCategory objects keyed by category name.
self.__objCatalog={}
self.__type=None
def getType(self):
return self.__type
def setType(self,type):
self.__type=type
def getName(self):
return self.__name
def setName(self,name):
self.__name=name
def exists(self,name):
return name in self.__objCatalog
def getObj(self,name):
if name in self.__objCatalog:
return self.__objCatalog[name]
else:
return None
def getObjNameList(self):
return self.__objNameList
def append(self,obj):
""" Add the input object to the current object catalog. An existing object
of the same name will be overwritten.
"""
if obj.getName() is not None:
if not obj.getName() in self.__objCatalog:
# self.__objNameList is keeping track of object order here --
self.__objNameList.append(obj.getName())
self.__objCatalog[obj.getName()]=obj
def replace(self,obj):
""" Replace an existing object with the input object
"""
if obj.getName() is not None and obj.getName() in self.__objCatalog:
self.__objCatalog[obj.getName()]=obj
def printIt(self,fh=sys.stdout,type="brief"):
fh.write("+ %s container: %30s contains %4d categories\n" %
(self.getType(),self.getName(),len(self.__objNameList)))
for nm in self.__objNameList:
fh.write("--------------------------------------------\n")
fh.write("Data category: %s\n" % nm)
if type == 'brief':
self.__objCatalog[nm].printIt(fh)
else:
self.__objCatalog[nm].dumpIt(fh)
def rename(self,curName,newName):
""" Change the name of an object in place -
"""
try:
i=self.__objNameList.index(curName)
self.__objNameList[i]=newName
self.__objCatalog[newName]=self.__objCatalog[curName]
self.__objCatalog[newName].setName(newName)
return True
except:
return False
def remove(self,curName):
""" Revmove object by name. Return True on success or False otherwise.
"""
try:
if curName in self.__objCatalog:
del self.__objCatalog[curName]
i=self.__objNameList.index(curName)
del self.__objNameList[i]
return True
else:
return False
except:
pass
return False
class DefinitionContainer(ContainerBase):
def __init__(self,name):
super(DefinitionContainer,self).__init__(name)
self.setType('definition')
def isCategory(self):
if self.exists('category'):
return True
return False
def isAttribute(self):
if self.exists('item'):
return True
return False
def printIt(self,fh=sys.stdout,type="brief"):
fh.write("Definition container: %30s contains %4d categories\n" %
(self.getName(),len(self.getObjNameList())))
if self.isCategory():
fh.write("Definition type: category\n")
elif self.isAttribute():
fh.write("Definition type: item\n")
else:
fh.write("Definition type: undefined\n")
for nm in self.getObjNameList():
fh.write("--------------------------------------------\n")
fh.write("Definition category: %s\n" % nm)
if type == 'brief':
self.getObj(nm).printIt(fh)
else:
self.getObj(nm).dumpId(fh)
class DataContainer(ContainerBase):
''' Container class for DataCategory objects.
'''
def __init__(self,name):
super(DataContainer,self).__init__(name)
self.setType('data')
self.__globalFlag=False
def invokeDataBlockMethod(self,type,method,db):
self.__currentRow = 1
exec(method.getInline())
def setGlobal(self):
self.__globalFlag=True
def getGlobal(self):
return self.__globalFlag
class DataCategoryBase(object):
""" Base object definition for a data category -
"""
def __init__(self,name,attributeNameList=None,rowList=None):
self._name = name
#
if rowList is not None:
self._rowList=rowList
else:
self._rowList=[]
if attributeNameList is not None:
self._attributeNameList=attributeNameList
else:
self._attributeNameList=[]
#
# Derived class data -
#
self._catalog={}
self._numAttributes=0
#
self.__setup()
def __setup(self):
self._numAttributes = len(self._attributeNameList)
self._catalog={}
for attributeName in self._attributeNameList:
attributeNameLC = attributeName.lower()
self._catalog[attributeNameLC] = attributeName
#
def setRowList(self,rowList):
self._rowList=rowList
def setAttributeNameList(self,attributeNameList):
self._attributeNameList=attributeNameList
self.__setup()
def setName(self,name):
self._name=name
def get(self):
return (self._name,self._attributeNameList,self._rowList)
class DataCategory(DataCategoryBase):
""" Methods for creating, accessing, and formatting PDBx cif data categories.
"""
def __init__(self,name,attributeNameList=None,rowList=None):
super(DataCategory,self).__init__(name,attributeNameList,rowList)
#
self.__lfh = sys.stdout
self.__currentRowIndex=0
self.__currentAttribute=None
#
self.__avoidEmbeddedQuoting=False
#
# --------------------------------------------------------------------
# any whitespace
self.__wsRe=re.compile(r"\s")
self.__wsAndQuotesRe=re.compile(r"[\s'\"]")
# any newline or carriage control
self.__nlRe=re.compile(r"[\n\r]")
#
# single quote
self.__sqRe=re.compile(r"[']")
#
self.__sqWsRe=re.compile(r"('\s)|(\s')")
# double quote
self.__dqRe=re.compile(r'["]')
self.__dqWsRe=re.compile(r'("\s)|(\s")')
#
self.__intRe=re.compile(r'^[0-9]+$')
self.__floatRe=re.compile(r'^-?(([0-9]+)[.]?|([0-9]*[.][0-9]+))([(][0-9]+[)])?([eE][+-]?[0-9]+)?$')
#
self.__dataTypeList=['DT_NULL_VALUE','DT_INTEGER','DT_FLOAT','DT_UNQUOTED_STRING','DT_ITEM_NAME',
'DT_DOUBLE_QUOTED_STRING','DT_SINGLE_QUOTED_STRING','DT_MULTI_LINE_STRING']
self.__formatTypeList=['FT_NULL_VALUE','FT_NUMBER','FT_NUMBER','FT_UNQUOTED_STRING',
'FT_QUOTED_STRING','FT_QUOTED_STRING','FT_QUOTED_STRING','FT_MULTI_LINE_STRING']
#
def __getitem__(self, x):
""" Implements list-type functionality -
Implements op[x] for some special cases -
x=integer - returns the row in category (normal list behavior)
x=string - returns the value of attribute 'x' in first row.
"""
if isinstance(x, int):
#return self._rowList.__getitem__(x)
return self._rowList[x]
elif isinstance(x, str):
try:
#return self._rowList[0][x]
ii=self.getAttributeIndex(x)
return self._rowList[0][ii]
except (IndexError, KeyError) as e:
raise KeyError(str(e))
assert False, "Should not be here"
def getCurrentAttribute(self):
return self.__currentAttribute
def getRowIndex(self):
return self.__currentRowIndex
def getRowList(self):
return self._rowList
def getRowCount(self):
return (len(self._rowList))
def getRow(self,index):
try:
return self._rowList[index]
except:
return []
def removeRow(self,index):
try:
if ((index >= 0) and (index < len(self._rowList))):
del self._rowList[index]
if self.__currentRowIndex >= len(self._rowList):
self.__currentRowIndex = len(self._rowList) -1
return True
else:
pass
except:
pass
return False
def getFullRow(self,index):
""" Return a full row based on the length of the the attribute list.
"""
try:
if (len(self._rowList[index]) < self._numAttributes):
for ii in range( self._numAttributes-len(self._rowList[index])):
self._rowList[index].append('?')
return self._rowList[index]
except:
return ['?' for ii in range(self._numAttributes)]
def getName(self):
return self._name
def getAttributeList(self):
return self._attributeNameList
def getAttributeCount(self):
return len(self._attributeNameList)
def getAttributeListWithOrder(self):
oL=[]
for ii,att in enumerate(self._attributeNameList):
oL.append((att,ii))
return oL
def getAttributeIndex(self,attributeName):
try:
return self._attributeNameList.index(attributeName)
except:
return -1
def hasAttribute(self,attributeName):
return attributeName in self._attributeNameList
def getIndex(self,attributeName):
try:
return self._attributeNameList.index(attributeName)
except:
return -1
def getItemNameList(self):
itemNameList=[]
for att in self._attributeNameList:
itemNameList.append("_"+self._name+"."+att)
return itemNameList
def append(self,row):
#self.__lfh.write("PdbxContainer(append) category %s row %r\n" % (self._name,row))
self._rowList.append(row)
def appendAttribute(self,attributeName):
attributeNameLC = attributeName.lower()
if attributeNameLC in self._catalog:
i = self._attributeNameList.index(self._catalog[attributeNameLC])
self._attributeNameList[i] = attributeName
self._catalog[attributeNameLC] = attributeName
#self.__lfh.write("Appending existing attribute %s\n" % attributeName)
else:
#self.__lfh.write("Appending existing attribute %s\n" % attributeName)
self._attributeNameList.append(attributeName)
self._catalog[attributeNameLC] = attributeName
#
self._numAttributes = len(self._attributeNameList)
def appendAttributeExtendRows(self,attributeName):
attributeNameLC = attributeName.lower()
if attributeNameLC in self._catalog:
i = self._attributeNameList.index(self._catalog[attributeNameLC])
self._attributeNameList[i] = attributeName
self._catalog[attributeNameLC] = attributeName
self.__lfh.write("Appending existing attribute %s\n" % attributeName)
else:
self._attributeNameList.append(attributeName)
self._catalog[attributeNameLC] = attributeName
# add a placeholder to any existing rows for the new attribute.
if (len(self._rowList) > 0):
for row in self._rowList:
row.append("?")
#
self._numAttributes = len(self._attributeNameList)
def getValue(self,attributeName=None,rowIndex=None):
if attributeName is None:
attribute = self.__currentAttribute
else:
attribute = attributeName
if rowIndex is None:
rowI = self.__currentRowIndex
else:
rowI =rowIndex
if isinstance(attribute, str) and isinstance(rowI,int):
try:
return self._rowList[rowI][self._attributeNameList.index(attribute)]
except (IndexError):
raise IndexError
raise IndexError(str(attribute))
def setValue(self,value,attributeName=None,rowIndex=None):
if attributeName is None:
attribute=self.__currentAttribute
else:
attribute=attributeName
if rowIndex is None:
rowI = self.__currentRowIndex
else:
rowI = rowIndex
if isinstance(attribute, str) and isinstance(rowI,int):
try:
# if row index is out of range - add the rows -
for ii in range(rowI+1 - len(self._rowList)):
self._rowList.append(self.__emptyRow())
# self._rowList[rowI][attribute]=value
ll=len(self._rowList[rowI])
ind=self._attributeNameList.index(attribute)
# extend the list if needed -
if ( ind >= ll):
self._rowList[rowI].extend([None for ii in range(2*ind -ll)])
self._rowList[rowI][ind]=value
except (IndexError):
self.__lfh.write("DataCategory(setvalue) index error category %s attribute %s index %d value %r\n" %
(self._name,attribute,rowI,value))
traceback.print_exc(file=self.__lfh)
#raise IndexError
except (ValueError):
self.__lfh.write("DataCategory(setvalue) value error category %s attribute %s index %d value %r\n" %
(self._name,attribute,rowI,value))
traceback.print_exc(file=self.__lfh)
#raise ValueError
def __emptyRow(self):
return [None for ii in range(len(self._attributeNameList))]
def replaceValue(self,oldValue,newValue,attributeName):
numReplace=0
if attributeName not in self._attributeNameList:
return numReplace
ind=self._attributeNameList.index(attributeName)
for row in self._rowList:
if row[ind] == oldValue:
row[ind]=newValue
numReplace += 1
return numReplace
def replaceSubstring(self,oldValue,newValue,attributeName):
ok=False
if attributeName not in self._attributeNameList:
return ok
ind=self._attributeNameList.index(attributeName)
for row in self._rowList:
val=row[ind]
row[ind]=val.replace(oldValue,newValue)
if val != row[ind]:
ok=True
return ok
def invokeAttributeMethod(self,attributeName,type,method,db):
self.__currentRowIndex = 0
self.__currentAttribute=attributeName
self.appendAttribute(attributeName)
#
ind=self._attributeNameList.index(attributeName)
if len(self._rowList) == 0:
row=[None for ii in range(len(self._attributeNameList)*2)]
row[ind]=None
self._rowList.append(row)
for row in self._rowList:
ll = len(row)
if (ind >= ll):
row.extend([None for ii in range(2*ind-ll)])
row[ind]=None
exec(method.getInline())
self.__currentRowIndex+=1
def invokeCategoryMethod(self,type,method,db):
self.__currentRowIndex = 0
exec(method.getInline())
def getAttributeLengthMaximumList(self):
mList=[0 for i in len(self._attributeNameList)]
for row in self._rowList:
for indx,val in enumerate(row):
mList[indx] = max(mList[indx],len(val))
return mList
def renameAttribute(self,curAttributeName,newAttributeName):
""" Change the name of an attribute in place -
"""
try:
i=self._attributeNameList.index(curAttributeName)
self._attributeNameList[i]=newAttributeName
del self._catalog[curAttributeName.lower()]
self._catalog[newAttributeName.lower()]=newAttributeName
return True
except:
return False
def printIt(self,fh=sys.stdout):
fh.write("--------------------------------------------\n")
fh.write(" Category: %s attribute list length: %d\n" %
(self._name,len(self._attributeNameList)))
for at in self._attributeNameList:
fh.write(" Category: %s attribute: %s\n" % (self._name,at))
fh.write(" Row value list length: %d\n" % len(self._rowList))
#
for row in self._rowList[:2]:
#
if len(row) == len(self._attributeNameList):
for ii,v in enumerate(row):
fh.write(" %30s: %s ...\n" % (self._attributeNameList[ii],str(v)[:30]))
else:
fh.write("+WARNING - %s data length %d attribute name length %s mismatched\n" %
(self._name,len(row),len(self._attributeNameList)))
def dumpIt(self,fh=sys.stdout):
fh.write("--------------------------------------------\n")
fh.write(" Category: %s attribute list length: %d\n" %
(self._name,len(self._attributeNameList)))
for at in self._attributeNameList:
fh.write(" Category: %s attribute: %s\n" % (self._name,at))
fh.write(" Value list length: %d\n" % len(self._rowList))
for row in self._rowList:
for ii,v in enumerate(row):
fh.write(" %30s: %s\n" % (self._attributeNameList[ii],v))
def __formatPdbx(self, inp):
""" Format input data following PDBx quoting rules -
"""
try:
if (inp is None):
return ("?",'DT_NULL_VALUE')
# pure numerical values are returned as unquoted strings
if (isinstance(inp,int) or self.__intRe.search(str(inp))):
return ( [str(inp)],'DT_INTEGER')
if (isinstance(inp,float) or self.__floatRe.search(str(inp))):
return ([str(inp)],'DT_FLOAT')
# null value handling -
if (inp == "." or inp == "?"):
return ([inp],'DT_NULL_VALUE')
if (inp == ""):
return (["."],'DT_NULL_VALUE')
# Contains white space or quotes ?
if not self.__wsAndQuotesRe.search(inp):
if inp.startswith("_"):
return (self.__doubleQuotedList(inp),'DT_ITEM_NAME')
else:
return ([str(inp)],'DT_UNQUOTED_STRING')
else:
if self.__nlRe.search(inp):
return (self.__semiColonQuotedList(inp),'DT_MULTI_LINE_STRING')
else:
if (self.__avoidEmbeddedQuoting):
# change priority to choose double quoting where possible.
if not self.__dqRe.search(inp) and not self.__sqWsRe.search(inp):
return (self.__doubleQuotedList(inp),'DT_DOUBLE_QUOTED_STRING')
elif not self.__sqRe.search(inp) and not self.__dqWsRe.search(inp):
return (self.__singleQuotedList(inp),'DT_SINGLE_QUOTED_STRING')
else:
return (self.__semiColonQuotedList(inp),'DT_MULTI_LINE_STRING')
else:
# change priority to choose double quoting where possible.
if not self.__dqRe.search(inp):
return (self.__doubleQuotedList(inp),'DT_DOUBLE_QUOTED_STRING')
elif not self.__sqRe.search(inp):
return (self.__singleQuotedList(inp),'DT_SINGLE_QUOTED_STRING')
else:
return (self.__semiColonQuotedList(inp),'DT_MULTI_LINE_STRING')
except:
traceback.print_exc(file=self.__lfh)
def __dataTypePdbx(self, inp):
""" Detect the PDBx data type -
"""
if (inp is None):
return ('DT_NULL_VALUE')
# pure numerical values are returned as unquoted strings
if isinstance(inp,int) or self.__intRe.search(str(inp)):
return ('DT_INTEGER')
if isinstance(inp,float) or self.__floatRe.search(str(inp)):
return ('DT_FLOAT')
# null value handling -
if (inp == "." or inp == "?"):
return ('DT_NULL_VALUE')
if (inp == ""):
return ('DT_NULL_VALUE')
# Contains white space or quotes ?
if not self.__wsAndQuotesRe.search(inp):
if inp.startswith("_"):
return ('DT_ITEM_NAME')
else:
return ('DT_UNQUOTED_STRING')
else:
if self.__nlRe.search(inp):
return ('DT_MULTI_LINE_STRING')
else:
if (self.__avoidEmbeddedQuoting):
if not self.__sqRe.search(inp) and not self.__dqWsRe.search(inp):
return ('DT_DOUBLE_QUOTED_STRING')
elif not self.__dqRe.search(inp) and not self.__sqWsRe.search(inp):
return ('DT_SINGLE_QUOTED_STRING')
else:
return ('DT_MULTI_LINE_STRING')
else:
if not self.__sqRe.search(inp):
return ('DT_DOUBLE_QUOTED_STRING')
elif not self.__dqRe.search(inp):
return ('DT_SINGLE_QUOTED_STRING')
else:
return ('DT_MULTI_LINE_STRING')
def __singleQuotedList(self,inp):
l=[]
l.append("'")
l.append(inp)
l.append("'")
return(l)
def __doubleQuotedList(self,inp):
l=[]
l.append('"')
l.append(inp)
l.append('"')
return(l)
def __semiColonQuotedList(self,inp):
l=[]
l.append("\n")
if inp[-1] == '\n':
l.append(";")
l.append(inp)
l.append(";")
l.append("\n")
else:
l.append(";")
l.append(inp)
l.append("\n")
l.append(";")
l.append("\n")
return(l)
def getValueFormatted(self,attributeName=None,rowIndex=None):
if attributeName is None:
attribute=self.__currentAttribute
else:
attribute=attributeName
if rowIndex is None:
rowI = self.__currentRowIndex
else:
rowI = rowIndex
if isinstance(attribute, str) and isinstance(rowI,int):
try:
list,type=self.__formatPdbx(self._rowList[rowI][self._attributeNameList.index(attribute)])
return "".join(list)
except (IndexError):
self.__lfh.write("attributeName %s rowI %r rowdata %r\n" % (attributeName,rowI,self._rowList[rowI]))
raise IndexError
raise TypeError(str(attribute))
def getValueFormattedByIndex(self,attributeIndex,rowIndex):
try:
list,type=self.__formatPdbx(self._rowList[rowIndex][attributeIndex])
return "".join(list)
except (IndexError):
raise IndexError
def getAttributeValueMaxLengthList(self,steps=1):
mList=[0 for i in range(len(self._attributeNameList))]
for row in self._rowList[::steps]:
for indx in range(len(self._attributeNameList)):
val=row[indx]
mList[indx] = max(mList[indx],len(str(val)))
return mList
def getFormatTypeList(self,steps=1):
try:
curDataTypeList=['DT_NULL_VALUE' for i in range(len(self._attributeNameList))]
for row in self._rowList[::steps]:
for indx in range(len(self._attributeNameList)):
val=row[indx]
# print "index ",indx," val ",val
dType=self.__dataTypePdbx(val)
dIndx=self.__dataTypeList.index(dType)
# print "d type", dType, " d type index ",dIndx
cType=curDataTypeList[indx]
cIndx=self.__dataTypeList.index(cType)
cIndx= max(cIndx,dIndx)
curDataTypeList[indx]=self.__dataTypeList[cIndx]
# Map the format types to the data types
curFormatTypeList=[]
for dt in curDataTypeList:
ii=self.__dataTypeList.index(dt)
curFormatTypeList.append(self.__formatTypeList[ii])
except:
self.__lfh.write("PdbxDataCategory(getFormatTypeList) ++Index error at index %d in row %r\n" % (indx,row))
return curFormatTypeList,curDataTypeList
def getFormatTypeListX(self):
curDataTypeList=['DT_NULL_VALUE' for i in range(len(self._attributeNameList))]
for row in self._rowList:
for indx in range(len(self._attributeNameList)):
val=row[indx]
#print "index ",indx," val ",val
dType=self.__dataTypePdbx(val)
dIndx=self.__dataTypeList.index(dType)
#print "d type", dType, " d type index ",dIndx
cType=curDataTypeList[indx]
cIndx=self.__dataTypeList.index(cType)
cIndx= max(cIndx,dIndx)
curDataTypeList[indx]=self.__dataTypeList[cIndx]
# Map the format types to the data types
curFormatTypeList=[]
for dt in curDataTypeList:
ii=self.__dataTypeList.index(dt)
curFormatTypeList.append(self.__formatTypeList[ii])
return curFormatTypeList,curDataTypeList | PypiClean |
/Products.TinyMCE-1.4.3.tar.gz/Products.TinyMCE-1.4.3/Products/TinyMCE/skins/tinymce/plugins/table/langs/pl_dlg.js | tinyMCE.addI18n('pl.table_dlg',{"rules_border":"granica","rules_box":"ramka","rules_vsides":"vsides","rules_rhs":"rhs","rules_lhs":"lhs","rules_hsides":"hsides","rules_below":"pod","rules_above":"nad","rules_void":"void",rules:"Prowadnice","frame_all":"wszystkie","frame_cols":"kolumny","frame_rows":"wiersze","frame_groups":"grupy","frame_none":"brak",frame:"Ramka",caption:"Nag\u0142\u00f3wek tabeli","missing_scope":"Jeste\u015b pewny \u017ce chcesz kontynuowa\u0107 bez definiowania zasi\u0119gu dla kom\u00f3rki tabeli. Bez niej, mo\u017ce by\u0107 trudne dla niekt\u00f3rych u\u017cytkownik\u00f3w zrozuminie zawarto\u015bci albo danych wy\u015bwietlanych poza tabel\u0105.","cell_limit":"Przekroczy\u0142e\u015b maksymaln\u0105 liczb\u0119 kom\u00f3rek kt\u00f3ra wynosi {$cells}.","row_limit":"Przekroczy\u0142e\u015b maksymaln\u0105 liczb\u0119 wierszy kt\u00f3ra wynosi {$rows}.","col_limit":"Przekroczy\u0142e\u015b maksymaln\u0105 liczb\u0119 kolumn kt\u00f3ra wynosi {$cols}.",colgroup:"Grupa kolumn",rowgroup:"Grupa wierszy",scope:"Zakres",tfoot:"Stopka tabeli",tbody:"Cia\u0142o tabeli",thead:"Nag\u0142\u00f3wek tabeli","row_all":"Zmie\u0144 wszystkie wiersze","row_even":"Zmie\u0144 parzyste wiersze","row_odd":"Zmie\u0144 nieparzyste wiersze","row_row":"Zmie\u0144 aktualny wiersz","cell_all":"Zmie\u0144 wszytkie kom\u00f3rki w tabeli","cell_row":"Zmie\u0144 wszytkie kom\u00f3rki w wierszu","cell_cell":"Zmie\u0144 aktualn\u0105 kom\u00f3rk\u0119",th:"Nag\u0142owek",td:"Dane",summary:"Podsumowanie",bgimage:"Obrazek t\u0142a",rtl:"Kierunek z prawej do lewej",ltr:"Kierunek z lewej do prawej",mime:"Docelowy typ MIME",langcode:"Kod j\u0119zyka",langdir:"Kierunek czytania tekstu",style:"Styl",id:"Id","merge_cells_title":"Po\u0142\u0105cz kom\u00f3rki",bgcolor:"Kolor t\u0142a",bordercolor:"Kolor ramki","align_bottom":"D\u00f3\u0142","align_top":"G\u00f3ra",valign:"Pionowe wyr\u00f3wnanie","cell_type":"Typ kom\u00f3rki","cell_title":"W\u0142a\u015bciwo\u015bci kom\u00f3rki","row_title":"W\u0142a\u015bciwo\u015bci wiersza","align_middle":"\u015arodek","align_right":"Prawy","align_left":"Lewy","align_default":"Domy\u015blnie",align:"Wyr\u00f3wnanie",border:"Ramka",cellpadding:"Cellpadding",cellspacing:"Cellspacing",rows:"Wiersze",cols:"Kolumny",height:"Wysoko\u015b\u0107",width:"Szeroko\u015b\u0107",title:"Wklej/Zmie\u0144 tabel\u0119",rowtype:"Wiersz w cz\u0119\u015bci tabeli","advanced_props":"Zaawansowane w\u0142a\u015bciwo\u015bci","general_props":"G\u0142\u00f3wne w\u0142a\u015bciwo\u015bci","advanced_tab":"Zaawansowane","general_tab":"G\u0142\u00f3wne","cell_col":"Zaktualizuj wszystkie kom\u00f3rki w kolumnie"}); | PypiClean |
/ais_dom-2023.7.2-py3-none-any.whl/homeassistant/components/androidtv/diagnostics.py | from __future__ import annotations
from typing import Any
import attr
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_CONNECTIONS, ATTR_IDENTIFIERS, CONF_UNIQUE_ID
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import ANDROID_DEV, DOMAIN, PROP_ETHMAC, PROP_SERIALNO, PROP_WIFIMAC
TO_REDACT = {CONF_UNIQUE_ID} # UniqueID contain MAC Address
TO_REDACT_DEV = {ATTR_CONNECTIONS, ATTR_IDENTIFIERS}
TO_REDACT_DEV_PROP = {PROP_ETHMAC, PROP_SERIALNO, PROP_WIFIMAC}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
) -> dict[str, dict[str, Any]]:
"""Return diagnostics for a config entry."""
data = {"entry": async_redact_data(entry.as_dict(), TO_REDACT)}
hass_data = hass.data[DOMAIN][entry.entry_id]
# Get information from AndroidTV library
aftv = hass_data[ANDROID_DEV]
data["device_properties"] = {
**async_redact_data(aftv.device_properties, TO_REDACT_DEV_PROP),
"device_class": aftv.DEVICE_CLASS,
}
# Gather information how this AndroidTV device is represented in Home Assistant
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
hass_device = device_registry.async_get_device(
identifiers={(DOMAIN, str(entry.unique_id))}
)
if not hass_device:
return data
data["device"] = {
**async_redact_data(attr.asdict(hass_device), TO_REDACT_DEV),
"entities": {},
}
hass_entities = er.async_entries_for_device(
entity_registry,
device_id=hass_device.id,
include_disabled_entities=True,
)
for entity_entry in hass_entities:
state = hass.states.get(entity_entry.entity_id)
state_dict = None
if state:
state_dict = dict(state.as_dict())
# The entity_id is already provided at root level.
state_dict.pop("entity_id", None)
# The context doesn't provide useful information in this case.
state_dict.pop("context", None)
data["device"]["entities"][entity_entry.entity_id] = {
**async_redact_data(
attr.asdict(
entity_entry, filter=lambda attr, value: attr.name != "entity_id"
),
TO_REDACT,
),
"state": state_dict,
}
return data | PypiClean |
/lattice_agent-0.1.2-py3-none-any.whl/lattice/elastic/events/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
"""
Module contains events processing mechanisms that are integrated with the standard python logging.
Example of usage:
::
from torch.distributed.elastic import events
event = events.Event(name="test_event", source=events.EventSource.WORKER, metadata={...})
events.get_logging_handler(destination="console").info(event)
"""
import inspect
import logging
import os
import socket
import traceback
from enum import Enum
from typing import Dict, Optional
from lattice.elastic.events.handlers import get_logging_handler
from .api import ( # noqa: F401
Event,
EventMetadataValue,
EventSource,
NodeState,
RdzvEvent,
)
_events_loggers: Dict[str, logging.Logger] = {}
def _get_or_create_logger(destination: str = "null") -> logging.Logger:
"""
Constructs python logger based on the destination type or extends if provided.
Available destination could be found in ``handlers.py`` file.
The constructed logger does not propagate messages to the upper level loggers,
e.g. root logger. This makes sure that a single event can be processed once.
Args:
destination: The string representation of the event handler.
Available handlers found in ``handlers`` module
"""
global _events_loggers
if destination not in _events_loggers:
_events_logger = logging.getLogger(f"torchelastic-events-{destination}")
_events_logger.setLevel(os.environ.get("LOGLEVEL", "INFO"))
# Do not propagate message to the root logger
_events_logger.propagate = False
logging_handler = get_logging_handler(destination)
_events_logger.addHandler(logging_handler)
# Add the logger to the global dictionary
_events_loggers[destination] = _events_logger
return _events_loggers[destination]
def record(event: Event, destination: str = "null") -> None:
_get_or_create_logger(destination).info(event.serialize())
def record_rdzv_event(event: RdzvEvent) -> None:
_get_or_create_logger("dynamic_rendezvous").info(event.serialize())
def construct_and_record_rdzv_event(
run_id: str,
message: str,
node_state: NodeState,
name: str = "",
hostname: str = "",
pid: Optional[int] = None,
master_endpoint: str = "",
local_id: Optional[int] = None,
rank: Optional[int] = None,
) -> None:
# We don't want to perform an extra computation if not needed.
if isinstance(get_logging_handler("dynamic_rendezvous"), logging.NullHandler):
return
# Set up parameters.
if not hostname:
hostname = socket.getfqdn()
if not pid:
pid = os.getpid()
# Determines which file called this function.
callstack = inspect.stack()
filename = "no_file"
if len(callstack) > 1:
stack_depth_1 = callstack[1]
filename = os.path.basename(stack_depth_1.filename)
if not name:
name = stack_depth_1.function
# Delete the callstack variable. If kept, this can mess with python's
# garbage collector as we are holding on to stack frame information in
# the inspect module.
del callstack
# Set up error trace if this is an exception
if node_state == NodeState.FAILED:
error_trace = traceback.format_exc()
else:
error_trace = ""
# Initialize event object
event = RdzvEvent(
name=f"{filename}:{name}",
run_id=run_id,
message=message,
hostname=hostname,
pid=pid,
node_state=node_state,
master_endpoint=master_endpoint,
rank=rank,
local_id=local_id,
error_trace=error_trace,
)
# Finally, record the event.
record_rdzv_event(event) | PypiClean |
/ocs_sample_library_hub-0.1.20.tar.gz/ocs_sample_library_hub-0.1.20/ocs_sample_library_preview/SDS/SdsStreamViewProperty.py | import json.decoder
class SdsStreamViewProperty(object):
"""Sds StreamView Property definition"""
@property
def SourceId(self):
"""
required
:return:
"""
return self.__sourceId
@SourceId.setter
def SourceId(self, id):
"""
required
:param id:
:return:
"""
self.__sourceId = id
@property
def TargetId(self):
"""
required
:return:
"""
return self.__targetId
@TargetId.setter
def TargetId(self, name):
"""
required
:param name:
:return:
"""
self.__targetId = name
@property
def SdsStreamView(self):
"""
SdsStreamView not required
:return:
"""
return self.__sdsStreamView
@SdsStreamView.setter
def SdsStreamView(self, description):
"""
SdsStreamView not required
:param description:
:return:
"""
self.__sdsStreamView = description
@property
def Mode(self):
return self.__mode
@Mode.setter
def Mode(self, mode):
self.__mode = mode
def toJson(self):
return json.dumps(self.toDictionary())
def toDictionary(self):
# required properties
dictionary = {'SourceId': self.SourceId}
if hasattr(self, 'TargetId'):
dictionary['TargetId'] = self.TargetId
if hasattr(self, 'Mode'):
dictionary['Mode'] = self.Mode
if hasattr(self, 'SdsStreamView'):
from .SdsStreamView import SdsStreamView
dictionary['SdsStreamView'] = self.SdsStreamView.toDictionary()
return dictionary
@staticmethod
def fromJson(jsonObj):
return SdsStreamViewProperty.fromDictionary(jsonObj)
@staticmethod
def fromDictionary(content):
streamViewProperty = SdsStreamViewProperty()
if not content:
return streamViewProperty
if 'SourceId' in content:
streamViewProperty.SourceId = content['SourceId']
if 'TargetId' in content:
streamViewProperty.TargetId = content['TargetId']
if 'Mode' in content:
streamViewProperty.Mode = content['Mode']
if 'SdsStreamView' in content:
from .SdsStreamView import SdsStreamView
streamViewProperty.SdsStreamView = SdsStreamView.fromDictionary(
content['SdsStreamView'])
return streamViewProperty | PypiClean |
/py-openapi-schema-to-json-schema-0.0.3.tar.gz/py-openapi-schema-to-json-schema-0.0.3/openapi_schema_to_json_schema/to_jsonschema.py | import json
class InvalidTypeError(ValueError):
def __init__(self, msg):
super(InvalidTypeError, self).__init__(msg)
def _prepare(schema, options=None):
notSupported = [
'nullable', 'discriminator', 'readOnly',
'writeOnly', 'xml', 'externalDocs',
'example', 'deprecated',
]
options = options or {}
options['dateToDateTime'] = options.get('dateToDateTime', False)
options['cloneSchema'] = options.get('cloneSchema', True)
options['supportPatternProperties'] = options.get(
'supportPatternProperties', False
)
options['keepNotSupported'] = options.get('keepNotSupported', [])
if not callable(options.get('patternPropertiesHandler')):
options['patternPropertiesHandler'] = patternPropertiesHandler
options['_removeProps'] = []
if options.get('removeReadOnly'):
options['_removeProps'].append('readOnly')
if options.get('removeWriteOnly'):
options['_removeProps'].append('writeOnly')
options['_structs'] = [
'allOf', 'anyOf', 'oneOf', 'not', 'items', 'additionalProperties',
]
options['_notSupported'] = resolveNotSupported(
notSupported, options['keepNotSupported'],
)
if options['cloneSchema']:
schema = json.loads(json.dumps(schema))
return schema, options
def convert(schema, options=None):
schema, options = _prepare(schema, options)
schema = convertSchema(schema, options)
schema['$schema'] = 'http://json-schema.org/draft-04/schema#'
return schema
def _recurse(tree, options):
for key, subtree in list(tree.items()):
if isinstance(subtree, dict):
if key == 'schema':
tree[key] = convertSchema(subtree, options)
else:
tree[key] = _recurse(subtree, options)
elif isinstance(subtree, list):
tree[key] = [
_recurse(item, options) if isinstance(item, dict) else item
for item in subtree
]
return tree
def convertDoc(doc, options):
doc, options = _prepare(doc, options)
components_schemas = doc.get('components', {}).get('schemas')
if components_schemas:
for name, struct in list(components_schemas.items()):
components_schemas[name] = convertSchema(struct, options)
paths = doc.get('paths')
if paths:
doc['paths'] = dict((path, _recurse(tree, options))
for path, tree in paths.items())
doc['$schema'] = 'http://json-schema.org/draft-04/schema#'
return doc
def convertSchema(schema, options):
structs = options['_structs']
notSupported = options['_notSupported']
for i, struct in enumerate(structs):
if isinstance(schema.get(struct), list):
for j in range(len(schema[struct])):
schema[struct][j] = convertSchema(schema[struct][j], options)
elif isinstance(schema.get(struct), dict):
schema[struct] = convertSchema(schema[struct], options)
if isinstance(schema.get('properties'), dict):
schema['properties'] = convertProperties(schema['properties'], options)
if isinstance(schema.get('required'), list):
schema['required'] = cleanRequired(schema['required'],
schema['properties'])
if len(schema['required']) == 0:
del schema['required']
if len(schema['properties']) == 0:
del schema['properties']
validateType(schema.get('type'))
schema = convertTypes(schema, options)
if (isinstance(schema.get('x-patternProperties'), dict)
and options['supportPatternProperties']):
schema = convertPatternProperties(schema,
options['patternPropertiesHandler'])
for unsupported in notSupported:
try:
del schema[unsupported]
except KeyError:
pass
return schema
def convertProperties(properties, options):
props = {}
for key in properties:
removeProp = False
# note: don't shadow the `property` built-in
pproperty = properties[key]
for prop in options['_removeProps']:
if pproperty.get(prop) is True:
removeProp = True
if removeProp:
continue
props[key] = convertSchema(pproperty, options)
return props
def validateType(ttype):
validTypes = ['integer', 'number', 'string', 'boolean', 'object', 'array']
if ttype is not None and ttype not in validTypes:
raise InvalidTypeError('Type "%s" is not a valid type' % ttype)
def convertTypes(schema, options):
toDateTime = options['dateToDateTime']
if schema.get('type') is None:
# https://github.com/pglass/py-openapi-schema-to-json-schema/issues/10
if schema.get('nullable') is True:
for struct in ['oneOf', 'anyOf']:
if struct in schema:
schema[struct].append({'type': 'null'})
return schema
if (schema.get('type') == 'string' and schema.get('format') == 'date'
and toDateTime):
schema['format'] = 'date-time'
if not schema.get('format'):
try:
del schema['format']
except KeyError:
pass
if schema.get('nullable') is True:
schema['type'] = [schema['type'], 'null']
return schema
def convertPatternProperties(schema, handler):
schema['patternProperties'] = schema['x-patternProperties']
del schema['x-patternProperties']
return handler(schema)
def patternPropertiesHandler(schema):
patternsObj = schema['patternProperties']
additProps = schema.get('additionalProperties')
if not isinstance(additProps, dict):
return schema
for pattern, value in patternsObj.items():
if value == additProps:
schema['additionalProperties'] = False
break
return schema
def resolveNotSupported(notSupported, toRetain):
return [x for x in notSupported if x not in toRetain]
def cleanRequired(required, properties):
required = required or []
properties = properties or {}
return [x for x in required if properties.get(x) is not None] | PypiClean |
/odoo14_addon_l10n_br_nfe-14.0.2.2.1-py3-none-any.whl/odoo/addons/l10n_br_nfe/models/res_company.py |
from odoo import api, fields
from odoo.addons.spec_driven_model.models import spec_models
from ..constants.nfe import (
NFCE_DANFE_LAYOUT_DEFAULT,
NFCE_DANFE_LAYOUTS,
NFE_DANFE_LAYOUT_DEFAULT,
NFE_DANFE_LAYOUTS,
NFE_ENVIRONMENT_DEFAULT,
NFE_ENVIRONMENTS,
NFE_TRANSMISSION_DEFAULT,
NFE_TRANSMISSIONS,
NFE_VERSION_DEFAULT,
NFE_VERSIONS,
)
PROCESSADOR_ERPBRASIL_EDOC = "oca"
PROCESSADOR = [(PROCESSADOR_ERPBRASIL_EDOC, "erpbrasil.edoc")]
class ResCompany(spec_models.SpecModel):
_name = "res.company"
_inherit = ["res.company", "nfe.40.emit"]
_nfe_search_keys = ["nfe40_CNPJ", "nfe40_xNome", "nfe40_xFant"]
def _compute_nfe_data(self):
# compute because a simple related field makes the match_record fail
for rec in self:
if rec.partner_id.is_company:
rec.nfe40_choice6 = "nfe40_CNPJ"
rec.nfe40_CNPJ = rec.partner_id.cnpj_cpf
else:
rec.nfe40_choice6 = "nfe40_CPF"
rec.nfe40_CPF = rec.partner_id.cnpj_cpf
nfe40_CNPJ = fields.Char(compute="_compute_nfe_data")
nfe40_xNome = fields.Char(related="partner_id.legal_name")
nfe40_xFant = fields.Char(related="partner_id.name")
nfe40_IE = fields.Char(related="partner_id.inscr_est")
nfe40_CRT = fields.Selection(related="tax_framework")
nfe40_enderEmit = fields.Many2one("res.partner", related="partner_id")
nfe40_choice6 = fields.Selection(string="CNPJ ou CPF?", compute="_compute_nfe_data")
processador_edoc = fields.Selection(
selection_add=PROCESSADOR,
)
nfe_version = fields.Selection(
selection=NFE_VERSIONS,
string="NFe Version",
default=NFE_VERSION_DEFAULT,
)
nfe_environment = fields.Selection(
selection=NFE_ENVIRONMENTS,
string="NFe Environment",
default=NFE_ENVIRONMENT_DEFAULT,
)
nfe_transmission = fields.Selection(
selection=NFE_TRANSMISSIONS,
string="Transmission Type",
default=NFE_TRANSMISSION_DEFAULT,
help="1=Emissão normal (não em contingência);"
"\n2=Contingência FS-IA, com impressão do DANFE em Formulário"
" de Segurança - Impressor Autônomo;"
"\n3=Contingência SCAN (Sistema de Contingência do Ambiente Nacional);"
" *Desativado * NT 2015/002"
"\n4=Contingência EPEC (Evento Prévio da Emissão em Contingência);"
"\n5=Contingência FS-DA, com impressão do DANFE em Formulário "
"de Segurança - Documento Auxiliar;"
"\n6=Contingência SVC-AN (SEFAZ Virtual de Contingência do AN);"
"\n7=Contingência SVC-RS (SEFAZ Virtual de Contingência do RS);"
"\n9=Contingência off-line da NFC-e;"
"\nObservação: Para a NFC-e somente é válida a opção de contingência:"
"\n9-Contingência Off-Line e, a critério da UF, opção "
"4-Contingência EPEC. (NT 2015/002)",
)
nfe_danfe_layout = fields.Selection(
selection=NFE_DANFE_LAYOUTS,
string="NFe Layout",
default=NFE_DANFE_LAYOUT_DEFAULT,
)
nfce_danfe_layout = fields.Selection(
selection=NFCE_DANFE_LAYOUTS,
string="NFCe Layout",
default=NFCE_DANFE_LAYOUT_DEFAULT,
)
nfe_default_serie_id = fields.Many2one(
comodel_name="l10n_br_fiscal.document.serie",
string="NF-e Default Serie",
)
def _build_attr(self, node, fields, vals, path, attr):
if attr.get_name() == "enderEmit" and self.env.context.get("edoc_type") == "in":
# we don't want to try build a related partner_id for enderEmit
# when importing an NFe
# instead later the emit tag will be imported as the
# document partner_id (dest) and the enderEmit data will be
# injected in the same res.partner record.
return
return super()._build_attr(node, fields, vals, path, attr)
@api.model
def _prepare_import_dict(self, values, model=None):
# we disable enderEmit related creation with dry_run=True
context = self._context.copy()
context["dry_run"] = True
values = super(ResCompany, self.with_context(context))._prepare_import_dict(
values, model
)
if not values.get("name"):
values["name"] = values.get("nfe40_xFant") or values.get("nfe40_xNome")
return values | PypiClean |
/dynamo_release-1.3.2-py3-none-any.whl/dynamo/external/hodge.py |
# Code adapted from https://github.com/kazumits/ddhodge.
from typing import Union
import numpy as np
from anndata import AnnData
from scipy.sparse import csr_matrix, issparse
from ..dynamo_logger import main_finish_progress, main_info, main_log_time
# from ..vectorfield.scVectorField import graphize_vecfld
from ..tools.graph_calculus import divergence, graphize_velocity, potential
from ..tools.sampling import sample_by_velocity, trn
from ..vectorfield.utils import vecfld_from_adata, vector_field_function
"""from ..tools.graph_operators import (
build_graph,
div,
potential,
)"""
from ..tools.connectivity import _gen_neighbor_keys, check_and_recompute_neighbors
def ddhodge(
adata: AnnData,
X_data: Union[np.ndarray, None] = None,
layer: Union[str, None] = None,
basis: str = "pca",
n: int = 30,
VecFld: Union[dict, None] = None,
adjmethod: str = "graphize_vecfld",
distance_free: bool = False,
n_downsamples: int = 5000,
up_sampling: bool = True,
sampling_method: str = "velocity",
seed: int = 19491001,
enforce: bool = False,
cores: int = 1,
**kwargs,
):
"""Modeling Latent Flow Structure using Hodge Decomposition based on the creation of sparse diffusion graph from the
reconstructed vector field function. This method is relevant to the curl-free/divergence-free vector field
reconstruction.
Parameters
----------
adata: :class:`~anndata.AnnData`
an Annodata object.
X_data:
The user supplied expression (embedding) data that will be used for graph hodege decomposition directly.
layer:
Which layer of the data will be used for graph Hodge decomposition.
basis:
Which basis of the data will be used for graph Hodge decomposition.
n:
Number of nearest neighbors when the nearest neighbor graph is not included.
VecFld:
The reconstructed vector field function.
adjmethod:
The method to build the ajacency matrix that will be used to create the sparse diffusion graph, can be
either "naive" or "graphize_vecfld". If "naive" used, the transition_matrix that created during vector field
projection will be used; if "graphize_vecfld" used, a method that guarantees the preservance of divergence
will be used.
n_downsamples:
Number of cells to downsample to if the cell number is large than this value. Three downsampling methods are
available, see `sampling_method`.
up_sampling:
Whether to assign calculated potential, curl and divergence to cells not sampled based on values from their
nearest sampled cells.
sampling_method:
Methods to downsample datasets to facilitate calculation. Can be one of {`random`, `velocity`, `trn`}, each
corresponds to random sampling, velocity magnitude based and topology representing network based sampling.
seed:
Seed for RandomState. Must be convertible to 32 bit unsigned integers. Used in sampling control points.
Default is to be 0 for ensure consistency between different runs.
enforce:
Whether to enforce the calculation of adjacency matrix for estimating potential, curl, divergence for each
cell.
cores:
Number of cores to run the graphize_vecfld function. If cores is set to be > 1, multiprocessing will be used
to parallel the graphize_vecfld calculation.
Returns
-------
adata: :class:`~anndata.AnnData`
`AnnData` object that is updated with the `ddhodge` key in the `obsp` attribute which to adjacency matrix
that corresponds to the sparse diffusion graph. Two columns `potential` and `divergence` corresponds to the
potential and divergence for each cell will also be added."""
main_log_time()
prefix = "" if basis is None else basis + "_"
to_downsample = adata.n_obs > n_downsamples
if VecFld is None:
VecFld, func = vecfld_from_adata(adata, basis)
else:
def func(x):
return vector_field_function(x, VecFld)
if X_data is None:
X_data_full = VecFld["X"].copy()
else:
if X_data.shape[0] != adata.n_obs:
raise ValueError(f"The X_data you provided doesn't correspond to exactly {adata.n_obs} cells")
X_data_full = X_data.copy()
if to_downsample:
if sampling_method == "trn":
cell_idx = trn(X_data_full, n_downsamples)
elif sampling_method == "velocity":
np.random.seed(seed)
cell_idx = sample_by_velocity(func(X_data_full), n_downsamples)
elif sampling_method == "random":
np.random.seed(seed)
cell_idx = np.random.choice(np.arange(adata.n_obs), n_downsamples)
else:
raise ImportError(
f"sampling method {sampling_method} is not available. Only `random`, `velocity`, `trn` are"
f"available."
)
else:
cell_idx = np.arange(adata.n_obs)
X_data = X_data_full[cell_idx, :]
adata_ = adata[cell_idx].copy()
if prefix + "ddhodge" in adata_.obsp.keys() and not enforce and not to_downsample:
main_info("fetch computation results from adata.obsp[%s]..." % (prefix + "ddhodge"))
adj_mat = adata_.obsp[prefix + "ddhodge"]
else:
if adjmethod == "graphize_vecfld":
main_info("graphizing vectorfield...")
V_data = func(X_data)
neighbor_result_prefix = "" if layer is None else layer
conn_key, dist_key, neighbor_key = _gen_neighbor_keys(neighbor_result_prefix)
if neighbor_key not in adata_.uns_keys() or to_downsample:
existing_nbrs_idx = None
else:
check_and_recompute_neighbors(adata, result_prefix=neighbor_result_prefix)
neighbors = adata_.obsp[conn_key]
existing_nbrs_idx = neighbors.tolil().rows
adj_mat, nbrs_idx, dists, nbrs = graphize_velocity(
V_data,
X_data,
nbrs_idx=existing_nbrs_idx,
k=n,
return_nbrs=True,
)
"""adj_mat, nbrs = graphize_vecfld(
func,
X_data,
nbrs_idx=existing_nbrs_idx,
k=n,
distance_free=distance_free,
n_int_steps=20,
cores=cores,
)"""
elif adjmethod == "naive":
main_info(
'method=naive, get adj_mat from transition matrix in adata directly (adata.uns["transition_matrix"'
)
if "transition_matrix" not in adata_.uns.keys():
raise Exception(
"Your adata doesn't have transition matrix created. You need to first "
"run dyn.tl.cell_velocity(adata) to get the transition before running"
" this function."
)
adj_mat = adata_.uns["transition_matrix"][cell_idx, cell_idx]
else:
raise ValueError(f"adjmethod can be only one of {'naive', 'graphize_vecfld'}")
# TODO transform the type of adj_mat here so that we can maintain one set of API (either sparse or numpy)
# if not issparse(adj_mat):
# main_info("adj_mat:%s is not sparse, transforming it to a sparse matrix..." %(str(type(adj_mat))))
# adj_mat = csr_matrix(adj_mat)
# TODO temp fix; refactor to make adj_mat sparse and adjust all the function call
if issparse(adj_mat):
adj_mat = adj_mat.toarray()
# if not all cells are used in the graphize_vecfld function, set diagnoal to be 1
if len(np.unique(np.hstack(adj_mat.nonzero()))) != adata.n_obs:
main_info("not all cells are used, set diag to 1...", indent_level=2)
# temporary fix for github issue #263
# https://github.com/aristoteleo/dynamo-release/issues/263
# support numpy and sparse matrices here
if issparse(adj_mat):
adj_mat.setdiag(1)
else:
np.fill_diagonal(adj_mat, 1)
# g = build_graph(adj_mat)
# TODO the following line does not work on sparse matrix.
A = np.abs(np.sign(adj_mat))
if (prefix + "ddhodge" not in adata.obsp.keys() or enforce) and not to_downsample:
adata.obsp[prefix + "ddhodge"] = adj_mat
# ddhodge_div = div(g)
# potential_ = potential(g, -ddhodge_div)
ddhodge_div = divergence(adj_mat, W=A)
potential_ = potential(adj_mat, W=A, div=ddhodge_div, **kwargs)
if up_sampling and to_downsample:
main_info("Constructing W matrix according upsampling=True and downsampling=True options...", indent_level=2)
query_idx = np.array(list(set(np.arange(adata.n_obs)).difference(cell_idx)))
query_data = X_data_full[query_idx, :]
# construct nbrs of query points based on two types of nbrs: from NNDescent (pynndescent) or NearestNeighbors
if hasattr(nbrs, "kneighbors"):
dist, query_nbrs_idx = nbrs.kneighbors(query_data)
elif hasattr(nbrs, "query"):
query_nbrs_idx, dist = nbrs.query(query_data, k=nbrs.n_neighbors)
k = query_nbrs_idx.shape[1]
query_W_row = np.repeat(np.arange(len(query_idx)), k)
query_W_col = query_nbrs_idx.flatten()
W = csr_matrix(
(np.repeat(1 / k, len(query_W_row)), (query_W_row, query_W_col)),
shape=(len(query_idx), len(cell_idx)),
)
query_data_div, query_data_potential = (
W.dot(ddhodge_div),
W.dot(potential_),
)
(adata.obs[prefix + "ddhodge_sampled"], adata.obs[prefix + "ddhodge_div"], adata.obs[prefix + "potential"],) = (
False,
0,
0,
)
adata.obs.loc[adata.obs_names[cell_idx], prefix + "ddhodge_sampled"] = True
adata.obs.loc[adata.obs_names[cell_idx], prefix + "ddhodge_div"] = ddhodge_div
adata.obs.loc[adata.obs_names[cell_idx], prefix + "ddhodge_potential"] = potential_
adata.obs.loc[adata.obs_names[query_idx], prefix + "ddhodge_div"] = query_data_div
adata.obs.loc[adata.obs_names[query_idx], prefix + "ddhodge_potential"] = query_data_potential
else:
adata.obs[prefix + "ddhodge_div"] = ddhodge_div
adata.obs[prefix + "ddhodge_potential"] = potential_
main_finish_progress("ddhodge completed") | PypiClean |
/pybanyan-openapi-client-1.0.0.tar.gz/pybanyan-openapi-client-1.0.0/openapi_client/paths/v1_events_count/get.py | from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from openapi_client import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from openapi_client import schemas # noqa: F401
from . import path
# query params
BeforeSchema = schemas.IntSchema
AfterSchema = schemas.IntSchema
TypeSchema = schemas.StrSchema
SubTypeSchema = schemas.StrSchema
UserEmailSchema = schemas.StrSchema
DeviceIdSchema = schemas.StrSchema
SerialnumberSchema = schemas.StrSchema
WorkloadContainerIdSchema = schemas.StrSchema
ServiceNameSchema = schemas.StrSchema
ActionSchema = schemas.StrSchema
IdSchema = schemas.StrSchema
ExternalIdSchema = schemas.StrSchema
SeveritySchema = schemas.StrSchema
ServiceAccountNameSchema = schemas.StrSchema
StartTimeSchema = schemas.IntSchema
EndTimeSchema = schemas.IntSchema
RequestRequiredQueryParams = typing_extensions.TypedDict(
'RequestRequiredQueryParams',
{
}
)
RequestOptionalQueryParams = typing_extensions.TypedDict(
'RequestOptionalQueryParams',
{
'before': typing.Union[BeforeSchema, decimal.Decimal, int, ],
'after': typing.Union[AfterSchema, decimal.Decimal, int, ],
'type': typing.Union[TypeSchema, str, ],
'sub_type': typing.Union[SubTypeSchema, str, ],
'user_email': typing.Union[UserEmailSchema, str, ],
'device_id': typing.Union[DeviceIdSchema, str, ],
'serialnumber': typing.Union[SerialnumberSchema, str, ],
'workload_container_id': typing.Union[WorkloadContainerIdSchema, str, ],
'service_name': typing.Union[ServiceNameSchema, str, ],
'action': typing.Union[ActionSchema, str, ],
'id': typing.Union[IdSchema, str, ],
'external_id': typing.Union[ExternalIdSchema, str, ],
'severity': typing.Union[SeveritySchema, str, ],
'service_account_name': typing.Union[ServiceAccountNameSchema, str, ],
'start_time': typing.Union[StartTimeSchema, decimal.Decimal, int, ],
'end_time': typing.Union[EndTimeSchema, decimal.Decimal, int, ],
},
total=False
)
class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
pass
request_query_before = api_client.QueryParameter(
name="before",
style=api_client.ParameterStyle.FORM,
schema=BeforeSchema,
explode=True,
)
request_query_after = api_client.QueryParameter(
name="after",
style=api_client.ParameterStyle.FORM,
schema=AfterSchema,
explode=True,
)
request_query_type = api_client.QueryParameter(
name="type",
style=api_client.ParameterStyle.FORM,
schema=TypeSchema,
explode=True,
)
request_query_sub_type = api_client.QueryParameter(
name="sub_type",
style=api_client.ParameterStyle.FORM,
schema=SubTypeSchema,
explode=True,
)
request_query_user_email = api_client.QueryParameter(
name="user_email",
style=api_client.ParameterStyle.FORM,
schema=UserEmailSchema,
explode=True,
)
request_query_device_id = api_client.QueryParameter(
name="device_id",
style=api_client.ParameterStyle.FORM,
schema=DeviceIdSchema,
explode=True,
)
request_query_serialnumber = api_client.QueryParameter(
name="serialnumber",
style=api_client.ParameterStyle.FORM,
schema=SerialnumberSchema,
explode=True,
)
request_query_workload_container_id = api_client.QueryParameter(
name="workload_container_id",
style=api_client.ParameterStyle.FORM,
schema=WorkloadContainerIdSchema,
explode=True,
)
request_query_service_name = api_client.QueryParameter(
name="service_name",
style=api_client.ParameterStyle.FORM,
schema=ServiceNameSchema,
explode=True,
)
request_query_action = api_client.QueryParameter(
name="action",
style=api_client.ParameterStyle.FORM,
schema=ActionSchema,
explode=True,
)
request_query_id = api_client.QueryParameter(
name="id",
style=api_client.ParameterStyle.FORM,
schema=IdSchema,
explode=True,
)
request_query_external_id = api_client.QueryParameter(
name="external_id",
style=api_client.ParameterStyle.FORM,
schema=ExternalIdSchema,
explode=True,
)
request_query_severity = api_client.QueryParameter(
name="severity",
style=api_client.ParameterStyle.FORM,
schema=SeveritySchema,
explode=True,
)
request_query_service_account_name = api_client.QueryParameter(
name="service_account_name",
style=api_client.ParameterStyle.FORM,
schema=ServiceAccountNameSchema,
explode=True,
)
request_query_start_time = api_client.QueryParameter(
name="start_time",
style=api_client.ParameterStyle.FORM,
schema=StartTimeSchema,
explode=True,
)
request_query_end_time = api_client.QueryParameter(
name="end_time",
style=api_client.ParameterStyle.FORM,
schema=EndTimeSchema,
explode=True,
)
_auth = [
'bearerAuthToken',
]
class SchemaFor200ResponseBodyApplicationJson(
schemas.DictSchema
):
class MetaOapg:
class properties:
data = schemas.IntSchema
__annotations__ = {
"data": data,
}
@typing.overload
def __getitem__(self, name: typing_extensions.Literal["data"]) -> MetaOapg.properties.data: ...
@typing.overload
def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
def __getitem__(self, name: typing.Union[typing_extensions.Literal["data", ], str]):
# dict_instance[name] accessor
return super().__getitem__(name)
@typing.overload
def get_item_oapg(self, name: typing_extensions.Literal["data"]) -> typing.Union[MetaOapg.properties.data, schemas.Unset]: ...
@typing.overload
def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["data", ], str]):
return super().get_item_oapg(name)
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, ],
data: typing.Union[MetaOapg.properties.data, decimal.Decimal, int, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
) -> 'SchemaFor200ResponseBodyApplicationJson':
return super().__new__(
cls,
*args,
data=data,
_configuration=_configuration,
**kwargs,
)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
)
@dataclass
class ApiResponseFor403(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_403 = api_client.OpenApiResponse(
response_cls=ApiResponseFor403,
)
@dataclass
class ApiResponseFor500(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_500 = api_client.OpenApiResponse(
response_cls=ApiResponseFor500,
)
_status_code_to_response = {
'200': _response_for_200,
'401': _response_for_401,
'403': _response_for_403,
'500': _response_for_500,
}
_all_accept_content_types = (
'application/json',
)
class BaseApi(api_client.Api):
def _v1_events_count_get_oapg(
self: api_client.Api,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
Get Events Count
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
used_path = path.value
prefix_separator_iterator = None
for parameter in (
request_query_before,
request_query_after,
request_query_type,
request_query_sub_type,
request_query_user_email,
request_query_device_id,
request_query_serialnumber,
request_query_workload_container_id,
request_query_service_name,
request_query_action,
request_query_id,
request_query_external_id,
request_query_severity,
request_query_service_account_name,
request_query_start_time,
request_query_end_time,
):
parameter_data = query_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
if prefix_separator_iterator is None:
prefix_separator_iterator = parameter.get_prefix_separator_iterator()
serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
for serialized_value in serialized_data.values():
used_path += serialized_value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=used_path,
method='get'.upper(),
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class V1EventsCountGet(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
def v1_events_count_get(
self: BaseApi,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._v1_events_count_get_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForget(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
def get(
self: BaseApi,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
return self._v1_events_count_get_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
) | PypiClean |
/memoized-property-1.0.3.tar.gz/memoized-property-1.0.3/CONTRIBUTING.rst | ============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every
little bit helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/estebistec/python-memoized-property/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug"
is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "feature"
is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
memoized_property could always use more documentation, whether as part of the
official memoized_property docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/estebistec/python-memoized-property/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `python-memoized-property` for local development.
1. Fork the `python-memoized-property` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/python-memoized-property.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv python-memoized-property
$ cd python-memoized-property/
$ pip install -r dev-requirements.txt
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox::
$ flake8 memoized_property.py tests
$ python setup.py test
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 2.6, 2.7, and 3.3, and for PyPy. Check
https://travis-ci.org/estebistec/python-memoized-property/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ python -m unittest tests.test_memoized_property | PypiClean |
/sosreport-3.2.0a1.tar.gz/sosreport-3.2.0a1/sos/plugins/openstack_neutron.py |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import os
import re
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
# The Networking plugin includes most of what is needed from a snapshot
# of the networking, so we only need to focus on the parts that are specific
# to OpenStack Networking. The Process plugin should capture the dnsmasq
# command line. The libvirt plugin grabs the instance's XML definition which
# has the interface names for an instance. So what remains is relevant database
# info...
class Neutron(Plugin):
"""OpenStack Networking (quantum/neutron) related information
"""
plugin_name = "neutron"
option_list = [("log", "Gathers all Neutron logs", "slow", False),
("quantum", "Overrides checks for newer Neutron components",
"fast", False)]
component_name = "neutron"
def setup(self):
if os.path.exists("/etc/neutron/") and \
self.get_option("quantum", False):
self.component_name = self.plugin_name
else:
self.component_name = "quantum"
self.add_copy_specs([
"/etc/%s/" % self.component_name,
"/var/log/%s/" % self.component_name
])
self.netns_dumps()
self.get_ovs_dumps()
def get_ovs_dumps(self):
# Check to see if we are using the Open vSwitch plugin. If not we
# should be able to skip the rest of the dump.
ovs_conf_check = self.call_ext_prog(
'grep "^core_plugin.*openvswitch" ' +
("/etc/%s/*.conf" + self.component_name))
if not (ovs_conf_check['status'] == 0):
return
if len(ovs_conf_check['output'].splitlines()) == 0:
return
# The '-s' option enables dumping of packet counters on the
# ports.
self.add_cmd_output("ovs-dpctl -s show")
# The '-t 5' adds an upper bound on how long to wait to connect
# to the Open vSwitch server, avoiding hangs when running sosreport.
self.add_cmd_output("ovs-vsctl -t 5 show")
def netns_dumps(self):
# It would've been beautiful if we could get parts of the networking
# plugin to run in different namespaces. There are a couple of options
# in the short term: create a local instance and "borrow" some of the
# functionality, or simply copy some of the functionality.
prefixes = ["qdhcp", "qrouter"]
ip_netns_result = self.call_ext_prog("ip netns")
if not (ip_netns_result['status'] == 0):
return
nslist = ip_netns_result['output']
lease_directories = []
if nslist:
for nsname in nslist.splitlines():
prefix, netid = nsname.split('-', 1)
if len(netid) > 0 and prefix in prefixes:
self.ns_gather_data(nsname)
lease_directories.append(
"/var/lib/%s/dhcp/%s/" %
(self.component_name, netid))
self.add_copy_specs(lease_directories)
# TODO: Refactor! Copied from Networking plugin.
def get_interface_name(self, ip_addr_out):
"""Return a dictionary for which key are interface name according to
the output of ifconifg-a stored in ifconfig_file.
"""
out = {}
for line in ip_addr_out.splitlines():
match = re.match('.*link/ether', line)
if match:
int = match.string.split(':')[1].lstrip()
out[int] = True
return out
def ns_gather_data(self, nsname):
cmd_prefix = "ip netns exec %s " % nsname
self.add_cmd_outputs([
cmd_prefix + "iptables-save",
cmd_prefix + "ifconfig -a",
cmd_prefix + "route -n"
])
# borrowed from networking plugin
ip_addr_result = self.call_ext_prog(cmd_prefix + "ip -o addr")
if ip_addr_result['status'] == 0:
for eth in self.get_interface_name(ip_addr_result['output']):
# Most, if not all, IFs in the namespaces are going to be
# virtual. The '-a', '-c' and '-g' options are not likely to be
# supported so these ops are not copied from the network
# plugin.
self.add_cmd_outputs([
cmd_prefix + "ethtool "+eth,
cmd_prefix + "ethtool -i "+eth,
cmd_prefix + "ethtool -k "+eth,
cmd_prefix + "ethtool -S "+eth
])
# As all of the bridges are in the "global namespace", we do not need
# to gather info on them.
def gen_pkg_tuple(self, packages):
names = []
for p in packages:
names.append(p % {"comp": self.component_name})
return tuple(names)
class DebianNeutron(Neutron, DebianPlugin, UbuntuPlugin):
"""OpenStack Neutron related information for Debian based distributions
"""
package_list_template = [
'%(comp)s-common',
'%(comp)s-plugin-cisco',
'%(comp)s-plugin-linuxbridge-agent',
'%(comp)s-plugin-nicira',
'%(comp)s-plugin-openvswitch',
'%(comp)s-plugin-openvswitch-agent',
'%(comp)s-plugin-ryu',
'%(comp)s-plugin-ryu-agent',
'%(comp)s-server',
'python-%(comp)s',
'python-%(comp)sclient'
]
def check_enabled(self):
return self.is_installed("%s-common" % self.component_name)
def setup(self):
super(DebianNeutron, self).setup()
self.packages = self.gen_pkg_tuple(self.package_list_template)
self.add_copy_spec("/etc/sudoers.d/%s_sudoers" % self.component_name)
class RedHatNeutron(Neutron, RedHatPlugin):
"""OpenStack Neutron related information for Red Hat distributions
"""
package_list_template = [
'openstack-%(comp)s',
'openstack-%(comp)s-linuxbridge'
'openstack-%(comp)s-metaplugin',
'openstack-%(comp)s-openvswitch',
'openstack-%(comp)s-bigswitch',
'openstack-%(comp)s-brocade',
'openstack-%(comp)s-cisco',
'openstack-%(comp)s-hyperv',
'openstack-%(comp)s-midonet',
'openstack-%(comp)s-nec'
'openstack-%(comp)s-nicira',
'openstack-%(comp)s-plumgrid',
'openstack-%(comp)s-ryu',
'python-%(comp)s',
'python-%(comp)sclient'
]
def check_enabled(self):
return self.is_installed("openstack-%s" % self.component_name)
def setup(self):
super(RedHatNeutron, self).setup()
self.packages = self.gen_pkg_tuple(self.package_list_template)
self.add_copy_spec("/etc/sudoers.d/%s-rootwrap" % self.component_name)
# vim: et ts=4 sw=4 | PypiClean |
/125softNLP-0.0.1-py3-none-any.whl/pysoftNLP/kashgari/tasks/classification/models.py |
# author: BrikerMan
# contact: [email protected]
# blog: https://eliyar.biz
# file: models.py
# time: 2019-05-22 11:26
import logging
import tensorflow as tf
from typing import Dict, Any
from pysoftNLP.kashgari.layers import L, AttentionWeightedAverageLayer, KMaxPoolingLayer
from pysoftNLP.kashgari.tasks.classification.base_model import BaseClassificationModel
class BiLSTM_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'layer_bi_lstm': {
'units': 128,
'return_sequences': False
},
'layer_dense': {
'activation': 'softmax'
}
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layer_bi_lstm = L.Bidirectional(L.LSTM(**config['layer_bi_lstm']))
layer_dense = L.Dense(output_dim, **config['layer_dense'])
tensor = layer_bi_lstm(embed_model.output)
output_tensor = layer_dense(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, output_tensor)
class BiGRU_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'layer_bi_gru': {
'units': 128,
'return_sequences': False
},
'layer_dense': {
'activation': 'softmax'
}
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layer_bi_gru = L.Bidirectional(L.GRU(**config['layer_bi_gru']))
layer_dense = L.Dense(output_dim, **config['layer_dense'])
tensor = layer_bi_gru(embed_model.output)
output_tensor = layer_dense(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, output_tensor)
class CNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'conv1d_layer': {
'filters': 128,
'kernel_size': 5,
'activation': 'relu'
},
'max_pool_layer': {},
'dense_layer': {
'units': 64,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
# build model structure in sequent way
layers_seq = []
layers_seq.append(L.Conv1D(**config['conv1d_layer']))
layers_seq.append(L.GlobalMaxPooling1D(**config['max_pool_layer']))
layers_seq.append(L.Dense(**config['dense_layer']))
layers_seq.append(L.Dense(output_dim, **config['activation_layer']))
tensor = embed_model.output
for layer in layers_seq:
tensor = layer(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor)
class CNN_LSTM_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'conv_layer': {
'filters': 32,
'kernel_size': 3,
'padding': 'same',
'activation': 'relu'
},
'max_pool_layer': {
'pool_size': 2
},
'lstm_layer': {
'units': 100
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_seq = []
layers_seq.append(L.Conv1D(**config['conv_layer']))
layers_seq.append(L.MaxPooling1D(**config['max_pool_layer']))
layers_seq.append(L.LSTM(**config['lstm_layer']))
layers_seq.append(L.Dense(output_dim, **config['activation_layer']))
tensor = embed_model.output
for layer in layers_seq:
tensor = layer(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor)
class CNN_GRU_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'conv_layer': {
'filters': 32,
'kernel_size': 3,
'padding': 'same',
'activation': 'relu'
},
'max_pool_layer': {
'pool_size': 2
},
'gru_layer': {
'units': 100
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_seq = []
layers_seq.append(L.Conv1D(**config['conv_layer']))
layers_seq.append(L.MaxPooling1D(**config['max_pool_layer']))
layers_seq.append(L.GRU(**config['gru_layer']))
layers_seq.append(L.Dense(output_dim, **config['activation_layer']))
tensor = embed_model.output
for layer in layers_seq:
tensor = layer(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor)
class AVCNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.25
},
'conv_0': {
'filters': 300,
'kernel_size': 1,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_1': {
'filters': 300,
'kernel_size': 2,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_2': {
'filters': 300,
'kernel_size': 3,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_3': {
'filters': 300,
'kernel_size': 4,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
# ---
'attn_0': {},
'avg_0': {},
'maxpool_0': {},
# ---
'maxpool_1': {},
'attn_1': {},
'avg_1': {},
# ---
'maxpool_2': {},
'attn_2': {},
'avg_2': {},
# ---
'maxpool_3': {},
'attn_3': {},
'avg_3': {},
# ---
'v_col3': {
# 'mode': 'concat',
'axis': 1
},
'merged_tensor': {
# 'mode': 'concat',
'axis': 1
},
'dropout': {
'rate': 0.7
},
'dense': {
'units': 144,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layer_embed_dropout = L.SpatialDropout1D(**config['spatial_dropout'])
layers_conv = [L.Conv1D(**config[f'conv_{i}']) for i in range(4)]
layers_sensor = []
layers_sensor.append(L.GlobalMaxPooling1D())
layers_sensor.append(AttentionWeightedAverageLayer())
layers_sensor.append(L.GlobalAveragePooling1D())
layer_view = L.Concatenate(**config['v_col3'])
layer_allviews = L.Concatenate(**config['merged_tensor'])
layers_seq = []
layers_seq.append(L.Dropout(**config['dropout']))
layers_seq.append(L.Dense(**config['dense']))
layers_seq.append(L.Dense(output_dim, **config['activation_layer']))
embed_tensor = layer_embed_dropout(embed_model.output)
tensors_conv = [layer_conv(embed_tensor) for layer_conv in layers_conv]
tensors_matrix_sensor = []
for tensor_conv in tensors_conv:
tensor_sensors = []
tensor_sensors = [layer_sensor(tensor_conv) for layer_sensor in layers_sensor]
# tensor_sensors.append(L.GlobalMaxPooling1D()(tensor_conv))
# tensor_sensors.append(AttentionWeightedAverageLayer()(tensor_conv))
# tensor_sensors.append(L.GlobalAveragePooling1D()(tensor_conv))
tensors_matrix_sensor.append(tensor_sensors)
tensors_views = [layer_view(list(tensors)) for tensors in zip(*tensors_matrix_sensor)]
tensor = layer_allviews(tensors_views)
# tensors_v_cols = [L.concatenate(tensors, **config['v_col3']) for tensors
# in zip(*tensors_matrix_sensor)]
# tensor = L.concatenate(tensors_v_cols, **config['merged_tensor'])
for layer in layers_seq:
tensor = layer(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor)
class KMax_CNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.2
},
'conv_0': {
'filters': 180,
'kernel_size': 1,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_1': {
'filters': 180,
'kernel_size': 2,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_2': {
'filters': 180,
'kernel_size': 3,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'conv_3': {
'filters': 180,
'kernel_size': 4,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu'
},
'maxpool_i4': {
'k': 3
},
'merged_tensor': {
# 'mode': 'concat',
'axis': 1
},
'dropout': {
'rate': 0.6
},
'dense': {
'units': 144,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layer_embed_dropout = L.SpatialDropout1D(**config['spatial_dropout'])
layers_conv = [L.Conv1D(**config[f'conv_{i}']) for i in range(4)]
layers_sensor = [KMaxPoolingLayer(**config['maxpool_i4']),
L.Flatten()]
layer_concat = L.Concatenate(**config['merged_tensor'])
layers_seq = []
layers_seq.append(L.Dropout(**config['dropout']))
layers_seq.append(L.Dense(**config['dense']))
layers_seq.append(L.Dense(output_dim, **config['activation_layer']))
embed_tensor = layer_embed_dropout(embed_model.output)
tensors_conv = [layer_conv(embed_tensor) for layer_conv in layers_conv]
tensors_sensor = []
for tensor_conv in tensors_conv:
tensor_sensor = tensor_conv
for layer_sensor in layers_sensor:
tensor_sensor = layer_sensor(tensor_sensor)
tensors_sensor.append(tensor_sensor)
tensor = layer_concat(tensors_sensor)
# tensor = L.concatenate(tensors_sensor, **config['merged_tensor'])
for layer in layers_seq:
tensor = layer(tensor)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor)
class R_CNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.2
},
'rnn_0': {
'units': 64,
'return_sequences': True
},
'conv_0': {
'filters': 128,
'kernel_size': 2,
'kernel_initializer': 'normal',
'padding': 'valid',
'activation': 'relu',
'strides': 1
},
'maxpool': {},
'attn': {},
'average': {},
'concat': {
'axis': 1
},
'dropout': {
'rate': 0.5
},
'dense': {
'units': 120,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_rcnn_seq = []
layers_rcnn_seq.append(L.SpatialDropout1D(**config['spatial_dropout']))
layers_rcnn_seq.append(L.Bidirectional(L.GRU(**config['rnn_0'])))
layers_rcnn_seq.append(L.Conv1D(**config['conv_0']))
layers_sensor = []
layers_sensor.append(L.GlobalMaxPooling1D())
layers_sensor.append(AttentionWeightedAverageLayer())
layers_sensor.append(L.GlobalAveragePooling1D())
layer_concat = L.Concatenate(**config['concat'])
layers_full_connect = []
layers_full_connect.append(L.Dropout(**config['dropout']))
layers_full_connect.append(L.Dense(**config['dense']))
layers_full_connect.append(L.Dense(output_dim, **config['activation_layer']))
tensor = embed_model.output
for layer in layers_rcnn_seq:
tensor = layer(tensor)
tensors_sensor = [layer(tensor) for layer in layers_sensor]
tensor_output = layer_concat(tensors_sensor)
# tensor_output = L.concatenate(tensor_sensors, **config['concat'])
for layer in layers_full_connect:
tensor_output = layer(tensor_output)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor_output)
class AVRNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.25
},
'rnn_0': {
'units': 60,
'return_sequences': True
},
'rnn_1': {
'units': 60,
'return_sequences': True
},
'concat_rnn': {
'axis': 2
},
'last': {},
'maxpool': {},
'attn': {},
'average': {},
'all_views': {
'axis': 1
},
'dropout': {
'rate': 0.5
},
'dense': {
'units': 144,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_rnn0 = []
layers_rnn0.append(L.SpatialDropout1D(**config['spatial_dropout']))
layers_rnn0.append(L.Bidirectional(L.GRU(**config['rnn_0'])))
layer_bi_rnn1 = L.Bidirectional(L.GRU(**config['rnn_1']))
layer_concat = L.Concatenate(**config['concat_rnn'])
layers_sensor = []
layers_sensor.append(L.Lambda(lambda t: t[:, -1], name='last'))
layers_sensor.append(L.GlobalMaxPooling1D())
layers_sensor.append(AttentionWeightedAverageLayer())
layers_sensor.append(L.GlobalAveragePooling1D())
layer_allviews = L.Concatenate(**config['all_views'])
layers_full_connect = []
layers_full_connect.append(L.Dropout(**config['dropout']))
layers_full_connect.append(L.Dense(**config['dense']))
layers_full_connect.append(L.Dense(output_dim, **config['activation_layer']))
tensor_rnn = embed_model.output
for layer in layers_rnn0:
tensor_rnn = layer(tensor_rnn)
tensor_concat = layer_concat([tensor_rnn, layer_bi_rnn1(tensor_rnn)])
tensor_sensors = [layer(tensor_concat) for layer in layers_sensor]
tensor_output = layer_allviews(tensor_sensors)
for layer in layers_full_connect:
tensor_output = layer(tensor_output)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor_output)
class Dropout_BiGRU_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.15
},
'rnn_0': {
'units': 64,
'return_sequences': True
},
'dropout_rnn': {
'rate': 0.35
},
'rnn_1': {
'units': 64,
'return_sequences': True
},
'last': {},
'maxpool': {},
'average': {},
'all_views': {
'axis': 1
},
'dropout': {
'rate': 0.5
},
'dense': {
'units': 72,
'activation': 'relu'
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_rnn = []
layers_rnn.append(L.SpatialDropout1D(**config['spatial_dropout']))
layers_rnn.append(L.Bidirectional(L.GRU(**config['rnn_0'])))
layers_rnn.append(L.Dropout(**config['dropout_rnn']))
layers_rnn.append(L.Bidirectional(L.GRU(**config['rnn_1'])))
layers_sensor = []
layers_sensor.append(L.Lambda(lambda t: t[:, -1], name='last'))
layers_sensor.append(L.GlobalMaxPooling1D())
layers_sensor.append(L.GlobalAveragePooling1D())
layer_allviews = L.Concatenate(**config['all_views'])
layers_full_connect = []
layers_full_connect.append(L.Dropout(**config['dropout']))
layers_full_connect.append(L.Dense(**config['dense']))
layers_full_connect.append(L.Dense(output_dim, **config['activation_layer']))
tensor_rnn = embed_model.output
for layer in layers_rnn:
tensor_rnn = layer(tensor_rnn)
tensor_sensors = [layer(tensor_rnn) for layer in layers_sensor]
tensor_output = layer_allviews(tensor_sensors)
for layer in layers_full_connect:
tensor_output = layer(tensor_output)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor_output)
class Dropout_AVRNN_Model(BaseClassificationModel):
@classmethod
def get_default_hyper_parameters(cls) -> Dict[str, Dict[str, Any]]:
return {
'spatial_dropout': {
'rate': 0.25
},
'rnn_0': {
'units': 56,
'return_sequences': True
},
'rnn_dropout': {
'rate': 0.3
},
'rnn_1': {
'units': 56,
'return_sequences': True
},
'last': {},
'maxpool': {},
'attn': {},
'average': {},
'all_views': {
'axis': 1
},
'dropout_0': {
'rate': 0.5
},
'dense': {
'units': 128,
'activation': 'relu'
},
'dropout_1': {
'rate': 0.25
},
'activation_layer': {
'activation': 'softmax'
},
}
def build_model_arc(self):
output_dim = len(self.processor.label2idx)
config = self.hyper_parameters
embed_model = self.embedding.embed_model
layers_rnn = []
layers_rnn.append(L.SpatialDropout1D(**config['spatial_dropout']))
layers_rnn.append(L.Bidirectional(L.GRU(**config['rnn_0'])))
layers_rnn.append(L.SpatialDropout1D(**config['rnn_dropout']))
layers_rnn.append(L.Bidirectional(L.GRU(**config['rnn_1'])))
layers_sensor = []
layers_sensor.append(L.Lambda(lambda t: t[:, -1], name='last'))
layers_sensor.append(L.GlobalMaxPooling1D())
layers_sensor.append(AttentionWeightedAverageLayer())
layers_sensor.append(L.GlobalAveragePooling1D())
layer_allviews = L.Concatenate(**config['all_views'])
layers_full_connect = []
layers_full_connect.append(L.Dropout(**config['dropout_0']))
layers_full_connect.append(L.Dense(**config['dense']))
layers_full_connect.append(L.Dropout(**config['dropout_1']))
layers_full_connect.append(L.Dense(output_dim, **config['activation_layer']))
tensor_rnn = embed_model.output
for layer in layers_rnn:
tensor_rnn = layer(tensor_rnn)
tensor_sensors = [layer(tensor_rnn) for layer in layers_sensor]
tensor_output = layer_allviews(tensor_sensors)
for layer in layers_full_connect:
tensor_output = layer(tensor_output)
self.tf_model = tf.keras.Model(embed_model.inputs, tensor_output)
if __name__ == "__main__":
print(BiLSTM_Model.get_default_hyper_parameters())
logging.basicConfig(level=logging.DEBUG)
from kashgari.corpus import SMP2018ECDTCorpus
x, y = SMP2018ECDTCorpus.load_data()
import kashgari
from kashgari.processors.classification_processor import ClassificationProcessor
from kashgari.embeddings import BareEmbedding
processor = ClassificationProcessor(multi_label=False)
embed = BareEmbedding(task=kashgari.CLASSIFICATION, sequence_length=30, processor=processor)
m = BiLSTM_Model(embed)
# m.build_model(x, y)
m.fit(x, y, epochs=2)
print(m.predict(x[:10]))
# m.evaluate(x, y)
print(m.predict_top_k_class(x[:10])) | PypiClean |
/test_poetry_vp-0.1.1-py3-none-any.whl/test_poetry_vp/api/nh/generator/stock/generate_struct.py | import importlib
class StructGenerator:
"""Struct生成器"""
def __init__(self, filename: str, prefix: str):
"""Constructor"""
self.filename = filename
self.prefix = prefix
self.typedefs = {}
self.current_struct = {}
self.load_constant()
def load_constant(self):
""""""
module_name = f"{self.prefix}_typedef"
module = importlib.import_module(module_name)
for name in dir(module):
if "__" not in name:
self.typedefs[name] = getattr(module, name)
def run(self):
"""运行生成"""
self.f_cpp = open(self.filename, "r")
self.f_struct = open(f"{self.prefix}_struct.py", "w")
for n, line in enumerate(self.f_cpp):
if "*" in line:
continue
try:
self.process_line(line)
except Exception:
print(n, line)
import traceback
traceback.print_exc()
return
self.f_cpp.close()
self.f_struct.close()
print("Struct生成成功")
def process_line(self, line: str):
"""处理每行"""
line = line.replace(";", "")
line = line.replace("\n", "")
line = line.replace("\t", " ")
if self.prefix == "nh_stock" and line.startswith(" "):
line = line[4:]
if "///" in line:
return
elif line.startswith(" struct") or line.startswith(" typedef struct"):
self.process_declare(line)
elif line.startswith(" {"):
self.process_start(line)
elif line.startswith(" }"):
self.process_end(line)
elif line.startswith(" "):
self.process_member(line)
def process_declare(self, line: str):
"""处理声明"""
words = line.split(" ")
words = [word for word in words if word]
if "typedef" in words:
name = words[2]
else:
name = words[1]
end = "{"
new_line = f"{name} = {end}\n"
self.f_struct.write(new_line)
self.current_struct = name
def process_start(self, line: str):
"""处理开始"""
pass
def process_end(self, line: str):
"""处理结束"""
new_line = "}\n\n"
self.f_struct.write(new_line)
def process_member(self, line: str):
"""处理成员"""
if "//" in line:
ix = line.index("//")
line = line[:ix]
words = line.split(" ")
words = [word for word in words if word]
if words[0] not in self.typedefs:
return
if words[0] == "ReqOrderInsertData":
return
elif words[0] == "Commi_Info_t":
return
elif words[0] == "char":
py_type = "string"
elif words[0] == "int":
py_type = "int"
else:
py_type = self.typedefs[words[0]]
name = words[1]
new_line = f" \"{name}\": \"{py_type}\",\n"
self.f_struct.write(new_line)
if __name__ == "__main__":
generator = StructGenerator("../../include/nh/stock/NhStockUserApiStruct.h", "nh")
generator.run() | PypiClean |
/ldmud_asyncio-0.0.4-py3-none-any.whl/ldmud_asyncio/__init__.py | import asyncio, collections, heapq, select, selectors, signal, threading
import ldmud
class LDMudSelector(selectors.BaseSelector):
"""LDMud selector.
This selector uses the LDMud backend loop to wait for a
socket to become ready. The select function here will not wait,
instead the select results will be passed directly to
loop.run_select_event().
"""
class Mapping(collections.abc.Mapping):
"""A mapping of file objects to selector keys.
It uses the mapping of file descriptors to keys
from the LDMudSelector to present a read-only
mapping from file objects to selector keys.
"""
def __init__(self, filemap):
self._filemap = filemap
def __len__(self):
return len(self._filemap)
def __getitem__(self, fileobj):
try:
fd = LDMudSelector._get_fd_from_fileobj(fileobj)
except:
fd = -1
if fd >= 0:
return self._filemap[fd]
# Search the map
for key in self._filemap.values():
if key.fileobj is fileobj:
return key
def __iter__(self):
return iter(self._selector._fd_to_key)
def __init__(self):
self._filemap = {} # fd -> key
def register(self, fileobj, events, data=None):
key = selectors.SelectorKey(fileobj, self._get_fd_from_fileobj(fileobj), events, data)
if key.fd in self._filemap:
raise KeyError("File is already registered")
self._filemap[key.fd] = key
def callback(cbmask):
cbevents = 0
if cbmask & select.POLLIN:
cbevents |= selectors.EVENT_READ
if cbmask & select.POLLOUT:
cbevents |= selectors.EVENT_WRITE
asyncio.get_event_loop().run_select_event(key, cbevents & key.events)
mask = 0
if events & selectors.EVENT_READ:
mask |= select.POLLIN
if events & selectors.EVENT_WRITE:
mask |= select.POLLOUT
ldmud.register_socket(key.fd, callback, mask)
def unregister(self, fileobj):
key = self.get_key(fileobj)
del self._filemap[key.fd]
ldmud.unregister_socket(key.fd)
return key
def select(self, timeout=None):
return []
def close(self):
for key in self._filemap.values():
self.unregister(self, key.fileobj)
def get_map(self):
return LDMudSelector.Mapping(self._filemap)
@staticmethod
def _get_fd_from_fileobj(fileobj):
"""Returns the file descriptor for a file object.
The file object needs either to be an integer (the descriptor itself)
or have a fileno() function that returns the descriptor.
"""
if isinstance(fileobj, int):
return fileobj
else:
return int(fileobj.fileno())
class LDMudEventLoop(asyncio.SelectorEventLoop):
"""LDMud event loop.
Event loop representing the LDMud backend loop.
"""
def __init__(self):
super().__init__(selector = LDMudSelector())
self._thread_id = threading.get_ident()
self._clock_resolution = 1
self._sigchld_handler_handle = None
self._running = False
asyncio._set_running_loop(self)
ldmud.register_hook(ldmud.ON_HEARTBEAT, self._heart_beat)
ldmud.register_hook(ldmud.ON_CHILD_PROCESS_TERMINATED, self._signal_handler)
def _heart_beat(self):
"""Called from LDMud upon each heart beat.
Check all pending timers and execute any expired timers.
"""
self.run_ready()
# If half of the timers are cancelled, clean the queue.
if 2 * self._timer_cancelled_count > len(self._scheduled):
active_timers = []
for timer in self._scheduled:
if timer._cancelled:
timer._scheduled = False
else:
active_timers.append(timer)
heapq.heapify(active_timers)
self._scheduled = active_timers
self._timer_cancelled_count = 0
# Now execute any expired timers.
now = self.time()
while self._scheduled:
if self._scheduled[0]._cancelled:
timer = heapq.heappop(self._scheduled)
timer._scheduled = False
self._timer_cancelled_count -= 1
continue
if self._scheduled[0]._when > now:
break
timer = heapq.heappop(self._scheduled)
timer._scheduled = False
timer._run()
self.run_ready()
def run_forever(self):
raise RuntimeError("The LDMud backend loop runs automatically.")
def run_until_complete(self, future):
raise RuntimeError("The LDMud backend loop runs automatically.")
def stop(self):
raise RuntimeError("To stop the LDMud backend loop you need to call the efun shutdown().")
def close(self):
pass
def _signal_handler(self):
"""LDMud received a SIGCHLD.
If there is a registered handler, call it.
"""
if self._sigchld_handler_handle is None:
return
if self._sigchld_handler_handle._cancelled:
self._sigchld_handler_handle = None
return
self._add_callback(self._sigchld_handler_handle)
self.run_ready()
def add_signal_handler(self, sig, callback, *args):
if sig != signal.SIGCHLD:
raise RuntimeError("Only signal handlers for SIDCHLD are supported.")
self._sigchld_handler_handle = asyncio.Handle(callback, args, self, None)
def remove_signal_handler(self, sig):
if sig != signal.SIGCHLD:
raise RuntimeError("Only signal handlers for SIDCHLD are supported.")
self._sigchld_handler_handle = None
def run_ready(self):
"""Run all tasks in the _ready list."""
# Prevent reentrant calls.
if self._running:
return
self._running = True
try:
while len(self._ready):
handle = self._ready.popleft()
if handle._cancelled:
continue
if self._debug:
try:
self._current_handle = handle
t0 = self.time()
handle._run()
dt = self.time() - t0
if dt >= self.slow_callback_duration:
logger.warning('Executing %s took %.3f seconds',
_format_handle(handle), dt)
finally:
self._current_handle = None
else:
handle._run()
finally:
self._running = False
def run_select_event(self, key, event):
"""Run the task associated with the given selector key."""
self._process_events(((key, event,),))
self.run_ready()
class LDMudDefaultEventLoopPolicy(asyncio.AbstractEventLoopPolicy):
"""Event loop policy for LDMud.
We have only one thread, there we have only one loop
which is the LDMud backend loop. There is currently no
support for watching child processes.
"""
_loop_factory = LDMudEventLoop
def __init__(self):
self._loop = None
self._watcher = None
self._set_called = False
def get_event_loop(self):
if self._loop is None and not self._set_called:
self.set_event_loop(self.new_event_loop())
return self._loop
def set_event_loop(self, loop):
assert loop is None or isinstance(loop, asyncio.AbstractEventLoop)
self._set_called = True
self._loop = loop
if self._watcher:
self._watcher.attach_loop(loop)
def new_event_loop(self):
return self._loop_factory()
def get_child_watcher(self):
if self._watcher is None:
self._watcher = asyncio.SafeChildWatcher()
if self._loop:
self._watcher.attach_loop(self._loop)
return self._watcher
def set_child_watcher(self, watcher):
assert watcher is None or isinstance(watcher, AbstractChildWatcher)
if self._watcher is not None:
self._watcher.close()
self._watcher = watcher
# Keep all task executed via run() alive.
_current_tasks = set()
def _unregister_task(task):
_current_tasks.discard(task)
def _register_task(task):
_current_tasks.add(task)
task.add_done_callback(_unregister_task)
def run(func):
"""Execute the given asynchronous function."""
future = asyncio.ensure_future(func)
_register_task(future)
asyncio.get_event_loop().run_ready()
asyncio.SelectorEventLoop = LDMudEventLoop
asyncio.DefaultEventLoopPolicy = LDMudDefaultEventLoopPolicy
asyncio.run = run
# We don't export anything, just monkey patch asyncio.
__all__ = [] | PypiClean |
/dimagi-superset-2.0.1.tar.gz/dimagi-superset-2.0.1/superset/dao/base.py | from typing import Any, Dict, List, Optional, Type, Union
from flask_appbuilder.models.filters import BaseFilter
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.models.sqla.interface import SQLAInterface
from sqlalchemy.exc import SQLAlchemyError, StatementError
from sqlalchemy.orm import Session
from superset.dao.exceptions import (
DAOConfigError,
DAOCreateFailedError,
DAODeleteFailedError,
DAOUpdateFailedError,
)
from superset.extensions import db
class BaseDAO:
"""
Base DAO, implement base CRUD sqlalchemy operations
"""
model_cls: Optional[Type[Model]] = None
"""
Child classes need to state the Model class so they don't need to implement basic
create, update and delete methods
"""
base_filter: Optional[BaseFilter] = None
"""
Child classes can register base filtering to be aplied to all filter methods
"""
id_column_name = "id"
@classmethod
def find_by_id(
cls, model_id: Union[str, int], session: Session = None
) -> Optional[Model]:
"""
Find a model by id, if defined applies `base_filter`
"""
session = session or db.session
query = session.query(cls.model_cls)
if cls.base_filter:
data_model = SQLAInterface(cls.model_cls, session)
query = cls.base_filter( # pylint: disable=not-callable
cls.id_column_name, data_model
).apply(query, None)
id_filter = {cls.id_column_name: model_id}
try:
return query.filter_by(**id_filter).one_or_none()
except StatementError:
# can happen if int is passed instead of a string or similar
return None
@classmethod
def find_by_ids(cls, model_ids: Union[List[str], List[int]]) -> List[Model]:
"""
Find a List of models by a list of ids, if defined applies `base_filter`
"""
id_col = getattr(cls.model_cls, cls.id_column_name, None)
if id_col is None:
return []
query = db.session.query(cls.model_cls).filter(id_col.in_(model_ids))
if cls.base_filter:
data_model = SQLAInterface(cls.model_cls, db.session)
query = cls.base_filter( # pylint: disable=not-callable
cls.id_column_name, data_model
).apply(query, None)
return query.all()
@classmethod
def find_all(cls) -> List[Model]:
"""
Get all that fit the `base_filter`
"""
query = db.session.query(cls.model_cls)
if cls.base_filter:
data_model = SQLAInterface(cls.model_cls, db.session)
query = cls.base_filter( # pylint: disable=not-callable
cls.id_column_name, data_model
).apply(query, None)
return query.all()
@classmethod
def find_one_or_none(cls, **filter_by: Any) -> Optional[Model]:
"""
Get the first that fit the `base_filter`
"""
query = db.session.query(cls.model_cls)
if cls.base_filter:
data_model = SQLAInterface(cls.model_cls, db.session)
query = cls.base_filter( # pylint: disable=not-callable
cls.id_column_name, data_model
).apply(query, None)
return query.filter_by(**filter_by).one_or_none()
@classmethod
def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model:
"""
Generic for creating models
:raises: DAOCreateFailedError
"""
if cls.model_cls is None:
raise DAOConfigError()
model = cls.model_cls() # pylint: disable=not-callable
for key, value in properties.items():
setattr(model, key, value)
try:
db.session.add(model)
if commit:
db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAOCreateFailedError(exception=ex) from ex
return model
@classmethod
def save(cls, instance_model: Model, commit: bool = True) -> Model:
"""
Generic for saving models
:raises: DAOCreateFailedError
"""
if cls.model_cls is None:
raise DAOConfigError()
if not isinstance(instance_model, cls.model_cls):
raise DAOCreateFailedError(
"the instance model is not a type of the model class"
)
try:
db.session.add(instance_model)
if commit:
db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAOCreateFailedError(exception=ex) from ex
return instance_model
@classmethod
def update(
cls, model: Model, properties: Dict[str, Any], commit: bool = True
) -> Model:
"""
Generic update a model
:raises: DAOCreateFailedError
"""
for key, value in properties.items():
setattr(model, key, value)
try:
db.session.merge(model)
if commit:
db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAOUpdateFailedError(exception=ex) from ex
return model
@classmethod
def delete(cls, model: Model, commit: bool = True) -> Model:
"""
Generic delete a model
:raises: DAODeleteFailedError
"""
try:
db.session.delete(model)
if commit:
db.session.commit()
except SQLAlchemyError as ex: # pragma: no cover
db.session.rollback()
raise DAODeleteFailedError(exception=ex) from ex
return model | PypiClean |
/data-science-common-core-1.7.8.tar.gz/data-science-common-core-1.7.8/common/model.py | import logging
import os
import pickle
from multiprocessing import cpu_count
import numpy as np
import pandas as pd
import pgzip
from catboost import CatBoostClassifier, CatBoostRegressor
from sklearn.ensemble import HistGradientBoostingRegressor
from sklearn.multioutput import MultiOutputRegressor
from src.logic import export_predictions
from .custom_hgb import CustomHistGradientBoostingClassifier
from .custom_multi import CustomMultiOutputClassifier
from .io import (
fetch_from_bucket,
get_data_bucket,
get_storage_client,
insert_logs,
insert_metrics,
insert_to_sheet,
logger,
upload_to_bucket,
)
from .logic import (
calculate_mean_absolute_error,
calculate_mean_average_precision,
kappa_analysis,
)
from .plot import plot_model_results
from .query import q_delete_run
def ml_setup(params):
"""Model-agnostic setup, initialize the model object."""
model_type = params["model_type"]
# Allocate here with wanted scikit-learn classifiers
if model_type == "hgb":
model_params = params["model_params_hgb"].copy()
model_params.pop("model_file")
if params["prediction_type"] == "classification":
model = hgb_classification_model_setup(params)
else:
model = hgb_regressor_model_setup(params)
elif model_type == "cat":
if params["ft_encoding"] is not None:
logging.warning(
"Catboost performs best with no/minimal feature preprocessing."
" Ohe/pca to categorical features should be avoided."
)
if params["prediction_type"] == "classification":
model = cat_classification_model_setup(params)
else:
model = cat_regressor_model_setup(params)
else:
raise ValueError
return model
def ml_load(params):
"""Model-agnostic loading."""
msg = "Download model file"
logger.info(msg)
insert_logs(params, msg)
model = fetch_from_bucket(params, "model")["model"]
return model
def sk_train(params, model, data):
"""Fit multi-output scikit-learn classifier and store the model file."""
multi_model = None
if params["prediction_type"] == "classification":
params["smote_params"]["categorical_features"] = [0]
params["smote_params"]["random_state"] = params["random_state"]
multi_model = CustomMultiOutputClassifier(model, n_jobs=1)
multi_model.fit(
data["df_train"],
data["label_train"],
eval_set=(data["df_val"], data["label_val"]),
)
elif params["prediction_type"] == "regression":
multi_model = MultiOutputRegressor(model)
multi_model.fit(data["df_train"], data["label_train"])
if not params["dry_run"]:
with pgzip.open(
params[f"model_params_{params['model_type']}"]["path_model_file"],
"wb",
blocksize=2**22,
compresslevel=4,
thread=cpu_count(),
) as f:
pickle.dump(multi_model, f)
return multi_model
def ml_train(params, model, data):
"""Model-agnostic training."""
model = sk_train(params, model, data)
return model
def ml_predict(params, model, df):
"""Model-agnostic predict function."""
if params["prediction_type"] == "regression":
# Compute prediction on validation data
df_pred = model.predict(df)
else:
# Compute prediction on validation data for each model
df_pred = np.array([i[:, 1] for i in model.predict_proba(df)]).T
return df_pred
def ml_upload(params):
"""Model-agnostic upload to bucket."""
model_params = params[f'model_params_{params["model_type"]}']
# Upload FeatureTransformer file - if existing
if os.path.exists(params["path_ft_file"]):
upload_to_bucket(
params,
params["path_ft_file"],
"",
bucket_folder="ft",
)
# Upload model file
upload_to_bucket(params, model_params["path_model_file"], "", bucket_folder="model")
return
def model_evaluation(params, data):
"""Measure the performances of a model with/out teu normalization.
Args:
data (dict): data dictionary in order to evaluate the model.
"""
if params["prediction_type"] == "classification":
results = calculate_mean_average_precision(params, data)
results.update(kappa_analysis(params, data))
results.update(calculate_lift_curve(params, data))
elif params["prediction_type"] == "regression":
results = calculate_mean_absolute_error(params, data)
else:
raise ValueError(
"Prediction_type should either be 'classification' or 'regression'"
)
return results
def ml_performance(params, model, data):
"""Model-agnostic performance evaluation."""
# Train mode
if params["train_mode"]:
# Compute prediction on training data
df_pred = ml_predict(params, model, data["df_train"])
data["pred_train"] = pd.DataFrame(df_pred, columns=params["data_output_fields"])
data["pred_train"].index = data["id_train"][params["id_field"]]
# Compute prediction on validation data
df_pred = ml_predict(params, model, data["df_val"])
data["pred_val"] = pd.DataFrame(df_pred, columns=params["data_output_fields"])
data["pred_val"].index = data["id_val"][params["id_field"]]
# Compute prediction on test data if exists
if "validation_test_split" in params and params["validation_test_split"]:
df_pred = ml_predict(params, model, data["df_test"])
data["pred_test"] = pd.DataFrame(
df_pred, columns=params["data_output_fields"]
)
data["pred_test"].index = data["id_test"][params["id_field"]]
results = model_evaluation(params, data)
if params["plot"]:
plot_model_results(params, data, results)
# Upload the data if not in dry run
if not params["dry_run"]:
# upload metrics to the dwh
insert_metrics(params, results)
ml_upload(params)
msg = "Training is complete."
logger.info(msg)
insert_logs(params, msg)
# Test mode
else:
msg = "Predict test results and compute performances"
logger.info(msg)
insert_logs(params, msg)
data["pred_test"] = ml_predict(params, model, data["df_test"])
# Compute prediction on test data and store result metrics
export_predictions(params, data)
msg = "Prediction is complete."
logger.info(msg)
insert_logs(params, msg)
return model
def hgb_regressor_model_setup(params):
"""Create HistGradientBoosting Regressor."""
# Select hgb parameters
model_params = params["model_params_hgb"]
# Set up HistGradientBoosting model
model = HistGradientBoostingRegressor(
max_iter=model_params["max_iter"],
max_leaf_nodes=model_params["max_leaf_nodes"],
l2_regularization=model_params["l2_regularization"],
loss=model_params["loss"],
learning_rate=model_params["learning_rate"],
scoring=model_params["scoring"],
early_stopping=model_params["early_stopping"],
tol=model_params["tol"],
n_iter_no_change=model_params["n_iter_no_change"],
random_state=model_params["random_state"],
verbose=model_params["verbose"],
)
return model
def hgb_classification_model_setup(params):
"""Create HistGradientBoosting Regressor."""
# Select hgb parameters
model_params = params["model_params_hgb"]
# Set up HistGradientBoosting model
model = CustomHistGradientBoostingClassifier(
max_iter=model_params["max_iter"],
max_leaf_nodes=model_params["max_leaf_nodes"],
l2_regularization=model_params["l2_regularization"],
loss=model_params["loss"],
learning_rate=model_params["learning_rate"],
scoring=model_params["scoring"],
early_stopping=model_params["early_stopping"],
tol=model_params["tol"],
n_iter_no_change=model_params["n_iter_no_change"],
random_state=model_params["random_state"],
verbose=model_params["verbose"],
)
return model
def cat_classification_model_setup(params):
"""Create CatBoostClassifier."""
model_params = params["model_params_cat"]["model_kwargs"]
model = CatBoostClassifier(
cat_features=params["processed_cat_features"], **model_params
)
return model
def cat_regressor_model_setup(params):
"""Create CatBoostRegressor."""
model_params = params["model_params_cat"]["model_kwargs"]
model = CatBoostRegressor(
cat_features=params["processed_cat_features"], **model_params
)
return model
def ml_model(params, data):
"""Model-agnostic main function."""
if params["model_type"] not in ["hgb", "cat"]:
model = None
logger.log("Invalid model type, options: `hgb`, and `cat`")
else:
if params["train_mode"]:
model = ml_setup(params)
model = ml_train(params, model, data)
else:
model = ml_load(params)
model = ml_performance(params, model, data)
return model
def prune_model(params, model_id=None):
"""Prune model files and info from GBQ and GCS."""
if not model_id and "model_id" in params:
model_id = params["model_id"]
# Fetch CGS client and list blobs
client = get_storage_client(params)
bucket = get_data_bucket(params, client)
blobs = bucket.list_blobs()
# Delete objects from GCS - if present
for blob in blobs:
if model_id in blob.name:
blob.delete()
logger.info(f"{blob.name} successfully deleted from Google Cloud Storage!")
# Delete record from GBQ
q_delete_run(params, model_id)
return
def compute_performance_table(params, data_segment, mode):
"""Compute and export percentile performance tables."""
# Compute percentiles
data_segment["count_range"] = range(data_segment.shape[0])
data_segment["percentile"] = (
np.ceil(100 * ((1 + data_segment["count_range"]) / data_segment.shape[0]))
).astype(int)
# Compute percentile aggregations
agg_label = {"conversion_label": ["count", "sum", "mean"]}
agg_score = {"conversion_score": ["min", "max", "mean"]}
agg_label.update(agg_score)
aggregation = agg_score if mode == "test" else agg_label
percentile = data_segment.groupby("percentile").agg(aggregation).reset_index()
# Reindex columns
percentile.columns = ["_".join(col) for col in percentile.columns]
if mode != "test":
# Compute cumulative statistics
for metric in ["count", "sum"]:
percentile[f"cum_conv_label_{metric}"] = percentile[
f"conversion_label_{metric}"
].cumsum()
percentile["Cum Mean Score"] = (
percentile.conversion_score_mean * percentile.conversion_label_count
).cumsum() / percentile.conversion_label_count.cumsum()
percentile["Cum Target %"] = (
percentile.cum_conv_label_sum / percentile.cum_conv_label_count
)
# Compute lift and recall
percentile["Cum Lift"] = (
percentile["Cum Target %"] / percentile["Cum Target %"].iloc[-1]
)
percentile["Recall %"] = (
percentile.cum_conv_label_sum / percentile.cum_conv_label_sum.iloc[-1]
)
percentile = percentile.rename(
{
"conversion_label_count": "# obs",
"conversion_label_sum": "# target",
"conversion_label_mean": "Target %",
"cum_conv_label_count": "Cum obs",
"cum_conv_label_sum": "Cum target",
},
axis=1,
)
percentile = percentile.rename(
{
"percentile_": "percentile",
"conversion_score_min": "Min score",
"conversion_score_max": "Max score",
"conversion_score_mean": "Mean score",
},
axis=1,
)
insert_to_sheet(
params, percentile, mode=f"res_{mode}", replace=True, include_index=False
)
return
def performance_lift(params, data, model):
"""Compute lift."""
# Compute scores and actual values for train+val data
for segment in ["train", "val", "test"]:
# Compute prediction on data segment
data[f"pred_{segment}"] = pd.DataFrame(
ml_predict(params, model, data[f"df_{segment}"]),
columns=params["data_output_fields"],
)
try:
if segment == "test":
# Add segment index
data[f"pred_{segment}"].index = data[f"df_{segment}"].index
data[f"check_{segment}"] = (
data[f"pred_{segment}"]
.rename({"conversion": "conversion_score"}, axis=1)
.sort_values("conversion_score", ascending=False)
)
# Add label
else:
# Add segment index
data[f"pred_{segment}"].index = data[f"id_{segment}"][
params["id_field"]
]
data[f"check_{segment}"] = (
data[f"label_{segment}"]
.join(data[f"pred_{segment}"], lsuffix="_label", rsuffix="_score")
.sort_values("conversion_score", ascending=False)
)
compute_performance_table(params, data[f"check_{segment}"], mode=segment)
except KeyError:
logger.warning(f"{segment} data are not available to export")
# Join data and sort by prediction score, descending
data["check_total"] = pd.concat(
[data["check_train"], data["check_val"]]
).sort_values("conversion_score", ascending=False)
compute_performance_table(params, data["check_total"], mode="total")
return
def calculate_single_lift_curve(y, y_pred, step=0.01):
"""Calculate single Lift Curve."""
if len(y.shape) > 1:
return [calculate_single_lift_curve(y[c], y_pred[c], step) for c in y.columns]
# Define an auxiliary dataframe to plot the curve
aux_lift = pd.DataFrame()
# Create a real and predicted column for our new DataFrame and assign values
aux_lift["real"] = y.values
aux_lift["predicted"] = y_pred.values
# Order the values for the predicted probability column:
aux_lift.sort_values("predicted", ascending=False, inplace=True)
# Create the values that will go into the X axis of our plot
x_val = np.arange(step, 1 + step, step)
# Calculate the ratio of ones in our data
ratio_ones = aux_lift["real"].sum() / len(aux_lift)
# Create an empty vector with the values that will go on the Y axis of our plot
y_v = []
# Calculate for each x value its correspondent y value
for x in x_val:
num_data = int(
np.ceil(x * len(aux_lift))
) # The ceil function returns the closest integer bigger than our number
data_here = aux_lift.iloc[:num_data, :] # ie. np.ceil(1.4) = 2
ratio_ones_here = data_here["real"].sum() / len(data_here)
y_v.append(ratio_ones_here / ratio_ones)
return list(x_val * 100), list(y_v)
def calculate_lift_curve(params, data):
"""Calculate All Lift curves."""
segment_list = ["train", "val"]
if "validation_test_split" in params and params["validation_test_split"]:
segment_list = ["train", "val", "test"]
return {
f"lift_{segment}": calculate_single_lift_curve(
data[f"label_{segment}"], data[f"pred_{segment}"]
)
for segment in segment_list
} | PypiClean |
/nemo_toolkit-1.20.0-py3-none-any.whl/nemo/collections/nlp/data/language_modeling/megatron/indexed_dataset.py |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Most of the code here has been copied from:
# fairseq/fairseq/data/indexed_dataset.py
# with some modifications:
# Removed IndexedRawTextDataset since it relied on Fairseq dictionary
# other slight modifications to remove fairseq dependencies
# Added document index to index file and made it accessible.
# An empty sentence no longer separates documents.
import os
import shutil
import struct
from functools import lru_cache
from itertools import accumulate
import numpy as np
import torch
from nemo.collections.nlp.data.language_modeling.megatron.indexed_retrieval_dataset import (
MMapRetrievalIndexedDataset,
MMapRetrievalIndexedDatasetBuilder,
)
from nemo.collections.nlp.data.language_modeling.text_memmap_dataset import CSVMemMapDataset, TextMemMapDataset
from nemo.utils import logging
def __best_fitting_dtype(vocab_size=None):
if vocab_size is not None and vocab_size < 65500:
return np.uint16
else:
return np.int32
def get_available_dataset_impl():
return ['lazy', 'cached', 'mmap', "retmmap"]
def infer_dataset_impl(path):
if IndexedDataset.exists(path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
if magic == IndexedDataset._HDR_MAGIC:
return 'cached'
elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]:
return 'mmap'
elif magic == MMapRetrievalIndexedDataset.Index._HDR_MAGIC[:8]:
return 'retmmap'
else:
return None
else:
print(f"Dataset does not exist: {path}")
print("Path should be a basename that both .idx and .bin can be appended to get full filenames.")
return None
def make_builder(out_file, impl, vocab_size=None, chunk_size=64, pad_id=0, retrieval_db=False, stride=64):
if impl == 'mmap':
return MMapIndexedDatasetBuilder(out_file, dtype=__best_fitting_dtype(vocab_size))
elif impl == 'retmmap':
return MMapRetrievalIndexedDatasetBuilder(
out_file,
chunk_size=chunk_size,
pad_id=pad_id,
retrieval_db=retrieval_db,
dtype=__best_fitting_dtype(vocab_size),
stride=stride,
)
else:
return IndexedDatasetBuilder(out_file)
def make_indexed_dataset_compatibility(ds):
"""Make any dataset compatible with IndexedDataset for Megatron samples mapping."""
if (getattr(ds, 'doc_idx', None) is not None) or (getattr(ds, 'sizes', None) is not None):
raise AttributeError("Dataset already has doc_idx or sizes attributes.")
ds.doc_idx = np.arange(len(ds) + 1, dtype=np.int64)
ds.sizes = np.ones(len(ds), dtype=np.int32)
return ds
def deallocate_indexed_dataset_memory(indexed_dataset):
"""Deallocate memory of an IndexedDataset."""
if isinstance(indexed_dataset, MMapIndexedDataset):
# for MMapIndexedDataset we cannot release any memory of sizes
indexed_dataset._index._doc_idx = None
else:
indexed_dataset.sizes = None
indexed_dataset.doc_idx = None
def make_dataset(path, impl, skip_warmup=False, impl_kwargs={}, delay_data_mmap=False):
# first handle text memap
if impl == 'text_mmap':
return TextMemMapDataset(path, **impl_kwargs)
elif impl == 'csv_mmap':
return CSVMemMapDataset(path, **impl_kwargs)
# now handle bin memap
if not IndexedDataset.exists(path):
print(f"Dataset does not exist: {path}")
print("Path should be a basename that both .idx and .bin can be appended to get full filenames.")
return None
if impl == 'infer':
impl = infer_dataset_impl(path)
if impl == 'lazy' and IndexedDataset.exists(path):
return IndexedDataset(path)
elif impl == 'cached' and IndexedDataset.exists(path):
return IndexedCachedDataset(path)
elif impl == 'mmap' and MMapIndexedDataset.exists(path):
return MMapIndexedDataset(path, skip_warmup, delay_data_mmap)
elif impl == 'retmmap':
return MMapRetrievalIndexedDataset(path, skip_warmup)
raise ValueError(f"Unknown dataset implementation: {impl}")
def dataset_exists(path, impl):
if impl == 'mmap':
return MMapIndexedDataset.exists(path)
elif impl == 'retmmap':
return MMapRetrievalIndexedDataset.exists(path)
else:
return IndexedDataset.exists(path)
def read_longs(f, n):
a = np.empty(n, dtype=np.int64)
f.readinto(a)
return a
def write_longs(f, a):
f.write(np.array(a, dtype=np.int64))
dtypes = {1: np.uint8, 2: np.int8, 3: np.int16, 4: np.int32, 5: np.int64, 6: np.float64, 7: np.double, 8: np.uint16}
def code(dtype):
for k in dtypes.keys():
if dtypes[k] == dtype:
return k
raise ValueError(dtype)
def index_file_path(prefix_path):
return prefix_path + '.idx'
def data_file_path(prefix_path):
return prefix_path + '.bin'
def create_doc_idx(sizes):
doc_idx = [0]
for i, s in enumerate(sizes):
if s == 0:
doc_idx.append(i + 1)
return doc_idx
class IndexedDataset(torch.utils.data.Dataset):
"""Loader for IndexedDataset"""
_HDR_MAGIC = b'TNTIDX\x00\x00'
def __init__(self, path):
super().__init__()
self.path = path
self.data_file = None
self.read_index(path)
def read_index(self, path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
assert magic == self._HDR_MAGIC, (
'Index file doesn\'t match expected format. ' 'Make sure that --dataset-impl is configured properly.'
)
version = f.read(8)
assert struct.unpack('<Q', version) == (1,)
code, self.element_size = struct.unpack('<QQ', f.read(16))
self.dtype = dtypes[code]
self._len, self.s = struct.unpack('<QQ', f.read(16))
self.doc_count = struct.unpack('<Q', f.read(8))
self.dim_offsets = read_longs(f, self._len + 1)
self.data_offsets = read_longs(f, self._len + 1)
self.sizes = read_longs(f, self.s)
self.doc_idx = read_longs(f, self.doc_count)
def read_data(self, path):
self.data_file = open(data_file_path(path), 'rb', buffering=0)
def check_index(self, i):
if i < 0 or i >= self._len:
raise IndexError('index out of range')
def __del__(self):
if self.data_file:
self.data_file.close()
# @lru_cache(maxsize=8)
def __getitem__(self, idx):
if not self.data_file:
self.read_data(self.path)
if isinstance(idx, int):
i = idx
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i] : self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
return a
elif isinstance(idx, slice):
start, stop, step = idx.indices(len(self))
if step != 1:
raise ValueError("Slices into indexed_dataset must be contiguous")
sizes = self.sizes[self.dim_offsets[start] : self.dim_offsets[stop]]
size = sum(sizes)
a = np.empty(size, dtype=self.dtype)
self.data_file.seek(self.data_offsets[start] * self.element_size)
self.data_file.readinto(a)
offsets = list(accumulate(sizes))
sents = np.split(a, offsets[:-1])
return sents
def __len__(self):
return self._len
def num_tokens(self, index):
return self.sizes[index]
def size(self, index):
return self.sizes[index]
@staticmethod
def exists(path):
return os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
@property
def supports_prefetch(self):
return False # avoid prefetching to save memory
class IndexedCachedDataset(IndexedDataset):
def __init__(self, path):
super().__init__(path)
self.cache = None
self.cache_index = {}
@property
def supports_prefetch(self):
return True
def prefetch(self, indices):
if all(i in self.cache_index for i in indices):
return
if not self.data_file:
self.read_data(self.path)
indices = sorted(set(indices))
total_size = 0
for i in indices:
total_size += self.data_offsets[i + 1] - self.data_offsets[i]
self.cache = np.empty(total_size, dtype=self.dtype)
ptx = 0
self.cache_index.clear()
for i in indices:
self.cache_index[i] = ptx
size = self.data_offsets[i + 1] - self.data_offsets[i]
a = self.cache[ptx : ptx + size]
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
ptx += size
if self.data_file:
# close and delete data file after prefetch so we can pickle
self.data_file.close()
self.data_file = None
# @lru_cache(maxsize=8)
def __getitem__(self, idx):
if isinstance(idx, int):
i = idx
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i] : self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
ptx = self.cache_index[i]
np.copyto(a, self.cache[ptx : ptx + a.size])
return a
elif isinstance(idx, slice):
# Hack just to make this work, can optimizer later if necessary
sents = []
for i in range(*idx.indices(len(self))):
sents.append(self[i])
return sents
class IndexedDatasetBuilder(object):
element_sizes = {np.uint8: 1, np.int8: 1, np.int16: 2, np.int32: 4, np.int64: 8, np.float64: 4, np.double: 8}
def __init__(self, out_file, dtype=np.int32):
self.out_file = open(out_file, 'wb')
self.dtype = dtype
self.data_offsets = [0]
self.dim_offsets = [0]
self.sizes = []
self.element_size = self.element_sizes[self.dtype]
self.doc_idx = [0]
def add_item(self, tensor):
bytes = self.out_file.write(np.array(tensor.numpy(), dtype=self.dtype))
self.data_offsets.append(self.data_offsets[-1] + bytes / self.element_size)
for s in tensor.size():
self.sizes.append(s)
self.dim_offsets.append(self.dim_offsets[-1] + len(tensor.size()))
def end_document(self):
self.doc_idx.append(len(self.sizes))
def merge_file_(self, another_file):
index = IndexedDataset(another_file)
assert index.dtype == self.dtype
begin = self.data_offsets[-1]
for offset in index.data_offsets[1:]:
self.data_offsets.append(begin + offset)
self.sizes.extend(index.sizes)
begin = self.dim_offsets[-1]
for dim_offset in index.dim_offsets[1:]:
self.dim_offsets.append(begin + dim_offset)
with open(data_file_path(another_file), 'rb') as f:
while True:
data = f.read(1024)
if data:
self.out_file.write(data)
else:
break
def finalize(self, index_file):
self.out_file.close()
index = open(index_file, 'wb')
index.write(b'TNTIDX\x00\x00')
index.write(struct.pack('<Q', 1))
index.write(struct.pack('<QQ', code(self.dtype), self.element_size))
index.write(struct.pack('<QQ', len(self.data_offsets) - 1, len(self.sizes)))
index.write(struct.pack('<Q', len(self.doc_idx)))
write_longs(index, self.dim_offsets)
write_longs(index, self.data_offsets)
write_longs(index, self.sizes)
write_longs(index, self.doc_idx)
index.close()
def _warmup_mmap_file(path):
with open(path, 'rb') as stream:
while stream.read(100 * 1024 * 1024):
pass
class MMapIndexedDataset(torch.utils.data.Dataset):
class Index(object):
_HDR_MAGIC = b'MMIDIDX\x00\x00'
@classmethod
def writer(cls, path, dtype):
class _Writer(object):
def __enter__(self):
self._file = open(path, 'wb')
self._file.write(cls._HDR_MAGIC)
self._file.write(struct.pack('<Q', 1))
self._file.write(struct.pack('<B', code(dtype)))
return self
@staticmethod
def _get_pointers(sizes):
dtype_size = dtype().itemsize
address = 0
pointers = []
for size in sizes:
pointers.append(address)
address += size * dtype_size
return pointers
def write(self, sizes, doc_idx):
pointers = self._get_pointers(sizes)
self._file.write(struct.pack('<Q', len(sizes)))
self._file.write(struct.pack('<Q', len(doc_idx)))
sizes = np.array(sizes, dtype=np.int32)
self._file.write(sizes.tobytes(order='C'))
del sizes
pointers = np.array(pointers, dtype=np.int64)
self._file.write(pointers.tobytes(order='C'))
del pointers
doc_idx = np.array(doc_idx, dtype=np.int64)
self._file.write(doc_idx.tobytes(order='C'))
def __exit__(self, exc_type, exc_val, exc_tb):
self._file.close()
return _Writer()
def __init__(self, path, skip_warmup=False):
with open(path, 'rb') as stream:
magic_test = stream.read(9)
assert self._HDR_MAGIC == magic_test, (
'Index file doesn\'t match expected format. '
'Make sure that --dataset-impl is configured properly.'
)
version = struct.unpack('<Q', stream.read(8))
assert (1,) == version
(dtype_code,) = struct.unpack('<B', stream.read(1))
self._dtype = dtypes[dtype_code]
self._dtype_size = self._dtype().itemsize
self._len = struct.unpack('<Q', stream.read(8))[0]
self._doc_count = struct.unpack('<Q', stream.read(8))[0]
offset = stream.tell()
if not skip_warmup:
logging.info(" warming up index mmap file...")
_warmup_mmap_file(path)
self._bin_buffer_mmap = np.memmap(path, mode='r', order='C')
self._bin_buffer = memoryview(self._bin_buffer_mmap)
logging.info(" reading sizes...")
self._sizes = np.frombuffer(self._bin_buffer, dtype=np.int32, count=self._len, offset=offset)
logging.info(" reading pointers...")
self._pointers = np.frombuffer(
self._bin_buffer, dtype=np.int64, count=self._len, offset=offset + self._sizes.nbytes
)
logging.info(" reading document index...")
self._doc_idx = np.frombuffer(
self._bin_buffer,
dtype=np.int64,
count=self._doc_count,
offset=offset + self._sizes.nbytes + self._pointers.nbytes,
)
def __del__(self):
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
@property
def dtype(self):
return self._dtype
@property
def sizes(self):
return self._sizes
@property
def doc_idx(self):
return self._doc_idx
@lru_cache(maxsize=8)
def __getitem__(self, i):
return self._pointers[i], self._sizes[i]
def __len__(self):
return self._len
def __init__(self, path, skip_warmup=False, delay_data_mmap=False):
super().__init__()
self._path = None
self._index = None
self._bin_buffer = None
self._delay_data_mmap = delay_data_mmap
self._skip_warmup = skip_warmup
self._do_init(path, skip_warmup, delay_data_mmap)
def __getstate__(self):
return self._path
def __setstate__(self, state):
self._do_init(state)
def _do_init(self, path, skip_warmup=True, delay_data_mmap=False):
self._path = path
self._index = self.Index(index_file_path(self._path), skip_warmup)
if not delay_data_mmap:
self._create_data_mmap(skip_warmup)
else:
logging.info(" skip creating data numpy buffer of mmap...")
self._bin_buffer_mmap = None
self._bin_buffer = None
def _create_data_mmap(self, skip_warmup):
if not skip_warmup:
logging.info(" warming up data mmap file...")
_warmup_mmap_file(data_file_path(self._path))
logging.info(" creating numpy buffer of mmap...")
self._bin_buffer_mmap = np.memmap(data_file_path(self._path), mode='r', order='C')
logging.info(" creating memory view of numpy buffer...")
self._bin_buffer = memoryview(self._bin_buffer_mmap)
def __del__(self):
if self._bin_buffer_mmap is not None:
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
del self._index
def __len__(self):
return len(self._index)
# @lru_cache(maxsize=8)
def __getitem__(self, idx):
if isinstance(idx, int):
ptr, size = self._index[idx]
np_array = np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=size, offset=ptr)
return np_array
elif isinstance(idx, slice):
start, stop, step = idx.indices(len(self))
if step != 1:
raise ValueError("Slices into indexed_dataset must be contiguous")
ptr = self._index._pointers[start]
sizes = self._index._sizes[idx]
offsets = list(accumulate(sizes))
total_size = sum(sizes)
np_array = np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=total_size, offset=ptr)
sents = np.split(np_array, offsets[:-1])
return sents
def get(self, idx, offset=0, length=None):
""" Retrieves a single item from the dataset with the option to only
return a portion of the item.
get(idx) is the same as [idx] but get() does not support slicing.
"""
ptr, size = self._index[idx]
if length is None:
length = size - offset
ptr += offset * np.dtype(self._index.dtype).itemsize
np_array = np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=length, offset=ptr)
return np_array
def create_data_mmap(self):
self._create_data_mmap(self._skip_warmup)
@property
def sizes(self):
return self._index.sizes
@property
def doc_idx(self):
return self._index.doc_idx
def get_doc_idx(self):
return self._index._doc_idx
def set_doc_idx(self, doc_idx_):
self._index._doc_idx = doc_idx_
@property
def supports_prefetch(self):
return False
@staticmethod
def exists(path):
return os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
class MMapIndexedDatasetBuilder(object):
def __init__(self, out_file, dtype=np.int64):
self._data_file = open(out_file, 'wb')
self._dtype = dtype
self._sizes = []
self._doc_idx = [0]
def add_item(self, tensor):
np_array = np.array(tensor.numpy(), dtype=self._dtype)
self._data_file.write(np_array.tobytes(order='C'))
self._sizes.append(np_array.size)
def end_document(self):
self._doc_idx.append(len(self._sizes))
def merge_file_(self, another_file):
# Concatenate index
index = MMapIndexedDataset.Index(index_file_path(another_file))
assert index.dtype == self._dtype
for size in index.sizes:
self._sizes.append(size)
# Concatenate data
with open(data_file_path(another_file), 'rb') as f:
shutil.copyfileobj(f, self._data_file)
def finalize(self, index_file):
self._data_file.close()
with MMapIndexedDataset.Index.writer(index_file, self._dtype) as index:
index.write(self._sizes, self._doc_idx) | PypiClean |
/yolov7_d2-0.0.3.tar.gz/yolov7_d2-0.0.3/yolov7/modeling/meta_arch/utils.py | from detectron2.layers.nms import batched_nms
import torch
from torchvision.ops import boxes as box_ops
def iou(boxes, top_box):
x1 = boxes[:, 0].clamp(min=top_box[0])
y1 = boxes[:, 1].clamp(min=top_box[1])
x2 = boxes[:, 2].clamp(max=top_box[2])
y2 = boxes[:, 3].clamp(max=top_box[3])
inters = (x2 - x1).clamp(min=0) * (y2 - y1).clamp(min=0)
areas = (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
unions = (top_box[2] - top_box[0]) * \
(top_box[3] - top_box[1]) + areas - inters
return inters / unions
def scale_by_iou(ious, sigma, soft_mode="gaussian"):
if soft_mode == "linear":
scale = ious.new_ones(ious.size())
scale[ious >= sigma] = 1 - ious[ious >= sigma]
else:
scale = torch.exp(-ious ** 2 / sigma)
return scale
def softnms(boxes, scores, sigma, score_threshold, soft_mode="gaussian"):
assert soft_mode in ["linear", "gaussian"]
undone_mask = scores >= score_threshold
while undone_mask.sum() > 1:
idx = scores[undone_mask].argmax()
idx = undone_mask.nonzero(as_tuple=False)[idx].item()
top_box = boxes[idx]
undone_mask[idx] = False
_boxes = boxes[undone_mask]
ious = iou(_boxes, top_box)
scales = scale_by_iou(ious, sigma, soft_mode)
scores[undone_mask] *= scales
undone_mask[scores < score_threshold] = False
return scores
def batched_softnms(boxes, scores, idxs, iou_threshold,
score_threshold=0.001, soft_mode="gaussian"):
assert soft_mode in ["linear", "gaussian"]
assert boxes.shape[-1] == 4
# change scores inplace
# no need to return changed scores
for id in torch.unique(idxs).cpu().tolist():
mask = (idxs == id).nonzero(as_tuple=False).view(-1)
scores[mask] = softnms(boxes[mask], scores[mask], iou_threshold,
score_threshold, soft_mode)
keep = (scores > score_threshold).nonzero(as_tuple=False).view(-1)
keep = keep[scores[keep].argsort(descending=True)]
return keep
def cluster_nms(boxes, scores, iou_threshold):
last_keep = torch.ones(*scores.shape).to(boxes.device)
scores, idx = scores.sort(descending=True)
boxes = boxes[idx]
origin_iou_matrix = box_ops.box_iou(
boxes, boxes).tril(diagonal=-1).transpose(1, 0)
while True:
iou_matrix = torch.mm(torch.diag(last_keep.float()), origin_iou_matrix)
keep = (iou_matrix.max(dim=0)[0] <= iou_threshold)
if (keep == last_keep).all():
return idx[keep.nonzero(as_tuple=False)]
last_keep = keep
def batched_clusternms(boxes, scores, idxs, iou_threshold):
assert boxes.shape[-1] == 4
result_mask = scores.new_zeros(scores.size(), dtype=torch.bool)
for id in torch.unique(idxs).cpu().tolist():
mask = (idxs == id).nonzero(as_tuple=False).view(-1)
keep = cluster_nms(boxes[mask], scores[mask], iou_threshold)
result_mask[mask[keep]] = True
keep = result_mask.nonzero(as_tuple=False).view(-1)
keep = keep[scores[keep].argsort(descending=True)]
return keep
def generalized_batched_nms(boxes, scores, idxs, iou_threshold,
score_threshold=0.001, nms_type="normal"):
assert boxes.shape[-1] == 4
if nms_type == "normal":
keep = batched_nms(boxes, scores, idxs, iou_threshold)
elif nms_type.startswith("softnms"):
keep = batched_softnms(boxes, scores, idxs, iou_threshold,
score_threshold=score_threshold,
soft_mode=nms_type.lstrip("softnms-"))
elif nms_type == "cluster":
keep = batched_clusternms(boxes, scores, idxs, iou_threshold)
else:
raise NotImplementedError(
"NMS type not implemented: \"{}\"".format(nms_type))
return keep | PypiClean |
/mayan-edms-4.4.8.tar.gz/mayan-edms-4.4.8/mayan/apps/authentication/forms.py | from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import AuthenticationForm
from django.utils.translation import ugettext_lazy as _
from mayan.apps.user_management.querysets import get_user_queryset
from mayan.apps.views.forms import FilteredSelectionForm
from .permissions import permission_users_impersonate
class AuthenticationFormBase(forms.Form):
_label = None
PASSWORD_FIELD = 'username'
def __init__(self, data, files, prefix, initial, request=None, wizard=None):
self.request = request
self.user_cache = None
self.wizard = wizard
super().__init__(
data=data, files=files, prefix=prefix, initial=initial
)
def get_user(self):
return self.user_cache
class AuthenticationFormMixinRememberMe(forms.Form):
_form_field_name_remember_me = 'remember_me'
remember_me = forms.BooleanField(label=_('Remember me'), required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
field_order = [
field for field in self.fields if field != self._form_field_name_remember_me
]
field_order.append(self._form_field_name_remember_me)
self.order_fields(field_order=field_order)
class AuthenticationFormEmailPassword(
AuthenticationFormMixinRememberMe, AuthenticationForm
):
"""
A form to use email address authentication.
"""
PASSWORD_FIELD = 'email'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
UserModel = get_user_model()
self.username_field = UserModel._meta.get_field(field_name='email')
username_max_length = self.username_field.max_length or 254
self.fields['username'].max_length = username_max_length
self.fields['username'].widget.attrs['maxlength'] = username_max_length
self.fields['username'].label = self.username_field.verbose_name
class AuthenticationFormUsernamePassword(
AuthenticationFormMixinRememberMe, AuthenticationForm
):
"""
Modified authentication form to include the "Remember me" field.
"""
PASSWORD_FIELD = 'username'
class UserImpersonationOptionsForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['permanent'] = forms.BooleanField(
label=_('Permanent'), help_text=_(
'If selected, disables ending impersonation.'
), required=False
)
class UserImpersonationSelectionForm(
FilteredSelectionForm, UserImpersonationOptionsForm
):
class Meta:
allow_multiple = False
field_name = 'user_to_impersonate'
label = _('User')
queryset = get_user_queryset().none()
permission = permission_users_impersonate
required = True
widget_attributes = {'class': 'select2'}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
queryset = get_user_queryset().exclude(pk=kwargs['user'].pk)
self.fields['user_to_impersonate'].queryset = queryset
self.order_fields(field_order=('user_to_impersonate', 'permanent')) | PypiClean |
/heat-infoblox-2.0.0.tar.gz/heat-infoblox-2.0.0/heat_infoblox/resources/netmri_managed_resource.py |
import logging
from heat.common.i18n import _
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
from heat_infoblox import constants
from heat_infoblox import netmri_resource_mixin as mri
from heat_infoblox import resource_utils
LOG = logging.getLogger(__name__)
class NetMRIManagedResource(resource.Resource, mri.NetMRIResourceMixin):
'''A resource managed via NetMRI jobs.
The user may specify separate scripts for create and delete.
'''
ATTRIBUTES = (
JOB,
JOB_DETAILS
) = (
'job',
'job_details'
)
CREATE_JOB = 'create_job'
DELETE_JOB = 'delete_job'
DELETE_JOB_ID = 'delete_job_id'
support_status = support.SupportStatus(
support.UNSUPPORTED,
_('See support.infoblox.com for support.'))
properties_schema = {
constants.CONNECTION:
resource_utils.connection_schema(constants.NETMRI),
CREATE_JOB: properties.Schema(
properties.Schema.MAP,
_("The job to execute on resource creation."),
required=True,
schema=mri.NetMRIResourceMixin.job_schema
),
DELETE_JOB: properties.Schema(
properties.Schema.MAP,
_("The job to execute on resource deletion."),
required=True,
schema=mri.NetMRIResourceMixin.job_schema
)
}
attributes_schema = mri.NetMRIResourceMixin.job_attributes_schema
def handle_create(self):
r = self._execute_job(self.properties[self.CREATE_JOB])
self.resource_id_set(r['JobID'])
def check_create_complete(self, handler_data):
if not self.properties[self.CREATE_JOB][self.WAIT]:
return True
job_id = int(self.resource_id)
return self._check_job_complete(job_id)
def _check_job_complete(self, job_id):
job = self.netmri.show('job', job_id)['job']
LOG.debug("job = %s", job)
if job['completed_at']:
return True
return False
def handle_delete(self):
r = self._execute_job(self.properties[self.DELETE_JOB])
self.metadata_set({self.DELETE_JOB_ID: r['JobID']})
def check_delete_complete(self, handler_data):
if not self.properties[self.DELETE_JOB][self.WAIT]:
return True
md = self.metadata_get()
job_id = int(md[self.DELETE_JOB_ID])
return self._check_job_complete(job_id)
def _resolve_attribute(self, name):
return self._resolve_job_attribute(name)
def resource_mapping():
return {
'Infoblox::NetMRI::ManagedResource': NetMRIManagedResource,
} | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/widget/README | -------------------------------------------------------------------------------
dojox.widget Collection
-------------------------------------------------------------------------------
Version 1.0
Release date: 10/31/2007
-------------------------------------------------------------------------------
Project state:
[Calendar] experimental
[CalendarFx] experimental
[ColorPicker] beta
[Dialog] experimental
[DialogSimple] beta
[FeedPortlet] experimental
[FilePicker] experimental
[FisheyeList] experimental
[FisheyeLite] beta
[Iterator] experimental
[Loader] experimental
[Pager] experimental
[Portlet] experimental
[PlaceholderMenuItem] experimental
[Roller] experimental
[RollingList] experimental
[SortList] experimental
[TitleGroup] beta
[Toaster] experimental
[Wizard] experimental
[AnalogGauge] experimental
[BarGauge] experimental
[Standby] experimental
-------------------------------------------------------------------------------
Credits:
[Calendar] Shane O'Sullivan
[CalendarFx] Shane O'Sullivan
[ColorPicker] Peter Higgins (dante)
[Dialog] Peter Higgins (dante)
[DialogSimple] Peter Higgins (dante)
[FeedPortlet] Shane O'Sullivan
[FilePicker] Nathan Toone (toonetown)
[FisheyeList] Karl Tiedt (kteidt)
[FisheyeLite] Peter Higgins (dante)
[Iterator] Alex Russell (slightlyoff)
[Loader] Peter Higgins (dante)
[Pager] Nikolai Onken (nonken), Peter Higgins (dante);
[PlaceholderMenuItem] Nathan Toone (toonetown)
[Portlet] Shane O'Sullivan
[Roller] Peter Higgins (dante)
[RollingList] Nathan Toone (toonetown)
[SortList] Peter Higgins (dante)
[TitleGroup] Peter Higgins (dante)
[Toaster] Adam Peller (peller)
[Wizard] Peter Higgins (dante)
[AnalogGauge] Benjamin Schell (bmschell) CCLA
[BarGauge] Benjamin Schell (bmschell) CCLA
[Standby] Jared Jurkiewicz (jaredj) CCLA
[UpgradeBar] Mike Wilcox (mwilcox), Revin Guillen
-------------------------------------------------------------------------------
Project description
This is a collection of standalone widgets for use in
your website. Each individual widget is independent
of the others.
-------------------------------------------------------------------------------
Dependencies:
Each widget has it's own requirements and dependencies.
Most inherit from dijit base-classes such as dijit._Widget,
dijit._Templated, etc ... So we will assume the availablility
of dojo (core), and dijit packages.
Each individual component stores resources in a folder that shares
a name with the Widget. For instance:
the Dialog lives in
dojox/widget/Dialog.js ...
and the folder:
dojox/widget/Dialog/ contains a 'Dialog.css', the required
styles for that particular widget. All required templates and
images reside in the folder.
This differs slightly from the rest of DojoX in that each other
project uses a shared resources/ folder in the project folder,
though uses the same naming convention for stylesheets and templates.
eg:
dojox/layout/resources/ExpandoPane.css
dojox.layout.ExpandoPane
-------------------------------------------------------------------------------
Documentation
Please refer to the API-tool, or in-line documentation. All of these
widgets are of varying use, quality, and documentation completion.
-------------------------------------------------------------------------------
Installation instructions
These are standalone Widgets, so putting the [widget].js file
in your dojox/widget folder, and copying any files in the
/dojox/widget/[widget]/ folder as supplements/templates/etc
should be all you need to do.
eg: FisheyeList:
/dojox/widget/FisheyeList.js
/dojox/widget/FisheyeList/FisheyeList.css
should be all you need to use the Fisheye widget.
you can safely import the whole widget project into your
dojox/ root directory from the following SVN url:
http://svn.dojotoolkit.org/src/dojox/trunk/widget
-------------------------------------------------------------------------------
Other Notes (Brief widget list):
* ColorPicker - An HSV ColorPicker intended to be a drop down
* Calendar - An extension on the dijit._Calendar providing a different UI
* CalendarFx - additional mixable FX for transitions in dojox.widget.Calendar
* Dialog - An extended version of dijit.Dialog with man options and transition.
* DialogSimple - A simple Dijit Dialog providing `dojox.layout.ContentPane` integration
* FilePicker - a widget for browsing server-side file systems (can use
dojox.data.FileStore as backend store)
* FisheyeList - the classic FishEye Picker (abandoned)
* FisheyeLite - A partial replacement for the FisheyeList - serious performance
gains, and entirely more extensible in that it simply animates defined
properties, relying on the natural styling as a foundation.
* Iterator - Basic array and data store iterator class
* Loader - an experimental Class that listens to XHR
connections in the background, and displays
a loading indicator. Loader will be removed in 1.3, and is (abandoned).
* PlaceholderMenuItem - a menu item that can be used to inject other menu
items at a given location. Extends dijit.Menu directly.
* Roller - A component to show many lines of text in a single area, rotating
through the options available. Also provides RollerSlide, an extension
to the stock fading roller to add a slide animation to the transition.
* RollingList - A component of the FilePicker widget
* SortList - a degradable UL with a fixed header, scrolling,
and sorting. Can be the direct descendant of a
LayoutContainer and will size to fit.
* TitleGroup - A container offering variable height TitlePane access, though
behaves like an AccordionContainer
* Toaster - a messaging system to display unobtrusive
alerts on screen.
* Wizard - a StackContainer with built-in navigation to
ease in the creation of 'step-based' content.
Requires dojo >= 1.1
* AnalogGauge - an analog style customizable gauge for displaying values in an
animated fashion and with multiple indicators. Supports easings for
indicator animations, transparent overlays, etc. Very flexible.
Requires dojo >= 1.3
* BarGauge - a bar style gauge for displaying values in an animated fashion
and with multiple indicators. Supports easings for indicator animations,
etc. Very flexible.
Requires dojo >= 1.3
* Standby - a 'blocker' style widget to overlay a translucent div + image over a DOM node/widget
to indicate busy. Overlay color, image, and alt text can all be customized.
Requires dojo >= 1.3
* UpgradeBar - Displays the "yellow bar" at the top of a page to indicate the user
needs to upgrade their browser or a plugin
Requires dojo >= 1.3 | PypiClean |
/de-studiensuche-0.1.0.tar.gz/de-studiensuche-0.1.0/deutschland/studiensuche/model/response_studienangebot_region.py | import re # noqa: F401
import sys # noqa: F401
from deutschland.studiensuche.exceptions import ApiAttributeError
from deutschland.studiensuche.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
OpenApiModel,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class ResponseStudienangebotRegion(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (
bool,
date,
datetime,
dict,
float,
int,
list,
str,
none_type,
) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"id": (int,), # noqa: E501
"key": (str,), # noqa: E501
"label": (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"id": "id", # noqa: E501
"key": "Key", # noqa: E501
"label": "label", # noqa: E501
}
read_only_vars = {}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ResponseStudienangebotRegion - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
key (str): [optional] # noqa: E501
label (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", True)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ResponseStudienangebotRegion - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
key (str): [optional] # noqa: E501
label (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(
f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes."
) | PypiClean |
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/me/mail_folders/item/mail_folder_item_request_builder.py | from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ....models import mail_folder
from ....models.o_data_errors import o_data_error
from .child_folders import child_folders_request_builder
from .copy import copy_request_builder
from .message_rules import message_rules_request_builder
from .messages import messages_request_builder
from .move import move_request_builder
from .multi_value_extended_properties import multi_value_extended_properties_request_builder
from .single_value_extended_properties import single_value_extended_properties_request_builder
from .user_configurations import user_configurations_request_builder
class MailFolderItemRequestBuilder():
"""
Provides operations to manage the mailFolders property of the microsoft.graph.user entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new MailFolderItemRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/me/mailFolders/{mailFolder%2Did}{?includeHiddenFolders*,%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
async def delete(self,request_configuration: Optional[MailFolderItemRequestBuilderDeleteRequestConfiguration] = None) -> None:
"""
Delete navigation property mailFolders for me
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
"""
request_info = self.to_delete_request_information(
request_configuration
)
from ....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_no_response_content_async(request_info, error_mapping)
async def get(self,request_configuration: Optional[MailFolderItemRequestBuilderGetRequestConfiguration] = None) -> Optional[mail_folder.MailFolder]:
"""
The user's mail folders. Read-only. Nullable.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[mail_folder.MailFolder]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ....models import mail_folder
return await self.request_adapter.send_async(request_info, mail_folder.MailFolder, error_mapping)
async def patch(self,body: Optional[mail_folder.MailFolder] = None, request_configuration: Optional[MailFolderItemRequestBuilderPatchRequestConfiguration] = None) -> Optional[mail_folder.MailFolder]:
"""
Update the navigation property mailFolders in me
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[mail_folder.MailFolder]
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = self.to_patch_request_information(
body, request_configuration
)
from ....models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ....models import mail_folder
return await self.request_adapter.send_async(request_info, mail_folder.MailFolder, error_mapping)
def to_delete_request_information(self,request_configuration: Optional[MailFolderItemRequestBuilderDeleteRequestConfiguration] = None) -> RequestInformation:
"""
Delete navigation property mailFolders for me
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.DELETE
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
return request_info
def to_get_request_information(self,request_configuration: Optional[MailFolderItemRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The user's mail folders. Read-only. Nullable.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
def to_patch_request_information(self,body: Optional[mail_folder.MailFolder] = None, request_configuration: Optional[MailFolderItemRequestBuilderPatchRequestConfiguration] = None) -> RequestInformation:
"""
Update the navigation property mailFolders in me
Args:
body: The request body
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.PATCH
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
request_info.set_content_from_parsable(self.request_adapter, "application/json", body)
return request_info
@property
def child_folders(self) -> child_folders_request_builder.ChildFoldersRequestBuilder:
"""
Provides operations to manage the childFolders property of the microsoft.graph.mailFolder entity.
"""
from .child_folders import child_folders_request_builder
return child_folders_request_builder.ChildFoldersRequestBuilder(self.request_adapter, self.path_parameters)
@property
def copy(self) -> copy_request_builder.CopyRequestBuilder:
"""
Provides operations to call the copy method.
"""
from .copy import copy_request_builder
return copy_request_builder.CopyRequestBuilder(self.request_adapter, self.path_parameters)
@property
def message_rules(self) -> message_rules_request_builder.MessageRulesRequestBuilder:
"""
Provides operations to manage the messageRules property of the microsoft.graph.mailFolder entity.
"""
from .message_rules import message_rules_request_builder
return message_rules_request_builder.MessageRulesRequestBuilder(self.request_adapter, self.path_parameters)
@property
def messages(self) -> messages_request_builder.MessagesRequestBuilder:
"""
Provides operations to manage the messages property of the microsoft.graph.mailFolder entity.
"""
from .messages import messages_request_builder
return messages_request_builder.MessagesRequestBuilder(self.request_adapter, self.path_parameters)
@property
def move(self) -> move_request_builder.MoveRequestBuilder:
"""
Provides operations to call the move method.
"""
from .move import move_request_builder
return move_request_builder.MoveRequestBuilder(self.request_adapter, self.path_parameters)
@property
def multi_value_extended_properties(self) -> multi_value_extended_properties_request_builder.MultiValueExtendedPropertiesRequestBuilder:
"""
Provides operations to manage the multiValueExtendedProperties property of the microsoft.graph.mailFolder entity.
"""
from .multi_value_extended_properties import multi_value_extended_properties_request_builder
return multi_value_extended_properties_request_builder.MultiValueExtendedPropertiesRequestBuilder(self.request_adapter, self.path_parameters)
@property
def single_value_extended_properties(self) -> single_value_extended_properties_request_builder.SingleValueExtendedPropertiesRequestBuilder:
"""
Provides operations to manage the singleValueExtendedProperties property of the microsoft.graph.mailFolder entity.
"""
from .single_value_extended_properties import single_value_extended_properties_request_builder
return single_value_extended_properties_request_builder.SingleValueExtendedPropertiesRequestBuilder(self.request_adapter, self.path_parameters)
@property
def user_configurations(self) -> user_configurations_request_builder.UserConfigurationsRequestBuilder:
"""
Provides operations to manage the userConfigurations property of the microsoft.graph.mailFolder entity.
"""
from .user_configurations import user_configurations_request_builder
return user_configurations_request_builder.UserConfigurationsRequestBuilder(self.request_adapter, self.path_parameters)
@dataclass
class MailFolderItemRequestBuilderDeleteRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
@dataclass
class MailFolderItemRequestBuilderGetQueryParameters():
"""
The user's mail folders. Read-only. Nullable.
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
if original_name == "include_hidden_folders":
return "includeHiddenFolders"
return original_name
# Expand related entities
expand: Optional[List[str]] = None
# Include Hidden Folders
include_hidden_folders: Optional[str] = None
# Select properties to be returned
select: Optional[List[str]] = None
@dataclass
class MailFolderItemRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[MailFolderItemRequestBuilder.MailFolderItemRequestBuilderGetQueryParameters] = None
@dataclass
class MailFolderItemRequestBuilderPatchRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None | PypiClean |
/velo-python-2.26.124b1.tar.gz/velo-python-2.26.124b1/velo_payments/__init__.py |
# flake8: noqa
"""
Velo Payments APIs
## Terms and Definitions Throughout this document and the Velo platform the following terms are used: * **Payor.** An entity (typically a corporation) which wishes to pay funds to one or more payees via a payout. * **Payee.** The recipient of funds paid out by a payor. * **Payment.** A single transfer of funds from a payor to a payee. * **Payout.** A batch of Payments, typically used by a payor to logically group payments (e.g. by business day). Technically there need be no relationship between the payments in a payout - a single payout can contain payments to multiple payees and/or multiple payments to a single payee. * **Sandbox.** An integration environment provided by Velo Payments which offers a similar API experience to the production environment, but all funding and payment events are simulated, along with many other services such as OFAC sanctions list checking. ## Overview The Velo Payments API allows a payor to perform a number of operations. The following is a list of the main capabilities in a natural order of execution: * Authenticate with the Velo platform * Maintain a collection of payees * Query the payor’s current balance of funds within the platform and perform additional funding * Issue payments to payees * Query the platform for a history of those payments This document describes the main concepts and APIs required to get up and running with the Velo Payments platform. It is not an exhaustive API reference. For that, please see the separate Velo Payments API Reference. ## API Considerations The Velo Payments API is REST based and uses the JSON format for requests and responses. Most calls are secured using OAuth 2 security and require a valid authentication access token for successful operation. See the Authentication section for details. Where a dynamic value is required in the examples below, the {token} format is used, suggesting that the caller needs to supply the appropriate value of the token in question (without including the { or } characters). Where curl examples are given, the –d @filename.json approach is used, indicating that the request body should be placed into a file named filename.json in the current directory. Each of the curl examples in this document should be considered a single line on the command-line, regardless of how they appear in print. ## Authenticating with the Velo Platform Once Velo backoffice staff have added your organization as a payor within the Velo platform sandbox, they will create you a payor Id, an API key and an API secret and share these with you in a secure manner. You will need to use these values to authenticate with the Velo platform in order to gain access to the APIs. The steps to take are explained in the following: create a string comprising the API key (e.g. 44a9537d-d55d-4b47-8082-14061c2bcdd8) and API secret (e.g. c396b26b-137a-44fd-87f5-34631f8fd529) with a colon between them. E.g. 44a9537d-d55d-4b47-8082-14061c2bcdd8:c396b26b-137a-44fd-87f5-34631f8fd529 base64 encode this string. E.g.: NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ== create an HTTP **Authorization** header with the value set to e.g. Basic NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ== perform the Velo authentication REST call using the HTTP header created above e.g. via curl: ``` curl -X POST \\ -H \"Content-Type: application/json\" \\ -H \"Authorization: Basic NDRhOTUzN2QtZDU1ZC00YjQ3LTgwODItMTQwNjFjMmJjZGQ4OmMzOTZiMjZiLTEzN2EtNDRmZC04N2Y1LTM0NjMxZjhmZDUyOQ==\" \\ 'https://api.sandbox.velopayments.com/v1/authenticate?grant_type=client_credentials' ``` If successful, this call will result in a **200** HTTP status code and a response body such as: ``` { \"access_token\":\"19f6bafd-93fd-4747-b229-00507bbc991f\", \"token_type\":\"bearer\", \"expires_in\":1799, \"scope\":\"...\" } ``` ## API access following authentication Following successful authentication, the value of the access_token field in the response (indicated in green above) should then be presented with all subsequent API calls to allow the Velo platform to validate that the caller is authenticated. This is achieved by setting the HTTP Authorization header with the value set to e.g. Bearer 19f6bafd-93fd-4747-b229-00507bbc991f such as the curl example below: ``` -H \"Authorization: Bearer 19f6bafd-93fd-4747-b229-00507bbc991f \" ``` If you make other Velo API calls which require authorization but the Authorization header is missing or invalid then you will get a **401** HTTP status response. # noqa: E501
The version of the OpenAPI document: 2.26.124
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
__version__ = "2.26.124"
# import apis into sdk package
from velo_payments.api.countries_api import CountriesApi
from velo_payments.api.currencies_api import CurrenciesApi
from velo_payments.api.funding_manager_api import FundingManagerApi
from velo_payments.api.funding_manager_private_api import FundingManagerPrivateApi
from velo_payments.api.login_api import LoginApi
from velo_payments.api.payee_invitation_api import PayeeInvitationApi
from velo_payments.api.payees_api import PayeesApi
from velo_payments.api.payment_audit_service_api import PaymentAuditServiceApi
from velo_payments.api.payment_audit_service__deprecated_api import PaymentAuditServiceDeprecatedApi
from velo_payments.api.payors_api import PayorsApi
from velo_payments.api.payors_private_api import PayorsPrivateApi
from velo_payments.api.payout_service_api import PayoutServiceApi
from velo_payments.api.tokens_api import TokensApi
from velo_payments.api.users_api import UsersApi
from velo_payments.api.webhooks_api import WebhooksApi
# import ApiClient
from velo_payments.api_client import ApiClient
from velo_payments.configuration import Configuration
from velo_payments.exceptions import OpenApiException
from velo_payments.exceptions import ApiTypeError
from velo_payments.exceptions import ApiValueError
from velo_payments.exceptions import ApiKeyError
from velo_payments.exceptions import ApiException
# import models into sdk package
from velo_payments.models.accepted_payment_v3 import AcceptedPaymentV3
from velo_payments.models.access_token_response import AccessTokenResponse
from velo_payments.models.access_token_validation_request import AccessTokenValidationRequest
from velo_payments.models.auth_response import AuthResponse
from velo_payments.models.auto_top_up_config import AutoTopUpConfig
from velo_payments.models.auto_top_up_config2 import AutoTopUpConfig2
from velo_payments.models.category import Category
from velo_payments.models.challenge import Challenge
from velo_payments.models.challenge2 import Challenge2
from velo_payments.models.company import Company
from velo_payments.models.company2 import Company2
from velo_payments.models.create_funding_account_request_v2 import CreateFundingAccountRequestV2
from velo_payments.models.create_individual import CreateIndividual
from velo_payments.models.create_individual2 import CreateIndividual2
from velo_payments.models.create_individual_name import CreateIndividualName
from velo_payments.models.create_payee import CreatePayee
from velo_payments.models.create_payee2 import CreatePayee2
from velo_payments.models.create_payee_address import CreatePayeeAddress
from velo_payments.models.create_payee_address2 import CreatePayeeAddress2
from velo_payments.models.create_payees_csv_request import CreatePayeesCSVRequest
from velo_payments.models.create_payees_csv_request2 import CreatePayeesCSVRequest2
from velo_payments.models.create_payees_csv_response import CreatePayeesCSVResponse
from velo_payments.models.create_payees_csv_response2 import CreatePayeesCSVResponse2
from velo_payments.models.create_payees_csv_response_rejected_csv_rows import CreatePayeesCSVResponseRejectedCsvRows
from velo_payments.models.create_payees_request import CreatePayeesRequest
from velo_payments.models.create_payees_request2 import CreatePayeesRequest2
from velo_payments.models.create_payment_channel import CreatePaymentChannel
from velo_payments.models.create_payment_channel2 import CreatePaymentChannel2
from velo_payments.models.create_payor_link_request import CreatePayorLinkRequest
from velo_payments.models.create_payout_request_v3 import CreatePayoutRequestV3
from velo_payments.models.create_webhook_request import CreateWebhookRequest
from velo_payments.models.debit_event import DebitEvent
from velo_payments.models.debit_event_all_of import DebitEventAllOf
from velo_payments.models.debit_status_changed import DebitStatusChanged
from velo_payments.models.debit_status_changed_all_of import DebitStatusChangedAllOf
from velo_payments.models.error import Error
from velo_payments.models.error_data import ErrorData
from velo_payments.models.error_response import ErrorResponse
from velo_payments.models.failed_payee import FailedPayee
from velo_payments.models.failed_payee2 import FailedPayee2
from velo_payments.models.failed_submission import FailedSubmission
from velo_payments.models.failed_submission2 import FailedSubmission2
from velo_payments.models.funding_account_response import FundingAccountResponse
from velo_payments.models.funding_account_response2 import FundingAccountResponse2
from velo_payments.models.funding_account_type import FundingAccountType
from velo_payments.models.funding_audit import FundingAudit
from velo_payments.models.funding_event import FundingEvent
from velo_payments.models.funding_event_type import FundingEventType
from velo_payments.models.funding_payor_status_audit_response import FundingPayorStatusAuditResponse
from velo_payments.models.funding_request_v1 import FundingRequestV1
from velo_payments.models.funding_request_v2 import FundingRequestV2
from velo_payments.models.funding_request_v3 import FundingRequestV3
from velo_payments.models.fx_summary import FxSummary
from velo_payments.models.fx_summary_v3 import FxSummaryV3
from velo_payments.models.get_fundings_response import GetFundingsResponse
from velo_payments.models.get_fundings_response_links import GetFundingsResponseLinks
from velo_payments.models.get_payee_list_response import GetPayeeListResponse
from velo_payments.models.get_payee_list_response2 import GetPayeeListResponse2
from velo_payments.models.get_payee_list_response_company import GetPayeeListResponseCompany
from velo_payments.models.get_payee_list_response_company2 import GetPayeeListResponseCompany2
from velo_payments.models.get_payee_list_response_individual import GetPayeeListResponseIndividual
from velo_payments.models.get_payee_list_response_individual2 import GetPayeeListResponseIndividual2
from velo_payments.models.get_payments_for_payout_response_v3 import GetPaymentsForPayoutResponseV3
from velo_payments.models.get_payments_for_payout_response_v3_page import GetPaymentsForPayoutResponseV3Page
from velo_payments.models.get_payments_for_payout_response_v3_summary import GetPaymentsForPayoutResponseV3Summary
from velo_payments.models.get_payments_for_payout_response_v4 import GetPaymentsForPayoutResponseV4
from velo_payments.models.get_payments_for_payout_response_v4_summary import GetPaymentsForPayoutResponseV4Summary
from velo_payments.models.get_payout_statistics import GetPayoutStatistics
from velo_payments.models.get_payouts_response import GetPayoutsResponse
from velo_payments.models.get_payouts_response_v3 import GetPayoutsResponseV3
from velo_payments.models.get_payouts_response_v3_links import GetPayoutsResponseV3Links
from velo_payments.models.get_payouts_response_v3_page import GetPayoutsResponseV3Page
from velo_payments.models.individual import Individual
from velo_payments.models.individual2 import Individual2
from velo_payments.models.individual_name import IndividualName
from velo_payments.models.inline_response400 import InlineResponse400
from velo_payments.models.inline_response401 import InlineResponse401
from velo_payments.models.inline_response403 import InlineResponse403
from velo_payments.models.inline_response404 import InlineResponse404
from velo_payments.models.inline_response409 import InlineResponse409
from velo_payments.models.inline_response412 import InlineResponse412
from velo_payments.models.invitation_status import InvitationStatus
from velo_payments.models.invitation_status2 import InvitationStatus2
from velo_payments.models.invite_payee_request import InvitePayeeRequest
from velo_payments.models.invite_payee_request2 import InvitePayeeRequest2
from velo_payments.models.invite_user_request import InviteUserRequest
from velo_payments.models.kyc_state import KycState
from velo_payments.models.link_for_response import LinkForResponse
from velo_payments.models.list_funding_accounts_response import ListFundingAccountsResponse
from velo_payments.models.list_funding_accounts_response2 import ListFundingAccountsResponse2
from velo_payments.models.list_payments_response_v3 import ListPaymentsResponseV3
from velo_payments.models.list_payments_response_v3_page import ListPaymentsResponseV3Page
from velo_payments.models.list_payments_response_v4 import ListPaymentsResponseV4
from velo_payments.models.list_source_account_response import ListSourceAccountResponse
from velo_payments.models.list_source_account_response_links import ListSourceAccountResponseLinks
from velo_payments.models.list_source_account_response_page import ListSourceAccountResponsePage
from velo_payments.models.list_source_account_response_v2 import ListSourceAccountResponseV2
from velo_payments.models.list_source_account_response_v2_links import ListSourceAccountResponseV2Links
from velo_payments.models.list_source_account_response_v3 import ListSourceAccountResponseV3
from velo_payments.models.list_source_account_response_v3_links import ListSourceAccountResponseV3Links
from velo_payments.models.localisation_details import LocalisationDetails
from velo_payments.models.mfa_details import MFADetails
from velo_payments.models.mfa_type import MFAType
from velo_payments.models.name import Name
from velo_payments.models.name2 import Name2
from velo_payments.models.notification import Notification
from velo_payments.models.notifications import Notifications
from velo_payments.models.notifications2 import Notifications2
from velo_payments.models.ofac_status import OfacStatus
from velo_payments.models.onboarded_status import OnboardedStatus
from velo_payments.models.onboarded_status2 import OnboardedStatus2
from velo_payments.models.onboarding_status_changed import OnboardingStatusChanged
from velo_payments.models.page_for_response import PageForResponse
from velo_payments.models.page_resource_funding_payor_status_audit_response_funding_payor_status_audit_response import PageResourceFundingPayorStatusAuditResponseFundingPayorStatusAuditResponse
from velo_payments.models.paged_payee_invitation_status_response import PagedPayeeInvitationStatusResponse
from velo_payments.models.paged_payee_invitation_status_response2 import PagedPayeeInvitationStatusResponse2
from velo_payments.models.paged_payee_invitation_status_response_page import PagedPayeeInvitationStatusResponsePage
from velo_payments.models.paged_payee_response import PagedPayeeResponse
from velo_payments.models.paged_payee_response2 import PagedPayeeResponse2
from velo_payments.models.paged_payee_response_links import PagedPayeeResponseLinks
from velo_payments.models.paged_payee_response_page import PagedPayeeResponsePage
from velo_payments.models.paged_payee_response_summary import PagedPayeeResponseSummary
from velo_payments.models.paged_payments_response_v3 import PagedPaymentsResponseV3
from velo_payments.models.paged_user_response import PagedUserResponse
from velo_payments.models.paged_user_response_links import PagedUserResponseLinks
from velo_payments.models.paged_user_response_page import PagedUserResponsePage
from velo_payments.models.password_request import PasswordRequest
from velo_payments.models.payable_issue import PayableIssue
from velo_payments.models.payable_issue2 import PayableIssue2
from velo_payments.models.payable_status_changed import PayableStatusChanged
from velo_payments.models.payee_address import PayeeAddress
from velo_payments.models.payee_address2 import PayeeAddress2
from velo_payments.models.payee_delta import PayeeDelta
from velo_payments.models.payee_delta2 import PayeeDelta2
from velo_payments.models.payee_delta_response import PayeeDeltaResponse
from velo_payments.models.payee_delta_response2 import PayeeDeltaResponse2
from velo_payments.models.payee_delta_response2_links import PayeeDeltaResponse2Links
from velo_payments.models.payee_delta_response_links import PayeeDeltaResponseLinks
from velo_payments.models.payee_delta_response_page import PayeeDeltaResponsePage
from velo_payments.models.payee_detail_response import PayeeDetailResponse
from velo_payments.models.payee_detail_response2 import PayeeDetailResponse2
from velo_payments.models.payee_details_changed import PayeeDetailsChanged
from velo_payments.models.payee_event import PayeeEvent
from velo_payments.models.payee_event_all_of import PayeeEventAllOf
from velo_payments.models.payee_event_all_of_reasons import PayeeEventAllOfReasons
from velo_payments.models.payee_invitation_status_response import PayeeInvitationStatusResponse
from velo_payments.models.payee_invitation_status_response2 import PayeeInvitationStatusResponse2
from velo_payments.models.payee_payor_ref import PayeePayorRef
from velo_payments.models.payee_payor_ref_v3 import PayeePayorRefV3
from velo_payments.models.payee_type import PayeeType
from velo_payments.models.payee_user_self_update_request import PayeeUserSelfUpdateRequest
from velo_payments.models.payment_audit_currency import PaymentAuditCurrency
from velo_payments.models.payment_audit_currency_v3 import PaymentAuditCurrencyV3
from velo_payments.models.payment_channel_country import PaymentChannelCountry
from velo_payments.models.payment_channel_rule import PaymentChannelRule
from velo_payments.models.payment_channel_rules_response import PaymentChannelRulesResponse
from velo_payments.models.payment_delta import PaymentDelta
from velo_payments.models.payment_delta_response import PaymentDeltaResponse
from velo_payments.models.payment_delta_response_v1 import PaymentDeltaResponseV1
from velo_payments.models.payment_delta_v1 import PaymentDeltaV1
from velo_payments.models.payment_event import PaymentEvent
from velo_payments.models.payment_event_all_of import PaymentEventAllOf
from velo_payments.models.payment_event_response import PaymentEventResponse
from velo_payments.models.payment_event_response_v3 import PaymentEventResponseV3
from velo_payments.models.payment_instruction_v3 import PaymentInstructionV3
from velo_payments.models.payment_rails import PaymentRails
from velo_payments.models.payment_rejected_or_returned import PaymentRejectedOrReturned
from velo_payments.models.payment_rejected_or_returned_all_of import PaymentRejectedOrReturnedAllOf
from velo_payments.models.payment_response_v3 import PaymentResponseV3
from velo_payments.models.payment_response_v4 import PaymentResponseV4
from velo_payments.models.payment_response_v4_payout import PaymentResponseV4Payout
from velo_payments.models.payment_status_changed import PaymentStatusChanged
from velo_payments.models.payment_status_changed_all_of import PaymentStatusChangedAllOf
from velo_payments.models.payment_v3 import PaymentV3
from velo_payments.models.payor_address import PayorAddress
from velo_payments.models.payor_address_v2 import PayorAddressV2
from velo_payments.models.payor_aml_transaction import PayorAmlTransaction
from velo_payments.models.payor_aml_transaction_v3 import PayorAmlTransactionV3
from velo_payments.models.payor_branding_response import PayorBrandingResponse
from velo_payments.models.payor_create_api_key_request import PayorCreateApiKeyRequest
from velo_payments.models.payor_create_api_key_response import PayorCreateApiKeyResponse
from velo_payments.models.payor_create_application_request import PayorCreateApplicationRequest
from velo_payments.models.payor_email_opt_out_request import PayorEmailOptOutRequest
from velo_payments.models.payor_links_response import PayorLinksResponse
from velo_payments.models.payor_links_response_links import PayorLinksResponseLinks
from velo_payments.models.payor_links_response_payors import PayorLinksResponsePayors
from velo_payments.models.payor_logo_request import PayorLogoRequest
from velo_payments.models.payor_v1 import PayorV1
from velo_payments.models.payor_v2 import PayorV2
from velo_payments.models.payout_company_v3 import PayoutCompanyV3
from velo_payments.models.payout_individual_v3 import PayoutIndividualV3
from velo_payments.models.payout_name_v3 import PayoutNameV3
from velo_payments.models.payout_payee_v3 import PayoutPayeeV3
from velo_payments.models.payout_payor import PayoutPayor
from velo_payments.models.payout_payor_ids import PayoutPayorIds
from velo_payments.models.payout_principal import PayoutPrincipal
from velo_payments.models.payout_status import PayoutStatus
from velo_payments.models.payout_status_v3 import PayoutStatusV3
from velo_payments.models.payout_summary_audit import PayoutSummaryAudit
from velo_payments.models.payout_summary_audit_v3 import PayoutSummaryAuditV3
from velo_payments.models.payout_summary_response_v3 import PayoutSummaryResponseV3
from velo_payments.models.payout_type import PayoutType
from velo_payments.models.ping import Ping
from velo_payments.models.query_batch_response import QueryBatchResponse
from velo_payments.models.query_batch_response2 import QueryBatchResponse2
from velo_payments.models.quote_fx_summary_v3 import QuoteFxSummaryV3
from velo_payments.models.quote_response_v3 import QuoteResponseV3
from velo_payments.models.region_v2 import RegionV2
from velo_payments.models.register_sms_request import RegisterSmsRequest
from velo_payments.models.rejected_payment_v3 import RejectedPaymentV3
from velo_payments.models.resend_token_request import ResendTokenRequest
from velo_payments.models.reset_password_request import ResetPasswordRequest
from velo_payments.models.role import Role
from velo_payments.models.role_update_request import RoleUpdateRequest
from velo_payments.models.self_mfa_type_unregister_request import SelfMFATypeUnregisterRequest
from velo_payments.models.self_update_password_request import SelfUpdatePasswordRequest
from velo_payments.models.set_notifications_request import SetNotificationsRequest
from velo_payments.models.source_account_response import SourceAccountResponse
from velo_payments.models.source_account_response_v2 import SourceAccountResponseV2
from velo_payments.models.source_account_response_v3 import SourceAccountResponseV3
from velo_payments.models.source_account_summary import SourceAccountSummary
from velo_payments.models.source_account_summary_v3 import SourceAccountSummaryV3
from velo_payments.models.source_account_type import SourceAccountType
from velo_payments.models.source_account_v3 import SourceAccountV3
from velo_payments.models.source_event import SourceEvent
from velo_payments.models.supported_countries_response import SupportedCountriesResponse
from velo_payments.models.supported_countries_response_v2 import SupportedCountriesResponseV2
from velo_payments.models.supported_country import SupportedCountry
from velo_payments.models.supported_country_v2 import SupportedCountryV2
from velo_payments.models.supported_currency_response_v2 import SupportedCurrencyResponseV2
from velo_payments.models.supported_currency_v2 import SupportedCurrencyV2
from velo_payments.models.transfer_request import TransferRequest
from velo_payments.models.transfer_request2 import TransferRequest2
from velo_payments.models.transmission_type import TransmissionType
from velo_payments.models.transmission_types import TransmissionTypes
from velo_payments.models.transmission_types2 import TransmissionTypes2
from velo_payments.models.unregister_mfa_request import UnregisterMFARequest
from velo_payments.models.update_payee_details_request import UpdatePayeeDetailsRequest
from velo_payments.models.update_payee_details_request2 import UpdatePayeeDetailsRequest2
from velo_payments.models.update_remote_id_request import UpdateRemoteIdRequest
from velo_payments.models.update_remote_id_request2 import UpdateRemoteIdRequest2
from velo_payments.models.update_webhook_request import UpdateWebhookRequest
from velo_payments.models.user_details_update_request import UserDetailsUpdateRequest
from velo_payments.models.user_info import UserInfo
from velo_payments.models.user_response import UserResponse
from velo_payments.models.user_status import UserStatus
from velo_payments.models.user_type import UserType
from velo_payments.models.user_type2 import UserType2
from velo_payments.models.validate_password_response import ValidatePasswordResponse
from velo_payments.models.watchlist_status import WatchlistStatus
from velo_payments.models.watchlist_status2 import WatchlistStatus2
from velo_payments.models.webhook_response import WebhookResponse
from velo_payments.models.webhooks_response import WebhooksResponse
from velo_payments.models.withdraw_payment_request import WithdrawPaymentRequest | PypiClean |
/pk-dlp-2023.3.4.tar.gz/pk-dlp-2023.3.4/yt_dlp/extractor/acast.py | from .common import InfoExtractor
from ..utils import (
clean_html,
clean_podcast_url,
int_or_none,
parse_iso8601,
)
class ACastBaseIE(InfoExtractor):
def _extract_episode(self, episode, show_info):
title = episode['title']
info = {
'id': episode['id'],
'display_id': episode.get('episodeUrl'),
'url': clean_podcast_url(episode['url']),
'title': title,
'description': clean_html(episode.get('description') or episode.get('summary')),
'thumbnail': episode.get('image'),
'timestamp': parse_iso8601(episode.get('publishDate')),
'duration': int_or_none(episode.get('duration')),
'filesize': int_or_none(episode.get('contentLength')),
'season_number': int_or_none(episode.get('season')),
'episode': title,
'episode_number': int_or_none(episode.get('episode')),
}
info.update(show_info)
return info
def _extract_show_info(self, show):
return {
'creator': show.get('author'),
'series': show.get('title'),
}
def _call_api(self, path, video_id, query=None):
return self._download_json(
'https://feeder.acast.com/api/v1/shows/' + path, video_id, query=query)
class ACastIE(ACastBaseIE):
IE_NAME = 'acast'
_VALID_URL = r'''(?x)
https?://
(?:
(?:(?:embed|www)\.)?acast\.com/|
play\.acast\.com/s/
)
(?P<channel>[^/]+)/(?P<id>[^/#?]+)
'''
_TESTS = [{
'url': 'https://www.acast.com/sparpodcast/2.raggarmordet-rosterurdetforflutna',
'md5': 'f5598f3ad1e4776fed12ec1407153e4b',
'info_dict': {
'id': '2a92b283-1a75-4ad8-8396-499c641de0d9',
'ext': 'mp3',
'title': '2. Raggarmordet - Röster ur det förflutna',
'description': 'md5:a992ae67f4d98f1c0141598f7bebbf67',
'timestamp': 1477346700,
'upload_date': '20161024',
'duration': 2766,
'creator': 'Anton Berg & Martin Johnson',
'series': 'Spår',
'episode': '2. Raggarmordet - Röster ur det förflutna',
}
}, {
'url': 'http://embed.acast.com/adambuxton/ep.12-adam-joeschristmaspodcast2015',
'only_matching': True,
}, {
'url': 'https://play.acast.com/s/rattegangspodden/s04e09styckmordetihelenelund-del2-2',
'only_matching': True,
}, {
'url': 'https://play.acast.com/s/sparpodcast/2a92b283-1a75-4ad8-8396-499c641de0d9',
'only_matching': True,
}]
def _real_extract(self, url):
channel, display_id = self._match_valid_url(url).groups()
episode = self._call_api(
'%s/episodes/%s' % (channel, display_id),
display_id, {'showInfo': 'true'})
return self._extract_episode(
episode, self._extract_show_info(episode.get('show') or {}))
class ACastChannelIE(ACastBaseIE):
IE_NAME = 'acast:channel'
_VALID_URL = r'''(?x)
https?://
(?:
(?:www\.)?acast\.com/|
play\.acast\.com/s/
)
(?P<id>[^/#?]+)
'''
_TESTS = [{
'url': 'https://www.acast.com/todayinfocus',
'info_dict': {
'id': '4efc5294-5385-4847-98bd-519799ce5786',
'title': 'Today in Focus',
'description': 'md5:c09ce28c91002ce4ffce71d6504abaae',
},
'playlist_mincount': 200,
}, {
'url': 'http://play.acast.com/s/ft-banking-weekly',
'only_matching': True,
}]
@classmethod
def suitable(cls, url):
return False if ACastIE.suitable(url) else super(ACastChannelIE, cls).suitable(url)
def _real_extract(self, url):
show_slug = self._match_id(url)
show = self._call_api(show_slug, show_slug)
show_info = self._extract_show_info(show)
entries = []
for episode in (show.get('episodes') or []):
entries.append(self._extract_episode(episode, show_info))
return self.playlist_result(
entries, show.get('id'), show.get('title'), show.get('description')) | PypiClean |
/wdig_cli-0.1.2.tar.gz/wdig_cli-0.1.2/wdig/queries.py | from dataclasses import dataclass
from typing import List
import pandas as pd
from rich.console import Console
from datetime import timedelta, datetime
from wdig.database import DatabaseAppSession, Transaction
from sqlalchemy import text, func
console = Console()
def query_by_period(months_back: int = 6) -> pd.DataFrame:
df = transactions_dataframe()
date_param = (datetime.now() - timedelta(days=months_back*30)).strftime('%Y-%m-%d')
df = df[df['tran_date'] > date_param]
df_period = df[['period_month', 'period_year', 'amount']].groupby(['period_year', 'period_month']).agg(
count=pd.NamedAgg(column='amount', aggfunc='count'),
amount=pd.NamedAgg(column='amount', aggfunc='sum')
)
df_period.reset_index(inplace=True)
df_period['amount'] = df_period['amount'].round(2)
return df_period
def get_transaction_count() -> int:
db_conn = DatabaseAppSession()
stmt = db_conn.db_session.query(Transaction)
tran_count = db_conn.db_session.query(Transaction).count()
return tran_count
def transactions_dataframe() -> pd.DataFrame:
db_conn = DatabaseAppSession()
df = pd.read_sql_query(
"select period_month, period_year, amount, category, budget_category, budget_tag, merchant_type, tag, account_name, description, id, tran_id, tran_type, account_number, tran_date, bank_name, account_format, imported_date, file_name from transaction where is_duplicate is false and category != 'transfer'",
db_conn.db_engine.engine,
)
return df
def is_file_already_loaded(file_name: str) -> bool:
db_conn = DatabaseAppSession()
count = (
db_conn.db_session.query(Transaction)
.filter(Transaction.file_name == file_name)
.count()
)
return count > 0
def get_unenriched_transaction_count() -> int:
db_conn = DatabaseAppSession()
count = (
db_conn.db_session.query(Transaction)
.filter(Transaction.category == None) # noqa: E711
.count()
)
return count
def get_most_recent_tran_date() -> datetime:
db_conn = DatabaseAppSession()
tran = db_conn.db_session.query(Transaction).order_by(Transaction.tran_date.desc()).limit(1).first()
return tran.tran_date
def merchant_type_summary(year: str = '2022') -> List[List]:
stmt = text('''
select period_month, merchant_type, count(merchant_type), sum(amount)*-1
from transaction
where period_year = :year and category = 'variable' and is_duplicate = false
group by period_month, merchant_type
order by period_month;
''')
stmt = stmt.bindparams(year=year)
db_conn = DatabaseAppSession()
results = db_conn.db_engine.engine.execute(stmt).fetchall()
typed_results = [['period_month', 'merchant_type', 'tran_count', 'total_amount']]
for row in results:
typed_results.append([row[0], row[1], row[2], float(row[3])])
return typed_results
def get_tran_by_id(tran_id: str) -> Transaction:
db_conn = DatabaseAppSession()
tran = (
db_conn.db_session.query(Transaction)
.filter(Transaction.tran_id == tran_id)
.first()
)
return tran
@dataclass
class BudgetTagGroups:
budget_category: str
budget_tag: str
amount: float
period: str
def get_transactions_grouped_by_budget_tag(year: str = '2022') -> List[BudgetTagGroups]:
db_conn = DatabaseAppSession()
raw_results = (
db_conn.db_session
.query(Transaction.budget_category, Transaction.budget_tag, func.sum(Transaction.amount), Transaction.period_month)
.filter(Transaction.is_duplicate == False) # noqa E712
.filter(Transaction.period_year == year)
.filter(Transaction.category != 'income')
.filter(Transaction.category != 'transfer')
.group_by(Transaction.budget_category, Transaction.budget_tag, Transaction.period_month)
.order_by(Transaction.period_month)
.all()
)
results = []
for tran in raw_results:
bg = BudgetTagGroups(tran[0], tran[1], tran[2], tran[3])
if not bg.budget_category:
bg.budget_category = 'unknown'
bg.budget_tag = 'unknown'
results.append(bg)
return results
if __name__ == '__main__':
budget_tags = get_transactions_grouped_by_budget_tag()
pass | PypiClean |
/wine_wrap-0.1.0.tar.gz/wine_wrap-0.1.0/README.md | # wine_wrap
A versioned wine-prefix management tool with memory-shared prefixes.
This tool simplifies the maintenance of individual wine-prefixes per executable.
Each prefix is version-controlled using git, allowing for reproducible setups.
Furthermore (if supported by host system), the prefixes are stored as subvolumes on a BTRFS image. This makes storing them more memory efficient by only saving differences between each installation.
## Installation
`wine_wrap` can be installed using `pip`:
```bash
$ pip install wine_wrap
```
## Usage
```bash
$ wine_wrap --help
Usage: wine_wrap [OPTIONS] COMMAND [ARGS]...
Options:
--help Show this message and exit.
Commands:
clear Clear all associations.
configure Associate script with given wine-prefix.
run Execute given script in wine-prefix.
scan Scan for executables in given prefix.
set Associate script with given wine-prefix.
show Show current setup.
```
## Getting started
A typical use-case would be to first run an installer, and then the created executable in the same wine-prefix.
To do so, first run the installer (as well as winecfg beforehand) and name the used prefix:
```bash
$ wine_wrap run --configure --name MyOwnPrefix installer.exe
[..]
```
Afterwards, find the newly installed executable within this prefix and associate it correctly:
```bash
$ wine_wrap scan MyOwnPrefix
[..]
> "/path/to/executable.exe"
[..]
$ wine_wrap set "/path/to/executable.exe" MyOwnPrefix
```
We can then make sure that the correct script-prefix associations are set:
```bash
$ wine_wrap show
--- MyOwnPrefix ---
> installer.exe
> executable.exe
```
It is then possible to simply run the executable in the correct wine-prefix:
```bash
$ wine_wrap run /path/to/executable.exe
[..]
```
If we don't need the scripts anymore, we can delete them in the end:
```bash
$ wine_wrap clear --delete-prefixes --prefix MyOwnPrefix
[..]
```
| PypiClean |
/TCL_TV_Remote-0.0.1.tar.gz/TCL_TV_Remote-0.0.1/LICENSE.md | MIT License
Copyright (c) 2021 Patrik Johansson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| PypiClean |
/shareddata-3.80.0.tar.gz/shareddata-3.80.0/src/SharedData/TableIndexJit.py | from numba.typed import List
import numpy as np
from numba import njit
###################### DATE_SYMBOL ########################
@njit(cache=True)
def create_pkey_date_symbol_jit(records, count, pkey, dateiniidx, dateendidx, dateunit, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
h0 = hash(records['date'][i])
h1 = hash(records['symbol'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['symbol'] != records[i]['symbol'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
return True
@njit(cache=True)
def get_loc_date_symbol_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['symbol'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['symbol'] != keys[i]['symbol'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_symbol_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
h0 = hash(new_records['date'][i])
h1 = hash(new_records['symbol'][i])
h = (h0 ^ h1) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['symbol'] == new_records[i]['symbol']):
# record exists update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed jump hash
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
count += 1
minchgid = min(minchgid, count)
return count, minchgid
###################### DATE_SYMBOL_SYMBOL2 ########################
@njit(cache=True)
def create_pkey_date_symbol_symbol1_jit(records, count, pkey, dateiniidx, dateendidx, dateunit, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
if records['symbol'][i] != records['symbol1'][i]:
h0 = hash(records['date'][i])
h1 = hash(records['symbol'][i])
h2 = hash(records['symbol1'][i])
h = (h0 ^ h1 ^ h2) % n
else:
h0 = hash(records['date'][i])
h1 = hash(records['symbol'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['symbol'] != records[i]['symbol']) |
(records[pkey[h]]['symbol1'] != records[i]['symbol1'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
return True
@njit(cache=True)
def get_loc_date_symbol_symbol1_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
if keys['symbol'][i] != keys['symbol1'][i]:
h0 = hash(keys['date'][i])
h1 = hash(keys['symbol'][i])
h2 = hash(keys['symbol1'][i])
h = (h0 ^ h1 ^ h2) % n
else:
h0 = hash(keys['date'][i])
h1 = hash(keys['symbol'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['symbol'] != keys[i]['symbol']) |
(records[pkey[h]]['symbol1'] != keys[i]['symbol1'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_symbol_symbol1_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
if new_records['symbol'][i] != new_records['symbol1'][i]:
h0 = hash(new_records['date'][i])
h1 = hash(new_records['symbol'][i])
h2 = hash(new_records['symbol1'][i])
h = (h0 ^ h1 ^ h2) % n
else:
h0 = hash(new_records['date'][i])
h1 = hash(new_records['symbol'][i])
h = (h0 ^ h1) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['symbol'] == new_records[i]['symbol'] and
records[pkey[h]]['symbol1'] == new_records[i]['symbol1']):
# record exists update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed jump hash
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
count += 1
minchgid = min(minchgid, count)
return count, minchgid
###################### DATE_PORTFOLIO ########################
@njit(cache=True)
def create_pkey_date_portfolio_jit(records, count, pkey, dateiniidx, dateendidx, dateunit, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
h0 = hash(records['date'][i])
h1 = hash(records['portfolio'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['portfolio'] != records[i]['portfolio'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
return True
@njit(cache=True)
def get_loc_date_portfolio_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['portfolio'][i])
h = (h0 ^ h1) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['portfolio'] != keys[i]['portfolio'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_portfolio_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
h0 = hash(new_records['date'][i])
h1 = hash(new_records['portfolio'][i])
h = (h0 ^ h1) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['portfolio'] == new_records[i]['portfolio']):
# record exists update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed jump hash
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
count += 1
minchgid = min(minchgid, count)
return count, minchgid
###################### DATE_PORTFOLIO_SYMBOL ########################
@njit(cache=True)
def create_pkey_date_portfolio_symbol_jit(records, count, pkey, dateiniidx, dateendidx, dateunit,
portiniidx, portendidx, portlist, portlistcount, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
h0 = hash(records['date'][i])
h1 = hash(records['portfolio'][i])
h2 = hash(records['symbol'][i])
h = (h0 ^ h1 ^ h2) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['portfolio'] != records[i]['portfolio']) |
(records[pkey[h]]['symbol'] != records[i]['symbol'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (
(records[fid]['date'] != records[i]['date']) |
(records[fid]['portfolio'] != records[i]['portfolio'])
):
hport = (hport + j**2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount*2)
portlist[curid+1] = newid
portlist[newid] = i
portendidx[hport] = newid
portlistcount += 1
return True
@njit(cache=True)
def get_loc_date_portfolio_symbol_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['portfolio'][i])
h2 = hash(keys['symbol'][i])
h = (h0 ^ h1 ^ h2) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['portfolio'] != keys[i]['portfolio']) |
(records[pkey[h]]['symbol'] != keys[i]['symbol'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_portfolio_symbol_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit,
portiniidx, portendidx, portlist, portlistcount):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
h0 = hash(new_records['date'][i])
h1 = hash(new_records['portfolio'][i])
h2 = hash(new_records['symbol'][i])
h = (h0 ^ h1 ^ h2) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['portfolio'] == new_records[i]['portfolio'] and
records[pkey[h]]['symbol'] == new_records[i]['symbol']):
# record exists, update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed, jump hash
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
# inlined logic for updating portlist
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (records[fid]['date'] != records[count]['date'] or
records[fid]['portfolio'] != records[count]['portfolio']):
hport = (hport + j ** 2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount * 2)
portlist[curid + 1] = newid
portlist[newid] = count
portendidx[hport] = newid
portlistcount += 1
count += 1
minchgid = min(minchgid, count)
return count, minchgid
###################### DATE_PORTFOLIO_SYMBOL_CLORDID ########################
@njit(cache=True)
def create_pkey_date_portfolio_symbol_clordid_jit(records, count, pkey, dateiniidx, dateendidx, dateunit, portiniidx, portendidx, portlist, portlistcount, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
h0 = hash(records['date'][i])
h1 = hash(records['portfolio'][i])
h2 = hash(records['symbol'][i])
h3 = hash(records['clordid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['portfolio'] != records[i]['portfolio']) |
(records[pkey[h]]['symbol'] != records[i]['symbol']) |
(records[pkey[h]]['clordid'] != records[i]['clordid'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (
(records[fid]['date'] != records[i]['date']) |
(records[fid]['portfolio'] != records[i]['portfolio'])
):
hport = (hport + j**2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount*2)
portlist[curid+1] = newid
portlist[newid] = i
portendidx[hport] = newid
portlistcount += 1
return True
@njit(cache=True)
def get_loc_date_portfolio_symbol_clordid_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['portfolio'][i])
h2 = hash(keys['symbol'][i])
h3 = hash(keys['clordid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['portfolio'] != keys[i]['portfolio']) |
(records[pkey[h]]['symbol'] != keys[i]['symbol']) |
(records[pkey[h]]['clordid'] != keys[i]['clordid'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_portfolio_symbol_clordid_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit,
portiniidx, portendidx, portlist, portlistcount):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
h0 = hash(new_records['date'][i])
h1 = hash(new_records['portfolio'][i])
h2 = hash(new_records['symbol'][i])
h3 = hash(new_records['clordid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['portfolio'] == new_records[i]['portfolio'] and
records[pkey[h]]['symbol'] == new_records[i]['symbol'] and
records[pkey[h]]['clordid'] == new_records[i]['clordid']):
# record exists, update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed, jump hash using quadratic probing
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
# inlined logic for updating portlist
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (records[fid]['date'] != records[count]['date'] or
records[fid]['portfolio'] != records[count]['portfolio']):
hport = (hport + j ** 2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount * 2)
portlist[curid + 1] = newid
portlist[newid] = count
portendidx[hport] = newid
portlistcount += 1
count += 1
minchgid = min(minchgid, count)
return count, minchgid
###################### DATE_PORTFOLIO_SYMBOL_TRADEID ########################
@njit(cache=True)
def create_pkey_date_portfolio_symbol_tradeid_jit(records, count, pkey, dateiniidx, dateendidx, dateunit, portiniidx, portendidx, portlist, portlistcount, start):
n = pkey.size-1
for i in range(start, count):
intdt = np.int32(np.int64(records['date'][i])/dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = i
if dateendidx[intdt] < i:
dateendidx[intdt] = i
h0 = hash(records['date'][i])
h1 = hash(records['portfolio'][i])
h2 = hash(records['symbol'][i])
h3 = hash(records['tradeid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
if pkey[h] == -1:
pkey[h] = i
else:
duplicatedkey = True
j = 1
while (
(records[pkey[h]]['date'] != records[i]['date']) |
(records[pkey[h]]['portfolio'] != records[i]['portfolio']) |
(records[pkey[h]]['symbol'] != records[i]['symbol']) |
(records[pkey[h]]['tradeid'] != records[i]['tradeid'])
):
h = (h + j**2) % n
if pkey[h] == -1:
pkey[h] = i
duplicatedkey = False
break
j += 1
if duplicatedkey:
return False
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (
(records[fid]['date'] != records[i]['date']) |
(records[fid]['portfolio'] != records[i]['portfolio'])
):
hport = (hport + j**2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount*2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = i
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount*2)
portlist[curid+1] = newid
portlist[newid] = i
portendidx[hport] = newid
portlistcount += 1
return True
@njit(cache=True)
def get_loc_date_portfolio_symbol_tradeid_jit(records, pkey, keys):
n = pkey.size-1
loc = np.empty((keys.size, ))
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['portfolio'][i])
h2 = hash(keys['symbol'][i])
h3 = hash(keys['tradeid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
if pkey[h] == -1:
loc[i] = pkey[h]
else:
j = 1
while (
(records[pkey[h]]['date'] != keys[i]['date']) |
(records[pkey[h]]['portfolio'] != keys[i]['portfolio']) |
(records[pkey[h]]['symbol'] != keys[i]['symbol']) |
(records[pkey[h]]['tradeid'] != keys[i]['tradeid'])
):
h = (h + j**2) % n
if pkey[h] == -1:
break
j += 1
loc[i] = pkey[h]
return loc
@njit(cache=True)
def upsert_date_portfolio_symbol_tradeid_jit(records, count, new_records, pkey, dateiniidx, dateendidx, dateunit,
portiniidx, portendidx, portlist, portlistcount):
minchgid = count
maxsize = records.size
nrec = new_records.size
n = pkey.size - 1
for i in range(nrec):
h0 = hash(new_records['date'][i])
h1 = hash(new_records['portfolio'][i])
h2 = hash(new_records['symbol'][i])
h3 = hash(new_records['tradeid'][i])
h = (h0 ^ h1 ^ h2 ^ h3) % n
found = False
if pkey[h] != -1: # hash already exists
j = 1
while True: # check for collision & find a free bucket
if (records[pkey[h]]['date'] == new_records[i]['date'] and
records[pkey[h]]['portfolio'] == new_records[i]['portfolio'] and
records[pkey[h]]['symbol'] == new_records[i]['symbol'] and
records[pkey[h]]['tradeid'] == new_records[i]['tradeid']):
# record exists, update it
records[pkey[h]] = new_records[i]
minchgid = min(minchgid, pkey[h])
found = True
break
# collision confirmed, jump hash using quadratic probing
h = (h + j ** 2) % n
if pkey[h] == -1:
break
j += 1
if not found:
# Check space and append new record
if count >= maxsize:
break # max size reached
else:
# append new record
records[count] = new_records[i]
intdt = np.int32(np.int64(new_records['date'][i]) / dateunit)
if dateiniidx[intdt] == -1:
dateiniidx[intdt] = count
if dateendidx[intdt] < count:
dateendidx[intdt] = count
pkey[h] = count
# inlined logic for updating portlist
hport = (h0 ^ h1) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
else:
j = 1
fid = portlist[portiniidx[hport]]
newindex = False
while (records[fid]['date'] != records[count]['date'] or
records[fid]['portfolio'] != records[count]['portfolio']):
hport = (hport + j ** 2) % n
if portiniidx[hport] == -1:
newid = int(portlistcount * 2)
portiniidx[hport] = newid
portendidx[hport] = newid
portlist[newid] = count
portlistcount += 1
newindex = True
break
fid = portlist[portiniidx[hport]]
j += 1
if not newindex:
curid = portendidx[hport]
newid = int(portlistcount * 2)
portlist[curid + 1] = newid
portlist[newid] = count
portendidx[hport] = newid
portlistcount += 1
count += 1
minchgid = min(minchgid, count)
return count, minchgid
####################### COMPOSITE INDEX ######################################
@njit(cache=True)
def get_index_date_portfolio_jit(records, keys, pkey, portiniidx, portlist):
n = pkey.size-1
loc = List()
keyloc = List()
for i in range(keys.size):
h0 = hash(keys['date'][i])
h1 = hash(keys['portfolio'][i])
h = (h0 ^ h1) % n
if portiniidx[h] == -1:
pass
else:
j = 1
portfound = True
recid = portlist[portiniidx[h]]
while (
(records[recid]['date'] != keys[i]['date']) |
(records[recid]['portfolio'] != keys[i]['portfolio'])
):
h = (h + j**2) % n
if portiniidx[h] == -1:
portfound = False
break
recid = portlist[portiniidx[h]]
j += 1
if portfound:
curid = portiniidx[h]
fid = portlist[curid]
loc.append(fid)
keyloc.append(i)
nextid = portlist[curid+1]
while nextid != -1:
curid = nextid
loc.append(portlist[curid])
keyloc.append(i)
nextid = portlist[curid+1]
return loc, keyloc | PypiClean |
/steam_sdk-2023.6.29-py3-none-any.whl/steam_sdk/data/DataLEDETOptions.py | from pydantic import BaseModel
from typing import (List, Union)
class TimeVectorLEDET(BaseModel):
"""
Level 2: Class for simulation time vector in LEDET
"""
time_vector_params: List[float] = []
class MagnetInductance(BaseModel):
"""
Level 2: Class for magnet inductance assignment
"""
flag_calculate_inductance: bool = None
overwrite_inductance_coil_sections: List[List[float]] = [[]]
overwrite_HalfTurnToInductanceBlock: List[int] = []
LUT_DifferentialInductance_current: List[float] = []
LUT_DifferentialInductance_inductance: List[float] = []
class HeatExchange(BaseModel):
"""
Level 2: Class for heat exchange information
"""
heat_exchange_max_distance: float = None # heat exchange max_distance
iContactAlongWidth_pairs_to_add: List[List[int]] = [[]]
iContactAlongWidth_pairs_to_remove: List[List[int]] = [[]]
iContactAlongHeight_pairs_to_add: List[List[int]] = [[]]
iContactAlongHeight_pairs_to_remove: List[List[int]] = [[]]
th_insulationBetweenLayers: float = None
class ConductorGeometry(BaseModel):
"""
Level 2: Class for multipole geometry parameters - ONLY USED FOR ISCC/ISCL CALCULATION
"""
alphaDEG_ht: List[float] = [] # Inclination angle of each half-turn, alphaDEG (LEDET)
rotation_ht: List[float] = [] # Rotation of each half-turn, rotation_block (LEDET)
mirror_ht: List[int] = [] # Mirror around quadrant bisector line for half-turn, mirror_block (LEDET)
mirrorY_ht: List[int] = [] # Mirror around Y axis for half-turn, mirrorY_block (LEDET)
class FieldMapFilesLEDET(BaseModel):
"""
Level 2: Class for field map file parameters in LEDET
"""
Iref: float = None
flagIron: int = None
flagSelfField: int = None
headerLines: int = None
columnsXY: List[int] = []
columnsBxBy: List[int] = []
flagPlotMTF: int = None
fieldMapNumber: int = None
flag_modify_map2d_ribbon_cable: int = None
flag_calculateMagneticField: int = None
class InputGenerationOptionsLEDET(BaseModel):
"""
Level 2: Class for input generation options in LEDET
"""
# flag_typeWindings: int = None
flag_calculateInductanceMatrix: int = None
flag_useExternalInitialization: int = None
flag_initializeVar: int = None
selfMutualInductanceFileNumber: int = None
class SimulationLEDET(BaseModel):
"""
Level 2: Class for simulation options in LEDET
"""
flag_fastMode: int = None
flag_controlCurrent: int = None
flag_automaticRefinedTimeStepping: int = None
class PhysicsLEDET(BaseModel):
"""
Level 2: Class for physics options in LEDET
"""
flag_IronSaturation: int = None
flag_InvertCurrentsAndFields: int = None
flag_ScaleDownSuperposedMagneticField: int = None
flag_HeCooling: int = None
fScaling_Pex: float = None
fScaling_Pex_AlongHeight: float = None
fScaling_MR: float = None
flag_scaleCoilResistance_StrandTwistPitch: int = None
flag_separateInsulationHeatCapacity: int = None
flag_persistentCurrents: int = None
flag_ISCL: int = None
fScaling_Mif: float = None
fScaling_Mis: float = None
flag_StopIFCCsAfterQuench: int = None
flag_StopISCCsAfterQuench: int = None
tau_increaseRif: float = None
tau_increaseRis: float = None
fScaling_RhoSS: float = None
maxVoltagePC: float = None
minCurrentDiode: float = None
flag_symmetricGroundingEE: int = None
flag_removeUc: int = None
BtX_background: float = None
BtY_background: float = None
class QuenchInitializationLEDET(BaseModel):
"""
Level 2: Class for quench initialization parameters in LEDET
"""
iStartQuench: List[int] = []
tStartQuench: List[float] = []
lengthHotSpot_iStartQuench: List[float] = []
fScaling_vQ_iStartQuench: List[float] = []
class PostProcessingLEDET(BaseModel):
"""
Level 2: Class for post processing options in LEDET
"""
flag_showFigures: int = None
flag_saveFigures: int = None
flag_saveMatFile: int = None
flag_saveTxtFiles: int = None
flag_generateReport: int = None
flag_saveResultsToMesh: int = None
tQuench: List[float] = []
initialQuenchTemp: List[float] = []
flag_hotSpotTemperatureInEachGroup: int = None
flag_importFieldWhenCalculatingHotSpotT: int = None
class Simulation3DLEDET(BaseModel):
"""
Level 2: Class for 3D simulation parameters and options in lEDET
"""
# Variables in the "Options" sheet
flag_3D: int = None
flag_adaptiveTimeStepping: int = None
sim3D_flag_Import3DGeometry: int = None
sim3D_import3DGeometry_modelNumber: int = None
# Variables in the "Inputs" sheet
sim3D_uThreshold: float = None
sim3D_f_cooling_down: Union[float, List[float]] = None
sim3D_f_cooling_up: Union[float, List[float]] = None
sim3D_f_cooling_left: Union[float, List[float]] = None
sim3D_f_cooling_right: Union[float, List[float]] = None
sim3D_f_cooling_LeadEnds: List[int] = []
sim3D_fExToIns: float = None
sim3D_fExUD: float = None
sim3D_fExLR: float = None
sim3D_min_ds_coarse: float = None
sim3D_min_ds_fine: float = None
sim3D_min_nodesPerStraightPart: int = None
sim3D_min_nodesPerEndsPart: int = None
sim3D_idxFinerMeshHalfTurn: List[int] = []
sim3D_flag_checkNodeProximity: int = None
sim3D_nodeProximityThreshold: float = None
sim3D_Tpulse_sPosition: float = None
sim3D_Tpulse_peakT: float = None
sim3D_Tpulse_width: float = None
sim3D_tShortCircuit: float = None
sim3D_coilSectionsShortCircuit: List[int] = []
sim3D_R_shortCircuit: float = None
sim3D_shortCircuitPosition: Union[float, List[List[float]]] = None
sim3D_durationGIF: float = None
sim3D_flag_saveFigures: int = None
sim3D_flag_saveGIF: int = None
sim3D_flag_VisualizeGeometry3D: int = None
sim3D_flag_SaveGeometry3D: int = None
class PlotsLEDET(BaseModel):
"""
Level 2: Class for plotting parameters in lEDET
"""
suffixPlot: List[str] = []
typePlot: List[int] = []
outputPlotSubfolderPlot: List[str] = []
variableToPlotPlot: List[str] = []
selectedStrandsPlot: List[str] = []
selectedTimesPlot: List[str] = []
labelColorBarPlot: List[str] = []
minColorBarPlot: List[str] = []
maxColorBarPlot: List[str] = []
MinMaxXYPlot: List[int] = []
flagSavePlot: List[int] = []
flagColorPlot: List[int] = []
flagInvisiblePlot: List[int] = []
class VariablesToSaveLEDET(BaseModel):
"""
Level 2: Class for variables to save in lEDET
"""
variableToSaveTxt: List[str] = []
typeVariableToSaveTxt: List[int] = []
variableToInitialize: List[str] = []
writeToMesh_fileNameMeshPositions: List[str] = []
writeToMesh_suffixFileNameOutput: List[str] = []
writeToMesh_selectedVariables: List[str] = []
writeToMesh_selectedTimeSteps: List[str] = []
writeToMesh_selectedMethod: List[str] = []
class LEDETOptions(BaseModel):
"""
Level 1: Class for LEDET options
"""
time_vector: TimeVectorLEDET = TimeVectorLEDET()
magnet_inductance: MagnetInductance = MagnetInductance()
heat_exchange: HeatExchange = HeatExchange()
conductor_geometry_used_for_ISCL: ConductorGeometry = ConductorGeometry()
field_map_files: FieldMapFilesLEDET = FieldMapFilesLEDET()
input_generation_options: InputGenerationOptionsLEDET = InputGenerationOptionsLEDET()
simulation: SimulationLEDET = SimulationLEDET()
physics: PhysicsLEDET = PhysicsLEDET()
quench_initiation: QuenchInitializationLEDET = QuenchInitializationLEDET()
post_processing: PostProcessingLEDET = PostProcessingLEDET()
simulation_3D: Simulation3DLEDET = Simulation3DLEDET()
plots: PlotsLEDET = PlotsLEDET()
variables_to_save: VariablesToSaveLEDET = VariablesToSaveLEDET() | PypiClean |
/Microbe-1.2.tar.gz/Microbe-1.2/microbe/static/js/vizhash.min.js | var hexcase=0;var b64pad="";function hex_md5(a){return rstr2hex(rstr_md5(str2rstr_utf8(a)))}function rstr_md5(a){return binl2rstr(binl_md5(rstr2binl(a),a.length*8))}function rstr2hex(c){try{hexcase}catch(g){hexcase=0}var f=hexcase?"0123456789ABCDEF":"0123456789abcdef";var b="";var a;for(var d=0;d<c.length;d++){a=c.charCodeAt(d);b+=f.charAt((a>>>4)&15)+f.charAt(a&15)}return b}function str2rstr_utf8(c){var b="";var d=-1;var a,e;while(++d<c.length){a=c.charCodeAt(d);e=d+1<c.length?c.charCodeAt(d+1):0;if(55296<=a&&a<=56319&&56320<=e&&e<=57343){a=65536+((a&1023)<<10)+(e&1023);d++}if(a<=127){b+=String.fromCharCode(a)}else{if(a<=2047){b+=String.fromCharCode(192|((a>>>6)&31),128|(a&63))}else{if(a<=65535){b+=String.fromCharCode(224|((a>>>12)&15),128|((a>>>6)&63),128|(a&63))}else{if(a<=2097151){b+=String.fromCharCode(240|((a>>>18)&7),128|((a>>>12)&63),128|((a>>>6)&63),128|(a&63))}}}}}return b}function rstr2binl(b){var a=Array(b.length>>2);for(var c=0;c<a.length;c++){a[c]=0}for(var c=0;c<b.length*8;c+=8){a[c>>5]|=(b.charCodeAt(c/8)&255)<<(c%32)}return a}function binl2rstr(b){var a="";for(var c=0;c<b.length*32;c+=8){a+=String.fromCharCode((b[c>>5]>>>(c%32))&255)}return a}function binl_md5(p,k){p[k>>5]|=128<<((k)%32);p[(((k+64)>>>9)<<4)+14]=k;var o=1732584193;var n=-271733879;var m=-1732584194;var l=271733878;for(var g=0;g<p.length;g+=16){var j=o;var h=n;var f=m;var e=l;o=md5_ff(o,n,m,l,p[g+0],7,-680876936);l=md5_ff(l,o,n,m,p[g+1],12,-389564586);m=md5_ff(m,l,o,n,p[g+2],17,606105819);n=md5_ff(n,m,l,o,p[g+3],22,-1044525330);o=md5_ff(o,n,m,l,p[g+4],7,-176418897);l=md5_ff(l,o,n,m,p[g+5],12,1200080426);m=md5_ff(m,l,o,n,p[g+6],17,-1473231341);n=md5_ff(n,m,l,o,p[g+7],22,-45705983);o=md5_ff(o,n,m,l,p[g+8],7,1770035416);l=md5_ff(l,o,n,m,p[g+9],12,-1958414417);m=md5_ff(m,l,o,n,p[g+10],17,-42063);n=md5_ff(n,m,l,o,p[g+11],22,-1990404162);o=md5_ff(o,n,m,l,p[g+12],7,1804603682);l=md5_ff(l,o,n,m,p[g+13],12,-40341101);m=md5_ff(m,l,o,n,p[g+14],17,-1502002290);n=md5_ff(n,m,l,o,p[g+15],22,1236535329);o=md5_gg(o,n,m,l,p[g+1],5,-165796510);l=md5_gg(l,o,n,m,p[g+6],9,-1069501632);m=md5_gg(m,l,o,n,p[g+11],14,643717713);n=md5_gg(n,m,l,o,p[g+0],20,-373897302);o=md5_gg(o,n,m,l,p[g+5],5,-701558691);l=md5_gg(l,o,n,m,p[g+10],9,38016083);m=md5_gg(m,l,o,n,p[g+15],14,-660478335);n=md5_gg(n,m,l,o,p[g+4],20,-405537848);o=md5_gg(o,n,m,l,p[g+9],5,568446438);l=md5_gg(l,o,n,m,p[g+14],9,-1019803690);m=md5_gg(m,l,o,n,p[g+3],14,-187363961);n=md5_gg(n,m,l,o,p[g+8],20,1163531501);o=md5_gg(o,n,m,l,p[g+13],5,-1444681467);l=md5_gg(l,o,n,m,p[g+2],9,-51403784);m=md5_gg(m,l,o,n,p[g+7],14,1735328473);n=md5_gg(n,m,l,o,p[g+12],20,-1926607734);o=md5_hh(o,n,m,l,p[g+5],4,-378558);l=md5_hh(l,o,n,m,p[g+8],11,-2022574463);m=md5_hh(m,l,o,n,p[g+11],16,1839030562);n=md5_hh(n,m,l,o,p[g+14],23,-35309556);o=md5_hh(o,n,m,l,p[g+1],4,-1530992060);l=md5_hh(l,o,n,m,p[g+4],11,1272893353);m=md5_hh(m,l,o,n,p[g+7],16,-155497632);n=md5_hh(n,m,l,o,p[g+10],23,-1094730640);o=md5_hh(o,n,m,l,p[g+13],4,681279174);l=md5_hh(l,o,n,m,p[g+0],11,-358537222);m=md5_hh(m,l,o,n,p[g+3],16,-722521979);n=md5_hh(n,m,l,o,p[g+6],23,76029189);o=md5_hh(o,n,m,l,p[g+9],4,-640364487);l=md5_hh(l,o,n,m,p[g+12],11,-421815835);m=md5_hh(m,l,o,n,p[g+15],16,530742520);n=md5_hh(n,m,l,o,p[g+2],23,-995338651);o=md5_ii(o,n,m,l,p[g+0],6,-198630844);l=md5_ii(l,o,n,m,p[g+7],10,1126891415);m=md5_ii(m,l,o,n,p[g+14],15,-1416354905);n=md5_ii(n,m,l,o,p[g+5],21,-57434055);o=md5_ii(o,n,m,l,p[g+12],6,1700485571);l=md5_ii(l,o,n,m,p[g+3],10,-1894986606);m=md5_ii(m,l,o,n,p[g+10],15,-1051523);n=md5_ii(n,m,l,o,p[g+1],21,-2054922799);o=md5_ii(o,n,m,l,p[g+8],6,1873313359);l=md5_ii(l,o,n,m,p[g+15],10,-30611744);m=md5_ii(m,l,o,n,p[g+6],15,-1560198380);n=md5_ii(n,m,l,o,p[g+13],21,1309151649);o=md5_ii(o,n,m,l,p[g+4],6,-145523070);l=md5_ii(l,o,n,m,p[g+11],10,-1120210379);m=md5_ii(m,l,o,n,p[g+2],15,718787259);n=md5_ii(n,m,l,o,p[g+9],21,-343485551);o=safe_add(o,j);n=safe_add(n,h);m=safe_add(m,f);l=safe_add(l,e)}return Array(o,n,m,l)}function md5_cmn(h,e,d,c,g,f){return safe_add(bit_rol(safe_add(safe_add(e,h),safe_add(c,f)),g),d)}function md5_ff(g,f,k,j,e,i,h){return md5_cmn((f&k)|((~f)&j),g,f,e,i,h)}function md5_gg(g,f,k,j,e,i,h){return md5_cmn((f&j)|(k&(~j)),g,f,e,i,h)}function md5_hh(g,f,k,j,e,i,h){return md5_cmn(f^k^j,g,f,e,i,h)}function md5_ii(g,f,k,j,e,i,h){return md5_cmn(k^(f|(~j)),g,f,e,i,h)}function safe_add(a,d){var c=(a&65535)+(d&65535);var b=(a>>16)+(d>>16)+(c>>16);return(b<<16)|(c&65535)}function bit_rol(a,b){return(a<<b)|(a>>>(32-b))}function hex_sha1(a){return rstr2hex(rstr_sha1(str2rstr_utf8(a)))}function rstr_sha1(a){return binb2rstr(binb_sha1(rstr2binb(a),a.length*8))}function rstr2binb(b){var a=Array(b.length>>2);for(var c=0;c<a.length;c++){a[c]=0}for(var c=0;c<b.length*8;c+=8){a[c>>5]|=(b.charCodeAt(c/8)&255)<<(24-c%32)}return a}function binb2rstr(b){var a="";for(var c=0;c<b.length*32;c+=8){a+=String.fromCharCode((b[c>>5]>>>(24-c%32))&255)}return a}function binb_sha1(v,o){v[o>>5]|=128<<(24-o%32);v[((o+64>>9)<<4)+15]=o;var y=Array(80);var u=1732584193;var s=-271733879;var r=-1732584194;var q=271733878;var p=-1009589776;for(var l=0;l<v.length;l+=16){var n=u;var m=s;var k=r;var h=q;var f=p;for(var g=0;g<80;g++){if(g<16){y[g]=v[l+g]}else{y[g]=bit_rol(y[g-3]^y[g-8]^y[g-14]^y[g-16],1)}var z=safe_add(safe_add(bit_rol(u,5),sha1_ft(g,s,r,q)),safe_add(safe_add(p,y[g]),sha1_kt(g)));p=q;q=r;r=bit_rol(s,30);s=u;u=z}u=safe_add(u,n);s=safe_add(s,m);r=safe_add(r,k);q=safe_add(q,h);p=safe_add(p,f)}return Array(u,s,r,q,p)}function sha1_ft(e,a,g,f){if(e<20){return(a&g)|((~a)&f)}if(e<40){return a^g^f}if(e<60){return(a&g)|(a&f)|(g&f)}return a^g^f}function sha1_kt(a){return(a<20)?1518500249:(a<40)?1859775393:(a<60)?-1894007588:-899497514};
vizhash={textHash:function(d){var a={};function c(f){var e=hex_sha1(f)+hex_md5(f);return e+e.split("").reverse().join("")}function b(){var g=a.value;var e=[];for(var f=0;f<g.length;f+=2){e.push(parseInt(g.substr(f,2),16))}return e}a.toString=function(){return a.value};a.value=c(d);a.intArray=function(){var e={values:b(),index:0};e.next=function(){var f=e.values[e.index];e.index+=1;e.index%=e.values.length;return f};return e}();return a},canvasHash:function(o,a,p){if(a>256||p>256){throw"Max image size is 256 x 256"}var g={hash:vizhash.textHash(o)},k=g.hash.intArray,f=vizhash.color(k.next(),k.next(),k.next());a=g.width=a||80;p=g.height=p||80;var c=g.canvas=vizhash.createCanvas(a,p);var b=g.context=c.getContext("2d");function n(){var r=k.next();return a*r/256}function j(){var r=k.next();return p*r/256}function m(t,A,w,s,H,z){var F=A-s/2,E=w-H/2,D=0.5522848;var v=(s/2)*D,r=(H/2)*D,G=F+s,C=E+H,B=F+s/2,u=E+H/2;t.beginPath();t.moveTo(F,u);t.bezierCurveTo(F,u-r,B-v,E,B,E);t.bezierCurveTo(B+v,E,G,u-r,G,u);t.bezierCurveTo(G,u+r,B+v,C,B,C);t.bezierCurveTo(B-v,C,F,u+r,F,u);t.fillStyle=z.toString();t.fill();t.closePath()}function q(t,u,r){t.beginPath();for(var s=0;s<u.length;s=s+2){t.lineTo(u[s],u[s+1])}t.fillStyle=r.toString();t.fill();t.closePath()}function i(u,x,w,v,r,y,s,t){u.beginPath();u.arc(x,w,v/2,y,s,false);u.fillStyle=t.toString();u.fill();u.closePath()}function h(v,t,w,s,u,r){v.beginPath();v.rect(t,w,s-t,u-w);v.fillStyle=r.toString();v.fill();v.closePath()}function d(){if(k.next()%2){var r=b.createLinearGradient(0,0,0,p)}else{var r=b.createLinearGradient(0,0,a,p)}r.addColorStop(0,f.toString());r.addColorStop(1,"rgb(0,0,0)");b.rect(0,0,a,p);b.fillStyle=r;b.fill()}function l(t,s){switch(t%7){case 0:h(b,n(),j(),n(),j(),s);break;case 1:case 2:m(b,n(),j(),n(),j(),s);break;case 3:var u=[n(),j(),n(),j(),n(),j(),n(),j()];q(b,u,s);break;case 4:case 5:case 6:var v=k.next()*2*Math.PI/256;var r=v+k.next()*Math.PI/256;i(b,n(),j(),n(),j(),v,r,s);break}}function e(){for(var t=0;t<=7;t++){var s=k.next();f.r=Math.round((f.r+k.next()/25)%256);f.g=Math.round((f.g+k.next()/25)%256);f.b=Math.round((f.b+k.next()/25)%256);l(s,f)}var r=vizhash.color(k.next(),k.next(),k.next());l(k.next(),r)}g.toImage=function(){var r=document.createElement("img");r.src=c.toDataURL("image/png");return r};d();e();return g},createCanvas:function(c,a){var b=document.createElement("canvas");b.width=c;b.height=a;return b},supportCanvas:function(){var a=document.createElement("canvas");return !!(a.getContext&&a.getContext("2d"))},color:function(e,d,a){var c={r:e,g:d,b:a};c.toString=function(){return"rgb("+c.r+","+c.g+","+c.b+")"};return c}}; | PypiClean |
/Argonaut-0.3.4.tar.gz/Argonaut-0.3.4/argonaut/public/ckeditor/_source/lang/fr-ca.js | /*
Copyright (c) 2003-2010, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.lang} object, for the
* Canadian French language.
*/
/**#@+
@type String
@example
*/
/**
* Constains the dictionary of language entries.
* @namespace
*/
CKEDITOR.lang['fr-ca'] =
{
/**
* The language reading direction. Possible values are "rtl" for
* Right-To-Left languages (like Arabic) and "ltr" for Left-To-Right
* languages (like English).
* @default 'ltr'
*/
dir : 'ltr',
/*
* Screenreader titles. Please note that screenreaders are not always capable
* of reading non-English words. So be careful while translating it.
*/
editorTitle : 'Rich text editor, %1, press ALT 0 for help.', // MISSING
// ARIA descriptions.
toolbar : 'Toolbar', // MISSING
editor : 'Rich Text Editor', // MISSING
// Toolbar buttons without dialogs.
source : 'Source',
newPage : 'Nouvelle page',
save : 'Sauvegarder',
preview : 'Previsualiser',
cut : 'Couper',
copy : 'Copier',
paste : 'Coller',
print : 'Imprimer',
underline : 'Souligné',
bold : 'Gras',
italic : 'Italique',
selectAll : 'Tout sélectionner',
removeFormat : 'Supprimer le formatage',
strike : 'Barrer',
subscript : 'Indice',
superscript : 'Exposant',
horizontalrule : 'Insérer un séparateur',
pagebreak : 'Insérer un saut de page',
unlink : 'Supprimer le lien',
undo : 'Annuler',
redo : 'Refaire',
// Common messages and labels.
common :
{
browseServer : 'Parcourir le serveur',
url : 'URL',
protocol : 'Protocole',
upload : 'Télécharger',
uploadSubmit : 'Envoyer sur le serveur',
image : 'Image',
flash : 'Animation Flash',
form : 'Formulaire',
checkbox : 'Case à cocher',
radio : 'Bouton radio',
textField : 'Champ texte',
textarea : 'Zone de texte',
hiddenField : 'Champ caché',
button : 'Bouton',
select : 'Champ de sélection',
imageButton : 'Bouton image',
notSet : '<Par défaut>',
id : 'Id',
name : 'Nom',
langDir : 'Sens d\'écriture',
langDirLtr : 'De gauche à droite (LTR)',
langDirRtl : 'De droite à gauche (RTL)',
langCode : 'Code langue',
longDescr : 'URL de description longue',
cssClass : 'Classes de feuilles de style',
advisoryTitle : 'Titre',
cssStyle : 'Style',
ok : 'OK',
cancel : 'Annuler',
close : 'Close', // MISSING
preview : 'Preview', // MISSING
generalTab : 'Général',
advancedTab : 'Avancée',
validateNumberFailed : 'This value is not a number.', // MISSING
confirmNewPage : 'Any unsaved changes to this content will be lost. Are you sure you want to load new page?', // MISSING
confirmCancel : 'Some of the options have been changed. Are you sure to close the dialog?', // MISSING
options : 'Options', // MISSING
target : 'Target', // MISSING
targetNew : 'New Window (_blank)', // MISSING
targetTop : 'Topmost Window (_top)', // MISSING
targetSelf : 'Same Window (_self)', // MISSING
targetParent : 'Parent Window (_parent)', // MISSING
langDirLTR : 'Left to Right (LTR)', // MISSING
langDirRTL : 'Right to Left (RTL)', // MISSING
styles : 'Style', // MISSING
cssClasses : 'Stylesheet Classes', // MISSING
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, unavailable</span>' // MISSING
},
contextmenu :
{
options : 'Context Menu Options' // MISSING
},
// Special char dialog.
specialChar :
{
toolbar : 'Insérer un caractère spécial',
title : 'Insérer un caractère spécial',
options : 'Special Character Options' // MISSING
},
// Link dialog.
link :
{
toolbar : 'Insérer/modifier le lien',
other : '<other>', // MISSING
menu : 'Modifier le lien',
title : 'Propriétés du lien',
info : 'Informations sur le lien',
target : 'Destination',
upload : 'Télécharger',
advanced : 'Avancée',
type : 'Type de lien',
toUrl : 'URL', // MISSING
toAnchor : 'Ancre dans cette page',
toEmail : 'E-Mail',
targetFrame : '<Cadre>',
targetPopup : '<fenêtre popup>',
targetFrameName : 'Nom du cadre de destination',
targetPopupName : 'Nom de la fenêtre popup',
popupFeatures : 'Caractéristiques de la fenêtre popup',
popupResizable : 'Resizable', // MISSING
popupStatusBar : 'Barre d\'état',
popupLocationBar: 'Barre d\'adresses',
popupToolbar : 'Barre d\'outils',
popupMenuBar : 'Barre de menu',
popupFullScreen : 'Plein écran (IE)',
popupScrollBars : 'Barres de défilement',
popupDependent : 'Dépendante (Netscape)',
popupWidth : 'Largeur',
popupLeft : 'Position à partir de la gauche',
popupHeight : 'Hauteur',
popupTop : 'Position à partir du haut',
id : 'Id', // MISSING
langDir : 'Sens d\'écriture',
langDirLTR : 'De gauche à droite (LTR)',
langDirRTL : 'De droite à gauche (RTL)',
acccessKey : 'Équivalent clavier',
name : 'Nom',
langCode : 'Sens d\'écriture',
tabIndex : 'Ordre de tabulation',
advisoryTitle : 'Titre',
advisoryContentType : 'Type de contenu',
cssClasses : 'Classes de feuilles de style',
charset : 'Encodage de caractère',
styles : 'Style',
selectAnchor : 'Sélectionner une ancre',
anchorName : 'Par nom',
anchorId : 'Par id',
emailAddress : 'Adresse E-Mail',
emailSubject : 'Sujet du message',
emailBody : 'Corps du message',
noAnchors : '(Pas d\'ancre disponible dans le document)',
noUrl : 'Veuillez saisir l\'URL',
noEmail : 'Veuillez saisir l\'adresse e-mail'
},
// Anchor dialog
anchor :
{
toolbar : 'Insérer/modifier l\'ancre',
menu : 'Propriétés de l\'ancre',
title : 'Propriétés de l\'ancre',
name : 'Nom de l\'ancre',
errorName : 'Veuillez saisir le nom de l\'ancre'
},
// List style dialog
list:
{
numberedTitle : 'Numbered List Properties', // MISSING
bulletedTitle : 'Bulleted List Properties', // MISSING
type : 'Type', // MISSING
start : 'Start', // MISSING
validateStartNumber :'List start number must be a whole number.', // MISSING
circle : 'Circle', // MISSING
disc : 'Disc', // MISSING
square : 'Square', // MISSING
none : 'None', // MISSING
notset : '<not set>', // MISSING
armenian : 'Armenian numbering', // MISSING
georgian : 'Georgian numbering (an, ban, gan, etc.)', // MISSING
lowerRoman : 'Lower Roman (i, ii, iii, iv, v, etc.)', // MISSING
upperRoman : 'Upper Roman (I, II, III, IV, V, etc.)', // MISSING
lowerAlpha : 'Lower Alpha (a, b, c, d, e, etc.)', // MISSING
upperAlpha : 'Upper Alpha (A, B, C, D, E, etc.)', // MISSING
lowerGreek : 'Lower Greek (alpha, beta, gamma, etc.)', // MISSING
decimal : 'Decimal (1, 2, 3, etc.)', // MISSING
decimalLeadingZero : 'Decimal leading zero (01, 02, 03, etc.)' // MISSING
},
// Find And Replace Dialog
findAndReplace :
{
title : 'Chercher et Remplacer',
find : 'Chercher',
replace : 'Remplacer',
findWhat : 'Rechercher:',
replaceWith : 'Remplacer par:',
notFoundMsg : 'Le texte indiqué est introuvable.',
matchCase : 'Respecter la casse',
matchWord : 'Mot entier',
matchCyclic : 'Match cyclic', // MISSING
replaceAll : 'Tout remplacer',
replaceSuccessMsg : '%1 occurrence(s) replaced.' // MISSING
},
// Table Dialog
table :
{
toolbar : 'Tableau',
title : 'Propriétés du tableau',
menu : 'Propriétés du tableau',
deleteTable : 'Supprimer le tableau',
rows : 'Lignes',
columns : 'Colonnes',
border : 'Taille de la bordure',
align : 'Alignement',
alignLeft : 'Gauche',
alignCenter : 'Centré',
alignRight : 'Droite',
width : 'Largeur',
widthPx : 'pixels',
widthPc : 'pourcentage',
widthUnit : 'width unit', // MISSING
height : 'Hauteur',
cellSpace : 'Espacement',
cellPad : 'Contour',
caption : 'Titre',
summary : 'Résumé',
headers : 'Headers', // MISSING
headersNone : 'None', // MISSING
headersColumn : 'First column', // MISSING
headersRow : 'First Row', // MISSING
headersBoth : 'Both', // MISSING
invalidRows : 'Number of rows must be a number greater than 0.', // MISSING
invalidCols : 'Number of columns must be a number greater than 0.', // MISSING
invalidBorder : 'Border size must be a number.', // MISSING
invalidWidth : 'Table width must be a number.', // MISSING
invalidHeight : 'Table height must be a number.', // MISSING
invalidCellSpacing : 'Cell spacing must be a number.', // MISSING
invalidCellPadding : 'Cell padding must be a number.', // MISSING
cell :
{
menu : 'Cellule',
insertBefore : 'Insérer une cellule avant',
insertAfter : 'Insérer une cellule après',
deleteCell : 'Supprimer des cellules',
merge : 'Fusionner les cellules',
mergeRight : 'Fusionner à droite',
mergeDown : 'Fusionner en bas',
splitHorizontal : 'Scinder la cellule horizontalement',
splitVertical : 'Scinder la cellule verticalement',
title : 'Cell Properties', // MISSING
cellType : 'Cell Type', // MISSING
rowSpan : 'Rows Span', // MISSING
colSpan : 'Columns Span', // MISSING
wordWrap : 'Word Wrap', // MISSING
hAlign : 'Horizontal Alignment', // MISSING
vAlign : 'Vertical Alignment', // MISSING
alignTop : 'Top', // MISSING
alignMiddle : 'Middle', // MISSING
alignBottom : 'Bottom', // MISSING
alignBaseline : 'Baseline', // MISSING
bgColor : 'Background Color', // MISSING
borderColor : 'Border Color', // MISSING
data : 'Data', // MISSING
header : 'Header', // MISSING
yes : 'Yes', // MISSING
no : 'No', // MISSING
invalidWidth : 'Cell width must be a number.', // MISSING
invalidHeight : 'Cell height must be a number.', // MISSING
invalidRowSpan : 'Rows span must be a whole number.', // MISSING
invalidColSpan : 'Columns span must be a whole number.', // MISSING
chooseColor : 'Choose' // MISSING
},
row :
{
menu : 'Ligne',
insertBefore : 'Insérer une ligne avant',
insertAfter : 'Insérer une ligne après',
deleteRow : 'Supprimer des lignes'
},
column :
{
menu : 'Colonne',
insertBefore : 'Insérer une colonne avant',
insertAfter : 'Insérer une colonne après',
deleteColumn : 'Supprimer des colonnes'
}
},
// Button Dialog.
button :
{
title : 'Propriétés du bouton',
text : 'Texte (Valeur)',
type : 'Type',
typeBtn : 'Bouton',
typeSbm : 'Soumettre',
typeRst : 'Réinitialiser'
},
// Checkbox and Radio Button Dialogs.
checkboxAndRadio :
{
checkboxTitle : 'Propriétés de la case à cocher',
radioTitle : 'Propriétés du bouton radio',
value : 'Valeur',
selected : 'Sélectionné'
},
// Form Dialog.
form :
{
title : 'Propriétés du formulaire',
menu : 'Propriétés du formulaire',
action : 'Action',
method : 'Méthode',
encoding : 'Encoding' // MISSING
},
// Select Field Dialog.
select :
{
title : 'Propriétés de la liste/du menu',
selectInfo : 'Info',
opAvail : 'Options disponibles',
value : 'Valeur',
size : 'Taille',
lines : 'lignes',
chkMulti : 'Sélection multiple',
opText : 'Texte',
opValue : 'Valeur',
btnAdd : 'Ajouter',
btnModify : 'Modifier',
btnUp : 'Monter',
btnDown : 'Descendre',
btnSetValue : 'Valeur sélectionnée',
btnDelete : 'Supprimer'
},
// Textarea Dialog.
textarea :
{
title : 'Propriétés de la zone de texte',
cols : 'Colonnes',
rows : 'Lignes'
},
// Text Field Dialog.
textfield :
{
title : 'Propriétés du champ texte',
name : 'Nom',
value : 'Valeur',
charWidth : 'Largeur en caractères',
maxChars : 'Nombre maximum de caractères',
type : 'Type',
typeText : 'Texte',
typePass : 'Mot de passe'
},
// Hidden Field Dialog.
hidden :
{
title : 'Propriétés du champ caché',
name : 'Nom',
value : 'Valeur'
},
// Image Dialog.
image :
{
title : 'Propriétés de l\'image',
titleButton : 'Propriétés du bouton image',
menu : 'Propriétés de l\'image',
infoTab : 'Informations sur l\'image',
btnUpload : 'Envoyer sur le serveur',
upload : 'Télécharger',
alt : 'Texte de remplacement',
width : 'Largeur',
height : 'Hauteur',
lockRatio : 'Garder les proportions',
unlockRatio : 'Unlock Ratio', // MISSING
resetSize : 'Taille originale',
border : 'Bordure',
hSpace : 'Espacement horizontal',
vSpace : 'Espacement vertical',
align : 'Alignement',
alignLeft : 'Gauche',
alignRight : 'Droite',
alertUrl : 'Veuillez saisir l\'URL de l\'image',
linkTab : 'Lien',
button2Img : 'Do you want to transform the selected image button on a simple image?', // MISSING
img2Button : 'Do you want to transform the selected image on a image button?', // MISSING
urlMissing : 'Image source URL is missing.', // MISSING
validateWidth : 'Width must be a whole number.', // MISSING
validateHeight : 'Height must be a whole number.', // MISSING
validateBorder : 'Border must be a whole number.', // MISSING
validateHSpace : 'HSpace must be a whole number.', // MISSING
validateVSpace : 'VSpace must be a whole number.' // MISSING
},
// Flash Dialog
flash :
{
properties : 'Propriétés de l\'animation Flash',
propertiesTab : 'Properties', // MISSING
title : 'Propriétés de l\'animation Flash',
chkPlay : 'Lecture automatique',
chkLoop : 'Boucle',
chkMenu : 'Activer le menu Flash',
chkFull : 'Allow Fullscreen', // MISSING
scale : 'Affichage',
scaleAll : 'Par défaut (tout montrer)',
scaleNoBorder : 'Sans bordure',
scaleFit : 'Ajuster aux dimensions',
access : 'Script Access', // MISSING
accessAlways : 'Always', // MISSING
accessSameDomain: 'Same domain', // MISSING
accessNever : 'Never', // MISSING
align : 'Alignement',
alignLeft : 'Gauche',
alignAbsBottom : 'Abs Bas',
alignAbsMiddle : 'Abs Milieu',
alignBaseline : 'Bas du texte',
alignBottom : 'Bas',
alignMiddle : 'Milieu',
alignRight : 'Droite',
alignTextTop : 'Haut du texte',
alignTop : 'Haut',
quality : 'Quality', // MISSING
qualityBest : 'Best', // MISSING
qualityHigh : 'High', // MISSING
qualityAutoHigh : 'Auto High', // MISSING
qualityMedium : 'Medium', // MISSING
qualityAutoLow : 'Auto Low', // MISSING
qualityLow : 'Low', // MISSING
windowModeWindow: 'Window', // MISSING
windowModeOpaque: 'Opaque', // MISSING
windowModeTransparent : 'Transparent', // MISSING
windowMode : 'Window mode', // MISSING
flashvars : 'Variables for Flash', // MISSING
bgcolor : 'Couleur de fond',
width : 'Largeur',
height : 'Hauteur',
hSpace : 'Espacement horizontal',
vSpace : 'Espacement vertical',
validateSrc : 'Veuillez saisir l\'URL',
validateWidth : 'Width must be a number.', // MISSING
validateHeight : 'Height must be a number.', // MISSING
validateHSpace : 'HSpace must be a number.', // MISSING
validateVSpace : 'VSpace must be a number.' // MISSING
},
// Speller Pages Dialog
spellCheck :
{
toolbar : 'Orthographe',
title : 'Spell Check', // MISSING
notAvailable : 'Sorry, but service is unavailable now.', // MISSING
errorLoading : 'Error loading application service host: %s.', // MISSING
notInDic : 'Pas dans le dictionnaire',
changeTo : 'Changer en',
btnIgnore : 'Ignorer',
btnIgnoreAll : 'Ignorer tout',
btnReplace : 'Remplacer',
btnReplaceAll : 'Remplacer tout',
btnUndo : 'Annuler',
noSuggestions : '- Pas de suggestion -',
progress : 'Vérification d\'orthographe en cours...',
noMispell : 'Vérification d\'orthographe terminée: pas d\'erreur trouvée',
noChanges : 'Vérification d\'orthographe terminée: Pas de modifications',
oneChange : 'Vérification d\'orthographe terminée: Un mot modifié',
manyChanges : 'Vérification d\'orthographe terminée: %1 mots modifiés',
ieSpellDownload : 'Le Correcteur d\'orthographe n\'est pas installé. Souhaitez-vous le télécharger maintenant?'
},
smiley :
{
toolbar : 'Emoticon',
title : 'Insérer un Emoticon',
options : 'Smiley Options' // MISSING
},
elementsPath :
{
eleLabel : 'Elements path', // MISSING
eleTitle : '%1 element' // MISSING
},
numberedlist : 'Liste numérotée',
bulletedlist : 'Liste à puces',
indent : 'Augmenter le retrait',
outdent : 'Diminuer le retrait',
justify :
{
left : 'Aligner à gauche',
center : 'Centrer',
right : 'Aligner à Droite',
block : 'Texte justifié'
},
blockquote : 'Citation',
clipboard :
{
title : 'Coller',
cutError : 'Les paramètres de sécurité de votre navigateur empêchent l\'éditeur de couper automatiquement vos données. Veuillez utiliser les équivalents claviers (Ctrl/Cmd+X).',
copyError : 'Les paramètres de sécurité de votre navigateur empêchent l\'éditeur de copier automatiquement vos données. Veuillez utiliser les équivalents claviers (Ctrl/Cmd+C).',
pasteMsg : 'Veuillez coller dans la zone ci-dessous en utilisant le clavier (<STRONG>Ctrl/Cmd+V</STRONG>) et appuyer sur <STRONG>OK</STRONG>.',
securityMsg : 'A cause des paramètres de sécurité de votre navigateur, l\'éditeur ne peut accéder au presse-papier directement. Vous devez coller à nouveau le contenu dans cette fenêtre.',
pasteArea : 'Paste Area' // MISSING
},
pastefromword :
{
confirmCleanup : 'The text you want to paste seems to be copied from Word. Do you want to clean it before pasting?', // MISSING
toolbar : 'Coller en tant que Word (formaté)',
title : 'Coller en tant que Word (formaté)',
error : 'It was not possible to clean up the pasted data due to an internal error' // MISSING
},
pasteText :
{
button : 'Coller comme texte',
title : 'Coller comme texte'
},
templates :
{
button : 'Modèles',
title : 'Modèles de contenu',
options : 'Template Options', // MISSING
insertOption : 'Remplacer tout le contenu actuel',
selectPromptMsg : 'Sélectionner le modèle à ouvrir dans l\'éditeur<br>(le contenu actuel sera remplacé):',
emptyListMsg : '(Aucun modèle disponible)'
},
showBlocks : 'Afficher les blocs',
stylesCombo :
{
label : 'Style',
panelTitle : 'Formatting Styles', // MISSING
panelTitle1 : 'Block Styles', // MISSING
panelTitle2 : 'Inline Styles', // MISSING
panelTitle3 : 'Object Styles' // MISSING
},
format :
{
label : 'Format',
panelTitle : 'Format',
tag_p : 'Normal',
tag_pre : 'Formaté',
tag_address : 'Adresse',
tag_h1 : 'En-tête 1',
tag_h2 : 'En-tête 2',
tag_h3 : 'En-tête 3',
tag_h4 : 'En-tête 4',
tag_h5 : 'En-tête 5',
tag_h6 : 'En-tête 6',
tag_div : 'Normal (DIV)'
},
div :
{
title : 'Create Div Container', // MISSING
toolbar : 'Create Div Container', // MISSING
cssClassInputLabel : 'Stylesheet Classes', // MISSING
styleSelectLabel : 'Style', // MISSING
IdInputLabel : 'Id', // MISSING
languageCodeInputLabel : ' Language Code', // MISSING
inlineStyleInputLabel : 'Inline Style', // MISSING
advisoryTitleInputLabel : 'Advisory Title', // MISSING
langDirLabel : 'Language Direction', // MISSING
langDirLTRLabel : 'Left to Right (LTR)', // MISSING
langDirRTLLabel : 'Right to Left (RTL)', // MISSING
edit : 'Edit Div', // MISSING
remove : 'Remove Div' // MISSING
},
font :
{
label : 'Police',
voiceLabel : 'Font', // MISSING
panelTitle : 'Police'
},
fontSize :
{
label : 'Taille',
voiceLabel : 'Font Size', // MISSING
panelTitle : 'Taille'
},
colorButton :
{
textColorTitle : 'Couleur de caractère',
bgColorTitle : 'Couleur de fond',
panelTitle : 'Colors', // MISSING
auto : 'Automatique',
more : 'Plus de couleurs...'
},
colors :
{
'000' : 'Black', // MISSING
'800000' : 'Maroon', // MISSING
'8B4513' : 'Saddle Brown', // MISSING
'2F4F4F' : 'Dark Slate Gray', // MISSING
'008080' : 'Teal', // MISSING
'000080' : 'Navy', // MISSING
'4B0082' : 'Indigo', // MISSING
'696969' : 'Dark Gray', // MISSING
'B22222' : 'Fire Brick', // MISSING
'A52A2A' : 'Brown', // MISSING
'DAA520' : 'Golden Rod', // MISSING
'006400' : 'Dark Green', // MISSING
'40E0D0' : 'Turquoise', // MISSING
'0000CD' : 'Medium Blue', // MISSING
'800080' : 'Purple', // MISSING
'808080' : 'Gray', // MISSING
'F00' : 'Red', // MISSING
'FF8C00' : 'Dark Orange', // MISSING
'FFD700' : 'Gold', // MISSING
'008000' : 'Green', // MISSING
'0FF' : 'Cyan', // MISSING
'00F' : 'Blue', // MISSING
'EE82EE' : 'Violet', // MISSING
'A9A9A9' : 'Dim Gray', // MISSING
'FFA07A' : 'Light Salmon', // MISSING
'FFA500' : 'Orange', // MISSING
'FFFF00' : 'Yellow', // MISSING
'00FF00' : 'Lime', // MISSING
'AFEEEE' : 'Pale Turquoise', // MISSING
'ADD8E6' : 'Light Blue', // MISSING
'DDA0DD' : 'Plum', // MISSING
'D3D3D3' : 'Light Grey', // MISSING
'FFF0F5' : 'Lavender Blush', // MISSING
'FAEBD7' : 'Antique White', // MISSING
'FFFFE0' : 'Light Yellow', // MISSING
'F0FFF0' : 'Honeydew', // MISSING
'F0FFFF' : 'Azure', // MISSING
'F0F8FF' : 'Alice Blue', // MISSING
'E6E6FA' : 'Lavender', // MISSING
'FFF' : 'White' // MISSING
},
scayt :
{
title : 'Spell Check As You Type', // MISSING
opera_title : 'Not supported by Opera', // MISSING
enable : 'Enable SCAYT', // MISSING
disable : 'Disable SCAYT', // MISSING
about : 'About SCAYT', // MISSING
toggle : 'Toggle SCAYT', // MISSING
options : 'Options', // MISSING
langs : 'Languages', // MISSING
moreSuggestions : 'More suggestions', // MISSING
ignore : 'Ignore', // MISSING
ignoreAll : 'Ignore All', // MISSING
addWord : 'Add Word', // MISSING
emptyDic : 'Dictionary name should not be empty.', // MISSING
optionsTab : 'Options', // MISSING
allCaps : 'Ignore All-Caps Words', // MISSING
ignoreDomainNames : 'Ignore Domain Names', // MISSING
mixedCase : 'Ignore Words with Mixed Case', // MISSING
mixedWithDigits : 'Ignore Words with Numbers', // MISSING
languagesTab : 'Languages', // MISSING
dictionariesTab : 'Dictionaries', // MISSING
dic_field_name : 'Dictionary name', // MISSING
dic_create : 'Create', // MISSING
dic_restore : 'Restore', // MISSING
dic_delete : 'Delete', // MISSING
dic_rename : 'Rename', // MISSING
dic_info : 'Initially the User Dictionary is stored in a Cookie. However, Cookies are limited in size. When the User Dictionary grows to a point where it cannot be stored in a Cookie, then the dictionary may be stored on our server. To store your personal dictionary on our server you should specify a name for your dictionary. If you already have a stored dictionary, please type it\'s name and click the Restore button.', // MISSING
aboutTab : 'About' // MISSING
},
about :
{
title : 'About CKEditor', // MISSING
dlgTitle : 'About CKEditor', // MISSING
moreInfo : 'For licensing information please visit our web site:', // MISSING
copy : 'Copyright © $1. All rights reserved.' // MISSING
},
maximize : 'Maximize', // MISSING
minimize : 'Minimize', // MISSING
fakeobjects :
{
anchor : 'Anchor', // MISSING
flash : 'Flash Animation', // MISSING
div : 'Page Break', // MISSING
unknown : 'Unknown Object' // MISSING
},
resize : 'Drag to resize', // MISSING
colordialog :
{
title : 'Select color', // MISSING
options : 'Color Options', // MISSING
highlight : 'Highlight', // MISSING
selected : 'Selected Color', // MISSING
clear : 'Clear' // MISSING
},
toolbarCollapse : 'Collapse Toolbar', // MISSING
toolbarExpand : 'Expand Toolbar', // MISSING
bidi :
{
ltr : 'Text direction from left to right', // MISSING
rtl : 'Text direction from right to left' // MISSING
}
}; | PypiClean |
/more.jsonschema-0.2.tar.gz/more.jsonschema-0.2/CHANGES.rst | CHANGES
*******
0.2 (2020-04-26)
================
- **Removed**: Removed support for Python 2 and Python 3.4 and 3.5.
You have to upgrade to Python 3 if you want to use this version.
- Added support for Python 3.6, 3.7 and 3.8 and PyPy 3.6.
- Make Python 3.7 the default testing environment.
- Upgrade jsonschema to version 3.2.0.
- Make JSON Schema Draft 7 the default validator.
- Add integration for the Black code formatter.
0.1 (2017-03-17)
================
* initial public release.
| PypiClean |
/Pillow-SIMD-9.0.0.post0.tar.gz/Pillow-SIMD-9.0.0.post0/src/PIL/GimpGradientFile.py | from math import log, pi, sin, sqrt
from ._binary import o8
EPSILON = 1e-10
"""""" # Enable auto-doc for data member
def linear(middle, pos):
if pos <= middle:
if middle < EPSILON:
return 0.0
else:
return 0.5 * pos / middle
else:
pos = pos - middle
middle = 1.0 - middle
if middle < EPSILON:
return 1.0
else:
return 0.5 + 0.5 * pos / middle
def curved(middle, pos):
return pos ** (log(0.5) / log(max(middle, EPSILON)))
def sine(middle, pos):
return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0
def sphere_increasing(middle, pos):
return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2)
def sphere_decreasing(middle, pos):
return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2)
SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
"""""" # Enable auto-doc for data member
class GradientFile:
gradient = None
def getpalette(self, entries=256):
palette = []
ix = 0
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
for i in range(entries):
x = i / (entries - 1)
while x1 < x:
ix += 1
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
w = x1 - x0
if w < EPSILON:
scale = segment(0.5, 0.5)
else:
scale = segment((xm - x0) / w, (x - x0) / w)
# expand to RGBA
r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5))
g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5))
b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5))
a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5))
# add to palette
palette.append(r + g + b + a)
return b"".join(palette), "RGBA"
class GimpGradientFile(GradientFile):
"""File handler for GIMP's gradient format."""
def __init__(self, fp):
if fp.readline()[:13] != b"GIMP Gradient":
raise SyntaxError("not a GIMP gradient file")
line = fp.readline()
# GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do
if line.startswith(b"Name: "):
line = fp.readline().strip()
count = int(line)
gradient = []
for i in range(count):
s = fp.readline().split()
w = [float(x) for x in s[:11]]
x0, x1 = w[0], w[2]
xm = w[1]
rgb0 = w[3:7]
rgb1 = w[7:11]
segment = SEGMENTS[int(s[11])]
cspace = int(s[12])
if cspace != 0:
raise OSError("cannot handle HSV colour space")
gradient.append((x0, x1, xm, rgb0, rgb1, segment))
self.gradient = gradient | PypiClean |
/django_bootstrap5-23.3.tar.gz/django_bootstrap5-23.3/CONTRIBUTING.md | # Contributing
Contributions are welcome, and they are greatly appreciated! Every
little bit helps, and credit will always be given.
You can contribute in many ways:
## Types of Contributions
### Report Bugs
Report bugs at <https://github.com/zostera/django-bootstrap5/issues>.
If you are reporting a bug, please include:
- Your operating system name and version.
- Any details about your local setup that might be helpful in troubleshooting.
- Detailed steps to reproduce the bug.
### Fix Bugs
Look through the GitHub issues for bugs. Anything tagged with \"bug\" is open to whoever wants to implement it.
### Implement Features
Look through the GitHub issues for features. Anything tagged with \"feature\" is open to whoever wants to implement it.
### Write Documentation
`django-bootstrap5` could always use more documentation, whether as part of the official django-bootstrap5 docs, in docstrings, or even on the web in blog posts, articles, and such.
### Submit Feedback
The best way to send feedback is to file an issue at
<https://github.com/zostera/django-bootstrap5/issues>.
If you are proposing a feature:
- Explain in detail how it would work.
- Keep the scope as narrow as possible, to make it easier to implement.
## Get Started!
Ready to contribute? Here\'s how to set up `django-bootstrap5` for local development.
You will need some knowledge of git, github, and Python/Django development. Using a Python virtual environment is advised.
This project uses [Hatch](https://github.com/pypa/hatch) for environments and builds.
### Local installation
This section assumes you know about local Python versions and virtual environments.
To clone the repository and install the requirements for local development:
```console
git clone git://github.com/zostera/django-bootstrap5.git
cd django-bootstrap5
pip install -U pip hatch
pip install -e .
```
### Running the tests
To run the tests:
```console
make test
```
To run the tests on all supported Python/Django combinations:
```console
make tests
```
## Pull Request Guidelines
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests for new or changed functionality, and pass all tests.
2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in CHANGELOG.md.
| PypiClean |
/django_watcher-1.2.3-py3-none-any.whl/django_watcher/mixins.py | from typing import TYPE_CHECKING, Any, Dict, List, Tuple, TypeVar, Union, cast
from django.db import models
from typing_extensions import TypedDict
from .abstract_watcher import AbstractWatcher
class _MetaParams(TypedDict):
source: str
operation_params: dict
class MetaParams(_MetaParams, total=False):
instance_ref: models.Model
_INSTANCE = 'instance'
_QUERY_SET = 'query_set'
if TYPE_CHECKING:
class WatchedDeleteModel(models.Model):
def UNWATCHED_delete(self, **kwargs) -> Tuple[int, Dict[str, int]]: # nopep8
pass
class WatchedSaveModel(models.Model):
def UNWATCHED_save(self, **kwargs) -> None: # nopep8
pass
S = TypeVar('S', bound=WatchedSaveModel)
D = TypeVar('D', bound=WatchedDeleteModel)
class WatchedCreateQuerySet(models.QuerySet):
def UNWATCHED_create(self, *args: Any, **kwargs: Any) -> S: # nopep8
pass
class WatchedDeleteQuerySet(models.QuerySet):
def UNWATCHED_delete(self, *args, **kwargs) -> Tuple[int, Dict[str, int]]: # nopep8
pass
class WatchedUpdateQuerySet(models.QuerySet):
def UNWATCHED_update(self, **kwargs: Any) -> int: # nopep8
pass
class WatchedSaveQuerySet(WatchedCreateQuerySet, WatchedUpdateQuerySet):
...
TargetDelete = Union['D', 'WatchedDeleteQuerySet']
class CreateWatcherMixin(AbstractWatcher):
"""
CreateWatcherMixin is a DataWatcher for create operations
Implement the methods you need choosing one or more of the following
def pre_create(self, target: List[Model], meta_params: MetaParams) -> None
...
def post_create(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
"""
def pre_create(self, target: List['S'], meta_params: MetaParams, **hooks_params) -> None:
pass
def post_create(self, target: models.QuerySet, meta_params: MetaParams, **hooks_params) -> None:
pass
def _watched_create(self, target: 'WatchedCreateQuerySet', *_, hooks_params, **kwargs) -> 'S':
meta_params: MetaParams = {'source': _QUERY_SET, 'operation_params': kwargs}
if self.is_overriden('pre_create'):
instance = target.model(**kwargs)
self.pre_create([instance], meta_params, **hooks_params)
instance = target.UNWATCHED_create(**kwargs)
if self.is_overriden('post_create'):
self.post_create(self.to_queryset(instance), meta_params, **hooks_params)
return instance
def _create(self, target: 'WatchedCreateQuerySet', *args, **kwargs) -> 'S':
return self._run_inside_transaction(self._watched_create, target, *args, **kwargs)
def _watched_save(self, target: 'S', *_, hooks_params, **kwargs) -> None:
meta_params: MetaParams = {
'source': _INSTANCE,
'operation_params': kwargs,
'instance_ref': target,
}
self.pre_create([target], meta_params, **hooks_params)
target.UNWATCHED_save(**kwargs)
if self.is_overriden('post_create'):
self.post_create(self.to_queryset(target), meta_params, **hooks_params)
def _save(self, target: 'S', **kwargs) -> None:
create = not target.pk
if create:
self._run_inside_transaction(self._watched_save, target, **kwargs)
else:
target.UNWATCHED_save(**kwargs)
class DeleteWatcherMixin(AbstractWatcher):
"""
DeleteWatcherMixin is a DataWatcher for delete operations
Implement the methods you need choosing one or more of the following
def pre_delete(self, target: models.QuerySet) -> None
...
def post_delete(self, undeleted_instances: List[Model]) -> None
...
"""
def pre_delete(self, target: models.QuerySet, meta_params: MetaParams, **hooks_params) -> None:
pass
def post_delete(
self, undeleted_instances: List['D'], meta_params: MetaParams, **hooks_params
) -> None:
pass
def _watched_delete(
self, target: 'TargetDelete', *args: Any, hooks_params, **kwargs: Any
) -> Tuple[int, Dict[str, int]]:
meta_params: MetaParams = (
{
'source': _QUERY_SET,
'operation_params': kwargs,
}
if self.is_queryset(target)
else {
'source': _INSTANCE,
'operation_params': kwargs,
'instance_ref': cast('WatchedDeleteModel', target),
}
)
self.pre_delete(self.to_queryset(target), meta_params, **hooks_params)
instances = list(self.to_queryset(target)) if self.is_overriden('post_delete') else []
res = target.UNWATCHED_delete(*args, **kwargs)
self.post_delete(instances, meta_params, **hooks_params)
return res
def _delete(
self, target: 'TargetDelete', *args: Any, **kwargs: Any
) -> Tuple[int, Dict[str, int]]:
return self._run_inside_transaction(self._watched_delete, target, *args, **kwargs)
class UpdateWatcherMixin(AbstractWatcher):
"""
UpdateWatcherMixin is a DataWatcher for update operations
Implement the methods you need, choosing one or more of the following
def pre_update(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
def post_update(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
"""
def pre_update(self, target: models.QuerySet, meta_params: MetaParams, **hooks_params) -> None:
pass
def post_update(self, target: models.QuerySet, meta_params: MetaParams, **hooks_params) -> None:
pass
def _watched_update(
self, target: 'WatchedUpdateQuerySet', *args, hooks_params, **kwargs
) -> int:
meta_params: MetaParams = {'source': _QUERY_SET, 'operation_params': kwargs}
self.pre_update(target, meta_params, **hooks_params)
result = target.UNWATCHED_update(*args, **kwargs)
self.post_update(target, meta_params, **hooks_params)
return result
def _update(self, target: 'WatchedUpdateQuerySet', *update_args, **kwargs) -> int:
return self._run_inside_transaction(self._watched_update, target, *update_args, **kwargs)
def _watched_save(self, target: 'S', *_, hooks_params, **kwargs) -> None:
meta_params: MetaParams = {
'source': _INSTANCE,
'operation_params': kwargs,
'instance_ref': target,
}
if self.is_overriden('pre_update'):
self.pre_update(self.to_queryset(target), meta_params, **hooks_params)
target.UNWATCHED_save(**kwargs)
if self.is_overriden('post_update'):
self.post_update(self.to_queryset(target), meta_params, **hooks_params)
def _save(self, target: 'S', **kwargs) -> None:
update = bool(target.pk)
if update:
self._run_inside_transaction(self._watched_save, target, **kwargs)
else:
target.UNWATCHED_save(**kwargs)
class SaveWatcherMixin(CreateWatcherMixin, UpdateWatcherMixin):
"""
SaveWatcherMixin is a DataWatcher for create and update operations.
Check hooks order for creation and update operation in the docs:
https://django-data-watcher.readthedocs.io/en/latest/guide/usage.html#savewatchermixin
Implement the methods you need choosing one or more of the following
def pre_create(self, target: List[Model], meta_params: MetaParams) -> None
...
def post_create(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
def pre_update(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
def post_update(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
def pre_save(self, target: Union[List[Model], models.QuerySet], meta_params: MetaParams) -> None
...
def post_save(self, target: models.QuerySet, meta_params: MetaParams) -> None
...
"""
def pre_save(
self, target: Union[List['S'], models.QuerySet], meta_params: MetaParams, **hooks_params
) -> None:
pass
def post_save(self, target: models.QuerySet, meta_params: MetaParams, **hooks_params) -> None:
pass
def _watched_save(self, target: 'S', *_, hooks_params, **kwargs) -> None:
create = not target.pk
meta_params: MetaParams = {
'source': _INSTANCE,
'operation_params': kwargs,
'instance_ref': target,
}
if create:
self.pre_save([target], meta_params, **hooks_params)
self.pre_create([target], meta_params, **hooks_params)
else:
qs = self.to_queryset(target)
self.pre_save(qs, meta_params, **hooks_params)
self.pre_update(qs, meta_params, **hooks_params)
target.UNWATCHED_save(**kwargs)
qs = self.to_queryset(target)
if create:
self.post_create(qs, meta_params, **hooks_params)
else:
self.post_update(qs, meta_params, **hooks_params)
self.post_save(qs, meta_params, **hooks_params)
def _save(self, target: 'S', **kwargs) -> None:
self._run_inside_transaction(self._watched_save, target, **kwargs)
def _watched_create(self, target: 'WatchedCreateQuerySet', *_, hooks_params, **kwargs) -> 'S':
meta_params: MetaParams = {'source': _QUERY_SET, 'operation_params': kwargs}
self.pre_save([target.model(**kwargs)], meta_params, **hooks_params)
instance: 'WatchedSaveModel' = super()._watched_create(
target, hooks_params=hooks_params, **kwargs
)
self.post_save(self.to_queryset(instance), meta_params, **hooks_params)
return instance # type: ignore
def _watched_update(
self, target: 'WatchedUpdateQuerySet', *args, hooks_params, **kwargs
) -> int:
meta_params: MetaParams = {'source': _QUERY_SET, 'operation_params': kwargs}
self.pre_save(target, meta_params, **hooks_params)
res = super()._watched_update(target, *args, hooks_params=hooks_params, **kwargs)
self.post_save(target, meta_params, **hooks_params)
return res | PypiClean |
/rsa_archer-0.1.9.tar.gz/rsa_archer-0.1.9/README.md | Library to work with Archer REST and Content APIs
===========================================
My original objective was to create Office365 mail to Archer Incidents application connector.Script captures the email, checks if there is an incident ID assigned and add the email to comments section (sub form) in archer record.
This package supports archer part of the connector, if someone interested I can share the whole thing.
#### Release notes:
> v0.1.9 (13 Dec 2019)
> - Fixed method get_field_content in Record, fixed error when method crashed if the value in values list is None
> - Now, if there are several values in values list. Function will return all values, including parent value if you're using leveled values list.
> - Looks like this ["Parent Value:Value", "Parent Value:Value", "Value"]
> - ATTENTION!!! now function returns LIST of values instead of string value.
>
> v0.1.8 (31 Oct 2019)
> - added delete_record(self, record_id=None) to ArcherInstance and few small fixes
>
> v0.1.4 (05 Feb 2019)
> - added user deactivation method and some user method error handling
>
> v0.1.3 (29 Jan 2019)
> - added archer instance method get_value_id_by_field_name_and_value() to be able to set value in record create/update methods
# Archer REST API
## 0. Installation
```bash
pip install rsa-archer
```
## 1. Creating Archer Instance
Create "api" user in Archer with proper permissions
At first, create Archer Instance object and continue to work with it
```python
import rsa_archer
from rsa_archer.archer_instance import ArcherInstance
archer_instance = ArcherInstance("domain","archer instance name","api username", "password")
# e.g.
archer_instance = ArcherInstance("archer.companyzxc.com","risk_management","api", "secure password")
```
## 2. Working with content records
### 2.1 Selecting application
To start working with content records you need to select Archer application (one application per Archer Instance object), without it it'll not work.
```python
archer_instance.from_application("application name")
# e.g.
archer_instance.from_application("Incidents") #same name as in archer application list
```
### 2.2 Creating new record
**NOTE** - right now working natively with record's fields is limited to text fields, for values list, attachemts and other types of fields you need to operate with archer internal ids. Good example of this is working with attachments, it could be found below.
Preparing json with field names and their values (text or ids):
```python
record_json = {"field name1": "value1", "field name2": "value2", "values list field name": [id1,id2,id3..] ...}
# e.g.
record_json = {"Incident Summary": "desired text", "Reporter email": "email","Incident Details": "HTML text", "Severity": [34658]}
```
Creating the record and getting its id:
```python
record_id = archer_instance.create_content_record(record_json)
```
### 2.2 Working with existing records
#### 2.2.1 Getting record content
Getting record object by id:
```python
existing_record = archer_instance.get_record(record_id)
```
Getting values of record fields (including ids):
```python
existing_record.get_field_content("field_name")
# it returns, value of the text field
# array of user internal ids for user field
# proper value for values list
# internal ids for other types of fields
# TODO other types of fields
```
#### 2.2.2 Updating existing record
Preparing updater json
```python
updater_json = {"field name1": "value1", "field name2": "value2", ...}
#e.g.
updater_json = {"Incident Summary": "desired text", "Reporter email": "email","Incident Details": "HTML text"}
```
Updating the record values:
```python
archer_instance.update_content_record(updater_json, record_id)
```
#### 2.2.3 Posting attachments to archer instance
Uploading attachment to Archer and getting its id:
```python
attachment_id = archer_instance.post_attachment("file name", fileinbase64_string)
```
Appending attachment ids into array, you might want to get existing record atttachments ids first and append additional attachment id to it or you will lose the existing ones:
```python
attachment_ids = []
attachment_ids.append(attachment_id)
```
Then associate the ids with the existing record for example:
```python
updater_json = {"Attachments": attachment_ids}
archer_instance.update_content_record(updater_json, record_id)
```
## 3. Working with sub forms in content records
### 3.1 Creating subrecords
Creating sub_record and getting its id:
```python
sub_form_json = {"subform field name1": "value1", "subform field name1": "value1", ...}
sub_record_id = archer_instance.create_sub_record(sub_form_json, "subform field name in target application")
```
Then associate subrecord with content record, in this case existing record:
```python
updater_json = {"subform field name in target application": sub_record_id}
archer_instance.update_content_record(updater_json, record_id)
```
But it will replace the existing subrecords in application, so you should get the existing subrecords first:
```python
current_sub_records_ids = record.get_field_content("subform field name in target application") #get the array of existing attachments ids
if current_sub_records:
final_sub_records = current_sub_records_ids + sub_record_id
else:
final_sub_records = sub_record_id
```
And then update the original application record:
```python
updater_json = {"subform field name in target application": sub_record_id}
archer_instance.update_content_record(updater_json, record_id)
```
### 3.2 Attachments to subrecords
Uploading attachment to Archer and getting its id:
```python
attachment_id = archer_instance.post_attachment("file name", fileinbase64_string)
```
Put attachment ids into array:
```python
attachment_ids = []
attachment_ids.append(attachment_id)
```
Assosiate it with the new sub_record
```python
sub_form_json = {"sub form attachment field name": attachment_ids}
archer_instance.create_sub_record(sub_form_json, "APPLICATION FIELD NAME")
```
## 4. Working with users
### 4.1 Getting user objects:
Getting all user objects:
```python
users = archer_instance.get_users()
```
Getting individual user object:
```python
user = archer_instance.get_user_by_id("user id")
```
Getting users using filters, find full list of filters in Archer REST API documentation:
```python
users = archer_instance.get_users("?$select=Id,DisplayName&$orderby=LastName")
```
Getting active users with no login:
```python
users = archer_instance.get_active_users_with_no_login()
```
### 4.2 Getting users info
Getting user object parameters (added for convenience), all information could be found in user.json:
```python
email = user.get_user_email()
id = user.get_user_id()
display_name = user.get_gisplay_name()
user_name = user.get_username()
last_login = user.get_last_login_date()
```
### 4.3 Working with user object
Assigning user to role:
```python
user.assign_role_to_user("role id")
```
Activating user:
```python
user.activate_user()
```
Deactivating user:
```python
user.deactivate_user()
```
Adding user to group:
```python
archer_instance.get_all_groups() #loads all groups first
user.put_user_to_group("group name")
```
# Archer GRC API (released from 6.4)
To start working in GRC api you need to set an endpoint, it's analog of application we used in REST.
To find the exact name of an endpoint you can use the following method:
```python
archer_instance.find_grc_endpoint_url("application name")
```
With endpoint name you can get content records of the application:
* it'll give you only 1000 records at a time, use skip to get more
* I used this api only to get key field to id mapping, since there is no normal search in REST API
* Method returns array_of_jsons instead of record objects, since these jsons are different from REST jsons and I don't really use them
```python
array_of_jsons = archer_instance.get_grc_endpoint_records("endpoint name", skip=None)
```
I'm building key record field value to record internal id mapping:
* for Incidents application "application key field" was incident #INC-xxx, but key record field stores only integer, for some reason
* so I added prefix, "INC-" in my example to the method
```python
archer_instance.build_unique_value_to_id_mapping("endpoint name", "application key field name", "prefix"=None)
```
So based on key record field value I can get record internal id:
```python
record_id = archer_instance.get_record_id_by_unique_value("key field value")
```
| PypiClean |
/ensmallen_graph-0.6.0-cp37-cp37m-manylinux2010_x86_64.whl/ensmallen_graph/datasets/string/streptomycespurpureus.py | from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def StreptomycesPurpureus(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Streptomyces purpureus graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Streptomyces purpureus graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 22:17:57.798458
The undirected graph Streptomyces purpureus has 6611 nodes and 839895 weighted
edges, of which none are self-loops. The graph is dense as it has a density
of 0.03844 and has 29 connected components, where the component with most
nodes has 6544 nodes and the component with the least nodes has 2 nodes.
The graph median node degree is 214, the mean node degree is 254.09, and
the node degree mode is 2. The top 5 most central nodes are 1054860.KB913030_gene4965
(degree 2746), 1054860.KB913030_gene1397 (degree 2002), 1054860.KB913030_gene1443
(degree 1900), 1054860.KB913030_gene4032 (degree 1858) and 1054860.KB913030_gene849
(degree 1752).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import StreptomycesPurpureus
# Then load the graph
graph = StreptomycesPurpureus()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="StreptomycesPurpureus",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)() | PypiClean |
/ciefunctions-1.0.2.tar.gz/ciefunctions-1.0.2/tc1_97/MathJax-2.7.5/unpacked/jax/output/SVG/fonts/Asana-Math/Main/Regular/Main.js | MathJax.OutputJax['SVG'].FONTDATA.FONTS['AsanaMathJax_Main'] = {
directory: 'Main/Regular',
family: 'AsanaMathJax_Main',
id: 'ASANAMATHMAIN',
0x20: [0,0,249,0,0,''],
0x21: [694,5,277,81,197,'138 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM160 694h34v-25c0 -56 -10 -109 -12 -143l-24 -278v-25l-28 -23l-10 5v17l-28 328c-2 29 -4 53 -7 106'],
0x22: [709,-469,370,52,319,'105 469h-20l-33 196v5c0 19 22 39 43 39s43 -20 43 -38c0 -2 0 -4 -1 -6zM286 469h-20l-33 196v5c0 20 21 39 43 39c21 0 43 -20 43 -39c0 -1 0 -3 -1 -5'],
0x23: [684,0,499,4,495,'470 280v-56h-109l-31 -224h-58l31 224h-134l-33 -224h-58l33 224h-107v56h116l21 139h-110v56h118l31 209h58l-31 -209h133l30 209h58l-30 -209h97v-56h-105l-20 -139h100zM332 419h-133l-21 -139h134'],
0x24: [731,116,499,30,471,'260 373c115 -12 211 -32 211 -166c0 -75 -34 -139 -97 -180c-33 -22 -59 -32 -114 -43l4 -100h-45c2 21 2 34 4 98c-62 1 -128 16 -193 42c6 39 9 69 12 148h32v-64c0 -24 2 -29 14 -41c28 -27 70 -42 135 -49v279c-107 12 -193 31 -193 161c0 74 31 134 88 171 c30 20 54 28 105 37l-4 65h45c-2 -19 -3 -31 -4 -62c80 -6 119 -16 166 -43c-9 -33 -13 -72 -13 -126h-32v42c0 37 -4 49 -23 62c-25 16 -52 25 -98 29v-260zM260 292v-274c80 14 132 74 132 155c0 73 -34 104 -132 119zM223 379v251c-36 -4 -52 -10 -72 -26 c-30 -24 -46 -63 -46 -110c0 -73 32 -104 118 -115'],
0x25: [709,20,839,39,802,'203 689c87 0 152 -66 152 -154c0 -96 -69 -167 -162 -167c-89 0 -154 65 -154 154c0 97 69 167 164 167zM200 666c-55 0 -89 -54 -89 -140s30 -135 83 -135c54 0 89 56 89 142c0 82 -32 133 -83 133zM650 321c87 0 152 -66 152 -154c0 -96 -69 -167 -162 -167 c-89 0 -154 65 -154 154c0 97 69 167 164 167zM647 298c-55 0 -89 -54 -89 -140s30 -135 83 -135c54 0 89 56 89 142c0 82 -32 133 -83 133zM250 -20h-40l366 729h40'],
0x26: [689,20,777,43,753,'424 599c0 -90 -75 -151 -144 -202c56 -91 115 -165 204 -256c36 48 48 77 48 113c0 41 -12 76 -27 80c-12 3 -28 5 -87 10l33 45c50 -2 71 -2 97 -2l194 7c-9 -29 -12 -47 -12 -72c-39 6 -69 9 -140 12c4 -15 5 -24 5 -39c0 -63 -22 -111 -81 -177 c85 -49 123 -63 172 -63c17 0 31 5 61 19l6 -25l-87 -63c-6 -4 -16 -6 -34 -6c-48 0 -123 32 -181 77c-71 -55 -129 -77 -202 -77c-119 0 -206 80 -206 192c0 129 115 192 155 215c-21 53 -30 92 -30 133s9 73 24 86c23 32 45 47 68 60c18 16 39 23 66 23 c62 0 98 -33 98 -90zM213 352c-61 -48 -83 -85 -83 -143c0 -93 77 -173 166 -173c40 0 77 14 124 46c-113 126 -162 189 -207 270zM264 431c50 39 93 97 93 138c0 38 -25 67 -58 67c-40 0 -61 -28 -61 -79c0 -35 7 -70 26 -126'],
0x27: [709,-446,277,45,233,'104 470l-44 -24l-15 10c48 77 72 143 78 223c51 15 61 18 97 30l13 -13c-34 -86 -72 -153 -129 -226'],
0x28: [726,215,332,60,301,'146 266c0 -281 109 -403 155 -460l-19 -21c-82 77 -114 116 -149 183c-48 90 -73 191 -73 298c0 276 165 409 222 460l19 -26c-58 -68 -155 -174 -155 -434'],
0x29: [726,215,332,32,273,'51 726c57 -51 222 -184 222 -461c0 -288 -169 -430 -222 -480l-19 21c51 63 155 184 155 460c0 260 -100 370 -155 434'],
0x2A: [442,-95,388,32,359,'168 267l-131 64l-3 10l44 52h10l95 -98l26 143l9 4l62 -19l3 -15l-69 -125l139 24l6 -4v-62l-9 -8l-138 20l68 -122v-10l-68 -26l-10 4l-18 142l-106 -88h-10l-36 51l5 9'],
0x2B: [538,0,668,65,604,'604 241h-240v-241h-59v241h-240v59h240v238h59v-238h240v-59'],
0x2C: [123,155,249,16,218,'204 123l14 -14c-28 -77 -66 -140 -147 -245l-41 -19l-14 11c58 96 83 161 90 237c53 15 71 21 98 30'],
0x2D: [287,-215,332,18,314,'284 215h-260l-6 5l28 67h262l6 -5'],
0x2E: [111,5,249,67,183,'124 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58'],
0x2F: [714,169,286,-37,297,'297 714l-279 -883h-55l279 883h55'],
0x30: [689,20,499,29,465,'263 689c137 0 202 -104 202 -323c0 -256 -76 -386 -227 -386c-62 0 -118 28 -153 77s-56 150 -56 267c0 242 79 365 234 365zM245 654c-95 0 -132 -83 -132 -294c0 -244 41 -345 138 -345c92 0 130 88 130 301c0 240 -39 338 -136 338'],
0x31: [700,3,499,60,418,'418 -3c-16 0 -126 3 -157 3c-11 0 -11 0 -165 -3v30l56 3c54 3 65 14 65 66v518l-150 -66l-7 50l241 102v-604c0 -52 10 -63 65 -66l52 -3v-30'],
0x32: [689,3,499,16,468,'16 23l170 181c113 120 155 194 155 269c0 85 -55 139 -141 139c-67 0 -103 -25 -119 -83l-13 -46h-29l17 136c49 50 98 70 170 70c127 0 205 -71 205 -186c0 -75 -30 -130 -127 -233l-182 -193c185 4 231 0 346 5v-85c-193 0 -193 3 -229 3s-36 -3 -223 -3v26'],
0x33: [689,20,499,15,462,'462 224c0 -137 -155 -244 -278 -244c-37 0 -81 8 -134 26c-6 50 -14 86 -35 147l27 10c19 -54 32 -77 56 -99c30 -28 71 -43 117 -43c93 0 154 67 154 169c0 91 -48 144 -131 144c-27 0 -46 -5 -84 -22l-7 6l12 53l15 -1c13 -1 24 -2 32 -2c71 0 130 63 130 138 c0 63 -53 116 -118 116c-52 0 -115 -34 -126 -68l-18 -56h-31l29 124c48 48 93 67 161 67c111 0 185 -59 185 -148c0 -59 -30 -105 -110 -167c47 -8 154 -29 154 -150'],
0x34: [697,3,499,2,472,'280 181h-278v23l283 463l80 30l-1 -452l108 3v-71c-28 3 -49 4 -108 4v-75c0 -60 11 -73 60 -76l48 -3v-30c-133 3 -133 3 -157 3s-24 0 -157 -3v30l62 3c49 2 60 16 60 76v75zM280 245v352l-215 -352h215'],
0x35: [690,20,499,13,459,'459 253c0 -164 -158 -273 -286 -273c-43 0 -83 9 -111 24c-14 44 -26 76 -49 133l24 14l46 -82c14 -26 64 -44 118 -44c109 0 173 65 173 176c0 108 -60 176 -156 176c-49 0 -76 -14 -123 -61l-20 6v368l357 -1v-85l-305 1v-209c53 47 89 50 137 50 c114 0 195 -80 195 -193'],
0x36: [689,20,499,32,468,'131 331c53 39 85 78 162 78c106 0 175 -75 175 -190c0 -136 -97 -239 -227 -239c-129 0 -209 102 -209 267c0 137 56 259 159 346c51 44 92 64 188 96l42 -24c-180 -54 -269 -153 -290 -334zM255 349c-48 0 -95 -24 -117 -58c-11 -17 -15 -36 -15 -71 c0 -134 46 -207 131 -207c77 0 128 67 128 171c0 99 -51 165 -127 165'],
0x37: [689,1,499,44,497,'409 603h-267c-46 0 -53 -8 -61 -70l-7 -56h-30v212h453v-30l-340 -660h-110'],
0x38: [689,20,499,30,464,'168 345c-88 57 -115 95 -115 163c0 105 87 181 206 181c106 0 177 -58 177 -144c0 -60 -32 -102 -123 -164c114 -52 151 -95 151 -175c0 -124 -108 -226 -238 -226c-113 0 -196 76 -196 179c0 52 20 97 58 135c22 21 39 32 80 51zM284 295l-80 36 c-74 -45 -103 -91 -103 -164c0 -93 55 -153 140 -153c81 0 144 66 144 150c0 54 -38 103 -101 131zM223 423l55 -27c65 42 90 81 90 138c0 73 -48 121 -121 121c-71 0 -119 -45 -119 -111c0 -50 32 -90 95 -121'],
0x39: [689,20,499,20,457,'96 -20l-34 17v8c181 44 290 167 298 335l-70 -58c-18 -15 -57 -26 -91 -26c-104 0 -179 84 -179 199c0 134 96 234 224 234c132 0 213 -103 213 -272c0 -134 -51 -249 -148 -332c-54 -46 -102 -69 -213 -105zM240 656c-82 0 -134 -63 -134 -164c0 -104 54 -175 134 -175 c77 0 130 62 130 154c0 45 -12 94 -30 126c-22 40 -55 59 -100 59'],
0x3A: [456,5,249,66,182,'123 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM123 456c31 0 59 -28 59 -59c0 -30 -28 -57 -60 -57c-29 0 -56 28 -56 58s27 58 57 58'],
0x3B: [456,153,249,16,218,'204 125l14 -14c-28 -77 -66 -140 -147 -245l-41 -19l-14 11c58 96 83 161 90 237c53 15 71 21 98 30zM150 456c31 0 59 -28 59 -59c0 -30 -28 -57 -60 -57c-29 0 -56 28 -56 58s27 58 57 58'],
0x3C: [528,0,668,65,604,'604 0l-539 252v21l539 255v-59l-438 -205l438 -205v-59'],
0x3D: [406,-134,668,65,604,'604 347h-539v59h539v-59zM604 134h-539v59h539v-59'],
0x3E: [528,0,668,65,604,'65 0v59l438 205l-438 205v59l539 -255v-21'],
0x3F: [694,5,443,43,395,'148 254c0 90 154 162 154 265c0 64 -51 109 -123 109c-45 0 -76 -15 -87 -42l-20 -48l-29 8l38 113c53 26 88 35 132 35c103 0 182 -67 182 -154c0 -57 -25 -96 -110 -174c-81 -73 -94 -90 -94 -123c0 -16 6 -29 22 -48l-21 -14c-30 19 -44 43 -44 73zM198 111 c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58'],
0x40: [694,20,746,24,724,'401 30c161 0 234 109 257 141h61c-31 -51 -47 -72 -79 -99c-69 -61 -153 -92 -246 -92c-208 0 -370 154 -370 351c0 204 161 363 369 363c186 0 331 -125 331 -285c0 -137 -117 -275 -233 -275c-44 0 -71 30 -76 84c-50 -63 -76 -79 -123 -79c-74 0 -133 62 -133 141 c0 119 104 229 217 229c48 0 76 -25 88 -80l18 70h88l-77 -279c-3 -10 -4 -19 -4 -28c0 -17 10 -29 25 -29c75 0 157 127 157 242c0 137 -117 239 -274 239c-182 0 -318 -131 -318 -307s137 -307 322 -307zM308 166c87 0 137 169 137 244c0 41 -21 70 -52 72h-5 c-78 0 -140 -151 -140 -228c0 -53 24 -88 60 -88'],
0x41: [700,3,777,15,756,'408 700l242 -568c35 -81 46 -98 72 -102l34 -3v-30c-108 3 -108 3 -127 3c-16 0 -16 0 -139 -3v30l47 3c25 2 41 9 41 20c0 9 -4 22 -21 64l-46 115h-288l-63 -173c0 -16 13 -23 48 -26l37 -3v-30c-108 3 -108 3 -121 3c-16 0 -16 0 -109 -3v30l37 3c22 2 42 20 54 49 l270 621h32zM240 269h253l-126 298'],
0x42: [692,3,610,26,576,'298 -3c-37 0 -99 3 -124 3c-19 0 -40 -1 -109 -3v23l26 10c14 6 22 28 22 65v474c0 79 -4 86 -51 90l-36 3v30c139 -3 139 -3 158 -3c15 0 15 0 154 3h25c116 0 179 -48 179 -136c0 -52 -25 -95 -73 -128c-28 -19 -51 -28 -100 -38c77 -6 111 -15 149 -43 c38 -27 58 -68 58 -119c0 -94 -68 -231 -278 -231zM208 362v-315c25 -6 48 -8 77 -8c119 0 188 63 188 169c0 107 -63 156 -198 156c-19 0 -28 0 -67 -2zM208 644v-243c31 -2 44 -2 61 -2c114 0 174 47 174 134c0 83 -47 119 -154 119c-32 0 -55 -2 -81 -8'],
0x43: [709,20,708,22,670,'22 342c0 240 196 367 411 367c73 0 146 -13 231 -41c-14 -66 -18 -102 -19 -152h-31v72c0 39 -101 79 -199 79c-172 0 -286 -122 -286 -306c0 -197 130 -327 325 -327c76 0 145 20 207 60l9 -10l-28 -50c-69 -36 -144 -54 -227 -54c-235 0 -393 146 -393 362'],
0x44: [692,3,793,22,751,'341 -3c-43 0 -87 3 -145 3c-23 0 -23 0 -139 -3v23l31 16c16 8 25 30 25 59v474c0 78 -5 87 -51 90l-40 3v30c114 -2 169 -3 206 -3c70 0 157 3 174 3c211 0 349 -121 349 -307c0 -107 -44 -215 -116 -286c-70 -69 -166 -102 -294 -102zM644 359v9 c0 221 -186 278 -370 278c-22 0 -44 0 -66 -2v-595c113 0 276 9 336 58c66 54 100 140 100 252'],
0x45: [692,3,610,22,572,'556 0c-136 -3 -136 -3 -167 -3c-70 0 -186 3 -232 3c-28 0 -57 -1 -123 -3v30l36 3c47 4 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c99 -2 144 -3 178 -3c46 0 139 3 232 3c44 0 75 -1 124 -3c-11 -44 -16 -92 -16 -152h-35v65c0 23 -3 28 -27 34 c-32 9 -88 14 -145 14c-69 0 -86 -1 -117 -8v-267c34 -2 68 -3 110 -3c66 0 94 3 112 11c4 8 7 17 8 26l5 52h30c-3 -100 -3 -100 -3 -116c0 -18 0 -18 3 -111h-30l-5 59l-8 25c-18 9 -45 12 -119 12c-45 0 -75 -1 -103 -3v-280c29 -6 47 -8 99 -8c90 0 163 5 184 12 c28 10 37 104 38 113h35c-10 -52 -16 -106 -16 -164'],
0x46: [692,3,555,22,536,'216 327v-207c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c147 -3 147 -3 176 -3c25 0 60 0 109 2l107 1c43 0 73 -1 122 -3c-11 -44 -16 -92 -16 -152h-35v65c0 23 -3 28 -27 34 c-31 8 -87 14 -135 14c-59 0 -77 -1 -107 -8v-267c34 -2 68 -3 110 -3c66 0 94 3 112 11c4 8 7 17 8 26l5 52h30c-3 -100 -3 -100 -3 -116c0 -18 0 -18 3 -111h-30l-5 59l-8 25c-18 9 -45 12 -119 12c-45 0 -75 -1 -103 -3'],
0x47: [709,20,762,22,728,'412 -20c-207 0 -390 121 -390 361c0 222 170 368 428 368c80 0 131 -9 242 -41c-14 -54 -20 -101 -20 -152h-31v53c0 78 -130 98 -217 98c-187 0 -295 -112 -295 -305c0 -208 126 -340 324 -340c66 0 126 14 137 33c4 7 6 19 6 43v101c0 24 -9 33 -34 35l-83 7v30 c87 -2 93 -2 122 -2c31 0 34 0 123 2l4 -25l-29 -10c-10 -4 -11 -6 -11 -37v-168c-136 -38 -205 -51 -276 -51'],
0x48: [692,3,831,22,810,'616 120v223h-400v-223c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-178c20 -1 42 -2 83 -2h234 c41 0 62 1 83 2v178c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90'],
0x49: [692,3,336,22,315,'216 569v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90'],
0x4A: [692,195,332,-15,311,'-10 -192l-5 76l11 6c23 -14 43 -20 67 -20c40 0 54 29 54 113v586c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-452c0 -121 -13 -171 -62 -231c-30 -38 -83 -81 -131 -81c-4 0 -8 0 -12 1'],
0x4B: [692,3,725,22,719,'216 325v-205c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-214l307 310v27c58 -2 75 -3 89 -3c17 0 17 0 80 3 v-30l-42 -2c-26 -1 -51 -14 -78 -38l-266 -240l330 -312c31 -29 47 -40 62 -41l21 -2v-30c-50 3 -52 3 -65 3c-10 0 -10 0 -80 -3l-348 335'],
0x4C: [692,3,610,22,586,'565 0c-136 -3 -136 -3 -168 -3c-93 0 -210 3 -233 3c-26 0 -33 0 -99 -3v23l31 16c16 8 25 30 25 59v474c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-522c36 -2 76 -3 115 -3c93 0 180 5 189 13 c6 5 28 86 34 115h32c-10 -97 -14 -129 -21 -172'],
0x4D: [692,13,945,16,926,'732 120v458l-276 -591h-20l-279 591v-458c0 -78 5 -87 51 -90l44 -3v-30c-105 3 -105 3 -120 3c-17 0 -17 0 -116 -3v30l44 3c46 3 51 12 51 90v449c0 78 -5 87 -51 90l-44 3v30c80 -3 80 -3 95 -3s15 0 95 3l264 -556l262 556c80 -3 80 -3 95 -3c17 0 17 0 93 3v-30 l-42 -3c-46 -3 -51 -12 -51 -90v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -151 3c-52 0 -82 -1 -128 -3v30l34 3c47 4 51 11 51 90'],
0x4E: [692,6,830,17,813,'137 689l54 3l481 -594v471c0 78 -5 87 -51 90l-44 3v30c99 -3 99 -3 116 -3c15 0 15 0 120 3v-30l-44 -3c-46 -3 -51 -12 -51 -90v-575l-87 2l-473 587v-463c0 -78 5 -87 51 -90l44 -3v-30c-105 3 -105 3 -120 3c-17 0 -17 0 -116 -3v30l44 3c46 3 51 12 51 90v449 c0 78 -5 87 -51 90l-44 3v30c55 -2 64 -2 120 -3'],
0x4F: [709,20,785,22,764,'402 709c222 0 362 -132 362 -342c0 -220 -170 -387 -394 -387c-202 0 -348 151 -348 361c0 220 152 368 380 368zM385 667c-164 0 -256 -104 -256 -289c0 -214 113 -356 281 -356c155 0 247 117 247 315c0 208 -101 330 -272 330'],
0x50: [692,3,603,22,580,'23 692c160 -3 160 -3 182 -3c102 0 153 3 170 3c127 0 205 -59 205 -155c0 -120 -105 -213 -242 -213c-20 0 -34 1 -58 6l-10 36c26 -8 41 -10 62 -10c87 0 148 63 148 153c0 95 -58 145 -166 145c-34 0 -63 -4 -98 -12v-522c0 -79 5 -87 51 -90l48 -3v-30 c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 74 -8 87 -53 90l-45 3v30'],
0x51: [709,176,785,22,764,'401 -19c106 -13 189 -86 304 -86c9 0 20 1 59 6v-21l-84 -56l-254 72c-31 10 -59 15 -78 15c-10 0 -75 -34 -87 -40l2 32c56 43 90 66 109 72l16 6h-11c-215 0 -355 143 -355 362c0 218 152 366 376 366s366 -131 366 -338c0 -144 -74 -273 -195 -343 c-52 -29 -89 -40 -168 -47zM129 375c0 -128 57 -352 283 -352c157 0 245 112 245 312c0 210 -100 332 -272 332c-165 0 -256 -104 -256 -292'],
0x52: [692,3,667,22,669,'216 642v-522c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 78 -5 87 -51 90l-41 3v30c140 -3 140 -3 173 -3c34 0 50 3 180 3c124 0 194 -54 194 -150c0 -58 -28 -108 -80 -145c-33 -23 -59 -34 -115 -48l227 -296 c13 -17 29 -23 61 -26v-30c-63 3 -63 3 -72 3c-11 0 -21 0 -68 -3l-277 363l6 12c13 -1 22 -1 29 -1c116 0 189 58 189 150c0 85 -58 133 -162 133c-34 0 -64 -4 -98 -12'],
0x53: [709,20,524,24,503,'419 179c0 220 -395 42 -395 305c0 131 108 225 257 225c55 0 100 -10 180 -42c-10 -54 -14 -88 -14 -141h-32l-6 57c-3 29 -14 44 -47 61c-27 15 -64 23 -103 23c-88 0 -151 -60 -151 -144c0 -175 249 -88 354 -187c28 -26 41 -59 41 -106c0 -145 -126 -250 -300 -250 c-58 0 -116 13 -173 38c6 45 8 67 8 113c0 6 0 22 -1 40h32l8 -67c5 -41 83 -80 163 -80c104 0 179 65 179 155'],
0x54: [692,3,612,18,595,'379 689c43 0 22 0 216 3c-5 -57 -7 -96 -7 -161h-31l-4 79c-1 24 -12 33 -40 33h-155l-4 -523c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90l-4 523h-155c-28 0 -39 -9 -40 -33l-4 -79h-31c0 65 -2 104 -7 161l68 -1 c71 -1 125 -2 135 -2h158'],
0x55: [692,20,777,12,759,'378 -20c-177 0 -267 62 -267 234v355c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-344c0 -128 60 -183 199 -183c145 0 213 66 213 207v320c0 78 -5 87 -51 90l-44 3v30c99 -3 99 -3 116 -3c15 0 15 0 120 3 v-30l-44 -3c-46 -3 -51 -12 -51 -90v-289c0 -211 -78 -300 -286 -300'],
0x56: [692,9,721,8,706,'274 662l-56 -3c-20 -1 -32 -8 -32 -20c0 -4 198 -514 199 -518c6 15 192 502 192 512c0 15 -13 25 -35 26l-52 3v30c16 -1 33 -1 38 -1c29 -1 52 -2 70 -2l108 3v-30l-30 -3c-20 -3 -31 -18 -54 -72l-174 -421c-33 -80 -45 -113 -64 -175h-50c-15 54 -27 89 -58 165 l-188 462c-13 30 -23 39 -44 41l-36 3v30c110 -3 110 -3 133 -3s23 0 133 3v-30'],
0x57: [700,9,1000,8,984,'734 -9h-50l-196 545l-210 -545h-50l-142 618c-9 36 -21 47 -50 50l-28 3v30c77 -2 119 -3 126 -3l126 3v-30l-42 -3c-27 -2 -37 -11 -37 -33c0 -10 101 -470 104 -482l214 556h32l197 -563l133 498c-1 16 -12 21 -47 24l-40 3v30c95 -3 95 -3 105 -3l105 3v-30l-32 -3 c-22 -2 -32 -11 -38 -29'],
0x58: [700,3,666,14,648,'648 27v-30c-43 2 -66 3 -71 3c-12 0 -12 0 -74 -3l-184 306l-202 -306c-43 3 -43 3 -51 3s-8 0 -52 -3v30l26 3c22 3 34 14 59 48l195 263l-128 213c-33 55 -68 88 -92 90l-45 3l-2 26l141 27c19 -10 34 -27 57 -65l128 -216l185 270h101v-30l-41 -3 c-15 -2 -27 -14 -68 -68l-154 -208l176 -280c35 -54 47 -67 66 -70'],
0x59: [705,3,666,9,654,'9 680l120 25c18 0 59 -46 91 -103l143 -261l103 163c39 62 70 116 108 190h80v-15c-81 -97 -114 -142 -188 -253c-58 -88 -72 -122 -72 -177v-129c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v142c0 16 -3 30 -9 40 l-124 216c-42 73 -94 125 -128 130l-29 3v29'],
0x5A: [692,3,666,15,638,'637 663l-486 -619h273c153 0 167 6 174 72l6 57h34c-5 -55 -6 -90 -6 -176l-305 3l-312 -3v28l485 623h-228c-122 0 -149 -8 -152 -47l-6 -71h-34c2 37 3 55 3 81s-1 44 -3 81l165 -1l232 -2l156 3'],
0x5B: [726,184,332,79,288,'88 -184l-9 13c6 31 8 59 8 127v622c0 50 -4 87 -4 113c0 15 1 26 5 35l100 -3l94 3l6 -6v-26l-4 -5h-38c-27 0 -49 -6 -61 -16c-9 -9 -12 -24 -12 -57v-691c0 -33 3 -47 12 -56c12 -10 34 -16 61 -16h38l4 -5v-26l-6 -6l-100 1'],
0x5C: [714,169,286,-9,324,'324 -169h-55l-278 883h54'],
0x5D: [726,184,332,45,254,'245 -184h-194l-6 6v26l4 5h38c27 0 49 6 61 16c9 9 12 23 12 56v691c0 33 -3 48 -12 57c-12 10 -34 16 -61 16h-38l-4 5v26l6 6h194c4 -9 5 -20 5 -35c0 -26 -4 -63 -4 -113v-622c0 -68 2 -96 8 -127'],
0x5E: [689,-283,605,51,554,'119 283h-68l219 406h66l218 -406h-68l-183 338'],
0x5F: [-75,125,499,0,500,'500 -125h-500v50h500v-50'],
0x60: [709,-446,277,45,233,'174 685l44 24l15 -10c-48 -76 -72 -144 -78 -223c-51 -15 -61 -18 -97 -30l-13 13c34 86 72 153 129 226'],
0x61: [469,12,499,32,471,'324 82l-124 -86c-7 -5 -24 -8 -42 -8c-79 0 -126 45 -126 123c0 87 57 134 193 159l99 18c-3 91 -42 128 -136 128c-46 0 -56 -7 -77 -54l-13 -31l-26 7v65l53 32c38 23 78 34 121 34c116 0 158 -53 158 -199v-183c0 -49 4 -57 28 -58l39 -2v-30c-44 2 -69 3 -74 3 c-13 0 -13 0 -78 -3zM324 124v132c-76 -11 -131 -25 -156 -40c-30 -19 -50 -54 -50 -91c0 -43 35 -80 75 -80c39 0 102 38 131 79'],
0x62: [726,28,552,-15,508,'150 718v-349c32 77 98 116 165 116c89 0 180 -71 193 -215v-8c0 -134 -76 -290 -228 -290c-48 0 -103 16 -166 52l-40 -36l-16 6c7 70 8 97 8 159v467c0 30 -7 41 -25 41h-56v28c79 12 112 20 156 37zM284 404c-78 0 -134 -61 -134 -96v-215c0 -22 74 -54 126 -54 c85 0 145 78 145 190c0 107 -53 175 -137 175'],
0x63: [497,20,443,25,413,'399 311h-28l-7 61c-18 25 -66 45 -110 45c-90 0 -140 -60 -140 -167c0 -132 73 -218 184 -218c36 0 68 9 105 29l10 -15l-21 -32c-10 -15 -85 -34 -137 -34c-134 0 -229 95 -229 230c0 5 -1 9 -1 14c0 137 144 273 268 273c45 0 88 -19 120 -63c-9 -36 -14 -77 -14 -123 '],
0x64: [726,12,610,34,579,'579 -3c-9 0 -73 3 -91 3c-12 0 -28 -1 -77 -3l5 92l-111 -93c-5 -4 -28 -8 -48 -8c-65 0 -112 13 -146 42c-49 40 -77 119 -77 199c0 122 149 257 272 257c40 0 78 -14 106 -49v183c0 30 -7 41 -25 41h-56v28c79 12 112 20 156 37l9 -8v-616c0 -63 3 -70 37 -72l46 -3 v-30zM412 350c0 16 -67 72 -146 72c-90 0 -144 -64 -144 -173c0 -119 63 -204 151 -204c47 0 96 31 124 78c13 23 15 33 15 98v129'],
0x65: [469,20,478,26,448,'250 -20c-89 0 -224 45 -224 239c0 104 32 173 114 221c40 24 85 29 122 29c112 0 186 -75 186 -190c0 -12 -1 -21 -2 -35c-47 -10 -156 -18 -242 -18c-34 0 -64 1 -85 4c2 -58 9 -86 28 -119c28 -49 82 -78 147 -78c43 0 66 9 130 47l13 -10l-20 -34 c-12 -18 -102 -56 -167 -56zM359 267c-3 67 -10 96 -32 129c-18 27 -44 40 -77 40c-48 0 -91 -27 -112 -71c-13 -26 -17 -48 -17 -95c26 -6 69 -9 112 -9c51 0 102 3 126 6'],
0x66: [737,3,332,23,341,'341 717v-77l-12 -6c-24 21 -42 28 -69 28c-57 0 -70 -28 -70 -143v-85l131 4l-7 -53h-124v-283c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v283l-69 -4v22l69 31v42v2c0 77 57 259 168 259c21 0 43 -6 67 -20'],
0x67: [469,283,555,32,544,'55 289c0 135 146 180 221 180c49 0 85 -12 122 -42l142 4l4 -12l-25 -41h-92c8 -21 11 -34 11 -54c0 -127 -143 -174 -204 -174c-8 0 -14 1 -24 2c-17 -17 -46 -34 -46 -57c0 -27 24 -39 76 -39h160c70 0 118 -50 118 -122c0 -122 -119 -217 -270 -217 c-127 0 -216 63 -216 154c0 31 14 68 28 76l87 47c-48 14 -66 30 -66 58c0 15 5 28 15 43l79 61c-74 8 -120 59 -120 133zM240 435c-64 0 -101 -45 -101 -122s45 -129 112 -129c61 0 103 45 103 110c0 85 -46 141 -114 141zM287 -14c-69 0 -97 -2 -113 -10 c-40 -17 -66 -58 -66 -103c0 -74 60 -125 147 -125c105 0 184 64 184 150c0 65 -39 88 -152 88'],
0x68: [726,3,581,6,572,'7 661v28c79 12 112 20 156 37l9 -8v-340c28 61 89 92 151 92c85 0 171 -58 171 -175v-193c0 -64 3 -70 37 -72l41 -3v-30c-69 3 -69 3 -83 3c-17 0 -18 0 -79 -3v284c0 77 -44 127 -111 127c-61 0 -127 -49 -127 -94v-212c0 -63 3 -70 37 -72l45 -3v-30 c-68 2 -95 3 -124 3s-56 -1 -124 -3v30l45 3c34 2 37 9 37 72v518c0 30 -7 41 -25 41h-56'],
0x69: [687,3,290,21,271,'188 465v-363c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v225c0 66 -4 73 -44 73h-37v28c53 6 105 20 156 41zM142 687c28 0 55 -28 55 -56c0 -29 -27 -55 -56 -55s-55 26 -55 55c0 30 26 56 56 56'],
0x6A: [688,283,233,-40,167,'-10 -248c68 0 85 106 85 241v334c0 66 -4 73 -44 73h-37v28c53 6 105 20 156 41l9 -4v-484c0 -79 -18 -143 -53 -189c-38 -50 -76 -71 -134 -75l-12 38c8 -1 24 -3 30 -3zM112 688c29 0 55 -26 55 -55c0 -30 -26 -56 -56 -56c-28 0 -55 27 -55 56c0 28 27 55 56 55'],
0x6B: [726,12,555,21,549,'23 661v28c79 12 112 20 156 37l9 -8v-457l223 198h100v-30h-4c-32 0 -59 -12 -104 -47l-129 -101l190 -193c32 -31 61 -48 85 -48v-23l-96 -29l-265 252v-138c0 -63 3 -70 37 -72l44 -3v-30c-89 3 -94 3 -122 3c-26 0 -26 0 -126 -3v30l46 3c34 2 37 9 37 72v518 c0 30 -7 41 -25 41h-56'],
0x6C: [726,3,290,21,271,'23 661v28c79 12 112 20 156 37l9 -8v-616c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v518c0 30 -7 41 -25 41h-56'],
0x6D: [469,3,882,16,869,'18 400v28c53 6 105 20 156 41l9 -4v-85l69 64c18 16 46 25 83 25c69 0 110 -27 142 -93l76 68c18 16 47 25 82 25c92 0 150 -62 150 -161v-206c0 -63 3 -70 37 -72l47 -3v-30c-76 3 -76 3 -89 3c-17 0 -18 0 -79 -3v284c0 76 -37 127 -91 127c-60 0 -126 -49 -126 -94 v-212c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v179c0 76 -37 127 -91 127c-60 0 -126 -49 -126 -94v-212c0 -63 3 -70 37 -72l42 -3v-30c-65 2 -91 3 -119 3s-31 0 -127 -3v30l46 3c34 2 37 9 37 72v225 c0 66 -4 73 -44 73h-37'],
0x6E: [469,3,581,6,572,'410 -3v284c0 77 -44 127 -111 127c-61 0 -127 -49 -127 -94v-212c0 -63 3 -70 37 -72l45 -3v-30c-68 2 -95 3 -124 3s-56 -1 -124 -3v30l45 3c34 2 37 9 37 72v225c0 66 -4 73 -44 73h-37v28c53 6 105 20 156 41l9 -4v-87l76 66c21 18 42 25 79 25 c112 0 167 -57 167 -174v-193c0 -64 3 -70 37 -72l41 -3v-30c-69 3 -69 3 -83 3c-17 0 -18 0 -79 -3'],
0x6F: [469,20,545,32,514,'279 469c137 0 235 -97 235 -232c0 -150 -110 -257 -264 -257c-129 0 -218 96 -218 237c0 149 101 252 247 252zM261 436c-88 0 -136 -65 -136 -185c0 -142 66 -238 164 -238c81 0 132 72 132 187c0 136 -68 236 -160 236'],
0x70: [477,281,600,8,556,'10 400v28c53 6 105 20 156 41l9 -4v-96c44 74 112 108 179 108c101 0 198 -76 200 -206c1 -10 2 -19 2 -29c0 -121 -97 -263 -248 -263c-41 0 -85 11 -133 34v-189c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v503 c0 66 -4 73 -44 73h-37zM313 404c-91 0 -138 -67 -138 -98v-208c39 -41 80 -59 135 -59c90 0 157 83 157 195c0 104 -60 170 -154 170'],
0x71: [477,281,600,45,593,'591 428v-28h-37c-40 0 -44 -7 -44 -73v-503c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v189c-48 -23 -92 -34 -133 -34c-151 0 -248 142 -248 263c0 10 1 19 2 29c2 130 99 206 200 206c67 0 135 -34 179 -108v96l9 4 c51 -21 103 -35 156 -41zM288 404c-94 0 -154 -66 -154 -170c0 -112 67 -195 157 -195c55 0 96 18 135 59v208c0 31 -47 98 -138 98'],
0x72: [469,3,394,21,374,'23 400v28c53 6 105 20 156 41l9 -4v-103l56 66c21 25 53 41 83 41c18 0 36 -4 47 -11l-9 -120h-21c-16 32 -37 46 -67 46c-49 0 -89 -50 -89 -110v-172c0 -66 2 -69 47 -72l52 -3v-30c-123 3 -123 3 -143 3c-25 0 -51 -1 -123 -3v30l46 3c34 2 37 9 37 72v225 c0 66 -4 73 -44 73h-37'],
0x73: [469,20,423,30,391,'41 143h30v-41c0 -50 52 -85 125 -85c71 0 121 38 121 92c0 86 -94 60 -212 112c-46 21 -68 55 -68 103c0 90 71 145 187 145c41 0 77 -7 130 -25c-3 -31 -4 -55 -5 -113h-28v23c0 47 -51 82 -118 82c-61 0 -96 -28 -96 -78c0 -130 284 -37 284 -210 c0 -98 -93 -168 -224 -168c-46 0 -85 7 -137 24c8 40 11 70 11 139'],
0x74: [621,12,325,22,319,'98 382l-76 -4v21l78 35v70c0 30 -1 49 -4 79c31 10 61 23 85 38l12 -11c-8 -64 -11 -100 -11 -142v-38l137 6l-9 -54h-128v-262c0 -49 16 -69 55 -69c19 0 34 5 59 19l13 -17l-48 -53c-6 -7 -30 -12 -53 -12c-77 0 -110 32 -110 106v288'],
0x75: [469,12,602,18,581,'283 49c87 0 136 77 136 115v163c0 66 -4 73 -44 73h-37v28c53 6 105 20 156 41l9 -4v-363c0 -64 3 -70 37 -72l41 -3v-30c-69 3 -69 3 -83 3c-15 0 -16 0 -83 -3c3 47 4 63 4 92l-67 -62c-27 -25 -67 -39 -112 -39c-136 0 -141 82 -141 171v168c0 66 -4 73 -44 73h-37 v28c53 6 105 20 156 41l9 -4v-295c0 -76 20 -121 100 -121'],
0x76: [459,7,564,6,539,'539 431l-31 -2c-20 -2 -34 -20 -56 -67l-159 -369h-50l-172 403c-11 25 -16 31 -31 33l-34 2v28c49 -2 97 -3 116 -3s19 0 117 3v-28l-49 -2c-16 -1 -25 -8 -25 -19s3 -24 9 -38l119 -288c38 83 92 217 104 246c12 30 19 57 19 73s-8 25 -24 26l-41 2v28 c60 -2 78 -3 94 -3s34 1 94 3v-28'],
0x77: [469,7,833,6,808,'413 469h23c8 -27 15 -47 29 -81l115 -290l105 300c0 18 -14 30 -39 31l-34 2v28c82 -3 82 -3 98 -3s16 0 98 3v-28l-26 -2c-22 -3 -34 -20 -64 -87l-137 -349h-46l-136 342l-150 -342h-47l-102 336c-25 78 -38 98 -66 100l-28 2v28c99 -3 99 -3 119 -3s20 0 119 3v-28 l-40 -2c-25 -1 -38 -9 -38 -24c0 -8 85 -292 88 -301'],
0x78: [469,3,515,20,496,'288 254l148 -200c11 -15 23 -23 35 -24l25 -3v-30c-52 3 -52 3 -62 3c-9 0 -9 0 -61 -3l-133 195l-134 -192h-86v27h17c10 0 23 6 30 15l149 176l-120 174c-10 14 -32 28 -50 29l-21 2l-5 26l116 20c34 -34 45 -47 60 -69l76 -116l124 175h82v-28h-14 c-17 0 -28 -4 -34 -11'],
0x79: [459,283,555,12,544,'297 86l119 292c4 10 6 19 6 26c0 14 -14 23 -42 25l-26 2v28c79 -3 79 -3 95 -3s16 0 95 3v-28l-28 -2c-25 -3 -44 -30 -84 -121l-203 -468c-38 -87 -81 -123 -148 -123c-19 0 -35 5 -54 15l24 75h13c17 -15 35 -23 51 -23c38 0 77 47 109 131l23 61 c-18 50 -28 74 -56 142l-109 262c-15 35 -27 46 -52 49l-18 2v28c97 -3 97 -3 116 -3s19 0 116 3v-28l-38 -2c-23 -1 -34 -9 -34 -23c0 -10 4 -25 10 -42l64 -168c18 -49 29 -72 51 -110'],
0x7A: [462,3,499,16,466,'455 432l-314 -397h135c96 0 138 7 142 25l18 79h30l-6 -47c-7 -48 -9 -72 -9 -95l-276 3c-43 0 -49 0 -153 -3l-6 20l90 105l170 212c27 34 44 57 65 90h-143c-74 0 -92 -5 -96 -26l-12 -68h-30c6 69 7 81 9 132l248 -3c38 0 68 1 138 3v-30'],
0x7B: [726,175,332,58,289,'289 -175c-113 0 -155 26 -155 158v168c0 72 -10 103 -76 122c62 19 76 54 76 119v186c0 125 55 148 155 148c-65 -19 -83 -59 -83 -131v-186c0 -74 -12 -117 -81 -135v-2c72 -19 81 -65 81 -144v-173c0 -72 20 -116 83 -130'],
0x7C: [713,172,210,76,135,'135 -172h-59v885h59v-885'],
0x7D: [726,175,332,44,275,'275 273c-66 -19 -76 -50 -76 -122v-168c0 -132 -42 -158 -155 -158c63 14 83 58 83 130v173c0 79 9 125 81 144v2c-69 18 -81 61 -81 135v186c0 72 -18 112 -83 131c100 0 155 -23 155 -148v-186c0 -65 14 -100 76 -119'],
0x7E: [341,-182,605,51,555,'190 329c73 0 175 -72 226 -72c37 0 74 29 105 84l34 -27c-43 -78 -87 -113 -138 -113c-66 0 -168 71 -226 71c-40 0 -66 -22 -106 -90l-34 27c49 87 88 120 139 120'],
0xA3: [694,13,499,12,478,'338 647c-117 0 -146 -103 -146 -204v-89l182 6l-4 -48c-96 3 -113 4 -181 5c-5 -87 -32 -159 -94 -248c37 5 77 7 133 7c27 0 162 -7 180 -7c19 0 25 8 29 37l7 52h26v-148c-21 -17 -29 -20 -50 -20c-155 0 -180 32 -341 42l-45 -45h-22v49c33 33 48 21 62 50 c20 40 34 121 34 200v30l-96 -3l8 45c43 -1 49 -1 88 -3v106c0 65 33 120 99 168c63 45 116 65 173 65c41 0 64 -6 98 -26c-7 -35 -10 -70 -10 -102c0 -8 0 -19 1 -34h-33c-1 62 -6 115 -98 115'],
0xA5: [701,3,499,5,496,'496 679c-44 -61 -93 -150 -130 -229l-32 -68h122v-39h-139c-8 -18 -17 -41 -17 -65v-32h156v-39h-156v-87c0 -81 5 -86 44 -90l41 -3v-30c-45 2 -85 3 -125 3c-43 0 -84 -1 -118 -3v30l41 3c41 4 44 12 44 90v87h-156v39h156v16c0 8 -1 22 -6 31l-21 50h-129v39h111 l-75 160c-20 42 -46 109 -94 109c-3 0 -5 -1 -8 -1v29l92 21c4 1 7 1 10 1c39 0 53 -59 68 -99l104 -261l70 163c28 64 53 126 77 190h70v-15'],
0xA7: [709,219,499,26,465,'109 534c0 -146 356 -90 356 -281c0 -59 -34 -107 -78 -142c44 -21 60 -60 60 -107c0 -107 -119 -223 -243 -223c-56 0 -118 18 -160 44c6 55 10 105 12 157h32l5 -90c37 -43 77 -59 145 -59c75 0 146 39 146 121c0 163 -358 102 -358 291c0 53 37 107 83 137 c-34 23 -58 62 -58 103c0 116 103 224 227 224c46 0 93 -8 138 -36c-6 -40 -12 -99 -13 -152h-31l-5 87c-37 34 -74 49 -123 49c-73 0 -135 -44 -135 -123zM399 212c0 105 -157 112 -264 157c-29 -25 -43 -51 -43 -90c0 -91 198 -120 273 -156c20 30 34 60 34 89'],
0xA8: [637,-537,332,17,316,'68 637c29 0 49 -21 49 -50c0 -30 -20 -50 -50 -50s-50 20 -50 50s20 50 51 50zM267 637c29 0 49 -21 49 -50c0 -30 -20 -50 -50 -50s-50 20 -50 50s20 50 51 50'],
0xAC: [360,-88,673,65,608,'608 88h-57v211h-486v61h543v-272'],
0xAE: [705,164,906,18,889,'889 271c0 -239 -197 -435 -436 -435s-435 196 -435 435c0 238 196 434 435 434s436 -196 436 -434zM841 271c0 212 -177 386 -388 386c-212 0 -387 -175 -387 -386c0 -212 175 -387 387 -387c211 0 388 174 388 387zM711 40l-4 -4c-18 2 -36 4 -55 4 c-20 0 -40 -2 -60 -4l-198 237l7 9c67 0 140 9 140 94c0 60 -40 86 -96 86c-23 0 -46 -3 -68 -8v-238c0 -11 -1 -31 -1 -53c0 -39 3 -85 22 -93c8 -3 34 -5 43 -6l4 -3v-21l-4 -4c-35 3 -70 4 -105 4s-70 -1 -105 -4l-4 4v21l4 3c64 7 64 1 64 134v130c0 11 1 29 1 50 c0 35 -3 76 -21 84c-9 3 -34 5 -44 6l-4 4v20l4 4c35 -3 70 -4 105 -4c54 0 108 4 162 4c65 0 133 -25 133 -102c0 -72 -61 -110 -124 -125l141 -168c19 -22 29 -36 59 -37l4 -3v-21'],
0xB1: [541,0,668,65,604,'604 279h-240v-192h-59v192h-240v59h240v203h59v-203h240v-59zM604 0h-539v55h539v-55'],
0xB5: [473,286,610,89,567,'553 92c6 -2 11 -6 14 -11v-7c-29 -31 -90 -89 -139 -89c-32 0 -39 29 -39 59c0 22 4 44 5 57l-10 3c-31 -50 -86 -116 -153 -116c-36 0 -64 14 -83 47l-7 1c-2 -28 -2 -54 -2 -79c0 -90 12 -161 55 -218l-7 -12c-21 -8 -44 -12 -67 -13c-24 43 -31 88 -31 140 s7 111 12 181c4 59 11 161 11 255c0 64 -3 123 -14 163c25 4 50 9 77 14c4 -25 5 -49 5 -74c0 -61 -8 -121 -8 -182c0 -25 1 -49 5 -74c7 -47 31 -95 86 -95c57 0 109 55 123 104c10 32 13 102 13 168c0 52 -2 103 -6 131l8 11c23 3 44 9 65 17l9 -10 c-10 -46 -17 -169 -17 -267c0 -50 2 -94 6 -119c2 -14 7 -23 22 -23c26 0 49 23 67 38'],
0xB7: [319,-203,249,67,183,'124 319c31 0 59 -28 59 -59c0 -30 -28 -57 -60 -57c-29 0 -56 28 -56 58s27 58 57 58'],
0xD7: [547,5,668,59,611,'611 37l-43 -42l-233 234l-234 -234l-42 42l234 234l-234 234l42 42l234 -234l233 234l43 -42l-235 -234'],
0xF0: [728,20,545,32,504,'335 614c91 -91 169 -198 169 -352c0 -168 -102 -282 -250 -282c-133 0 -222 96 -222 237c0 148 99 252 239 252c38 0 74 -8 107 -25l2 2c-28 62 -59 106 -98 142l-140 -68l-18 30l136 66c-31 29 -67 52 -122 80l34 32c73 -44 100 -62 133 -90l143 70l18 -30zM416 240 c0 170 -105 196 -155 196c-87 0 -136 -66 -136 -184c0 -140 61 -239 147 -239c109 0 144 133 144 227'],
0xF7: [512,-10,605,51,555,'51 233v56h504v-56h-504zM304 512c31 0 58 -28 58 -59s-27 -57 -59 -57c-30 0 -57 27 -57 58c0 30 27 58 58 58zM304 126c31 0 58 -28 58 -59s-27 -57 -59 -57c-30 0 -57 27 -57 58c0 30 27 58 58 58'],
0x131: [469,3,290,21,271,'188 465v-363c0 -63 3 -70 37 -72l46 -3v-30c-100 3 -100 3 -125 3s-25 0 -125 -3v30l46 3c34 2 37 9 37 72v225c0 66 -4 73 -44 73h-37v28c53 6 105 20 156 41'],
0x2C6: [677,-510,312,0,312,'125 677h62l125 -167h-35l-121 103l-121 -103h-35'],
0x2C7: [677,-510,312,0,312,'156 574l122 103h34l-125 -167h-62l-125 167h35'],
0x2C9: [591,-538,338,13,325,'325 538h-312v53h312v-53'],
0x2D8: [699,-534,269,-4,273,'135 596c61 0 99 48 108 103h26l4 -4c-4 -90 -49 -161 -138 -161c-86 0 -136 68 -139 161l4 4h26c9 -56 47 -103 109 -103'],
0x2D9: [676,-568,333,112,220,'220 622c0 -30 -23 -54 -54 -54s-54 24 -54 54c0 29 25 54 54 54c30 0 54 -23 54 -54'],
0x2DA: [717,-535,267,36,231,'134 717c52 0 97 -39 97 -91s-45 -91 -97 -91c-54 0 -98 38 -98 91c0 51 46 91 98 91zM134 557c34 0 56 32 56 69c0 36 -22 69 -56 69c-35 0 -57 -32 -57 -69s22 -69 57 -69'],
0x2DC: [682,-571,329,-5,334,'-5 575c11 49 50 105 105 105c19 0 46 -8 80 -25c35 -16 58 -25 70 -25c25 0 43 18 56 52h25l3 -3c-11 -52 -49 -108 -106 -108c-19 0 -45 9 -79 25c-34 17 -57 25 -69 25c-26 0 -45 -16 -58 -49h-24'],
0x300: [677,-506,0,-302,-78,'-118 506l-144 90c-33 21 -40 29 -40 47c0 19 14 34 31 34c14 0 34 -12 55 -33l138 -138h-40'],
0x301: [677,-506,0,-255,-31,'-255 506l138 138c22 22 40 33 55 33c17 0 31 -15 31 -34c0 -18 -7 -26 -40 -47l-144 -90h-40'],
0x302: [677,-510,0,-312,0,'-187 677h62l125 -167h-35l-121 103l-121 -103h-35'],
0x303: [640,-535,0,-330,0,'-224 635c53 0 99 -45 136 -45c28 0 44 13 59 50h29c-4 -58 -52 -105 -109 -105c-54 0 -101 45 -139 45c-24 0 -40 -14 -53 -45h-29c14 65 51 100 106 100'],
0x304: [591,-538,0,-332,-20,'-20 538h-312v53h312v-53'],
0x306: [664,-506,0,-307,-25,'-25 664c-6 -135 -91 -158 -141 -158c-53 0 -135 25 -141 158h30c3 -57 49 -98 111 -98s108 41 111 98h30'],
0x307: [637,-537,0,-175,-75,'-124 637c29 0 49 -21 49 -50c0 -30 -20 -50 -50 -50s-50 20 -50 50s20 50 51 50'],
0x308: [637,-537,0,-318,-17,'-267 637c29 0 49 -21 49 -50c0 -30 -20 -50 -50 -50s-50 20 -50 50s20 50 51 50zM-66 637c29 0 49 -21 49 -50c0 -30 -20 -50 -50 -50s-50 20 -50 50s20 50 51 50'],
0x30A: [718,-518,0,-235,-35,'-35 618c0 -55 -45 -100 -100 -100s-100 45 -100 100s45 100 100 100s100 -45 100 -100zM-73 618c0 39 -25 64 -62 64s-62 -25 -62 -64s25 -64 62 -64s62 25 62 64'],
0x30C: [677,-510,0,-322,-10,'-166 574l122 103h34l-125 -167h-62l-125 167h35'],
0x338: [714,169,0,-408,0,'0 714l-352 -883h-56l351 883h57'],
0x382: [0,0,768,0,0,''],
0x391: [700,3,777,15,756,'408 700l242 -568c35 -81 46 -98 72 -102l34 -3v-30c-108 3 -108 3 -127 3c-16 0 -16 0 -139 -3v30l47 3c25 2 41 9 41 20c0 9 -4 22 -21 64l-46 115h-288l-28 -67c-21 -50 -35 -93 -35 -106c0 -16 13 -23 48 -26l37 -3v-30c-108 3 -108 3 -121 3c-16 0 -16 0 -109 -3v30 l37 3c22 2 42 20 54 49l231 526l39 95h32zM240 269h253l-126 298'],
0x392: [692,3,610,26,576,'298 -3c-37 0 -99 3 -124 3c-19 0 -40 -1 -109 -3v23l26 10c14 6 22 28 22 65v474c0 79 -4 86 -51 90l-36 3v30c139 -3 139 -3 158 -3c15 0 15 0 154 3h25c116 0 179 -48 179 -136c0 -52 -25 -95 -73 -128c-28 -19 -51 -28 -100 -38c77 -6 111 -15 149 -43 c38 -27 58 -68 58 -119c0 -94 -68 -231 -278 -231zM208 362v-315c25 -6 48 -8 77 -8c119 0 188 63 188 169c0 107 -63 156 -198 156c-19 0 -28 0 -67 -2zM208 644v-243c31 -2 44 -2 61 -2c114 0 174 47 174 134c0 83 -47 119 -154 119c-32 0 -55 -2 -81 -8'],
0x393: [692,3,555,22,536,'536 689c-11 -44 -16 -101 -16 -152h-35v65c0 13 -1 24 -9 27c-53 20 -132 21 -176 21c-27 0 -56 -1 -84 -8v-522c0 -82 8 -88 51 -90l48 -3v-30c-52 2 -99 3 -145 3c-49 0 -96 -1 -148 -3v30l48 3c43 2 51 10 51 90v449c0 82 -8 88 -51 90l-48 3v30c55 -1 112 -3 169 -3 c67 0 159 3 216 3c45 0 90 -1 129 -3'],
0x394: [697,4,688,27,662,'27 6c65 122 267 584 310 691h25l300 -691l-6 -10c-101 3 -202 4 -303 4c-107 0 -213 0 -319 -4zM537 37c-65 178 -137 354 -212 527h-7c-76 -175 -147 -349 -213 -527h432'],
0x395: [692,3,610,22,572,'556 0c-136 -3 -136 -3 -167 -3c-70 0 -186 3 -232 3c-28 0 -57 -1 -123 -3v30l36 3c47 4 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c99 -2 144 -3 178 -3c46 0 139 3 232 3c44 0 75 -1 124 -3c-11 -44 -16 -92 -16 -152h-35v65c0 23 -3 28 -27 34 c-32 9 -88 14 -145 14c-69 0 -86 -1 -117 -8v-267c34 -2 68 -3 110 -3c66 0 94 3 112 11c4 8 7 17 8 26l5 52h30c-3 -100 -3 -100 -3 -116c0 -18 0 -18 3 -111h-30l-5 59l-8 25c-18 9 -45 12 -119 12c-45 0 -75 -1 -103 -3v-280c29 -6 47 -8 99 -8c90 0 163 5 184 12 c28 10 37 104 38 113h35c-10 -52 -16 -106 -16 -164'],
0x396: [692,3,666,15,638,'637 663l-486 -619h273c153 0 167 6 174 72l6 57h34c-5 -55 -6 -90 -6 -176l-277 3h-28l-312 -3v28l485 623h-228c-122 0 -149 -8 -152 -47l-6 -71h-34c2 37 3 55 3 81s-1 44 -3 81l165 -1l182 -2h50l156 3'],
0x397: [692,3,831,22,810,'616 120v223c-27 2 -43 2 -83 2h-234c-40 0 -56 0 -83 -2v-223c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90 v-178c20 -1 42 -2 83 -2h234c41 0 62 1 83 2v178c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90'],
0x398: [709,20,785,22,764,'764 367c0 -215 -168 -387 -387 -387c-203 0 -355 140 -355 364c0 206 144 365 376 365c214 0 366 -120 366 -342zM657 336c0 198 -89 331 -275 331c-174 0 -253 -122 -253 -286c0 -175 82 -359 280 -359c167 0 248 130 248 314zM238 481c14 -19 -2 -81 66 -81h178 c55 0 54 33 63 77l4 4h14l3 -4c-1 -76 -1 -152 0 -228l-3 -4h-14l-4 4c-9 45 -8 78 -63 78h-178c-54 0 -53 -33 -63 -78l-3 -4h-15l-3 4c1 76 1 152 0 228l3 4h15'],
0x399: [692,3,336,22,315,'216 569v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90'],
0x39A: [692,3,725,22,719,'216 325v-205c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 79 -5 87 -51 90l-48 3v30c72 -2 96 -3 146 -3c52 0 74 0 147 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-214l278 271c16 16 29 33 29 39v27 c58 -2 75 -3 89 -3c17 0 17 0 80 3v-30l-42 -2c-26 -1 -51 -14 -78 -38l-266 -240l330 -312c31 -29 47 -40 62 -41l21 -2v-30c-50 3 -52 3 -65 3c-10 0 -10 0 -80 -3l-348 335'],
0x39B: [697,4,753,17,729,'729 0l-3 -4c-40 2 -79 4 -119 4c-44 0 -88 -1 -133 -4l-3 4v25l3 4c17 0 83 -5 83 23c0 25 -54 156 -64 182l-129 325h-7c-10 -24 -199 -464 -199 -505c0 -27 55 -25 70 -25l5 -4v-25l-5 -4c-34 2 -69 4 -105 4c-34 0 -67 -2 -102 -3l-4 3v25l4 4c22 0 47 -1 64 13 c37 29 281 617 298 655h20c8 -20 242 -623 279 -656c13 -11 27 -10 44 -12l3 -4v-25'],
0x39C: [692,13,945,16,926,'732 120v458c-18 -34 -30 -57 -44 -86l-178 -378c-18 -37 -42 -102 -54 -127h-20l-279 591v-458c0 -78 5 -87 51 -90l44 -3v-30c-105 3 -105 3 -120 3c-17 0 -17 0 -116 -3v30l44 3c46 3 51 12 51 90v449c0 78 -5 87 -51 90l-44 3v30c80 -3 80 -3 95 -3s15 0 95 3 l264 -556l262 556c80 -3 80 -3 95 -3c17 0 17 0 93 3v-30l-42 -3c-46 -3 -51 -12 -51 -90v-449c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -151 3c-52 0 -82 -1 -128 -3v30l34 3c47 4 51 11 51 90'],
0x39D: [692,20,830,17,813,'137 689l54 3l481 -594v471c0 78 -5 87 -51 90l-44 3v30c99 -3 99 -3 116 -3c15 0 15 0 120 3v-30l-44 -3c-46 -3 -51 -12 -51 -90v-552c0 -11 3 -30 3 -37l-90 16l-473 587v-463c0 -78 5 -87 51 -90l44 -3v-30c-105 3 -105 3 -120 3c-17 0 -17 0 -116 -3v30l44 3 c46 3 51 12 51 90v449c0 78 -5 87 -51 90l-44 3v30c55 -2 64 -2 120 -3'],
0x39E: [689,4,692,42,651,'612 505l-4 -3l-20 -3l-4 4c-12 78 -6 112 -97 112h-283c-89 0 -83 -33 -96 -112l-5 -4l-19 3l-3 3c3 60 6 121 4 181l2 3c173 -4 346 -4 518 0l2 -3l-1 -39c-1 -47 2 -94 6 -142zM197 484c14 -18 -5 -84 66 -84h166c56 0 55 36 64 80l3 4h15l3 -4c-1 -78 -1 -155 0 -234 l-3 -3h-15c-14 14 4 84 -67 84h-166c-55 0 -55 -36 -63 -81l-3 -3h-15l-4 3c2 79 2 156 0 234l4 4h15zM651 189c-10 -62 -17 -126 -23 -189l-4 -4c-92 2 -185 4 -278 4c-92 0 -185 -2 -279 -4l-3 4c-5 63 -12 127 -22 189l4 5h20l5 -5c24 -83 23 -113 118 -113h314 c95 0 95 31 119 113l4 5h20'],
0x39F: [709,20,785,22,764,'402 709c222 0 362 -132 362 -342c0 -220 -170 -387 -394 -387c-202 0 -348 151 -348 361c0 220 152 368 380 368zM385 667c-164 0 -256 -104 -256 -289c0 -214 113 -356 281 -356c155 0 247 117 247 315c0 208 -101 330 -272 330'],
0x3A0: [689,4,812,25,788,'788 0l-4 -4c-44 1 -89 4 -135 4s-93 -2 -140 -4l-4 4v24l4 3c16 1 72 2 83 14c13 16 10 145 10 173v433l-391 1v-428c0 -28 -3 -164 11 -179c12 -13 65 -13 82 -14l4 -3v-24l-4 -4c-45 1 -90 4 -135 4c-47 0 -93 -2 -141 -4l-3 4v24l3 3c17 1 71 1 83 14 c13 16 11 141 11 170v268c0 28 3 151 -13 167c-12 12 -65 12 -81 13l-3 3v24l3 3c120 -1 240 -3 360 -3c132 0 265 0 396 3l4 -3v-24l-4 -3c-17 -1 -68 0 -81 -13c-15 -14 -11 -144 -11 -171v-266c0 -26 -3 -153 11 -168c12 -12 65 -13 81 -14l4 -3v-24'],
0x3A1: [692,3,603,22,580,'23 692c160 -3 160 -3 182 -3c102 0 153 3 170 3c127 0 205 -59 205 -155c0 -120 -105 -213 -242 -213c-20 0 -34 1 -58 6l-10 36c26 -8 41 -10 62 -10c87 0 148 63 148 153c0 95 -58 145 -166 145c-34 0 -63 -4 -98 -12v-522c0 -79 5 -87 51 -90l48 -3v-30 c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v449c0 74 -8 87 -53 90l-45 3v30'],
0x3A3: [689,4,684,45,643,'643 193c-6 -66 -9 -131 -10 -197c-102 2 -204 4 -305 4c-94 0 -188 -2 -283 -4v19c50 54 192 260 227 325c-65 111 -134 222 -210 325v16c174 1 349 0 522 8l4 -6c-7 -47 -9 -96 -12 -143l-3 -4h-18l-6 4c-13 89 -16 104 -114 104h-253c53 -84 106 -170 165 -251 c-71 -102 -139 -209 -203 -317h358c95 0 97 35 116 117l4 3h18'],
0x3A4: [692,3,612,18,595,'379 689c43 0 22 0 216 3c-5 -57 -7 -96 -7 -161h-31l-4 79c-1 24 -12 33 -40 33h-155c-3 -33 -4 -55 -4 -100v-423c0 -79 5 -87 51 -90l48 -3v-30c-59 2 -95 3 -147 3s-87 -1 -146 -3v30l48 3c46 3 51 11 51 90v423c0 43 -1 66 -4 100h-155c-28 0 -39 -9 -40 -33l-4 -79 h-31c0 65 -2 104 -7 161l68 -1c71 -1 125 -2 135 -2h158'],
0x3A5: [691,4,733,3,735,'735 564l-1 -18l-16 -9c-9 54 -40 82 -94 82c-41 0 -80 -20 -110 -46c-93 -79 -102 -231 -102 -344c0 -30 -3 -172 11 -188c11 -12 65 -13 82 -14l3 -3v-24l-3 -4c-46 2 -92 4 -140 4c-45 0 -90 -1 -136 -4l-4 4v24l4 3c15 0 73 1 83 14c13 18 11 157 11 188v38 c0 106 -57 355 -195 355c-54 0 -82 -33 -108 -75l-9 -1l-8 12c33 73 85 133 171 133c170 0 179 -254 184 -375h25c12 145 43 375 237 375c78 0 119 -52 115 -127'],
0x3A6: [689,4,755,23,723,'723 372c0 -196 -136 -257 -311 -257c0 -21 -3 -57 7 -75c16 -11 65 -12 83 -13l5 -3v-24l-5 -4c-43 2 -86 4 -129 4c-42 0 -86 -1 -129 -4l-6 4v24l6 3c91 6 91 -6 91 88c-162 0 -312 57 -312 243c0 188 154 227 312 227c-3 84 -12 69 -91 74l-6 3v24l6 3 c43 -2 87 -3 129 -3c43 0 86 1 129 3l5 -3v-24l-5 -3c-80 -5 -88 9 -90 -74c147 0 311 -30 311 -213zM624 359c0 139 -80 199 -212 201v-422c135 4 212 86 212 221zM335 138v422c-136 -5 -213 -58 -213 -202c0 -134 78 -216 213 -220'],
0x3A7: [700,3,666,14,648,'648 27v-30c-43 2 -66 3 -71 3c-12 0 -12 0 -74 -3l-184 306c-25 -35 -35 -49 -45 -64l-157 -242c-43 3 -43 3 -51 3s-8 0 -52 -3v30l26 3c22 3 34 14 59 48c2 3 3 3 6 8l189 255l-128 213c-33 55 -68 88 -92 90l-45 3l-2 26l141 27c19 -10 34 -27 57 -65l128 -216 l185 270h101v-30l-41 -3c-15 -2 -27 -14 -68 -68l-154 -208l176 -280c35 -54 47 -67 66 -70'],
0x3A8: [689,4,870,18,852,'852 672l-4 -7c-50 -18 -48 -5 -60 -70c-34 -213 -48 -383 -309 -388c0 -8 -5 -153 15 -168c13 -10 62 -11 79 -12l4 -3v-24l-4 -4c-47 2 -93 4 -139 4s-91 -1 -137 -4l-3 4v24l3 3c17 1 70 1 82 14c14 15 12 145 12 166c-245 2 -279 162 -306 367c-10 85 -18 76 -63 91 l-4 7l2 12l4 5c22 -3 116 -9 131 -21c34 -27 14 -325 132 -407c30 -20 69 -25 104 -25v229c0 30 3 163 -12 180c-11 13 -65 13 -82 14l-3 3v24l3 3c46 -2 91 -3 137 -3s92 1 139 3l4 -3v-24l-4 -3c-16 -1 -72 -1 -82 -14c-14 -17 -12 -147 -12 -177v-232c35 0 74 5 103 25 c120 82 99 380 134 407c13 11 109 18 129 21l6 -5'],
0x3A9: [704,6,824,34,791,'791 145c-7 -50 -14 -101 -17 -151c-103 4 -208 4 -311 6v68c159 21 206 165 206 306c0 170 -56 303 -250 303c-190 0 -263 -125 -263 -300c0 -142 45 -288 206 -309v-68c-103 -3 -207 -2 -310 -6c-5 50 -11 101 -18 151l4 3h15l3 -3c13 -52 13 -70 73 -70h129v7 c-134 56 -207 172 -207 316c0 223 167 306 368 306c199 0 356 -90 356 -306c0 -145 -74 -260 -208 -316v-7h129c60 0 60 17 73 70l4 3h14'],
0x3B1: [473,16,594,44,571,'562 97c5 -3 8 -6 9 -13c-26 -30 -82 -100 -128 -100c-51 0 -62 111 -68 144l-3 3c-48 -67 -126 -147 -218 -147c-68 0 -97 49 -106 109c-24 150 73 380 241 380c112 0 114 -136 125 -213l7 -3c26 58 61 134 63 202h11c11 -15 21 -31 30 -46c-25 -70 -60 -135 -99 -197 c6 -32 22 -161 67 -161c27 0 46 24 62 42h7zM368 164l-4 28c-9 60 -30 245 -117 245c-109 0 -142 -235 -128 -322c7 -45 25 -87 78 -87c71 0 137 85 171 136'],
0x3B2: [680,283,512,68,460,'458 205c20 -125 -76 -220 -197 -220c-44 0 -89 13 -127 44c3 -95 6 -191 17 -288c-26 -6 -50 -15 -74 -24l-9 10c11 186 6 375 5 562c0 150 -15 306 135 372c28 12 57 19 88 19c64 0 103 -36 113 -99c17 -102 -54 -181 -141 -212l1 -9c91 -2 174 -59 189 -155zM376 208 c-13 83 -70 131 -152 131c-17 0 -32 -3 -47 -10l-9 8c4 10 5 22 6 34c106 0 187 75 169 187c-7 48 -37 88 -89 88c-19 0 -37 -6 -52 -17c-80 -56 -77 -424 -61 -526c9 -58 51 -91 108 -91c106 0 142 103 127 196'],
0x3B3: [473,273,581,-31,489,'88 473c88 0 171 -80 146 -450l6 -4c48 89 192 260 176 366c-3 16 -10 34 -16 50l4 11c16 7 47 21 64 21c16 0 18 -17 20 -29c13 -85 -61 -184 -105 -244c-54 -74 -108 -151 -156 -230c-2 -70 -12 -147 -26 -215c-19 -9 -38 -17 -58 -22c-5 8 -11 16 -16 24 c15 68 46 163 82 220c2 82 -2 165 -16 249c-11 70 -42 192 -137 192c-37 0 -54 -18 -73 -46l-6 -1c-3 3 -7 6 -8 12c25 49 54 96 119 96'],
0x3B4: [702,16,497,56,468,'465 287c24 -148 -62 -303 -219 -303c-113 0 -168 69 -185 176c-23 144 47 266 185 298v6c-47 29 -126 75 -135 133c-13 79 102 105 163 105c25 0 50 -3 75 -8c8 -13 14 -27 19 -39l-9 -6c-33 19 -68 30 -103 30s-85 -15 -78 -57c18 -116 258 -152 287 -335zM393 269 c-13 81 -54 130 -125 179c-127 -37 -152 -153 -131 -281c11 -68 46 -155 130 -155c118 0 141 162 126 257'],
0x3B5: [468,13,480,69,436,'161 146c0 -70 58 -116 135 -116c45 0 89 14 132 42l8 -5l-15 -26c-50 -36 -103 -54 -159 -54c-102 0 -193 50 -193 145c0 65 48 106 143 123v9c-70 12 -105 45 -105 97c0 72 58 107 174 107c59 0 105 -13 138 -40c-16 -27 -29 -49 -38 -68l-11 5c-5 55 -36 83 -92 83 c-55 0 -93 -36 -93 -90c0 -44 22 -86 98 -86c22 0 44 3 68 7l5 -6c-3 -13 -6 -23 -8 -30l-10 -6c-23 6 -45 9 -66 9c-44 0 -111 -20 -111 -100'],
0x3B6: [712,149,504,61,509,'509 26c12 -79 -96 -162 -164 -175c-19 12 -36 28 -53 41l3 8c16 -3 31 -4 47 -4c43 0 133 33 124 86c-3 20 -28 22 -43 22c-45 0 -90 -9 -136 -9c-125 0 -200 68 -220 192c-27 173 51 344 165 451l-5 5c-22 -1 -44 -3 -66 -3c-30 0 -66 4 -94 25l2 19l30 21l12 -1 c17 -37 69 -41 101 -41c62 0 120 24 171 49l15 -7c-3 -17 -6 -33 -11 -49c-213 -13 -290 -229 -260 -424c20 -127 78 -176 204 -176c42 0 81 9 122 9c25 0 51 -9 56 -39'],
0x3B7: [473,275,531,-11,464,'149 473c28 0 17 -46 13 -104l7 -2c35 57 89 106 165 106c59 0 104 -29 113 -91c16 -105 -47 -499 17 -620l-2 -13c-25 -4 -48 -13 -70 -24l-11 9c-8 32 -16 50 -10 165c5 113 23 345 5 457c-8 46 -38 70 -83 70c-172 0 -134 -289 -130 -408l-5 -9 c-21 -6 -41 -15 -61 -25l-10 10c7 79 16 322 6 388c-2 11 -7 19 -18 19c-20 0 -56 -26 -72 -37l-6 2l-8 11l2 6c29 21 125 90 158 90'],
0x3B8: [702,16,613,83,555,'547 504c29 -188 -11 -520 -247 -520c-136 0 -187 96 -206 215c-34 213 2 503 262 503c127 0 173 -84 191 -198zM478 381c0 45 1 91 -6 136c-11 68 -44 159 -130 159c-129 0 -173 -191 -180 -295h316zM478 349h-316c0 -48 -1 -97 7 -146c12 -78 40 -190 142 -190 c143 0 162 224 167 336'],
0x3B9: [473,16,310,94,305,'305 83l-1 -8c-30 -30 -95 -91 -144 -91c-36 0 -59 23 -64 59c-8 57 13 250 6 406c26 5 53 12 76 24l11 -13c-18 -81 -37 -295 -24 -375c3 -22 17 -44 42 -44c30 0 64 34 82 52l10 -2'],
0x3BA: [473,16,571,13,553,'541 76c6 -1 8 -6 12 -9l-1 -6c-27 -29 -74 -76 -121 -76c-68 0 -179 219 -198 249c-9 -9 -30 -28 -35 -37c-13 -23 1 -151 5 -186c-20 -9 -41 -16 -60 -27l-11 10c2 94 1 190 0 286c0 114 -5 127 -25 127s-66 -33 -81 -44l-8 2l-5 8v8c28 22 116 92 154 92 c16 0 25 -10 27 -25c12 -70 1 -151 0 -219l8 -2c18 21 204 240 250 240c67 0 55 -35 52 -82l-9 -3c-11 11 -22 16 -36 16c-44 0 -180 -86 -186 -124c13 -36 134 -200 166 -220c12 -8 22 -12 34 -12c27 0 50 17 68 34'],
0x3BB: [702,16,618,86,616,'603 88c7 0 9 -5 13 -8l-1 -6c-27 -29 -84 -90 -127 -90c-9 0 -17 3 -25 10c-21 18 -106 369 -124 431l-8 3c-71 -120 -145 -269 -162 -415c-24 -9 -48 -18 -71 -29l-12 14c66 163 137 322 240 462c-11 40 -47 193 -99 193c-16 0 -45 -11 -61 -16c-6 2 -8 6 -11 11l3 8 c30 13 100 46 137 46c10 0 19 -3 27 -13c17 -19 105 -363 139 -470c10 -31 42 -164 81 -164c21 0 46 22 61 33'],
0x3BC: [473,286,610,89,567,'553 92c6 -2 11 -6 14 -11v-7c-29 -31 -90 -89 -139 -89c-54 0 -37 84 -34 116l-10 3c-31 -50 -86 -116 -153 -116c-36 0 -64 14 -83 47l-7 1c-7 -129 -2 -224 53 -297l-7 -12c-21 -8 -44 -12 -67 -13c-48 87 -28 182 -19 321c6 99 24 319 -3 418c25 4 50 9 77 14 c17 -111 -15 -219 2 -330c7 -47 31 -95 86 -95c57 0 109 55 123 104c17 57 16 235 7 299l8 11c23 3 44 9 65 17l9 -10c-15 -70 -24 -312 -11 -386c2 -14 7 -23 22 -23c26 0 49 23 67 38'],
0x3BD: [473,7,497,-24,443,'228 71c108 116 189 271 114 355l2 11c20 11 41 22 63 31c17 -16 31 -33 35 -56c17 -108 -162 -303 -211 -404c-20 -3 -39 -8 -56 -15l-11 7c-34 241 -52 401 -116 401c-21 0 -43 -17 -57 -29h-9l-6 8v9c26 19 111 84 141 84c25 0 29 -15 62 -186l40 -211'],
0x3BE: [701,148,547,65,507,'507 28c11 -71 -103 -153 -158 -176c-17 15 -38 28 -60 40l2 9c13 -2 26 -3 40 -3c41 0 136 32 128 82c-4 21 -22 26 -40 26c-54 0 -106 -13 -161 -13c-87 0 -174 39 -190 138c-21 133 116 198 220 229l-1 9c-63 19 -129 48 -140 120c-12 74 50 120 111 140l-2 7 c-18 1 -120 -9 -125 30c-3 18 25 29 36 35l8 -4c9 -39 69 -41 97 -41c56 0 110 15 155 40l11 -11c-2 -12 -5 -25 -10 -38c-90 -1 -230 -28 -211 -146c16 -97 106 -125 187 -127l4 -8c-9 -7 -16 -18 -22 -28c-13 1 -27 2 -41 2c-108 0 -223 -70 -204 -192 c14 -86 92 -93 159 -93c51 0 99 11 148 11c24 0 54 -8 59 -38'],
0x3BF: [469,20,545,32,514,'279 469c137 0 235 -97 235 -232c0 -150 -110 -257 -264 -257c-129 0 -218 96 -218 237c0 149 101 252 247 252zM261 436c-88 0 -136 -65 -136 -185c0 -142 66 -238 164 -238c81 0 132 72 132 187c0 136 -68 236 -160 236'],
0x3C0: [467,15,653,27,628,'513 -15c-100 0 -147 214 -50 420h-227c-4 -131 -10 -264 -28 -392c-15 -12 -49 -19 -68 -25l-11 10c40 131 52 268 67 407h-23c-75 0 -97 -10 -129 -73l-6 -4l-9 5l-2 7c10 49 11 127 76 126c85 -1 171 -3 257 -3c71 0 141 2 211 4c-3 -15 -6 -32 -6 -49l-60 -13 c-30 -72 -43 -154 -29 -239c6 -41 25 -124 81 -124c23 0 41 14 57 25l7 -1l7 -9l-2 -6c-26 -26 -72 -66 -113 -66'],
0x3C1: [473,284,566,83,490,'153 29c0 -177 19 -252 29 -295c-24 -11 -53 -17 -81 -18c-10 143 -7 285 -15 428c-2 44 -4 90 1 133c12 118 113 196 232 196c96 0 151 -53 166 -148c25 -157 -42 -340 -219 -340c-41 0 -79 10 -109 44h-4zM407 311c-11 69 -42 130 -121 130c-137 0 -144 -198 -128 -303 c10 -66 39 -127 117 -127c138 0 150 192 132 300'],
0x3C2: [463,155,501,43,430,'429 34c14 -89 -93 -168 -166 -189l-46 44l5 8c14 -3 30 -5 44 -5c48 0 130 35 121 92c-2 16 -16 21 -31 21c-45 0 -88 -11 -134 -11c-289 0 -192 469 47 469h135l5 -6c-1 -16 -3 -31 -6 -44c-110 -1 -230 18 -292 -77c-71 -106 -37 -279 133 -279c47 0 92 10 139 10 c23 0 42 -8 46 -33'],
0x3C3: [474,15,552,35,487,'487 469c-1 -20 -4 -40 -8 -58c-53 -8 -112 -4 -165 -4l-2 -3c73 -43 104 -61 117 -147c25 -157 -55 -272 -215 -272c-112 0 -156 63 -173 168c-22 141 19 208 131 269c30 17 63 30 101 35c51 6 173 -4 208 17zM360 267c-9 54 -24 105 -74 140c-14 2 -29 5 -44 5 c-131 0 -148 -120 -130 -230c11 -70 37 -169 129 -169c126 0 135 155 119 254'],
0x3C4: [463,16,519,35,439,'47 278c-6 1 -11 3 -12 9c2 32 7 158 31 173c8 4 33 3 41 3h327l5 -6l-4 -37c-43 -9 -97 -7 -142 -8c-4 -29 -63 -379 59 -379c29 0 52 23 71 41l5 1l11 -10l-1 -6c-34 -37 -84 -75 -140 -75c-54 0 -77 38 -84 87c-17 109 8 236 32 337c-163 -6 -163 15 -193 -127'],
0x3C5: [471,12,547,13,490,'487 358c24 -148 -99 -370 -258 -370c-63 0 -102 38 -112 100c-18 112 50 319 -28 319c-22 0 -46 -25 -62 -38l-7 2l-7 8l1 9c33 32 78 75 131 75c110 0 20 -222 43 -364c6 -39 30 -75 73 -75c114 0 179 187 163 287c-7 42 -28 95 -73 113l-3 10c21 11 39 24 58 37 c45 -27 73 -64 81 -113'],
0x3C6: [485,277,681,8,606,'602 332c30 -190 -108 -332 -293 -336l11 -247c-21 -7 -42 -15 -63 -26l-11 8c3 87 9 175 5 263c-122 17 -219 79 -239 207c-26 162 97 261 242 284l7 -10l-4 -7c-123 -36 -185 -125 -163 -262c14 -90 58 -165 155 -189c-2 69 -10 244 9 305c26 82 114 151 199 151 c87 0 133 -61 145 -141zM534 317c-9 58 -38 123 -108 123c-121 0 -116 -173 -117 -262l-2 -163c157 17 253 141 227 302'],
0x3C7: [479,193,608,51,548,'427 -172c-80 0 -108 109 -166 323h-8c-61 -109 -113 -223 -151 -344h-11c-16 11 -29 26 -40 41c49 120 121 235 201 335c-12 42 -57 194 -84 219c-12 10 -24 17 -37 17c-23 0 -34 -15 -47 -27l-7 -1l-8 7l2 7c25 29 57 68 103 68c11 0 24 -2 35 -9 c39 -24 71 -176 83 -221l7 -4c62 81 124 163 190 240c22 -10 42 -21 59 -35l-3 -10c-76 -18 -203 -174 -244 -218c19 -63 76 -286 131 -321c13 -8 28 -12 42 -12c21 0 38 10 52 21h6l6 -6l-1 -5c-29 -29 -63 -65 -110 -65'],
0x3C8: [682,281,695,6,626,'19 377c-7 0 -10 4 -13 8l2 9c25 19 97 69 128 69c68 0 17 -187 38 -312c12 -77 63 -119 138 -115l21 1c3 207 8 417 -4 625c15 9 31 15 49 20l9 -6c-13 -211 -18 -425 -24 -638c138 18 221 139 198 280c-6 37 -18 76 -44 107l-2 13c14 4 48 20 63 20c28 0 42 -67 45 -86 c26 -160 -110 -348 -261 -377c5 -83 10 -166 18 -249c-18 -10 -37 -20 -56 -27l-9 10c12 85 16 175 18 263c-15 -3 -31 -4 -48 -4c-96 0 -159 27 -176 132c-16 104 31 279 -37 279c-20 0 -37 -12 -53 -22'],
0x3C9: [463,12,715,36,659,'656 325c23 -144 -60 -337 -220 -337c-50 0 -81 15 -110 59h-5c-35 -36 -72 -59 -127 -59c-97 0 -139 62 -153 149c-26 164 75 291 223 326l11 -11l-6 -9c-129 -55 -176 -165 -152 -314c8 -48 31 -113 94 -113c120 0 113 201 103 288c21 5 42 13 62 20l11 -9 c-12 -63 -15 -130 -5 -196c7 -46 21 -102 80 -102c117 0 149 172 134 267c-9 54 -32 108 -83 139l-1 11c13 7 39 24 54 24c53 0 84 -95 90 -133'],
0x3D0: [701,13,562,30,492,'352 701c103 0 122 -61 122 -92c0 -67 -55 -127 -131 -164c-25 -11 -53 -31 -93 -42c26 1 44 2 55 2c107 0 187 -76 187 -189c0 -118 -91 -229 -246 -229c-170 0 -216 128 -216 248c0 196 117 466 322 466zM333 677c-84 0 -168 -109 -209 -291c155 46 250 145 277 224 c0 53 -37 67 -68 67zM266 15c112 0 141 122 141 186c0 68 -31 183 -161 183c-38 0 -79 -9 -125 -30c-40 -66 -15 -280 76 -323c19 -10 42 -16 69 -16'],
0x3D1: [702,15,620,62,583,'300 702c169 0 242 -246 227 -397c17 -3 35 -5 53 -6l3 -8c-19 -16 -40 -30 -63 -39c-22 -108 -87 -267 -220 -267c-18 0 -38 2 -54 13c-59 40 -79 241 -126 241c-17 0 -32 -19 -42 -30h-7l-8 8l-1 8c21 23 74 79 111 79c67 0 53 -271 166 -271c97 0 135 155 143 232 c-124 31 -303 115 -326 257c-15 95 48 180 144 180zM483 317c-2 18 -3 36 -6 54c-15 93 -78 293 -205 293c-53 0 -88 -42 -79 -95c19 -120 190 -206 290 -252'],
0x3D5: [705,289,665,27,613,'609 285c31 -198 -91 -288 -271 -301c4 -82 13 -163 27 -245c-13 -13 -29 -21 -47 -28l-11 9v264c-127 8 -253 53 -276 201c-29 182 106 289 276 288l-2 211c13 9 26 16 39 21l8 -6c-11 -75 -14 -151 -17 -226c123 1 251 -45 274 -188zM528 273c-17 107 -83 174 -193 176 l2 -443c149 5 214 124 191 267zM307 449c-153 -5 -218 -113 -195 -262c18 -114 77 -173 194 -182'],
0x3D6: [529,12,715,41,671,'667 301c25 -153 -62 -313 -225 -313c-50 0 -81 15 -110 59h-6c-35 -36 -71 -59 -127 -59c-93 0 -138 58 -152 146c-25 159 42 260 180 309l11 -12l-1 -8c-103 -54 -134 -175 -115 -294c8 -48 33 -113 94 -113c120 0 113 200 103 288c21 5 42 13 62 20l12 -9 c-13 -63 -17 -131 -6 -198c7 -45 22 -100 80 -100c117 0 146 170 131 265c-30 190 -242 214 -389 214v10l15 20c22 2 45 3 69 3c154 0 344 -38 374 -228'],
0x3DC: [692,3,555,22,536,'216 120c0 -79 5 -87 51 -90h48v-33c-108 4 -186 4 -293 0v30h48c46 3 51 14 51 93v449c0 79 -5 87 -51 90h-48v33c242 -9 261 4 514 -3c-11 -44 -16 -92 -16 -152h-35v65c-1 43 -19 50 -269 40v-267h261c0 -18 1 -48 4 -141h-30c6 28 0 56 -13 84c-12 16 -179 13 -222 9 v-207'],
0x3DD: [495,233,514,23,495,'190 156v-389c-56 10 -111 9 -167 1v30l46 3c34 2 38 9 37 72v622c37 -2 75 -5 116 -6l255 6c22 -19 27 -41 0 -63c-70 7 -142 8 -217 1c-57 0 -74 -39 -70 -228c79 -6 157 -7 231 0c11 -19 8 -35 -2 -49c-76 5 -153 6 -229 0'],
0x3F0: [510,16,705,48,659,'659 72l-1 -5c-35 -39 -75 -83 -132 -83c-76 0 -92 77 -92 138c0 52 12 104 29 153l-8 6c-116 -104 -242 -209 -372 -295l-26 4c-6 15 -9 31 -9 48c0 8 1 16 2 24c102 48 181 174 181 286c0 33 -7 88 -51 88c-29 0 -57 -27 -77 -45l-7 1l-6 8l1 7c35 34 95 91 146 91 c42 0 58 -32 58 -69c0 -81 -58 -197 -104 -263l6 -6c306 205 294 260 326 301c15 18 32 34 49 49l19 -8l1 -22c1 -27 -4 -55 -10 -82c-29 -30 -60 -58 -92 -86c-6 -27 -10 -55 -10 -83c0 -63 13 -181 98 -181c28 0 49 15 70 32h5'],
0x3F1: [474,260,478,72,461,'457 313c21 -134 -23 -328 -181 -328c-91 0 -141 69 -161 150l-12 -2c-3 -51 -5 -106 3 -157c25 -158 98 -109 214 -130c28 -5 44 -8 61 -89c-2 -5 -9 -14 -13 -17h-4c-2 9 -5 21 -11 28c-41 51 -232 -53 -273 205c-23 147 -4 501 188 501c97 0 173 -62 189 -161z M388 363c-8 49 -35 84 -88 84c-119 0 -162 -233 -147 -329c8 -48 38 -102 93 -102c126 0 157 248 142 347'],
0x3F4: [709,20,785,22,764,'402 709c222 0 362 -132 362 -342c0 -220 -170 -387 -394 -387c-202 0 -348 151 -348 361c0 220 152 368 380 368zM656 375c-12 185 -111 292 -271 292c-164 0 -256 -107 -256 -292h527zM131 330c16 -187 124 -308 279 -308c153 0 245 114 247 308h-526'],
0x3F5: [466,12,480,46,439,'439 408l-31 -53l-19 -1c-14 55 -60 68 -119 68c-98 0 -136 -86 -140 -170c76 0 151 -1 228 1l-9 -34l-5 -6c-30 1 -60 1 -89 1h-57c-22 0 -44 0 -68 -3c1 -91 76 -175 148 -175c58 0 109 20 149 52l11 -18c-50 -44 -114 -82 -189 -82c-105 0 -181 51 -199 161 c-24 158 87 317 251 317c49 0 101 -21 138 -58'],
0x2013: [277,-219,499,0,500,'500 219h-500v58h500v-58'],
0x2014: [277,-219,1000,0,1000,'1000 219h-1000v58h1000v-58'],
0x2016: [713,172,420,76,345,'345 -172h-59v885h59v-885zM135 -172h-59v885h59v-885'],
0x2018: [709,-446,277,45,233,'174 685l44 24l15 -10c-48 -77 -72 -143 -78 -223c-51 -15 -61 -18 -97 -30l-13 13c34 86 72 153 129 226'],
0x2019: [709,-446,277,45,233,'104 470l-44 -24l-15 10c48 77 72 143 78 223c51 15 61 18 97 30l13 -13c-34 -86 -72 -153 -129 -226'],
0x201C: [709,-446,510,45,466,'407 685l44 24l15 -10c-48 -77 -72 -143 -78 -223c-51 -15 -61 -18 -97 -30l-13 13c34 86 72 153 129 226zM174 685l44 24l15 -10c-48 -77 -72 -143 -78 -223c-51 -15 -61 -18 -97 -30l-13 13c34 86 72 153 129 226'],
0x201D: [709,-446,510,45,466,'104 470l-44 -24l-15 10c48 77 72 143 78 223c51 15 61 18 97 30l13 -13c-34 -86 -72 -153 -129 -226zM337 470l-44 -24l-15 10c48 77 72 143 78 223c51 15 61 18 97 30l13 -13c-34 -86 -72 -153 -129 -226'],
0x2020: [694,5,499,34,466,'466 395l-5 -5l-19 1c-32 2 -64 6 -96 6h-59v-78c0 -98 0 -196 7 -293l2 -26l-5 -5c-14 2 -29 4 -41 4s-27 -2 -41 -4l-5 5l2 26c7 97 7 195 7 293v78h-59c-32 0 -64 -4 -96 -6l-19 -1l-5 5c5 23 4 47 0 67l5 5l19 -1c32 -2 64 -6 96 -6h59v52c0 52 -6 103 -11 155l-2 22 l5 5c16 -1 31 -3 45 -3s29 2 45 3l5 -5l-2 -22c-5 -52 -11 -103 -11 -155v-52h59c32 0 64 4 96 6l19 1l5 -5c-4 -20 -5 -44 0 -67'],
0x2021: [694,249,499,34,466,'466 -40l-5 -5c-33 2 -66 7 -99 7h-75v-47c0 -46 6 -93 11 -140l2 -19l-5 -5c-16 1 -31 3 -45 3s-29 -2 -45 -3l-5 5l2 19c5 47 11 94 11 140v47h-75c-33 0 -66 -5 -99 -7l-5 5c4 23 4 47 0 67l5 5c33 -2 66 -7 99 -7h75c0 66 -3 132 -8 198c5 66 8 131 8 197h-75 c-33 0 -66 -5 -99 -7l-5 5c4 23 4 47 0 67l5 5c33 -2 66 -7 99 -7h75v47c0 46 -6 93 -11 140l-2 19l5 5c16 -1 31 -3 45 -3s29 2 45 3l5 -5l-2 -19c-5 -47 -11 -94 -11 -140v-47h75c33 0 66 5 99 7l5 -5c-4 -20 -4 -44 0 -67l-5 -5c-33 2 -66 7 -99 7h-75 c0 -66 3 -131 8 -197c-5 -66 -8 -132 -8 -198h75c33 0 66 5 99 7l5 -5c-4 -20 -4 -44 0 -67'],
0x2026: [111,5,746,100,647,'588 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM372 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM157 111c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58'],
0x2032: [495,-47,364,53,313,'313 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62'],
0x2033: [495,-47,599,53,548,'548 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM313 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37 l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62'],
0x2034: [495,-47,834,53,783,'783 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM548 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37 l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM313 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62'],
0x2035: [495,-47,364,53,313,'313 69c0 -9 -7 -22 -17 -22c-16 0 -34 22 -41 34l-173 273c-14 22 -29 51 -29 79c0 36 38 62 71 62c39 0 58 -38 69 -70l109 -319c3 -8 11 -27 11 -37'],
0x203E: [1726,-1614,1024,0,1024,'0 1614v112h1024v-112h-1024'],
0x2044: [558,279,313,0,314,'314 557l-263 -836h-51l263 837'],
0x2057: [495,-47,1069,53,1018,'1018 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM783 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37 l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM548 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62zM313 433c0 -28 -16 -57 -30 -79l-172 -273c-8 -12 -26 -34 -42 -34 c-10 0 -16 13 -16 22c0 10 7 29 10 37l110 319c11 32 29 70 68 70c34 0 72 -26 72 -62'],
0x20D7: [790,-519,557,0,558,'486 636h-486v37h486l-79 96l21 21l130 -136l-130 -135l-21 21'],
0x210F: [733,9,499,10,471,'471 85c-59 -44 -127 -94 -159 -94c-21 0 -31 15 -31 40c0 24 7 54 13 76l51 196c10 39 15 72 15 90c0 14 -11 21 -22 21c-42 0 -145 -97 -190 -178c-40 -72 -54 -172 -62 -229l-69 -16l-7 11c26 78 67 270 88 386l15 82l-74 -26l-14 35l95 34l20 109c2 10 2 19 2 25 c0 20 -16 23 -34 23h-48l4 21c74 6 115 20 160 42l12 -11l-39 -182l215 77l13 -35l-237 -85l-48 -222l4 -2c28 35 54 70 87 101c68 63 145 108 180 108c26 0 32 -18 32 -35c0 -24 -5 -49 -11 -75l-64 -256c-4 -15 -7 -28 -7 -40c0 -11 6 -20 13 -20c12 0 38 15 61 30l26 17'],
0x2111: [721,4,645,12,594,'589 688c-11 -72 -68 -136 -145 -136c-115 0 -157 111 -265 111c-57 0 -103 -42 -103 -100c0 -49 41 -86 89 -86c10 0 20 1 30 4l4 -17c-12 -4 -24 -6 -36 -6c-65 0 -110 50 -110 114c0 90 88 149 172 149c128 0 193 -115 263 -115c50 0 79 47 88 90zM594 487l-11 -14 c-17 15 -38 32 -61 32c-22 0 -45 -17 -45 -40c0 -63 112 -151 112 -258c0 -136 -147 -211 -267 -211c-157 0 -171 141 -239 141c-24 0 -42 -21 -51 -41l-20 1c21 48 69 88 123 88c120 0 144 -159 258 -159c56 0 123 51 123 111c0 106 -109 176 -109 282c0 57 46 108 102 108 c33 0 63 -18 85 -40'],
0x2113: [713,13,470,16,455,'455 632c0 -109 -157 -215 -237 -269c-17 -57 -72 -206 -72 -266c0 -38 21 -71 62 -71c64 0 114 75 140 125l19 -8c-35 -70 -100 -156 -187 -156c-67 0 -105 50 -105 114c0 62 31 133 48 191l-97 -65l-10 18l117 80c30 107 72 388 234 388c49 0 88 -31 88 -81zM425 625 c0 31 -21 49 -50 49c-59 0 -87 -78 -101 -125l-46 -154c64 46 197 143 197 230'],
0x2118: [504,203,656,58,617,'617 327c0 -132 -128 -331 -274 -331c-61 0 -91 64 -91 118c0 31 19 69 54 69c23 0 38 -17 38 -39c0 -5 -1 -9 -1 -15h-3c-31 0 -48 -34 -48 -61c0 -30 15 -54 48 -54c3 0 5 1 8 1c114 18 173 182 173 282c0 52 -14 158 -84 158c-84 0 -222 -229 -262 -300 c36 -60 90 -168 90 -237s-43 -121 -114 -121c-64 0 -93 59 -93 115c0 55 28 128 58 175c-33 39 -57 101 -57 153c0 119 98 183 206 199l2 -17c-73 -10 -139 -66 -139 -145c0 -38 16 -69 36 -99c53 94 203 326 318 326c87 0 135 -101 135 -177zM209 -115 c0 61 -44 136 -81 183c-24 -42 -50 -107 -50 -156c0 -43 23 -99 71 -99c44 0 60 33 60 72'],
0x211C: [717,4,748,17,702,'702 85l-113 -89c-73 41 -82 122 -82 199c0 23 1 46 1 67c0 37 -17 107 -65 107c-11 0 -52 -18 -65 -24c0 -91 -4 -177 -72 -246c-40 -42 -94 -65 -138 -103l-64 72l-35 -26l-11 15l111 86l64 -75c43 24 45 53 45 98v301c0 76 -5 222 -112 222c-49 0 -100 -38 -100 -90 c0 -93 128 -91 128 -204c0 -56 -33 -115 -89 -131l-3 17c23 10 50 28 50 57c0 69 -135 79 -135 196c0 94 79 181 175 181c69 0 153 -36 169 -110c64 32 125 70 183 112c60 -48 120 -70 120 -159c0 -73 -71 -120 -131 -147c123 -85 29 -338 114 -338c14 0 34 19 44 28z M577 524c0 56 -49 107 -105 107c-34 0 -77 -29 -106 -46c11 -47 12 -96 12 -146v-73c70 27 199 65 199 158'],
0x2127: [704,6,824,34,791,'791 553l-4 -3h-14l-4 3c-13 53 -13 70 -73 70h-129v-7c134 -56 208 -171 208 -316c0 -216 -157 -306 -356 -306c-201 0 -368 83 -368 306c0 144 73 260 207 316v7h-129c-60 0 -60 -18 -73 -70l-3 -3h-15l-4 3c7 50 13 101 18 151c103 -4 207 -3 310 -6v-68 c-161 -21 -206 -167 -206 -309c0 -175 73 -300 263 -300c194 0 250 133 250 303c0 141 -47 285 -206 306v68c103 2 208 2 311 6c3 -50 10 -101 17 -151'],
0x2132: [692,3,555,22,536,'342 362v207c0 79 -5 87 -51 90l-48 3v30c59 -2 95 -3 147 -3s87 1 146 3v-30l-48 -3c-46 -3 -51 -11 -51 -90v-449c0 -79 5 -87 51 -90l48 -3v-30c-147 3 -147 3 -176 3c-25 0 -60 0 -109 -2l-107 -1c-43 0 -73 1 -122 3c11 44 16 92 16 152h35v-65c0 -23 3 -28 27 -34 c31 -8 87 -14 135 -14c59 0 77 1 107 8v267c-34 2 -68 3 -110 3c-66 0 -94 -3 -112 -11c-4 -8 -7 -17 -8 -26l-5 -52h-30c3 100 3 100 3 116c0 18 0 18 -3 111h30l5 -59l8 -25c18 -9 45 -12 119 -12c45 0 75 1 103 3'],
0x2135: [746,13,672,32,641,'641 560c0 -29 -18 -52 -38 -72c-15 17 -32 37 -57 37c-21 0 -97 -99 -97 -120c0 -60 167 -101 167 -244c0 -73 -29 -121 -76 -174l-11 5c6 9 15 32 15 43c0 66 -85 144 -132 184l-220 187c-35 -15 -74 -63 -74 -102c0 -113 89 -138 89 -225c0 -58 -20 -85 -80 -85 c-31 0 -62 1 -93 3l-2 6c24 12 61 22 61 55c0 49 -53 89 -53 147c0 82 64 174 132 217c-49 45 -125 97 -125 171c0 61 45 111 84 153l13 -8c-7 -16 -7 -33 -7 -50c0 -47 59 -94 91 -123c28 -24 134 -134 179 -134c27 0 106 102 106 128c0 24 -42 42 -42 84s17 60 47 86 c0 -83 123 -93 123 -169'],
0x2136: [734,0,675,31,635,'635 160l-44 -160h-560l44 160h469c29 0 35 2 35 22v11l-24 319c-3 39 -29 63 -80 63h-303c-81 0 -96 38 -96 82c0 24 4 51 4 75l13 2c0 -10 25 -32 89 -32h356c38 0 83 -5 83 -60c0 -79 -39 -56 -39 -113v-11l24 -325c2 -28 3 -33 29 -33'],
0x2137: [734,0,331,26,306,'306 54h-12c-2 12 -12 82 -28 82c-6 0 -32 -118 -37 -136h-203l38 137h182c18 0 19 18 19 32c0 26 -26 340 -26 342c-3 38 -28 64 -70 64c-81 0 -95 41 -95 85c0 25 5 52 5 72l13 2c3 -16 33 -32 82 -32h27c40 0 87 -5 87 -59c0 -61 -27 -85 -27 -135c0 -4 1 -8 1 -12'],
0x2138: [734,0,559,36,526,'526 642c0 -68 -44 -68 -45 -68c0 -1 33 -386 33 -475c0 -30 -39 -90 -45 -99h-10v575h-327c-81 0 -96 38 -96 82c0 24 4 51 4 75l13 2c0 -10 25 -32 89 -32h301c38 0 83 -5 83 -60'],
0x2141: [727,9,778,53,708,'708 363c0 211 -162 364 -386 364c-99 0 -176 -15 -269 -54v-284h211v51h-148v197c70 22 127 37 190 37c195 0 336 -130 336 -314c0 -179 -141 -316 -321 -316c-84 0 -170 21 -253 60v-57c110 -44 169 -56 251 -56c222 0 389 161 389 372'],
0x2190: [486,-55,1013,65,949,'949 241h-770l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h770v-59'],
0x2191: [713,172,524,47,478,'478 506l-35 -32l-151 125v-771h-59v771l-152 -125l-34 32l215 207'],
0x2192: [486,-55,1013,65,949,'949 272l-206 -217l-32 34l125 152h-771v59h771l-125 152l32 34'],
0x2193: [713,172,524,47,478,'478 35l-215 -207l-216 207l35 32l151 -125v771h59v-771l152 125'],
0x2194: [486,-55,1013,65,949,'949 272l-206 -217l-32 33l124 153h-656l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h656l-124 151l32 35'],
0x2195: [712,172,524,47,478,'264 -172l-217 206l33 32l153 -124v656l-152 -124l-34 32l214 206l217 -206l-34 -32l-152 124v-656l151 124l35 -32'],
0x2196: [713,172,1013,65,949,'949 -136l-35 -36l-785 785l-16 -201h-48v301h301v-48l-201 -17'],
0x2197: [713,172,1013,65,949,'949 412h-47l-17 201l-784 -785l-36 36l784 784l-201 17v48h301v-301'],
0x2198: [713,172,1013,65,949,'949 -172h-301v48l201 17l-784 784l36 36l784 -785l17 201h47v-301'],
0x2199: [713,172,1013,65,949,'949 677l-784 -784l201 -17v-48h-301v301h48l16 -201l785 785'],
0x219A: [486,-55,1013,65,949,'949 241h-422l-47 -175h-53l48 175h-296l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h312l44 163h52l-44 -163h406v-59'],
0x219B: [486,-55,1013,65,949,'949 272l-206 -217l-32 34l125 152h-309l-47 -175h-53l48 175h-410v59h426l44 163h52l-44 -163h293l-125 152l32 34'],
0x219E: [486,-55,1013,65,949,'949 241h-560l126 -152l-33 -34l-179 186h-124l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h126l177 186l33 -34l-126 -152h560v-59'],
0x21A0: [486,-55,1013,65,949,'65 241h560l-126 -152l33 -34l179 186h124l-125 -152l32 -34l207 214l-207 217l-32 -34l125 -152h-126l-177 186l-33 -34l126 -152h-560v-59'],
0x21A2: [486,-55,1150,65,1075,'1075 89l-33 -34l-179 186h-684l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h686l177 186l33 -34l-126 -152v-59'],
0x21A3: [486,-55,1150,76,1085,'1085 272l-206 -217l-33 34l126 152h-687l-177 -186l-32 34l125 152v59l-125 152l32 34l179 -186h685l-126 152l33 34'],
0x21A6: [486,-55,1013,36,978,'94 300h770l-124 152l32 34l206 -217l-206 -214l-32 34l124 152h-770v-186h-58v431h58v-186'],
0x21A9: [494,-55,1013,65,949,'788 494c69 0 161 -58 161 -127c0 -118 -101 -126 -192 -126h-578l125 -150l-32 -36l-207 216l207 215l32 -33l-125 -153h644c43 0 66 24 66 67c0 44 -58 73 -101 73v54'],
0x21AA: [494,-55,1013,65,949,'226 494c-69 0 -161 -58 -161 -127c0 -118 101 -126 192 -126h578l-125 -150l32 -36l207 216l-207 215l-32 -33l125 -153h-644c-43 0 -66 24 -66 67c0 44 58 73 101 73v54'],
0x21AB: [493,-55,1013,65,949,'949 367c0 -118 -101 -126 -192 -126v-119h-59v119h-519l125 -150l-32 -36l-207 216l207 215l32 -33l-125 -153h519c-1 92 8 193 125 193c69 0 126 -57 126 -126zM889 367c0 44 -23 67 -66 67s-66 -23 -66 -67v-67h66c43 0 66 24 66 67'],
0x21AC: [493,-55,1013,65,949,'949 272l-206 -217l-32 34l125 152h-519v-119h-59v119c-92 0 -193 8 -193 126c0 69 58 126 127 126c116 0 126 -101 125 -193h519l-125 152l32 34zM258 300v67c0 44 -23 67 -66 67c-44 0 -67 -23 -67 -67c0 -43 23 -67 67 -67h66'],
0x21AD: [486,-55,1211,34,1178,'1178 272l-206 -217l-33 34l125 152h-104c-63 0 -128 -110 -187 -110c-58 0 -118 174 -168 198c-48 -24 -107 -198 -165 -198c-59 0 -124 110 -189 110h-104l126 -152l-33 -34l-206 214l206 217l33 -34l-126 -152h104c84 0 141 -98 182 -98c40 0 109 210 172 210 c64 0 135 -210 175 -210c41 0 98 98 180 98h104l-125 152l33 34'],
0x21AE: [486,-55,1013,65,949,'949 272l-206 -217l-32 33l124 153h-308l-47 -175h-53l48 175h-296l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h312l44 163h52l-44 -163h292l-124 151l32 35'],
0x21B0: [753,0,506,65,442,'442 0h-59v507h-204l125 -151l-32 -35l-207 215l207 217l32 -35l-125 -152h263v-566'],
0x21B1: [753,0,506,65,442,'442 536l-206 -215l-33 35l125 151h-204v-507h-59v566h263l-125 152l33 35'],
0x21B6: [458,0,1124,34,1115,'1115 0h-69c-1 215 -175 389 -390 389c-169 0 -316 -120 -371 -269l164 91l35 -37l-237 -174l-213 217l33 32l148 -121c56 191 232 330 441 330c252 0 458 -205 459 -458'],
0x21B7: [458,0,1124,34,1115,'34 0h69c1 215 175 389 390 389c169 0 316 -120 371 -269l-164 91l-35 -37l237 -174l213 217l-33 32l-148 -121c-56 191 -232 330 -441 330c-252 0 -458 -205 -459 -458'],
0x21BA: [751,0,987,114,875,'875 380c0 -211 -170 -380 -380 -380c-211 0 -381 169 -381 380c0 101 43 197 111 269l36 -35c-59 -63 -96 -147 -96 -234c0 -182 147 -329 330 -329c181 0 329 147 329 328c0 72 -37 192 -97 235l-35 -108l-66 245l246 -66l-108 -36c68 -72 111 -168 111 -269'],
0x21BB: [751,0,987,114,875,'875 380c0 -211 -170 -380 -380 -380c-211 0 -381 169 -381 380c0 101 43 197 111 269l-107 36l245 66l-66 -245c-8 37 -17 74 -35 108c-66 -70 -97 -138 -97 -235c0 -181 148 -328 330 -328s329 147 329 329c0 87 -37 171 -97 234l37 35c69 -72 111 -168 111 -269'],
0x21BC: [394,-149,1013,64,950,'298 394l32 -35l-126 -151h746v-59h-886'],
0x21BD: [394,-148,1013,64,950,'950 394v-58h-746l126 -152l-32 -36l-234 246h886'],
0x21BE: [714,171,524,233,478,'478 481l-35 -33l-151 127v-746h-59v885'],
0x21BF: [714,171,524,47,293,'293 -171h-59v746l-152 -127l-35 33l246 233v-885'],
0x21C0: [394,-149,1013,64,950,'716 394l-32 -35l126 -151h-746v-59h886'],
0x21C1: [394,-148,1013,64,950,'64 394v-58h746l-126 -152l32 -36l234 246h-886'],
0x21C2: [714,171,524,233,478,'478 62l-35 33l-151 -127v746h-59v-885'],
0x21C3: [714,171,524,47,293,'293 714h-59v-746l-152 127l-35 -33l246 -233v885'],
0x21C4: [665,124,1013,65,949,'949 451l-206 -217l-32 34l125 152h-771v59h771l-125 152l32 34zM949 62h-770l125 -151l-32 -35l-207 215l207 216l32 -34l-125 -152h770v-59'],
0x21C6: [665,124,1013,65,949,'949 420h-770l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h770v-59zM949 93l-206 -217l-32 35l125 151h-771v59h771l-125 152l32 34'],
0x21C7: [665,124,1013,65,949,'949 62h-770l125 -151l-32 -35l-207 215l172 180l-172 177l207 217l32 -34l-125 -152h770v-59h-770l123 -149l-123 -150h770v-59'],
0x21C8: [714,171,883,47,837,'837 507l-35 -32l-151 125v-771h-59v771l-150 -123l-149 123v-771h-59v771l-152 -125l-35 32l215 207l180 -172l178 172'],
0x21C9: [665,124,1013,65,949,'949 93l-206 -217l-32 35l125 151h-771v59h771l-125 147l125 152h-771v59h771l-125 152l32 34l206 -214l-171 -180'],
0x21CA: [714,171,883,47,837,'837 36l-215 -207l-180 172l-178 -172l-217 207l35 32l152 -125v771h59v-771l147 125l152 -125v771h59v-771l151 125'],
0x21CB: [594,52,1013,65,949,'949 348h-884l233 246l33 -35l-127 -152h745v-59zM949 194l-232 -246l-33 35l127 152h-746v59h884'],
0x21CC: [594,52,1013,65,949,'949 348h-884v59h746l-127 152l33 35zM949 135h-745l127 -152l-33 -35l-233 246h884v-59'],
0x21CD: [537,-6,1013,65,949,'949 135h-450l-30 -111h-52l29 111h-178l80 -98l-29 -31l-254 265l254 266l29 -32l-80 -98h251l26 98h53l-26 -98h377v-59h-393l-41 -154h434v-59zM503 348h-284l-64 -77l63 -77h244'],
0x21CE: [537,-6,1013,59,954,'954 271l-254 -265l-29 31l80 98h-258l-30 -111h-52l29 111h-178l80 -98l-29 -31l-254 265l254 266l29 -32l-80 -98h251l26 98h53l-26 -98h185l-80 98l29 32zM865 271l-65 77h-250l-41 -154h292zM497 348h-284l-64 -77l63 -77h244'],
0x21CF: [537,-6,1013,65,949,'949 271l-253 -265l-30 31l80 98h-247l-30 -111h-52l29 111h-381v59h397l41 154h-438v59h454l26 98h53l-26 -98h174l-80 98l30 32zM860 271l-64 77h-240l-41 -154h282'],
0x21D0: [539,-7,1013,65,949,'949 136h-681l80 -98l-29 -31l-254 266l254 266l29 -33l-80 -98h681v-59h-730l-64 -78l63 -76h731v-59'],
0x21D1: [713,172,578,24,555,'555 459l-32 -30l-98 80v-681h-59v731l-78 64l-76 -63v-732h-59v681l-98 -80l-31 30l264 254'],
0x21D2: [539,-7,1013,65,949,'949 273l-253 -266l-30 31l80 98h-681v59h732l63 76l-64 78h-731v59h681l-80 98l30 33'],
0x21D3: [713,172,578,24,555,'555 82l-264 -254l-267 254l32 30l97 -80v681h59v-731l79 -64l75 63v732h59v-681l99 80'],
0x21D4: [537,-5,1013,59,954,'954 271l-254 -266l-29 31l80 99h-489l80 -98l-29 -32l-254 264l254 268l29 -32l-80 -99h489l-80 98l29 33zM865 271l-65 77h-587l-64 -77l63 -78h589'],
0x21D5: [718,176,578,24,556,'290 718l266 -254l-32 -28l-98 80v-490l98 80l32 -28l-264 -254l-268 254l32 28l98 -80v490l-98 -80l-32 28zM290 630l-78 -66v-586l78 -64l78 62v590'],
0x21DA: [607,67,1013,65,949,'949 62h-609l81 -98l-29 -31l-327 338l326 336l30 -30l-81 -98h609v-59h-658l-110 -120h768v-59h-767l110 -120h657v-59'],
0x21DB: [608,66,1013,65,949,'949 271l-325 -337l-30 30l81 98h-610v59h659l110 120h-769v59h768l-110 120h-658v59h610l-81 98l29 31'],
0x21DD: [485,-54,1149,34,1116,'1116 271l-207 -217l-32 34l125 152h-97c-65 0 -126 -110 -187 -110c-55 0 -120 163 -166 200c-47 -33 -116 -200 -163 -200c-55 0 -134 207 -173 207c-49 0 -94 -97 -182 -97v59c70 1 127 111 188 111c56 0 116 -166 166 -202c48 34 115 202 163 202 c54 0 136 -209 174 -209c43 0 98 98 173 98h104l-125 152l32 34'],
0x21E0: [485,-54,1063,65,998,'518 239l-120 1v59l120 -1v-59zM998 240h-120v59h120v-59zM758 240h-120v59h120v-59zM298 240h-120l125 -152l-32 -34l-206 217l206 214l32 -34l-125 -152h120v-59'],
0x21E2: [485,-54,1063,65,998,'545 239l120 1v59l-120 -1v-59zM65 240h120v59h-120v-59zM305 240h120v59h-120v-59zM765 240h120l-125 -152l32 -34l206 217l-206 214l-32 -34l125 -152h-120v-59'],
0x2200: [697,15,617,26,592,'592 697l-281 -712h-17l-268 712h45l109 -296h191l109 296h112zM355 359h-159l79 -214'],
0x2201: [906,146,753,114,640,'640 117c-16 -150 -103 -263 -263 -263c-145 0 -263 118 -263 263v526c0 146 118 263 263 263c161 0 247 -112 263 -263h-76c-11 108 -72 188 -187 188c-103 0 -186 -84 -186 -188v-526c0 -102 84 -186 186 -186c114 0 176 80 187 186h76'],
0x2202: [750,11,494,35,460,'91 666l-43 49c54 23 88 33 127 35c152 8 275 -137 285 -337c6 -116 -20 -251 -62 -321c-39 -63 -96 -99 -165 -103c-104 -5 -191 76 -197 184c-3 62 18 126 60 174c34 41 74 61 123 64c51 3 87 -11 154 -58c2 50 4 73 2 102c-9 163 -72 254 -173 249 c-34 -2 -67 -13 -111 -38zM369 318c-38 36 -70 53 -108 50c-81 -4 -143 -102 -137 -216c4 -85 41 -136 95 -133c44 2 86 38 109 92c23 52 31 95 41 207'],
0x2203: [697,0,617,46,572,'572 0h-526v39h388c19 0 34 16 34 53v253h-332v39h332v232c0 26 -20 42 -47 42h-364v39h515v-697'],
0x2204: [836,141,617,46,572,'572 0h-336l-37 -141h-55l38 141h-136v39h147l83 306h-140v39h150l75 274h-304v39h315l39 139h50l-39 -139h150v-697zM468 384v232c0 19 -11 42 -47 42h-9l-73 -274h129zM468 92v253h-140l-82 -306h188c19 0 34 16 34 53'],
0x2205: [591,13,733,65,668,'668 288c0 -166 -134 -301 -301 -301c-69 0 -133 24 -184 62l-57 -62l-34 34l55 61c-51 55 -82 127 -82 206c0 168 135 303 302 303c69 0 131 -23 181 -62l56 62l35 -34l-54 -60c52 -54 83 -128 83 -209zM621 288c0 68 -25 129 -67 174l-339 -377c42 -31 94 -50 152 -50 c140 0 254 113 254 253zM517 495c-42 30 -94 48 -150 48c-141 0 -254 -114 -254 -255c0 -65 25 -125 66 -170'],
0x2207: [697,4,688,27,662,'662 687c-65 -122 -267 -584 -310 -691h-25c-39 117 -92 231 -141 343c-50 116 -98 238 -159 348l6 10c101 -3 202 -4 303 -4c107 0 213 0 319 4zM584 656h-432c65 -178 137 -354 212 -527h7c76 175 147 349 213 527'],
0x2208: [533,-8,563,55,509,'509 8h-195c-147 0 -259 123 -259 263s112 262 259 262h195v-53h-200c-105 0 -188 -83 -200 -183h383v-53h-383c13 -105 106 -183 200 -183h200v-53'],
0x2209: [648,107,563,55,509,'509 8h-195c-26 0 -50 4 -72 11l-50 -126h-57l58 146c-84 44 -138 134 -138 232c0 140 112 262 259 262h75l47 115h57l-47 -115h63v-53h-84l-72 -183h139v-53h-160l-71 -177c16 -4 33 -6 48 -6h200v-53zM368 480h-59c-105 0 -188 -83 -200 -183h187zM275 244h-166 c9 -67 50 -124 104 -156'],
0x220B: [533,-8,563,55,509,'509 271c0 -140 -112 -263 -258 -263h-196v53h200c94 0 187 78 200 183h-382v53h382c-12 100 -95 183 -200 183h-200v53h196c146 0 258 -122 258 -262'],
0x220D: [482,3,511,65,446,'228 482c174 0 218 -109 218 -213c0 -124 -63 -272 -309 -272c-10 0 -34 2 -72 7l9 37c13 -2 33 -3 58 -3c127 0 199 67 215 178l-243 9l8 47l239 7c0 108 -51 162 -152 162c-31 0 -68 -4 -115 -13l4 39c36 10 83 15 140 15'],
0x2212: [299,-243,605,51,555,'555 243h-504v56h504v-56'],
0x2213: [541,0,668,65,604,'604 486h-539v55h539v-55zM604 192h-240v-192h-59v192h-240v59h240v203h59v-203h240v-59'],
0x2214: [629,89,668,65,604,'392 571c0 -30 -27 -58 -59 -58c-31 0 -57 28 -57 58s27 58 57 58c31 0 59 -28 59 -58zM604 152h-240v-241h-59v241h-240v59h240v238h59v-238h240v-59'],
0x2215: [726,119,605,87,519,'519 726l-372 -845h-60l372 845h60'],
0x2216: [501,-50,799,101,699,'101 459l559 -409l39 42l-559 409'],
0x2217: [446,-99,388,31,358,'358 245l-9 -8l-138 20l68 -122v-10l-68 -26l-10 4l-18 142l-106 -88h-10l-36 51l5 9l131 54l-131 64l-3 10l44 52h10l95 -98l26 143l9 4l62 -19l3 -15l-69 -125l139 24l6 -4v-62'],
0x2218: [417,-124,388,47,342,'342 271c0 -83 -65 -147 -147 -147c-83 0 -148 64 -148 147c0 81 65 146 148 146c82 0 147 -65 147 -146zM304 271c0 60 -50 109 -109 109c-62 0 -110 -49 -110 -109c0 -62 48 -110 110 -110c59 0 109 48 109 110'],
0x2219: [319,-203,249,67,183,'124 319c31 0 59 -28 59 -59c0 -30 -28 -57 -60 -57c-29 0 -56 28 -56 58s27 58 57 58'],
0x221A: [1079,59,760,63,793,'793 1079l-328 -1138l-277 580l-112 -42l-13 22l202 108l207 -439l260 909h61'],
0x221D: [466,-75,668,65,604,'604 75c-75 0 -138 39 -171 99c-34 -60 -97 -99 -171 -99c-110 0 -197 86 -197 196c0 108 87 195 197 195c74 0 137 -40 171 -99c33 59 96 99 171 99v-50c-82 0 -146 -65 -146 -145c0 -82 64 -146 146 -146v-50zM407 271c0 80 -66 145 -145 145c-82 0 -146 -65 -146 -145 c0 -82 64 -146 146 -146c79 0 145 64 145 146'],
0x221E: [463,-65,897,55,843,'843 284c0 -105 -72 -219 -188 -219c-94 0 -170 78 -219 151c-47 -64 -140 -150 -223 -150c-104 0 -158 82 -158 178c0 108 74 219 190 219c89 0 174 -76 218 -148c53 63 137 146 224 146c105 0 156 -81 156 -177zM783 236c0 71 -43 173 -127 173 c-59 0 -143 -77 -175 -125c41 -58 113 -176 192 -176c75 0 110 58 110 128zM418 245c-31 59 -118 179 -189 179c-76 0 -112 -58 -112 -128c0 -72 42 -174 126 -174c58 0 144 76 175 123'],
0x2220: [577,0,535,65,471,'471 0h-406l353 577h53l-327 -534h327v-43'],
0x2221: [577,0,535,65,471,'471 0h-406l353 577h53l-219 -359c57 -38 93 -104 95 -175h124v-43zM301 43c-1 54 -28 106 -74 137l-83 -137h157'],
0x2222: [515,-26,535,65,471,'471 26l-406 245l406 244v-54l-133 -79c21 -33 34 -71 34 -113c0 -40 -12 -77 -33 -110l132 -78v-55zM336 269c0 35 -11 68 -29 94l-153 -92l155 -94c17 27 27 59 27 92'],
0x2223: [714,171,437,189,248,'248 -171h-59v885h59v-885'],
0x2224: [714,171,437,0,438,'438 558l-190 -191v-538h-59v479l-106 -106h-83l189 191v321h59v-262l107 106h83'],
0x2225: [714,171,641,186,456,'245 -171h-59v885h59v-885zM397 714h59v-885h-59v885'],
0x2226: [714,171,641,0,642,'642 558l-186 -186v-543h-59v484l-152 -151v-333h-59v274l-118 -118h-68l186 187v542h59v-483l152 152v331h59v-272l116 116h70'],
0x2227: [585,0,687,65,623,'623 0h-60l-218 480l-220 -480h-60l271 585h18'],
0x2228: [585,0,687,65,623,'623 585l-269 -585h-18l-271 585h60l220 -480l218 480h60'],
0x2229: [603,0,687,65,623,'623 0h-54v329c0 112 -97 220 -225 220c-121 0 -225 -94 -225 -220v-329h-54v325c0 157 131 278 280 278c150 0 278 -121 278 -278v-325'],
0x222A: [603,0,687,65,623,'623 278c0 -157 -128 -278 -278 -278c-149 0 -280 121 -280 278v325h54v-329c0 -126 104 -220 225 -220c128 0 225 108 225 220v329h54v-325'],
0x222B: [885,442,768,54,694,'694 802c0 -20 -7 -43 -31 -43c-30 0 -71 30 -71 62c0 7 1 13 4 20c-9 9 -22 12 -34 12c-140 0 -123 -332 -129 -420c-12 -164 -20 -329 -38 -492c-15 -131 -47 -383 -222 -383c-48 0 -119 24 -119 83c0 19 5 44 29 44c31 0 72 -32 72 -63c0 -6 -2 -14 -4 -19 c8 -10 22 -13 33 -13c128 0 117 219 120 302c5 180 19 359 30 538c10 149 34 455 239 455c48 0 121 -25 121 -83'],
0x2234: [524,6,661,65,597,'387 468c0 -31 -25 -55 -55 -55c-32 0 -56 24 -56 55s24 56 56 56c30 0 55 -25 55 -56zM597 49c0 -31 -25 -55 -56 -55c-32 0 -56 24 -56 55s24 56 56 56c31 0 56 -25 56 -56zM177 49c0 -31 -25 -55 -56 -55c-32 0 -56 24 -56 55s24 56 56 56c31 0 56 -25 56 -56'],
0x2235: [524,6,661,65,597,'597 468c0 -31 -25 -55 -56 -55c-32 0 -56 24 -56 55s24 56 56 56c31 0 56 -25 56 -56zM177 468c0 -31 -25 -55 -56 -55c-32 0 -56 24 -56 55s24 56 56 56c31 0 56 -25 56 -56zM386 49c0 -31 -25 -55 -55 -55c-32 0 -56 24 -56 55s24 56 56 56c30 0 55 -25 55 -56'],
0x223C: [355,-186,668,58,610,'610 344c-14 -93 -60 -158 -162 -158s-162 114 -244 114c-62 0 -100 -39 -124 -93l-22 9c24 84 74 139 166 139c94 0 164 -115 246 -115c70 0 102 53 120 114'],
0x223D: [355,-186,668,58,610,'58 344c14 -93 60 -158 162 -158s162 114 244 114c62 0 100 -39 124 -93l22 9c-24 84 -74 139 -166 139c-94 0 -164 -115 -246 -115c-70 0 -102 53 -120 114'],
0x2240: [593,4,277,57,221,'221 414c0 -102 -113 -179 -113 -267c0 -71 49 -109 112 -129l-9 -22c-99 17 -154 73 -154 176c0 111 110 177 110 265c0 63 -34 106 -89 134l8 22c88 -28 135 -87 135 -179'],
0x2241: [491,-51,668,56,614,'614 341c-18 -104 -75 -155 -170 -155c-38 0 -71 15 -102 32l-66 -167h-57l78 196c-25 18 -58 37 -86 37c-48 0 -84 -22 -113 -83l-42 16c29 94 86 138 173 138c36 0 68 -14 98 -32l67 168h57l-78 -196c27 -18 61 -39 91 -39c49 0 84 27 108 102'],
0x2242: [434,-106,668,65,604,'604 374h-537v60h537v-60zM604 245v1c-23 -85 -70 -140 -162 -140c-91 0 -162 115 -240 115c-68 0 -99 -54 -116 -114l-21 9c15 93 57 159 158 159c99 0 161 -115 239 -115c62 0 98 40 121 94'],
0x2243: [400,-107,668,65,604,'604 389c-15 -92 -57 -158 -158 -158c-99 0 -161 114 -239 114c-62 0 -98 -39 -121 -93l-21 9c23 83 71 139 162 139c92 0 162 -115 240 -115c69 0 99 54 116 114zM604 107h-539v59h539v-59'],
0x2245: [587,-134,668,65,604,'604 576c-15 -92 -57 -158 -158 -158c-99 0 -161 114 -239 114c-62 0 -98 -39 -121 -93l-21 9c23 83 71 139 162 139c92 0 162 -115 240 -115c69 0 99 54 116 114zM604 294h-539v59h539v-59zM604 134h-539v59h539v-59'],
0x2246: [605,-18,668,65,604,'604 594c-15 -92 -57 -158 -158 -158c-99 0 -161 114 -239 114c-62 0 -98 -39 -121 -93l-21 9c23 83 71 139 162 139c92 0 162 -115 240 -115c69 0 99 54 116 114zM604 135h-296l-57 -116l-57 -1l58 116h-187v59h216l52 101h-268v59h296l29 56h58l-30 -55h186v-59h-215 l-53 -101h268v-59'],
0x2248: [465,-79,668,56,614,'614 448c-17 -96 -69 -155 -170 -155c-99 0 -164 99 -233 99c-57 0 -90 -35 -113 -85l-42 16c27 88 80 139 173 139c94 0 167 -100 235 -100c62 0 91 50 108 103zM614 234v1c-17 -97 -69 -156 -170 -156c-99 0 -164 99 -233 99c-57 0 -90 -35 -113 -84l-42 15 c27 88 80 139 173 139c94 0 167 -100 235 -100c62 0 91 50 108 104'],
0x224A: [534,-62,668,65,604,'604 523c-15 -91 -57 -158 -158 -158c-99 0 -159 114 -239 114c-61 0 -98 -40 -121 -93l-21 9c23 83 71 139 162 139c93 0 160 -115 240 -115c69 0 100 55 116 114zM604 344c-15 -92 -57 -158 -158 -158c-99 0 -161 114 -239 114c-62 0 -98 -39 -121 -93l-21 9 c23 83 71 139 162 139c92 0 162 -115 240 -115c69 0 99 54 116 114zM604 62h-539v59h539v-59'],
0x224D: [478,-63,668,54,616,'616 468c-23 -30 -90 -160 -281 -160c-193 0 -258 129 -281 159l13 11c81 -92 187 -109 267 -109s186 16 267 109zM616 73l-15 -10c-81 91 -187 108 -267 108s-186 -16 -267 -108l-13 10c23 29 88 159 281 159c191 0 258 -129 281 -159'],
0x224E: [479,-62,668,65,604,'604 347h-190c0 40 -33 73 -74 73c-40 0 -73 -32 -73 -73h-202v59h157c22 43 67 73 118 73c52 0 97 -30 119 -73h145v-59zM604 134h-145c-22 -43 -67 -72 -119 -72c-51 0 -96 29 -118 72h-157v59h202c0 -40 33 -72 73 -72c41 0 74 31 74 72h190v-59'],
0x224F: [479,-134,668,65,604,'604 347h-189c0 40 -33 73 -74 73c-40 0 -73 -32 -73 -73h-203v59h158c22 43 68 73 118 73c52 0 97 -30 119 -73h144v-59zM604 134h-538v59h538v-59'],
0x2250: [503,-38,668,65,604,'391 447c0 -31 -26 -56 -56 -56c-32 0 -56 25 -56 56s24 56 56 56c30 0 56 -25 56 -56zM604 251h-539v59h539v-59zM604 38h-539v59h539v-59'],
0x2251: [599,58,668,65,604,'391 543c0 -31 -26 -56 -56 -56c-32 0 -56 25 -56 56s24 56 56 56c30 0 56 -25 56 -56zM604 347h-539v59h539v-59zM604 134h-539v59h539v-59zM391 -2c0 -32 -26 -56 -56 -56c-32 0 -56 24 -56 56c0 30 24 56 56 56c30 0 56 -26 56 -56'],
0x2252: [599,58,668,65,604,'604 347h-539v59h539v-59zM177 543c0 -31 -25 -56 -56 -56c-32 0 -56 25 -56 56s24 56 56 56c31 0 56 -25 56 -56zM604 -2c0 -32 -25 -56 -56 -56s-55 24 -55 56c0 30 24 56 55 56s56 -26 56 -56zM604 134h-539v59h539v-59'],
0x2253: [599,58,668,65,604,'604 543c0 -31 -25 -56 -56 -56s-55 25 -55 56s24 56 55 56s56 -25 56 -56zM604 347h-539v59h539v-59zM604 134h-539v59h539v-59zM177 -2c0 -32 -25 -56 -56 -56c-32 0 -56 24 -56 56c0 30 24 56 56 56c31 0 56 -26 56 -56'],
0x2256: [404,-131,668,65,604,'604 131h-539v59h198c-20 19 -32 46 -32 76c0 31 12 59 33 78h-199v60h539v-60h-199c21 -19 35 -46 35 -76c0 -31 -15 -59 -35 -78h199v-59zM404 266c0 38 -30 71 -69 71c-48 0 -69 -43 -69 -68c0 -27 20 -71 68 -71c40 0 70 30 70 68'],
0x2257: [649,-134,668,65,604,'440 544c0 -58 -47 -104 -105 -104c-60 0 -104 47 -104 104s45 105 104 105c58 0 105 -47 105 -105zM604 294h-539v55h539v-55zM604 134h-539v59h539v-59zM391 544c0 31 -25 56 -56 56c-38 0 -55 -35 -55 -55c0 -21 16 -56 54 -56c32 0 57 25 57 55'],
0x225C: [640,-134,687,65,623,'352 640c18 -44 97 -204 124 -245c-43 1 -85 1 -128 1c-46 0 -93 3 -138 2c31 48 114 200 131 242h11zM426 420c-27 62 -53 110 -85 170c-32 -61 -57 -108 -85 -170h170zM623 189h-558v-55h558v55zM623 349h-558v-55h558v55'],
0x2260: [596,55,668,65,604,'604 135h-296l-74 -190h-57l75 189h-187v59h211l61 154h-272v59h296l75 190h57l-75 -189h186v-59h-210l-61 -154h271v-59'],
0x2261: [479,-62,668,65,604,'604 420h-539v59h539v-59zM604 241h-539v59h539v-59zM604 62h-539v59h539v-59'],
0x2264: [604,61,669,65,604,'604 76l-539 252v21l539 255v-59l-438 -205l438 -205v-59zM604 -61h-539v56h539v-56'],
0x2265: [604,61,669,65,604,'604 329l-539 -254v59l438 205l-438 205v60l539 -253v-22zM604 -61h-539v56h539v-56'],
0x2266: [672,131,668,65,604,'604 143l-539 253v21l539 255v-59l-438 -206l438 -205v-59zM604 29h-539v56h539v-56zM604 -131h-539v56h539v-56'],
0x2267: [672,131,668,65,604,'604 397l-539 -255v59l438 205l-438 206v60l539 -254v-21zM604 29h-539v56h539v-56zM604 -131h-539v56h539v-56'],
0x2268: [672,183,668,65,604,'604 143l-539 253v21l539 255v-59l-438 -206l438 -205v-59zM604 -131l-338 2l-50 -54h-72l51 52h-130v55h182l102 105h-284v55h337l51 54h71l-50 -53h130v-54h-183l-102 -106h285v-56'],
0x2269: [672,183,668,65,604,'604 397l-539 -255v59l438 205l-438 206v60l539 -254v-21zM604 -131l-338 2l-50 -54h-72l51 52h-130v55h182l102 105h-284v55h337l51 54h71l-50 -53h130v-54h-183l-102 -106h285v-56'],
0x226A: [535,-5,965,55,912,'912 5l-545 255v21l545 254v-59l-443 -205l443 -207v-59zM599 5l-544 255v21l544 254v-59l-442 -205l442 -207v-59'],
0x226B: [535,-5,965,55,912,'912 260l-545 -255v59l442 207l-442 205v59l545 -254v-21zM599 260l-544 -255v59l442 207l-442 205v59l544 -254v-21'],
0x226C: [705,179,430,65,366,'366 280c0 -142 -30 -258 -126 -359c26 -34 57 -60 89 -81l-12 -19c-39 24 -73 51 -101 77c-29 -26 -62 -53 -101 -77l-13 19c33 21 63 47 90 81c-96 101 -127 217 -127 359c0 158 60 259 123 324c-23 31 -51 58 -84 83l12 18c17 -12 57 -36 100 -73c43 37 83 61 100 73 l11 -18c-32 -25 -61 -52 -84 -83c63 -65 123 -166 123 -324zM289 279c0 104 -22 203 -73 286c-52 -83 -74 -182 -74 -286c0 -151 29 -252 74 -324c45 72 73 173 73 324'],
0x226E: [648,107,668,65,604,'604 6l-291 137l-100 -250h-57l108 273l-199 94v21l302 142l90 225h57l-77 -192l167 79v-59l-196 -92l-75 -191l271 -128v-59zM339 352l-173 -81l118 -56'],
0x226F: [648,107,668,65,604,'604 260l-302 -142l-89 -225h-57l77 192l-168 -79v59l196 92l76 191l-272 128v59l292 -137l100 250h57l-109 -273l199 -94v-21zM503 271l-118 55l-54 -137'],
0x2270: [712,171,668,65,604,'604 -61l-372 1l-45 -111h-56l44 110h-110v55h131l91 230l-222 104v21l336 159l81 204h57l-68 -171l133 63v-59l-162 -77l-86 -217l248 -116v-60l-268 127l-83 -207h351v-56zM373 436l-207 -96l141 -66'],
0x2271: [712,171,668,65,604,'604 -61l-372 1l-45 -111h-56l44 110h-110v55h131l70 175l-201 -94v59l230 108l65 164l-295 138v60l315 -148l102 256h57l-111 -279l176 -82v-22l-268 -126l-83 -208h351v-56zM503 340l-95 44l-43 -109'],
0x2272: [604,118,668,65,604,'604 76l-539 252v21l539 255v-59l-438 -205l438 -205v-59zM604 40c0 0 -11 -158 -159 -158c-114 0 -144 115 -238 115c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c105 0 148 -116 239 -116c94 0 118 112 118 115'],
0x2273: [604,118,668,65,604,'65 76l539 252v21l-539 255v-59l438 -205l-438 -205v-59zM604 40c0 0 -11 -158 -159 -158c-114 0 -144 115 -238 115c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c105 0 148 -116 239 -116c94 0 118 112 118 115'],
0x2276: [626,85,668,65,604,'604 216l-539 196v21l539 193v-52l-420 -152l420 -153v-53zM604 111l-539 -196v53l420 153l-420 152v52l539 -193v-21'],
0x2277: [626,85,668,65,604,'604 412l-539 -196v53l420 153l-420 152v52l539 -193v-21zM604 -85l-539 196v21l539 193v-52l-420 -152l420 -153v-53'],
0x227A: [553,14,668,65,604,'604 -14c0 0 -102 200 -539 270v26c437 70 539 269 539 271v-81s-103 -143 -378 -201c275 -59 378 -204 378 -204v-81'],
0x227B: [553,14,668,65,604,'604 257c-437 -70 -539 -269 -539 -271v81s103 144 378 202c-275 59 -378 203 -378 203v81s102 -200 539 -270v-26'],
0x227C: [632,91,668,65,604,'604 66c0 0 -102 200 -539 270v26c437 70 539 269 539 270v-80c0 -1 -103 -144 -378 -201v-2c275 -58 378 -202 378 -203v-80zM604 -91c0 0 -113 227 -539 291v53c447 -78 539 -261 539 -262v-82'],
0x227D: [632,91,668,65,604,'604 336c-437 -70 -539 -270 -539 -270v80c0 1 103 145 378 203v2c-275 57 -378 200 -378 201v80c0 -1 102 -200 539 -270v-26zM604 200c-425 -63 -539 -289 -539 -291v82c0 1 93 183 539 262v-53'],
0x227E: [621,118,668,65,604,'604 56c0 0 -102 200 -539 269v27c437 69 539 268 539 269v-80c0 -1 -103 -143 -378 -201v-1c275 -59 378 -202 378 -203v-80zM604 40c0 0 -11 -158 -159 -158c-114 0 -144 115 -238 115c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c105 0 148 -116 239 -116 c94 0 118 112 118 115'],
0x227F: [621,118,668,65,604,'65 56c0 0 102 200 539 269v27c-437 69 -539 268 -539 269v-80c0 -1 103 -143 378 -201v-1c-275 -59 -378 -202 -378 -203v-80zM604 40c0 0 -11 -158 -159 -158c-114 0 -144 115 -238 115c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c105 0 148 -116 239 -116 c94 0 118 112 118 115'],
0x2280: [648,107,668,65,604,'604 67v-80s-58 114 -275 199l-116 -293h-57l124 312c-60 20 -132 38 -215 51v26c113 19 203 45 276 75l116 291h57l-103 -259c150 78 193 164 193 164v-81s-63 -88 -222 -154l-34 -83c185 -68 256 -168 256 -168zM316 294c-28 -9 -58 -17 -90 -25c26 -6 50 -11 73 -18'],
0x2281: [648,107,668,65,604,'604 257c-113 -19 -204 -45 -277 -75l-114 -289h-57l102 256c-150 -77 -193 -162 -193 -163v81s63 88 221 154l34 84c-183 68 -255 167 -255 167v81s58 -114 274 -200l118 295h57l-126 -313c61 -21 133 -39 216 -52v-26zM443 269c-25 7 -50 13 -72 19l-18 -43 c28 9 58 17 90 24'],
0x2282: [533,-8,668,55,615,'615 8h-301c-147 0 -259 123 -259 263s112 262 259 262h301v-53h-306c-114 0 -202 -98 -202 -209c0 -118 100 -210 202 -210h306v-53'],
0x2283: [533,-8,668,55,615,'615 271c0 -140 -113 -263 -259 -263h-301v53h305c102 0 202 92 202 210c0 111 -87 209 -202 209h-305v53h301c146 0 259 -122 259 -262'],
0x2286: [602,61,668,55,615,'615 78l-301 -1c-147 0 -259 122 -259 262s112 263 259 263h301v-53h-306c-114 0 -202 -97 -202 -209c0 -118 100 -209 202 -209h306v-53zM615 -61h-534v56h534v-56'],
0x2287: [602,61,668,55,615,'615 340c0 -140 -113 -263 -259 -263h-301v52h305c102 0 202 92 202 210c0 112 -87 209 -202 209h-305v54h301c146 0 259 -122 259 -262zM588 -61h-533v56h533v-56'],
0x2288: [712,171,668,55,615,'615 -61l-383 1l-45 -111h-56l44 110h-94v55h115l39 95c-107 35 -180 137 -180 250c0 141 112 262 259 262h124l44 111h57l-44 -110h120v-53h-141l-167 -418h308v-53h-301c-10 0 -18 0 -28 1l-33 -84h362v-56zM417 549h-108c-114 0 -202 -97 -202 -209 c0 -97 67 -176 147 -201'],
0x2289: [712,171,668,55,615,'615 340c0 -141 -113 -262 -259 -262h-70l-33 -83h335v-55h-356l-45 -111h-56l44 110h-120v55h141l33 83h-174v52h196l164 412c-18 5 -36 7 -55 7h-305v54h301c27 0 53 -5 78 -13l48 123h57l-57 -144c80 -45 133 -133 133 -228zM562 339c0 75 -39 144 -100 181l-155 -391 h53c102 0 202 91 202 210'],
0x228A: [602,114,668,55,615,'615 78l-301 -1c-147 0 -259 122 -259 262s112 263 259 263h301v-53h-306c-114 0 -202 -97 -202 -209c0 -118 100 -209 202 -209h306v-53zM615 -61l-271 1l-48 -54h-72l50 53h-193v55h244l49 53h71l-49 -52h219v-56'],
0x228B: [602,114,668,55,615,'615 340c0 -140 -113 -263 -259 -263h-301v52h305c102 0 202 92 202 210c0 112 -87 209 -202 209h-305v54h301c146 0 259 -122 259 -262zM588 -61l-244 1l-48 -54h-72l50 53h-219v55h270l49 53h71l-49 -52h192v-56'],
0x228E: [603,0,687,65,623,'505 323h-133v-124h-54v124h-135v54h134v137h54v-137h134v-54zM623 278c0 -157 -128 -278 -278 -278c-149 0 -280 121 -280 278v325h54v-329c0 -126 104 -220 225 -220c128 0 225 108 225 220v329h54v-325'],
0x228F: [533,-8,668,55,615,'615 8h-560v525h560v-53h-508v-417h508v-55'],
0x2290: [533,-8,668,55,615,'615 8h-560v55h507v417h-507v53h560v-525'],
0x2291: [602,61,668,55,615,'615 80l-560 -1v523h560v-53h-508v-414h508v-55zM615 -61h-559v56h559v-56'],
0x2292: [602,61,668,55,615,'615 80l-560 -1v55h507v414h-507v54h560v-522zM614 -61h-559v56h559v-56'],
0x2293: [576,0,687,65,623,'623 0h-54v522h-450v-522h-54v576h558v-576'],
0x2294: [576,0,687,65,623,'623 0h-558v576h54v-522h450v522h54v-576'],
0x2295: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM603 295c-11 130 -116 232 -245 244v-244h245zM312 539c-131 -12 -234 -115 -246 -244h245zM603 246l-245 1v-245c129 12 234 115 245 244zM312 2v245 h-246c12 -130 115 -233 246 -245'],
0x2296: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM603 295c-12 138 -129 245 -268 245c-140 0 -257 -108 -269 -245h537zM603 246l-537 1c12 -138 129 -246 269 -246c139 0 256 108 268 245'],
0x2297: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM507 477c-47 39 -107 63 -172 63c-66 0 -127 -24 -173 -63l173 -173zM604 271c0 65 -24 126 -63 172l-173 -172l173 -173c39 46 63 107 63 173zM301 271 l-173 172c-39 -46 -63 -107 -63 -172c0 -66 24 -127 63 -173zM507 64l-172 173l-173 -173c46 -39 107 -63 173 -63c65 0 125 24 172 63'],
0x2298: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM507 477c-47 39 -107 63 -172 63c-149 0 -270 -121 -270 -269c0 -66 24 -127 63 -173zM604 271c0 65 -24 126 -63 172l-379 -379c46 -39 107 -63 173 -63 c147 0 269 121 269 270'],
0x2299: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM604 271c0 148 -122 269 -269 269c-149 0 -270 -121 -270 -269c0 -149 121 -270 270 -270c147 0 269 121 269 270zM393 271c0 -30 -27 -58 -59 -58 c-31 0 -57 28 -57 58s27 58 57 58c31 0 59 -28 59 -58'],
0x229A: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM604 271c0 147 -122 269 -269 269c-149 0 -270 -121 -270 -269c0 -149 121 -270 270 -270c147 0 269 122 269 270zM336 125c-79 0 -148 67 -148 146 c0 78 69 145 147 145c77 0 146 -68 146 -145c0 -78 -68 -146 -145 -146zM434 271c0 54 -44 98 -99 98s-100 -42 -100 -98c0 -57 45 -99 100 -99s99 43 99 99'],
0x229B: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM604 271c0 148 -122 269 -269 269c-149 0 -270 -121 -270 -269c0 -149 121 -270 270 -270c147 0 269 121 269 270zM498 245l-9 -8l-138 20l68 -122v-10 l-68 -26l-10 4l-18 142l-106 -88h-10l-36 51l5 9l131 54l-131 64l-3 10l44 52h10l95 -98l26 143l9 4l62 -19l3 -15l-69 -125l139 24l6 -4v-62'],
0x229D: [587,46,668,18,652,'652 271c0 -175 -143 -317 -317 -317c-175 0 -317 142 -317 317c0 174 142 316 317 316c174 0 317 -142 317 -316zM604 271c0 148 -122 269 -269 269c-149 0 -270 -121 -270 -269c0 -149 121 -270 270 -270c147 0 269 121 269 270zM541 246h-413v49h413v-49'],
0x229E: [541,0,668,64,605,'605 0h-541v541h541v-541zM546 300v182h-182v-182h182zM305 300v182h-182v-182h182zM546 59v182h-182v-182h182zM305 59v182h-182v-182h182'],
0x229F: [541,0,668,64,605,'605 0h-541v541h541v-541zM546 300v182h-423v-182h423zM546 59v182h-423v-182h423'],
0x22A0: [541,0,668,64,605,'605 0h-541v541h541v-541zM504 482h-339l170 -169zM546 101v339l-169 -169zM293 271l-170 169v-339zM504 59l-169 169l-170 -169h339'],
0x22A1: [541,0,668,64,605,'605 0h-541v541h541v-541zM546 59v423h-423v-423h423zM392 271c0 -30 -27 -58 -59 -58c-31 0 -57 28 -57 58s27 58 57 58c31 0 59 -28 59 -58'],
0x22A2: [599,20,748,85,664,'144 599v-280h520v-59h-520v-280h-59v619h59'],
0x22A3: [599,20,748,85,664,'605 599v-280h-520v-59h520v-280h59v619h-59'],
0x22A4: [579,0,748,65,684,'684 520h-280v-520h-59v520h-280v59h619v-59'],
0x22A5: [579,0,748,65,684,'684 0h-619v59h280v520h59v-520h280v-59'],
0x22A8: [541,0,490,65,425,'425 135h-301v-135h-59v541h59v-134h301v-59h-301v-154h301v-59'],
0x22A9: [541,0,618,65,554,'554 241h-301v-241h-59v541h59v-241h301v-59zM124 0h-59v541h59v-541'],
0x22AA: [541,0,746,65,682,'682 241h-301v-241h-59v541h59v-241h301v-59zM253 0h-59v541h59v-541zM124 0h-59v541h59v-541'],
0x22AC: [541,0,490,-47,425,'425 241h-223l-78 -103v-138h-59v59l-44 -59h-68l112 151v390h59v-241h53l178 239h70l-179 -239h179v-59zM124 229l9 12h-9v-12'],
0x22AD: [592,50,490,204,776,'530 348h-154v-154zM376 123v-123h-59l1 65l-114 -115v70l113 115v406h59v-134h214l186 185v-68l-118 -118l19 1v-59h-77l-152 -153l229 -1v-59l-287 1'],
0x22AE: [541,0,618,-47,554,'554 241h-289l-12 -13v-228h-59v167l-70 -71v-96h-59v35l-33 -35h-79l112 117v424h59v-363l70 71v292h59v-230l220 228h81l-232 -239h232v-59'],
0x22AF: [541,0,748,-47,684,'684 135h-431v-135h-59v167l-70 -71v-96h-59v35l-33 -35h-79l112 117v424h59v-363l70 71v292h59v-134h92l128 132h81l-128 -132h258v-59h-315l-116 -120v-34h431v-59zM288 348h-35v-37'],
0x22B2: [535,-6,669,65,604,'604 6l-539 254v21l539 254v-529zM552 89v363l-386 -181'],
0x22B3: [535,-6,669,65,604,'604 260l-539 -254v529l539 -254v-21zM503 271l-385 181v-363'],
0x22B4: [604,61,669,65,604,'604 76l-539 252v21l539 255v-528zM604 -61h-539v56h539v-56zM552 159v361l-386 -180'],
0x22B5: [604,61,669,65,604,'604 329l-539 -254v529l539 -253v-22zM604 -61h-539v56h539v-56zM503 340l-385 180v-361'],
0x22B8: [446,-94,1016,65,952,'952 269c0 -97 -79 -175 -177 -175c-89 0 -161 63 -175 147h-535v59h536c14 83 85 146 174 146c98 0 177 -79 177 -177zM899 269c0 69 -56 125 -124 125c-70 0 -124 -56 -124 -125c0 -68 54 -123 124 -123c68 0 124 55 124 123'],
0x22BA: [579,0,748,125,625,'625 520h-221v-520h-59v520h-220v59h500v-59'],
0x22BB: [639,99,687,65,623,'623 639l-270 -584h-18l-270 584h60l218 -479l220 479h60zM623 -99h-558v55h558v-55'],
0x22BC: [639,99,687,65,623,'623 584h-558v55h558v-55zM623 -99h-60l-218 479l-220 -479h-60l271 584h18'],
0x22C4: [453,-89,273,22,253,'253 271l-116 -182l-115 182l115 182zM218 271l-81 126l-80 -126l80 -127'],
0x22C5: [329,-213,249,67,183,'124 329c31 0 59 -28 59 -59c0 -30 -28 -57 -60 -57c-29 0 -56 28 -56 58s27 58 57 58'],
0x22C6: [577,37,708,32,678,'678 344l-201 -147l78 -234l-199 147l-200 -147l76 231l-200 149h247l75 234l76 -233h248'],
0x22C8: [515,-23,758,65,694,'694 23l-316 210l-313 -210v492l313 -208l316 208v-492zM629 141v260l-193 -130zM322 271l-194 130v-260'],
0x22C9: [515,-23,758,65,694,'694 23l-316 210l-313 -210v492l313 -208l316 208v-71l-258 -173l258 -173v-75zM322 271l-194 130v-260'],
0x22CA: [518,-26,758,65,694,'694 26l-313 208l-316 -208v71l258 174l-258 172v75l316 -210l313 210v-492zM631 140v260l-194 -129'],
0x22CB: [714,36,858,55,804,'804 0l-36 -36l-339 339l-338 -339l-36 36l339 339l-339 339l36 36'],
0x22CC: [714,36,858,55,804,'804 0l-36 -36l-339 339l-338 -339l-36 36l713 714l36 -36l-339 -339'],
0x22CD: [434,-106,668,58,611,'611 295l-21 -9c-25 54 -63 93 -125 93c-82 0 -143 -114 -245 -114c-101 0 -147 65 -162 158l21 10c17 -61 51 -115 120 -115c82 0 152 116 246 116c92 0 141 -56 166 -139zM604 106h-537v59h537v-59'],
0x22CE: [541,-2,668,52,617,'617 541c-158 -80 -246 -384 -271 -539h-26c-24 153 -112 460 -268 539h80c105 -76 176 -259 202 -378c25 119 97 302 202 378h81'],
0x22CF: [541,-2,668,52,617,'617 2h-81c-105 76 -177 259 -202 378c-26 -119 -97 -302 -202 -378h-80c156 79 244 386 268 539h26c25 -155 113 -459 271 -539'],
0x22D0: [533,-8,668,55,615,'615 139h-287c-74 0 -132 61 -132 132c0 69 58 131 132 131h287v-53h-288c-44 0 -78 -35 -78 -78c0 -45 38 -79 78 -79h288v-53zM615 8h-301c-147 0 -259 123 -259 263s112 262 259 262h301v-53h-306c-114 0 -202 -98 -202 -209c0 -118 100 -210 202 -210h306v-53'],
0x22D1: [533,-8,668,55,615,'615 271c0 -140 -113 -263 -259 -263h-301v53h305c102 0 202 92 202 210c0 111 -87 209 -202 209h-305v53h301c146 0 259 -122 259 -262zM474 271c0 -70 -58 -132 -133 -132h-286v53h287c44 0 78 35 78 79s-38 76 -78 76h-287v55h286c75 0 133 -61 133 -131'],
0x22D2: [603,0,687,65,623,'623 0h-54v329c0 126 -104 220 -225 220c-127 0 -225 -108 -225 -220v-329h-54v325c0 157 129 278 278 278c150 0 280 -121 280 -278v-325zM500 0h-53v316c0 58 -47 101 -103 101c-58 0 -103 -50 -103 -101v-316h-53v314c0 88 72 157 155 157c84 0 157 -69 157 -157v-314 '],
0x22D3: [603,0,687,65,623,'500 289c0 -88 -72 -156 -155 -156c-84 0 -157 68 -157 156v314h53v-316c0 -58 47 -101 103 -101c58 0 103 51 103 101v316h53v-314zM623 278c0 -157 -128 -278 -278 -278c-149 0 -280 121 -280 278v325h54v-329c0 -126 104 -220 225 -220c128 0 225 108 225 220v329h54 v-325'],
0x22D4: [771,0,687,65,623,'623 0h-54v329c0 104 -83 204 -195 218v-547h-59v547c-109 -13 -196 -103 -196 -218v-329h-54v325c0 147 113 261 249 276v170h59v-169c138 -15 250 -129 250 -277v-325'],
0x22D6: [535,-5,668,65,604,'531 271c0 -32 -26 -56 -56 -56c-32 0 -56 24 -56 56c0 30 24 55 56 55c30 0 56 -25 56 -55zM604 5l-539 255v21l539 254v-59l-438 -205l438 -207v-59'],
0x22D7: [535,-5,668,65,604,'604 260l-539 -255v59l438 207l-438 205v59l539 -254v-21zM251 271c0 -32 -26 -56 -56 -56c-32 0 -56 24 -56 56c0 30 24 55 56 55c30 0 56 -25 56 -55'],
0x22D8: [536,-6,1278,55,1224,'1224 6l-544 255v21l544 254v-59l-442 -205l442 -207v-59zM912 6l-545 255v21l545 254v-59l-443 -205l443 -207v-59zM599 6l-544 255v21l544 254v-59l-442 -205l442 -207v-59'],
0x22D9: [536,-6,1278,55,1224,'1224 261l-544 -255v59l442 207l-442 205v59l544 -254v-21zM912 261l-545 -255v59l442 207l-442 205v59l545 -254v-21zM599 261l-544 -255v59l442 207l-442 205v59l544 -254v-21'],
0x22DA: [831,289,668,65,604,'604 420l-539 195v21l539 195v-53l-420 -152l420 -152v-54zM604 317h-539v56h539v-56zM604 158h-539v56h539v-56zM604 -94l-539 -195v53l420 153l-420 151v54l539 -194v-22'],
0x22DB: [831,289,668,65,604,'604 616l-539 -197v54l420 152l-420 152v54l539 -194v-21zM604 317h-539v56h539v-56zM604 158h-539v56h539v-56zM604 -289l-539 195v21l539 195v-53l-420 -151l420 -153v-54'],
0x22DE: [632,91,668,65,604,'604 551c0 -2 -92 -184 -539 -263v53c426 63 539 290 539 291v-81zM604 -91c0 2 -102 200 -539 270v26c437 70 539 270 539 270v-80c0 -1 -103 -144 -378 -203v-1c275 -58 378 -200 378 -202v-80'],
0x22DF: [632,91,668,65,604,'604 288c-446 78 -539 261 -539 263v81s114 -227 539 -291v-53zM604 179c-437 -70 -539 -268 -539 -270v80c0 2 103 144 378 202v1c-275 59 -378 202 -378 203v80s102 -200 539 -270v-26'],
0x22E0: [648,107,668,65,604,'604 -91c0 0 -68 136 -295 227l-96 -243h-57l104 261c-56 19 -121 34 -195 46v53c82 -15 153 -33 213 -53l29 75c-65 24 -146 45 -242 61v26c134 21 236 55 314 91l78 195h57l-63 -157c117 71 153 140 153 141v-80c0 -1 -52 -74 -183 -137l-44 -111 c163 -67 227 -157 227 -158v-80s-53 105 -247 189l-29 -73c223 -85 276 -190 276 -191v-82zM353 386c-38 -13 -79 -26 -127 -35v-2c37 -7 70 -16 101 -27'],
0x22E1: [648,107,668,65,604,'604 200c-128 -19 -228 -54 -305 -92l-86 -215h-57l69 173c-120 -77 -160 -155 -160 -157v82c0 1 40 78 192 154l40 103c-181 -83 -232 -182 -232 -182v80c0 1 73 102 259 170l24 60c-204 66 -283 175 -283 176v80c0 -1 63 -124 301 -210l91 226h57l-97 -243 c54 -17 117 -31 187 -43v-26c-95 -15 -173 -36 -239 -60l-40 -99c73 28 164 56 279 76v-53zM443 349v2c-15 3 -29 6 -44 10l-10 -24c17 4 35 8 54 12'],
0x22E6: [604,118,668,65,604,'604 76l-539 252v21l539 255v-59l-438 -205l438 -205v-59zM604 40c0 0 -11 -158 -159 -158c-49 0 -82 21 -112 45l-37 -41h-72l70 74c-26 20 -52 37 -87 37c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c45 0 78 -21 109 -46l38 41h71l-68 -73c26 -21 55 -38 89 -38 c94 0 118 112 118 115'],
0x22E7: [604,118,668,65,604,'604 329l-539 -254v59l438 205l-438 205v60l539 -253v-22zM604 40c0 0 -11 -158 -159 -158c-49 0 -82 21 -112 45l-37 -41h-72l70 74c-26 20 -52 37 -87 37c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139c45 0 78 -21 109 -46l38 41h71l-68 -73c26 -21 55 -38 89 -38 c94 0 118 112 118 115'],
0x22E8: [621,118,668,65,604,'604 56c0 0 -102 200 -539 269v27c437 69 539 268 539 269v-80c0 -1 -103 -143 -378 -201v-1c275 -59 378 -202 378 -203v-80zM604 40c0 0 -11 -158 -159 -158c-49 0 -82 21 -112 45l-37 -41h-72l70 74c-26 20 -52 37 -87 37c-87 0 -121 -92 -121 -94l-21 10 s28 139 162 139c45 0 78 -21 109 -46l38 41h71l-68 -73c26 -21 55 -38 89 -38c94 0 118 112 118 115'],
0x22E9: [621,118,668,65,604,'604 325c-437 -69 -539 -268 -539 -269v80c0 1 103 144 378 202v1c-275 59 -378 201 -378 202v80s102 -200 539 -269v-27zM604 40c0 0 -11 -158 -159 -158c-49 0 -82 21 -112 45l-37 -41h-72l70 74c-26 20 -52 37 -87 37c-87 0 -121 -92 -121 -94l-21 10s28 139 162 139 c45 0 78 -21 109 -46l38 41h71l-68 -73c26 -21 55 -38 89 -38c94 0 118 112 118 115'],
0x22EA: [648,107,668,65,604,'604 6l-291 137l-100 -250h-57l108 273l-199 94v21l302 142l90 225h57l-77 -192l167 79v-529zM552 89v363l-144 -68l-75 -191zM339 352l-173 -81l118 -56'],
0x22EB: [648,107,668,65,604,'604 260l-302 -142l-89 -225h-57l77 192l-168 -79v529l292 -137l100 250h57l-109 -273l199 -94v-21zM503 271l-118 55l-54 -137zM337 348l-219 104v-363l143 68'],
0x22EC: [712,171,668,65,604,'604 -61l-372 1l-45 -111h-56l44 110h-110v55h131l70 175l-201 -94v529l315 -148l102 256h57l-111 -279l176 -82v-22l-268 -126l-83 -208h351v-56zM503 340l-95 44l-43 -109zM360 406l-242 114v-361l177 83'],
0x22ED: [712,171,668,65,604,'604 -61l-372 1l-45 -111h-56l44 110h-110v55h131l91 230l-222 104v21l336 159l81 204h57l-68 -171l133 63v-529l-268 127l-83 -207h351v-56zM552 159v361l-110 -52l-86 -217zM373 436l-207 -96l141 -66'],
0x22EE: [674,142,249,67,183,'124 674c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM124 329c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM124 -26c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58'],
0x22EF: [329,-213,1000,109,891,'499 329c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM166 329c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58zM832 329c31 0 59 -28 59 -58c0 -31 -28 -58 -60 -58c-29 0 -56 28 -56 58s27 58 57 58'],
0x22F1: [568,16,774,95,680,'428 318c22 -22 22 -62 1 -83c-22 -22 -61 -21 -83 1c-21 21 -20 60 1 81s60 22 81 1zM193 553c22 -22 22 -61 1 -83c-22 -21 -61 -21 -84 2c-20 20 -20 59 2 80c21 22 60 22 81 1zM664 82c22 -22 22 -61 0 -82c-21 -22 -60 -22 -83 1c-20 20 -20 59 1 81 c22 21 61 21 82 0'],
0x2308: [713,172,390,93,355,'355 649h-199v-821h-63v885h262v-64'],
0x2309: [713,172,390,36,298,'298 -172h-63v821h-199v64h262v-885'],
0x230A: [713,172,390,93,355,'355 -172h-262v885h63v-821h199v-64'],
0x230B: [713,172,390,36,298,'298 -172h-262v64h199v821h63v-885'],
0x2322: [333,-164,691,65,627,'627 175l-14 -11c-81 92 -188 108 -268 108s-185 -16 -266 -108l-14 11c23 29 89 158 281 158s258 -129 281 -158'],
0x2323: [333,-164,691,65,627,'627 323c-23 -30 -89 -159 -281 -159s-258 129 -281 159l14 10c81 -92 186 -108 266 -108s187 16 268 108'],
0x23B4: [755,-545,601,0,602,'0 745l14 10c30 -6 58 -8 126 -8h314c50 0 108 8 134 8l14 -10l-2 -100l2 -94l-6 -6h-26l-4 4v38c0 28 -6 50 -16 62c-10 8 -24 12 -58 12h-382c-34 0 -48 -4 -56 -12c-10 -12 -16 -34 -16 -62v-38l-6 -4h-26l-6 6l2 100'],
0x23B5: [-165,375,601,0,602,'0 -365l14 -10c30 6 58 8 126 8h314c50 0 108 -8 134 -8l14 10l-2 100l2 94l-6 6h-26l-4 -4v-38c0 -28 -6 -50 -16 -62c-10 -8 -24 -12 -58 -12h-382c-34 0 -48 4 -56 12c-10 12 -16 34 -16 62v38l-6 4h-26l-6 -6l2 -100'],
0x23DC: [786,-545,1069,64,1006,'546 700c-286 0 -407 -112 -460 -155l-22 19c78 82 116 114 184 149c90 48 190 73 298 73c276 0 409 -165 460 -222l-26 -19c-68 58 -174 155 -434 155'],
0x23DD: [-545,786,1069,64,1006,'546 -700c-286 0 -407 112 -460 155l-22 -19c78 -82 116 -114 184 -149c90 -48 190 -73 298 -73c276 0 409 165 460 222l-26 19c-68 -58 -174 -155 -434 -155'],
0x23DE: [776,-545,1029,64,966,'64 545c3 102 15 155 158 155h168c80 0 102 14 122 76c20 -59 48 -76 120 -76h186c138 0 145 -69 148 -155c-22 63 -52 83 -132 83h-186c-88 0 -116 18 -136 84c-20 -68 -48 -84 -144 -84h-174c-78 0 -112 -22 -130 -83'],
0x23DF: [-545,776,1029,64,966,'64 -545c3 -102 15 -155 158 -155h168c80 0 102 -14 122 -76c20 59 48 76 120 76h186c138 0 145 69 148 155c-22 -63 -52 -83 -132 -83h-186c-88 0 -116 -18 -136 -84c-20 68 -48 84 -144 84h-174c-78 0 -112 22 -130 83'],
0x23E0: [755,-545,1029,65,965,'266 697l-126 -152h-75l167 210h566l167 -210h-75l-125 152h-499'],
0x23E1: [-545,755,1029,65,965,'266 -697l-126 152h-75l167 -210h566l167 210h-75l-125 -152h-499'],
0x25A0: [541,0,669,64,605,'605 0h-541v541h541v-541'],
0x25A1: [560,0,688,65,623,'623 560v-560h-558v560h558zM569 504h-450v-450h450v450'],
0x25B2: [577,0,667,44,623,'623 0h-579l290 577'],
0x25B3: [577,0,667,44,623,'623 0h-579l290 577zM551 44l-217 432l-217 -432h434'],
0x25B6: [578,1,667,45,622,'45 -1v579l577 -290'],
0x25BC: [577,0,667,44,623,'623 577l-289 -577l-290 577h579'],
0x25BD: [577,0,667,44,623,'623 577l-289 -577l-290 577h579zM551 533h-434l217 -432'],
0x25C0: [578,1,667,45,622,'622 -1v579l-577 -290'],
0x2660: [592,0,570,44,526,'526 214c0 -36 -6 -139 -103 -139c-28 0 -99 2 -99 63h-24c0 -37 9 -90 23 -138h-76c15 48 25 101 25 138h-25c0 -61 -71 -63 -100 -63c-98 0 -103 106 -103 140v9c0 130 158 184 242 368c83 -184 240 -238 240 -368v-10'],
0x2661: [591,7,636,44,593,'318 -7c-86 194 -274 269 -274 415c0 87 29 183 134 183c118 0 106 -86 141 -86c36 0 22 86 141 86c104 0 133 -96 133 -183c0 -144 -186 -222 -275 -415zM551 408c0 63 -13 140 -91 140c-99 0 -66 -85 -141 -85s-42 85 -141 85c-79 0 -92 -77 -92 -140 c0 -90 128 -191 182 -257c18 -22 35 -46 50 -70c16 24 32 48 50 70c55 66 183 167 183 257'],
0x2662: [642,101,559,44,516,'516 271l-236 -372l-236 372l236 371zM444 271l-164 257l-164 -257l164 -258'],
0x2663: [584,0,607,44,564,'564 202c0 -74 -49 -129 -119 -129c-28 0 -92 14 -92 65h-35c0 -37 9 -90 23 -138h-76c15 48 23 101 23 138h-33c0 -51 -63 -65 -93 -65c-68 0 -118 55 -118 129c0 66 36 134 101 134c33 0 55 -18 64 -29l15 32c-33 25 -52 67 -52 110c0 72 63 135 132 135 s133 -62 133 -135c0 -45 -21 -86 -53 -111l16 -31c9 12 29 29 63 29c65 0 101 -68 101 -134'],
0x266D: [668,122,436,69,387,'387 216c0 -199 -189 -338 -293 -338c-17 0 -25 11 -25 29v749l43 12v-326c1 0 52 44 120 44c99 0 155 -65 155 -170zM296 213c0 76 -22 146 -80 146c-79 0 -104 -59 -104 -100v-326c0 -8 2 -15 19 -15c81 0 165 137 165 295'],
0x266E: [758,216,396,63,347,'347 -204l-42 -12v249l-242 -95v807l42 13v-306l242 94v-750zM305 128v307l-200 -78v-307'],
0x266F: [775,234,422,53,384,'384 71l-48 -19v-196l-43 -16v196l-151 -58v-196l-43 -16v195l-46 -16v92l46 18v309l-46 -18v92l46 18v229l43 16v-227l151 58v227l43 16v-226l48 19v-95l-48 -19v-307l48 19v-95zM293 131v307l-151 -58v-308'],
0x2713: [742,7,782,44,749,'749 714c-205 -207 -357 -449 -452 -721h-85c-38 96 -94 179 -168 253l29 31c66 -65 122 -130 172 -198c119 243 277 466 475 663'],
0x2720: [682,95,876,49,827,'827 77h-27c-81 140 -191 199 -351 206c-1 -159 74 -275 210 -354v-24h-443v23c138 79 211 195 211 355c-167 -7 -272 -69 -355 -215h-23v441h28c81 -140 190 -199 350 -205c1 159 -73 275 -209 353v25h443v-23c-138 -79 -212 -195 -212 -355c168 6 273 68 356 215h22 v-442'],
0x27E8: [713,172,381,53,329,'329 -172h-50l-226 443l226 442h50l-204 -442'],
0x27E9: [713,172,381,53,329,'329 271l-226 -443h-50l204 443l-204 442h50'],
0x27EE: [726,223,245,61,213,'213 -202l-5 -21c-22 39 -147 150 -147 474s125 436 147 475l5 -21c-64 -112 -71 -274 -71 -454s7 -341 71 -453'],
0x27EF: [726,223,245,32,184,'32 -202l5 -21c22 39 147 150 147 474s-125 436 -147 475l-5 -21c64 -112 71 -274 71 -454s-7 -341 -71 -453'],
0x27F5: [488,-57,1513,65,1444,'1444 243h-1265l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h1265v-59'],
0x27F6: [488,-57,1513,65,1444,'65 302h1265l-125 152l32 34l207 -214l-207 -217l-32 34l125 152h-1265v59'],
0x27F7: [486,-55,1513,65,1449,'1449 272l-206 -217l-32 33l124 153h-1156l125 -152l-32 -34l-207 214l207 217l32 -34l-125 -152h1156l-124 151l32 35'],
0x27F8: [537,-5,1513,65,1449,'1449 134h-1181l80 -98l-29 -31l-254 266l254 266l29 -33l-80 -98h1181v-59h-1230l-64 -78l63 -76h1231v-59'],
0x27F9: [537,-5,1513,65,1449,'65 134h1181l-80 -98l29 -31l254 266l-254 266l-29 -33l80 -98h-1181v-59h1230l64 -78l-63 -76h-1231v-59'],
0x27FA: [537,-5,1513,59,1454,'1454 271l-254 -266l-29 31l80 99h-989l80 -98l-29 -32l-254 264l254 268l29 -32l-80 -99h989l-80 98l29 33zM1365 271l-65 77h-1087l-64 -77l63 -78h1089'],
0x27FC: [486,-55,1513,36,1478,'94 300h1270l-124 152l32 34l206 -217l-206 -214l-32 34l124 152h-1270v-186h-58v431h58v-186'],
0x2997: [709,191,384,87,298,'197 10l101 -126v-75l-211 167v566l210 167v-75l-100 -125v-499'],
0x2998: [709,191,384,87,298,'188 10l-101 -126v-75l211 167v566l-210 167v-75l100 -125v-499'],
0x29EB: [642,101,559,44,516,'516 271l-236 -372l-236 372l236 371'],
0x29F5: [714,169,463,65,399,'399 -169h-55l-279 883h55'],
0x29F8: [1021,510,402,0,403,'403 1021l-337 -1531h-66l337 1531h66'],
0x29F9: [1021,510,402,0,403,'403 -510h-66l-337 1531h66'],
0x2A3F: [617,76,812,25,788,'788 -73l-4 -3c-131 3 -264 3 -396 3c-120 0 -240 -2 -360 -3l-3 3v24l3 3c16 1 69 1 81 13c16 16 13 139 13 167v268c0 29 2 154 -11 170c-12 13 -66 13 -83 14l-3 3v24l3 4c48 -2 94 -4 141 -4c45 0 90 3 135 4l4 -4v-24l-4 -3c-17 -1 -70 -1 -82 -14 c-14 -15 -11 -151 -11 -179v-428l391 1v433c0 28 3 157 -10 173c-11 12 -67 13 -83 14l-4 3v24l4 4c47 -2 94 -4 140 -4s91 3 135 4l4 -4v-24l-4 -3c-16 -1 -69 -2 -81 -14c-14 -15 -11 -142 -11 -168v-266c0 -27 -4 -157 11 -171c13 -13 64 -12 81 -13l4 -3v-24'],
0x2A5E: [636,262,687,65,623,'623 581h-558v55h558v-55zM623 421h-558v55h558v-55zM623 -262h-60l-218 479l-220 -479h-60l271 584h18'],
0x2A7D: [615,74,668,65,604,'604 -15l-539 255v-59l539 -255v59zM604 615l-539 -254v-21l539 -255v59l-438 205l438 206v60'],
0x2A7E: [615,74,668,65,604,'65 -15l539 255v-59l-539 -255v59zM65 615l539 -254v-21l-539 -255v59l438 205l-438 206v60'],
0x2A85: [672,187,668,65,604,'604 143l-539 253v21l539 255v-59l-438 -206l438 -205v-59zM604 94c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78zM604 -66c0 0 -24 -121 -151 -121 c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78'],
0x2A86: [672,187,668,65,604,'604 397l-539 -255v59l438 205l-438 206v60l539 -254v-21zM604 94c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78zM604 -66c0 0 -24 -121 -151 -121 c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78'],
0x2A87: [604,114,668,65,604,'604 76l-539 252v21l539 255v-59l-438 -205l438 -205v-59zM604 -61l-260 1l-48 -54h-72l50 53h-209v55h260l49 53h71l-49 -52h208v-56'],
0x2A88: [604,114,668,65,604,'604 329l-539 -254v59l438 205l-438 205v60l539 -253v-22zM604 -61l-260 1l-48 -54h-72l50 53h-209v55h260l49 53h71l-49 -52h208v-56'],
0x2A89: [672,187,668,65,604,'604 143l-539 253v21l539 255v-59l-438 -206l438 -205v-59zM604 -66c0 0 -24 -121 -151 -121c-76 0 -116 49 -162 83l-75 -79h-72l100 104c-10 4 -22 6 -35 6c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c24 0 44 -6 61 -14l49 50c-41 31 -76 69 -129 69 c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c66 0 109 -44 154 -78l71 74h71l-97 -101c14 -6 30 -11 46 -11c76 0 105 76 105 78l26 -10s-24 -121 -151 -121c-29 0 -51 7 -71 16l-47 -49c44 -33 84 -74 138 -74c76 0 105 76 105 78'],
0x2A8A: [672,187,668,65,604,'604 397l-539 -255v59l438 205l-438 206v60l539 -254v-21zM604 -66c0 0 -24 -121 -151 -121c-76 0 -116 49 -162 83l-75 -79h-72l100 104c-10 4 -22 6 -35 6c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c24 0 44 -6 61 -14l49 50c-41 31 -76 69 -129 69 c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c66 0 109 -44 154 -78l71 74h71l-97 -101c14 -6 30 -11 46 -11c76 0 105 76 105 78l26 -10s-24 -121 -151 -121c-29 0 -51 7 -71 16l-47 -49c44 -33 84 -74 138 -74c76 0 105 76 105 78'],
0x2A8B: [831,289,668,65,604,'604 420l-539 195v21l539 195v-53l-420 -152l420 -152v-54zM604 317h-539v56h539v-56zM604 158h-539v56h539v-56zM604 -94l-539 -195v53l420 153l-420 151v54l539 -194v-22'],
0x2A8C: [831,289,668,65,604,'604 616l-539 -197v54l420 152l-420 152v54l539 -194v-21zM604 317h-539v56h539v-56zM604 158h-539v56h539v-56zM604 -289l-539 195v21l539 195v-53l-420 -151l420 -153v-54'],
0x2A95: [615,74,668,65,604,'604 556l-539 -255v59l539 255v-59zM604 -74l-539 254v21l539 255v-59l-438 -205l438 -206v-60'],
0x2A96: [615,74,668,65,604,'604 302l-539 253v60l539 -254v-59zM604 180l-539 -254v59l438 206l-438 205v60l539 -254v-22'],
0x2AAF: [623,81,668,65,604,'604 -81h-539v56h539v-56zM604 56c0 0 -102 200 -539 270v26c437 70 539 269 539 271v-81s-103 -143 -378 -201c275 -59 378 -204 378 -204v-81'],
0x2AB0: [620,84,668,65,604,'65 -84h539v56h-539v-56zM65 53c0 0 102 200 539 270v26c-437 70 -539 269 -539 271v-81s103 -143 378 -201c-275 -59 -378 -204 -378 -204v-81'],
0x2AB5: [680,191,668,65,604,'604 115c0 0 -102 200 -539 270v26c437 69 539 268 539 269v-80c0 -1 -103 -143 -378 -201v-1c275 -59 378 -202 378 -203v-80zM604 -139l-338 2l-50 -54h-72l51 52h-130v55h182l102 105h-284v55h337l51 54h71l-50 -53h130v-54h-183l-102 -106h285v-56'],
0x2AB6: [680,191,668,65,604,'604 385c-437 -70 -539 -269 -539 -270v80c0 1 103 144 378 202v1c-275 59 -378 201 -378 202v80s102 -200 539 -269v-26zM604 -139l-338 2l-50 -54h-72l51 52h-130v55h182l102 105h-284v55h337l51 54h71l-50 -53h130v-54h-183l-102 -106h285v-56'],
0x2AB7: [688,187,668,65,604,'604 123c0 0 -102 200 -539 270v26c437 69 539 268 539 269v-80c0 -1 -103 -143 -378 -201v-1c275 -59 378 -202 378 -203v-80zM604 94c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116 c75 0 105 76 105 78zM604 -66c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78'],
0x2AB8: [688,187,668,65,604,'604 393c-437 -70 -539 -269 -539 -270v80c0 1 103 144 378 202v1c-275 59 -378 201 -378 202v80s102 -200 539 -269v-26zM604 94c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116 c75 0 105 76 105 78zM604 -66c0 0 -24 -121 -151 -121c-115 0 -150 114 -244 114c-87 0 -123 -91 -123 -93l-21 9s29 139 163 139c105 0 153 -116 245 -116c75 0 105 76 105 78'],
0x2AB9: [688,187,668,65,604,'604 123c0 0 -102 200 -539 270v26c437 69 539 268 539 269v-80c0 -1 -103 -143 -378 -201v-1c275 -59 378 -202 378 -203v-80zM604 -66c0 0 -24 -121 -151 -121c-76 0 -116 49 -162 83l-75 -79h-72l100 104c-10 4 -22 6 -35 6c-88 0 -123 -91 -123 -93l-21 9 s29 139 163 139c24 0 44 -6 61 -14l49 50c-41 31 -76 69 -129 69c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c66 0 109 -44 154 -78l71 74h71l-97 -101c14 -6 30 -11 46 -11c76 0 105 76 105 78l26 -10s-24 -121 -151 -121c-29 0 -51 7 -71 16l-47 -49 c44 -33 84 -74 138 -74c75 0 105 76 105 78'],
0x2ABA: [688,187,668,65,604,'604 393c-437 -70 -539 -269 -539 -270v80c0 1 103 144 378 202v1c-275 59 -378 201 -378 202v80s102 -200 539 -269v-26zM604 -66c0 0 -24 -121 -151 -121c-76 0 -116 49 -162 83l-75 -79h-72l100 104c-10 4 -22 6 -35 6c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139 c24 0 44 -6 61 -14l49 50c-41 31 -76 69 -129 69c-88 0 -123 -91 -123 -93l-21 9s29 139 163 139c66 0 109 -44 154 -78l71 74h71l-97 -101c14 -6 30 -11 46 -11c76 0 105 76 105 78l26 -10s-24 -121 -151 -121c-29 0 -51 7 -71 16l-47 -49c44 -33 84 -74 138 -74 c75 0 105 76 105 78'],
0x2AC5: [669,131,668,55,615,'615 145l-301 -1c-147 0 -259 122 -259 262s112 263 259 263h301v-52h-306c-114 0 -202 -98 -202 -210c0 -118 100 -209 202 -209h306v-53zM615 29h-534v56h534v-56zM615 -131h-534v56h534v-56'],
0x2AC6: [669,131,668,55,615,'615 407c0 -140 -113 -263 -259 -263h-301v53h305c102 0 202 91 202 209c0 112 -87 210 -202 210h-305v53h301c146 0 259 -122 259 -262zM588 29h-533v56h533v-56zM588 -131h-533v56h533v-56'],
0x2ACB: [669,183,668,55,615,'615 145l-301 -1c-147 0 -259 122 -259 262s112 263 259 263h301v-52h-306c-114 0 -202 -98 -202 -210c0 -118 100 -209 202 -209h306v-53zM615 -131l-349 2l-50 -54h-72l51 52h-114v55h166l102 105h-268v55h321l51 54h71l-50 -53h141v-54h-194l-102 -106h296v-56'],
0x2ACC: [669,183,668,55,615,'615 407c0 -140 -113 -263 -259 -263h-301v53h305c102 0 202 91 202 209c0 112 -87 210 -202 210h-305v53h301c146 0 259 -122 259 -262zM588 -131l-322 2l-50 -54h-72l51 52h-140v55h192l102 105h-294v55h347l51 54h71l-50 -53h114v-54h-167l-102 -106h269v-56']
};
MathJax.Ajax.loadComplete(MathJax.OutputJax.SVG.fontDir+"/Main/Regular/Main.js"); | PypiClean |
/pyextoverlay-1.1.4-py3-none-any.whl/pyextoverlay.py | from PyQt5 import QtWidgets, QtCore, QtGui
import sys
# Coded by Xenely
# Структурные классы
class Alignment:
left = QtCore.Qt.AlignLeft
center = QtCore.Qt.AlignCenter
right = QtCore.Qt.AlignRight
class PenType:
dot = QtCore.Qt.DotLine
dash = QtCore.Qt.DashLine
solid = QtCore.Qt.SolidLine
dashdot = QtCore.Qt.DashDotLine
dashdotdot = QtCore.Qt.DashDotDotLine
class BrushType:
hor = QtCore.Qt.HorPattern
ver = QtCore.Qt.VerPattern
nobrush = QtCore.Qt.NoBrush
solid = QtCore.Qt.SolidPattern
bdiag = QtCore.Qt.BDiagPattern
fdiag = QtCore.Qt.FDiagPattern
cross = QtCore.Qt.CrossPattern
dense1 = QtCore.Qt.Dense1Pattern
dense2 = QtCore.Qt.Dense2Pattern
dense3 = QtCore.Qt.Dense3Pattern
dense4 = QtCore.Qt.Dense4Pattern
dense5 = QtCore.Qt.Dense5Pattern
dense6 = QtCore.Qt.Dense6Pattern
dense7 = QtCore.Qt.Dense7Pattern
diagcross = QtCore.Qt.DiagCrossPattern
class RenderHints:
antialiasing = QtGui.QPainter.Antialiasing
text_antialiasing = QtGui.QPainter.TextAntialiasing
qt4_compatible_painting = QtGui.QPainter.Qt4CompatiblePainting
smooth_pixmap_transform = QtGui.QPainter.SmoothPixmapTransform
non_cosmetic_default_pen = QtGui.QPainter.NonCosmeticDefaultPen
lossless_image_rendering = QtGui.QPainter.LosslessImageRendering
hight_quality_antialiasing = QtGui.QPainter.HighQualityAntialiasing
# Классы оверлея
class OverlayFont:
def __init__(self, font_family: str, size: int) -> None:
self.__font = QtGui.QFont()
self.__font.setFamily(font_family)
self.__font.setPixelSize(size)
def set_family(self, font_family: str) -> None:
self.__font.setFamily(font_family)
def set_size(self, size: int) -> None:
self.__font.setPixelSize(size)
def get_font(self) -> QtGui.QFont:
return self.__font
class Overlay(QtWidgets.QWidget):
def __init__(self, update_inverval_ms: int) -> None:
super().__init__()
# Прозрачность
self.setWindowFlags(QtCore.Qt.Tool | QtCore.Qt.FramelessWindowHint | QtCore.Qt.WindowStaysOnTopHint | QtCore.Qt.WindowTransparentForInput)
self.setAttribute(QtCore.Qt.WA_TranslucentBackground)
# Статические переменные
self.draw_list = []
self.geometry_list = []
self.__overlay_painter = QtGui.QPainter()
self.__render_hint = RenderHints.hight_quality_antialiasing
# Таймер, необходим для обновления оверлея
self.__timer = QtCore.QTimer(self)
self.__timer.setInterval(update_inverval_ms)
self.__timer.timeout.connect(lambda: self.update())
self.__timer.start()
# Ф-ия рисовальщик
def paintEvent(self, event: object) -> None:
self.__overlay_painter.begin(self)
self.__overlay_painter.setRenderHint(self.__render_hint)
if self.geometry_list:
self.setGeometry(*self.geometry_list)
self.geometry_list.clear()
for shape in self.draw_list:
if shape["type"] == "rect":
self.__set_rect(event, self.__overlay_painter, shape["x"], shape["y"], shape["width"], shape["height"], shape["line_size"], shape["line_type"], shape["color"])
if shape["type"] == "ellipse":
self.__set_ellipse(event, self.__overlay_painter, shape["x"], shape["y"], shape["width"], shape["height"], shape["line_size"], shape["line_type"], shape["color"])
if shape["type"] == "line":
self.__set_line(event, self.__overlay_painter, shape["x1"], shape["y1"], shape["x2"], shape["y2"], shape["line_size"], shape["line_type"], shape["color"])
if shape["type"] == "text":
self.__set_text(event, self.__overlay_painter, shape["x"], shape["y"], shape["width"], shape["height"], shape["text"], shape["font"], shape["align"], shape["color"])
if shape["type"] == "polygon":
self.__set_polygon(event, self.__overlay_painter, shape["points"], shape["line_size"], shape["line_type"], shape["color"])
self.__overlay_painter.end()
def set_render_hint(self, render_hint: RenderHints) -> None:
self.__render_hint = render_hint
def set_timer_interval(self, update_ms_interval: int) -> None:
self.__timer.setInterval(update_ms_interval)
# Скрытые методы
def __set_rect(self, event: object, painter: object, x: int, y: int, w: int, h: int, line_size: int, line_type: PenType, color: tuple[int, ...]) -> None:
if line_size <= 0:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.solid))
painter.drawRect(x, y, w, h)
else:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.nobrush))
painter.drawRect(x, y, w, h)
def __set_ellipse(self, event: object, painter: object, x: int, y: int, w: int, h: int, line_size: int, line_type: PenType, color: tuple) -> None:
if line_size <= 0:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.solid))
painter.drawEllipse(x, y, w, h)
else:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.nobrush))
painter.drawEllipse(x, y, w, h)
def __set_line(self, event: object, painter: object, x1: int, y1: int, x2: int, y2: int, line_size: int, line_type: object, color: tuple) -> None:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.drawLine(x1, y1, x2, y2)
def __set_text(self, event: object, painter: object, x: int, y: int, w: int, h: int, text: str, font: OverlayFont, align: object, color: tuple):
painter.setPen(QtGui.QPen(QtGui.QColor(*color)))
painter.setFont(font.get_font())
painter.drawText(QtCore.QRect(x, y, w, h), align, text)
def __set_polygon(self, event: object, painter: object, points: list, line_size: int, line_type: object, color: tuple) -> None:
if line_size <= 0:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.solid))
polygon = []
for point in points:
polygon.append(QtCore.QPoint(point["x"], point["y"]))
polygon = QtGui.QPolygon(polygon)
else:
painter.setPen(QtGui.QPen(QtGui.QColor(*color), line_size, line_type))
painter.setBrush(QtGui.QBrush(QtGui.QColor(*color), BrushType.nobrush))
polygon = []
for point in points:
polygon.append(QtCore.QPoint(point["x"], point["y"]))
polygon = QtGui.QPolygon(polygon)
painter.drawPolygon(polygon)
# Инициализация
def application_init() -> QtWidgets.QApplication:
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_DisableHighDpiScaling)
return QtWidgets.QApplication(sys.argv)
def application_start(application: QtWidgets.QApplication) -> None:
application.exec() | PypiClean |
/django-bulbs-3.25.0.tar.gz/django-bulbs-3.25.0/bulbs/content/migrations/0001_initial.py | from __future__ import unicode_literals
import django
from django.db import models, migrations
from django.conf import settings
import djbetty.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
if django.VERSION >= (1, 8, 0):
dependencies.insert(0,
('contenttypes', '0002_remove_content_type_name'))
operations = [
migrations.CreateModel(
name='Content',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('published', models.DateTimeField(null=True, blank=True)),
('last_modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=512)),
('slug', models.SlugField(default=b'', blank=True)),
('description', models.TextField(default=b'', max_length=1024, blank=True)),
('thumbnail_override', djbetty.fields.ImageField(default=None, null=True, editable=False, blank=True)),
('subhead', models.CharField(default=b'', max_length=255, blank=True)),
('indexed', models.BooleanField(default=True)),
('authors', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
options={
'permissions': (('publish_own_content', 'Can publish their own content'), ('publish_content', 'Can publish content'), ('promote_content', 'Can promote content')),
},
),
migrations.CreateModel(
name='FeatureType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('slug', models.SlugField(unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='LogEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('action_time', models.DateTimeField(auto_now=True, verbose_name=b'action time')),
('object_id', models.TextField(null=True, verbose_name=b'object id', blank=True)),
('change_message', models.TextField(verbose_name=b'change message', blank=True)),
('content_type', models.ForeignKey(related_name='change_logs', blank=True, to='contenttypes.ContentType', null=True)),
('user', models.ForeignKey(related_name='change_logs', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-action_time',),
},
),
migrations.CreateModel(
name='ObfuscatedUrlInfo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('create_date', models.DateTimeField()),
('expire_date', models.DateTimeField()),
('url_uuid', models.CharField(unique=True, max_length=32, editable=False)),
('content', models.ForeignKey(to='content.Content')),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('slug', models.SlugField(unique=True)),
('polymorphic_ctype', models.ForeignKey(related_name='polymorphic_content.tag_set+', editable=False, to='contenttypes.ContentType', null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='content',
name='feature_type',
field=models.ForeignKey(blank=True, to='content.FeatureType', null=True),
),
migrations.AddField(
model_name='content',
name='polymorphic_ctype',
field=models.ForeignKey(related_name='polymorphic_content.content_set+', editable=False, to='contenttypes.ContentType', null=True),
),
migrations.AddField(
model_name='content',
name='tags',
field=models.ManyToManyField(to='content.Tag', blank=True),
),
] | PypiClean |
/pixelpin-auth-core-1.0.3.tar.gz/pixelpin-auth-core-1.0.3/.eggs/nose-1.3.7-py2.7.egg/nose/plugins/debug.py | import pdb
from nose.plugins.base import Plugin
class Pdb(Plugin):
"""
Provides --pdb and --pdb-failures options that cause the test runner to
drop into pdb if it encounters an error or failure, respectively.
"""
enabled_for_errors = False
enabled_for_failures = False
score = 5 # run last, among builtins
def options(self, parser, env):
"""Register commandline options.
"""
parser.add_option(
"--pdb", action="store_true", dest="debugBoth",
default=env.get('NOSE_PDB', False),
help="Drop into debugger on failures or errors")
parser.add_option(
"--pdb-failures", action="store_true",
dest="debugFailures",
default=env.get('NOSE_PDB_FAILURES', False),
help="Drop into debugger on failures")
parser.add_option(
"--pdb-errors", action="store_true",
dest="debugErrors",
default=env.get('NOSE_PDB_ERRORS', False),
help="Drop into debugger on errors")
def configure(self, options, conf):
"""Configure which kinds of exceptions trigger plugin.
"""
self.conf = conf
self.enabled_for_errors = options.debugErrors or options.debugBoth
self.enabled_for_failures = options.debugFailures or options.debugBoth
self.enabled = self.enabled_for_failures or self.enabled_for_errors
def addError(self, test, err):
"""Enter pdb if configured to debug errors.
"""
if not self.enabled_for_errors:
return
self.debug(err)
def addFailure(self, test, err):
"""Enter pdb if configured to debug failures.
"""
if not self.enabled_for_failures:
return
self.debug(err)
def debug(self, err):
import sys # FIXME why is this import here?
ec, ev, tb = err
stdout = sys.stdout
sys.stdout = sys.__stdout__
try:
pdb.post_mortem(tb)
finally:
sys.stdout = stdout | PypiClean |
/fds.sdk.ForeignExchangeRate-1.0.6-py3-none-any.whl/fds/sdk/ForeignExchangeRate/api/forwards_api.py | import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.ForeignExchangeRate.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.ForeignExchangeRate.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.ForeignExchangeRate.exceptions import ApiException
from fds.sdk.ForeignExchangeRate.model.error_response import ErrorResponse
from fds.sdk.ForeignExchangeRate.model.forwards_request import ForwardsRequest
from fds.sdk.ForeignExchangeRate.model.forwards_response import ForwardsResponse
class ForwardsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.get_fx_forwards_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (ForwardsResponse,), 400: (ErrorResponse,), 401: (ErrorResponse,), 403: (ErrorResponse,), 415: (ErrorResponse,), 500: (ErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/foreign-exchange/v1/forwards',
'operation_id': 'get_fx_forwards',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'ids',
'forward_period',
'start_date',
'end_date',
'frequency',
],
'required': [
'ids',
],
'nullable': [
],
'enum': [
'forward_period',
'frequency',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('forward_period',): {
"ON": "ON",
"TN": "TN",
"SW": "SW",
"1M": "1M",
"2M": "2M",
"3M": "3M",
"6M": "6M",
"9M": "9M",
"1Y": "1Y",
"2Y": "2Y",
"5Y": "5Y"
},
('frequency',): {
"D": "D",
"W": "W",
"M": "M",
"AM": "AM",
"CQ": "CQ",
"AY": "AY",
"CY": "CY"
},
},
'openapi_types': {
'ids':
([str],),
'forward_period':
(str,),
'start_date':
(str,),
'end_date':
(str,),
'frequency':
(str,),
},
'attribute_map': {
'ids': 'ids',
'forward_period': 'forwardPeriod',
'start_date': 'startDate',
'end_date': 'endDate',
'frequency': 'frequency',
},
'location_map': {
'ids': 'query',
'forward_period': 'query',
'start_date': 'query',
'end_date': 'query',
'frequency': 'query',
},
'collection_format_map': {
'ids': 'csv',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_fx_forwards_for_list_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (ForwardsResponse,), 400: (ErrorResponse,), 401: (ErrorResponse,), 403: (ErrorResponse,), 415: (ErrorResponse,), 500: (ErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/foreign-exchange/v1/forwards',
'operation_id': 'get_fx_forwards_for_list',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'forwards_request',
],
'required': [
'forwards_request',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'forwards_request':
(ForwardsRequest,),
},
'attribute_map': {
},
'location_map': {
'forwards_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def get_fx_forwards(
self,
ids,
**kwargs
) -> ForwardsResponse:
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
ids ([str]): The currency pair requested in the format of a ISO {source}{target}. For a complete list of ISO currencies, please visit [OA 1470](https://my.apps.factset.com/oa/pages/1470)
Keyword Args:
forward_period (str): Bid, offer, and mid rates are available for the following 11 time periods. * ON - Overnight * TN - Tomorrow Next * SW - One Week (Spot Week) * 1M - One Month * 2M - Two Months * 3M - Three Months * 6M - Six Months * 9M - Nine Months * 1Y - One Year * 2Y - Two Years * 5Y - Five Years . [optional] if omitted the server will use the default value of "1M"
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily * **W** = Weekly, based on the last day of the week of the start date. * **M** = Monthly, based on the last trading day of the month. * **AM** = Monthly, based on the start date (e.g., if the start date is June 16, data is displayed for June 16, May 16, April 16 etc.). * **CQ** = Quarterly based on the last trading day of the calendar quarter (March, June, September, or December). * **AY** = Actual Annual, based on the start date. * **CY** = Calendar Annual, based on the last trading day of the calendar year. . [optional] if omitted the server will use the default value of "D"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ForwardsResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['ids'] = \
ids
return self.get_fx_forwards_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_with_http_info(
self,
ids,
**kwargs
) -> typing.Tuple[ForwardsResponse, int, typing.MutableMapping]:
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
ids ([str]): The currency pair requested in the format of a ISO {source}{target}. For a complete list of ISO currencies, please visit [OA 1470](https://my.apps.factset.com/oa/pages/1470)
Keyword Args:
forward_period (str): Bid, offer, and mid rates are available for the following 11 time periods. * ON - Overnight * TN - Tomorrow Next * SW - One Week (Spot Week) * 1M - One Month * 2M - Two Months * 3M - Three Months * 6M - Six Months * 9M - Nine Months * 1Y - One Year * 2Y - Two Years * 5Y - Five Years . [optional] if omitted the server will use the default value of "1M"
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily * **W** = Weekly, based on the last day of the week of the start date. * **M** = Monthly, based on the last trading day of the month. * **AM** = Monthly, based on the start date (e.g., if the start date is June 16, data is displayed for June 16, May 16, April 16 etc.). * **CQ** = Quarterly based on the last trading day of the calendar quarter (March, June, September, or December). * **AY** = Actual Annual, based on the start date. * **CY** = Calendar Annual, based on the last trading day of the calendar year. . [optional] if omitted the server will use the default value of "D"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ForwardsResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['ids'] = \
ids
return self.get_fx_forwards_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_async(
self,
ids,
**kwargs
) -> "ApplyResult[ForwardsResponse]":
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
ids ([str]): The currency pair requested in the format of a ISO {source}{target}. For a complete list of ISO currencies, please visit [OA 1470](https://my.apps.factset.com/oa/pages/1470)
Keyword Args:
forward_period (str): Bid, offer, and mid rates are available for the following 11 time periods. * ON - Overnight * TN - Tomorrow Next * SW - One Week (Spot Week) * 1M - One Month * 2M - Two Months * 3M - Three Months * 6M - Six Months * 9M - Nine Months * 1Y - One Year * 2Y - Two Years * 5Y - Five Years . [optional] if omitted the server will use the default value of "1M"
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily * **W** = Weekly, based on the last day of the week of the start date. * **M** = Monthly, based on the last trading day of the month. * **AM** = Monthly, based on the start date (e.g., if the start date is June 16, data is displayed for June 16, May 16, April 16 etc.). * **CQ** = Quarterly based on the last trading day of the calendar quarter (March, June, September, or December). * **AY** = Actual Annual, based on the start date. * **CY** = Calendar Annual, based on the last trading day of the calendar year. . [optional] if omitted the server will use the default value of "D"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[ForwardsResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['ids'] = \
ids
return self.get_fx_forwards_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_with_http_info_async(
self,
ids,
**kwargs
) -> "ApplyResult[typing.Tuple[ForwardsResponse, int, typing.MutableMapping]]":
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
ids ([str]): The currency pair requested in the format of a ISO {source}{target}. For a complete list of ISO currencies, please visit [OA 1470](https://my.apps.factset.com/oa/pages/1470)
Keyword Args:
forward_period (str): Bid, offer, and mid rates are available for the following 11 time periods. * ON - Overnight * TN - Tomorrow Next * SW - One Week (Spot Week) * 1M - One Month * 2M - Two Months * 3M - Three Months * 6M - Six Months * 9M - Nine Months * 1Y - One Year * 2Y - Two Years * 5Y - Five Years . [optional] if omitted the server will use the default value of "1M"
start_date (str): The start date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
end_date (str): The end date requested for a given date range in **YYYY-MM-DD** format. If left blank, the API will default to latest available. . [optional]
frequency (str): Controls the display frequency of the data returned. * **D** = Daily * **W** = Weekly, based on the last day of the week of the start date. * **M** = Monthly, based on the last trading day of the month. * **AM** = Monthly, based on the start date (e.g., if the start date is June 16, data is displayed for June 16, May 16, April 16 etc.). * **CQ** = Quarterly based on the last trading day of the calendar quarter (March, June, September, or December). * **AY** = Actual Annual, based on the start date. * **CY** = Calendar Annual, based on the last trading day of the calendar year. . [optional] if omitted the server will use the default value of "D"
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(ForwardsResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['ids'] = \
ids
return self.get_fx_forwards_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_for_list(
self,
forwards_request,
**kwargs
) -> ForwardsResponse:
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
forwards_request (ForwardsRequest): Request object for FX Forwards.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ForwardsResponse
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['forwards_request'] = \
forwards_request
return self.get_fx_forwards_for_list_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_for_list_with_http_info(
self,
forwards_request,
**kwargs
) -> typing.Tuple[ForwardsResponse, int, typing.MutableMapping]:
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
forwards_request (ForwardsRequest): Request object for FX Forwards.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ForwardsResponse
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['forwards_request'] = \
forwards_request
return self.get_fx_forwards_for_list_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_for_list_async(
self,
forwards_request,
**kwargs
) -> "ApplyResult[ForwardsResponse]":
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
forwards_request (ForwardsRequest): Request object for FX Forwards.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[ForwardsResponse]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['forwards_request'] = \
forwards_request
return self.get_fx_forwards_for_list_endpoint.call_with_http_info(**kwargs)
def get_fx_forwards_for_list_with_http_info_async(
self,
forwards_request,
**kwargs
) -> "ApplyResult[typing.Tuple[ForwardsResponse, int, typing.MutableMapping]]":
"""Gets Forwards for a list of currency pairs # noqa: E501
Forward rates are provided in 80+ currencies against the U.S. dollar, British pound, and euro. Forward quotes are provided by Reuters and coverage is determined by the amount of reliable currency trading in a particular currency. Bid, offer, and mid rates are available for 11 `forwardPeriod` periods - Overnight, Tomorrow Next, 1 Week (Spot Week), 1 month, 2 month, 3 month, 6 month, 9 month, 1 year, 2 year, 5 year. All identifiers have spot rates, but not all identifiers have forward rate data. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
forwards_request (ForwardsRequest): Request object for FX Forwards.
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(ForwardsResponse, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['forwards_request'] = \
forwards_request
return self.get_fx_forwards_for_list_endpoint.call_with_http_info(**kwargs) | PypiClean |
/schemaorg_types-0.4.0.tar.gz/schemaorg_types-0.4.0/schemaorg_types/DonateAction.py | from __future__ import annotations
from datetime import *
from time import *
from typing import *
from pydantic import *
class DonateAction(BaseModel):
"""The act of providing goods, services, or money without compensation, often for philanthropic reasons.
References:
https://schema.org/DonateAction
Note:
Model Depth 4
Attributes:
potentialAction: (Optional[Union[List[Union[str, Any]], str, Any]]): Indicates a potential Action, which describes an idealized action in which this thing would play an 'object' role.
mainEntityOfPage: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): Indicates a page (or other CreativeWork) for which this thing is the main entity being described. See [background notes](/docs/datamodel.html#mainEntityBackground) for details.
subjectOf: (Optional[Union[List[Union[str, Any]], str, Any]]): A CreativeWork or Event about this Thing.
url: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): URL of the item.
alternateName: (Union[List[Union[str, Any]], str, Any]): An alias for the item.
sameAs: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): URL of a reference Web page that unambiguously indicates the item's identity. E.g. the URL of the item's Wikipedia page, Wikidata entry, or official website.
description: (Union[List[Union[str, Any]], str, Any]): A description of the item.
disambiguatingDescription: (Union[List[Union[str, Any]], str, Any]): A sub property of description. A short description of the item used to disambiguate from other, similar items. Information from other properties (in particular, name) may be necessary for the description to be useful for disambiguation.
identifier: (Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]): The identifier property represents any kind of identifier for any kind of [[Thing]], such as ISBNs, GTIN codes, UUIDs etc. Schema.org provides dedicated properties for representing many of these, either as textual strings or as URL (URI) links. See [background notes](/docs/datamodel.html#identifierBg) for more details.
image: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): An image of the item. This can be a [[URL]] or a fully described [[ImageObject]].
name: (Union[List[Union[str, Any]], str, Any]): The name of the item.
additionalType: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): An additional type for the item, typically used for adding more specific types from external vocabularies in microdata syntax. This is a relationship between something and a class that the thing is in. In RDFa syntax, it is better to use the native RDFa syntax - the 'typeof' attribute - for multiple types. Schema.org tools may have only weaker understanding of extra types, in particular those defined externally.
endTime: (Optional[Union[List[Union[datetime, str, Any]], datetime, str, Any]]): The endTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to end. For actions that span a period of time, when the action was performed. E.g. John wrote a book from January to *December*. For media, including audio and video, it's the time offset of the end of a clip within a larger file.Note that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.
provider: (Optional[Union[List[Union[str, Any]], str, Any]]): The service provider, service operator, or service performer; the goods producer. Another party (a seller) may offer those services or goods on behalf of the provider. A provider may also serve as the seller.
startTime: (Optional[Union[List[Union[datetime, str, Any]], datetime, str, Any]]): The startTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to start. For actions that span a period of time, when the action was performed. E.g. John wrote a book from *January* to December. For media, including audio and video, it's the time offset of the start of a clip within a larger file.Note that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions.
result: (Optional[Union[List[Union[str, Any]], str, Any]]): The result produced in the action. E.g. John wrote *a book*.
actionStatus: (Optional[Union[List[Union[str, Any]], str, Any]]): Indicates the current disposition of the Action.
agent: (Optional[Union[List[Union[str, Any]], str, Any]]): The direct performer or driver of the action (animate or inanimate). E.g. *John* wrote a book.
instrument: (Optional[Union[List[Union[str, Any]], str, Any]]): The object that helped the agent perform the action. E.g. John wrote a book with *a pen*.
object: (Optional[Union[List[Union[str, Any]], str, Any]]): The object upon which the action is carried out, whose state is kept intact or changed. Also known as the semantic roles patient, affected or undergoer (which change their state) or theme (which doesn't). E.g. John read *a book*.
error: (Optional[Union[List[Union[str, Any]], str, Any]]): For failed actions, more information on the cause of the failure.
target: (Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]]): Indicates a target EntryPoint, or url, for an Action.
location: (Union[List[Union[str, Any]], str, Any]): The location of, for example, where an event is happening, where an organization is located, or where an action takes place.
participant: (Optional[Union[List[Union[str, Any]], str, Any]]): Other co-agents that participated in the action indirectly. E.g. John wrote a book with *Steve*.
price: (Union[List[Union[str, Any, StrictInt, StrictFloat]], str, Any, StrictInt, StrictFloat]): The offer price of a product, or of a price component when attached to PriceSpecification and its subtypes.Usage guidelines:* Use the [[priceCurrency]] property (with standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217), e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies, e.g. "BTC"; well known names for [Local Exchange Trading Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types, e.g. "Ithaca HOUR") instead of including [ambiguous symbols](http://en.wikipedia.org/wiki/Dollar_sign#Currencies_that_use_the_dollar_or_peso_sign) such as '$' in the value.* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator.* Note that both [RDFa](http://www.w3.org/TR/xhtml-rdfa-primer/#using-the-content-attribute) and Microdata syntax allow the use of a "content=" attribute for publishing simple machine-readable values alongside more human-friendly formatting.* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similar Unicode symbols.
priceSpecification: (Optional[Union[List[Union[str, Any]], str, Any]]): One or more detailed price specifications, indicating the unit price and delivery or payment charges.
priceCurrency: (Union[List[Union[str, Any]], str, Any]): The currency of the price, or a price component when attached to [[PriceSpecification]] and its subtypes.Use standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217), e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies, e.g. "BTC"; well known names for [Local Exchange Trading Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types, e.g. "Ithaca HOUR".
recipient: (Optional[Union[List[Union[str, Any]], str, Any]]): A sub property of participant. The participant who is at the receiving end of the action.
"""
type_: str = Field(default="DonateAction", alias="@type", const=True)
potentialAction: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="Indicates a potential Action, which describes an idealized action in which this thing"
"would play an 'object' role.",
)
mainEntityOfPage: Optional[
Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]
] = Field(
default=None,
description="Indicates a page (or other CreativeWork) for which this thing is the main entity being"
"described. See [background notes](/docs/datamodel.html#mainEntityBackground)"
"for details.",
)
subjectOf: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="A CreativeWork or Event about this Thing.",
)
url: Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]] = Field(
default=None,
description="URL of the item.",
)
alternateName: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="An alias for the item.",
)
sameAs: Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]] = Field(
default=None,
description="URL of a reference Web page that unambiguously indicates the item's identity. E.g. the"
"URL of the item's Wikipedia page, Wikidata entry, or official website.",
)
description: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="A description of the item.",
)
disambiguatingDescription: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="A sub property of description. A short description of the item used to disambiguate from"
"other, similar items. Information from other properties (in particular, name) may"
"be necessary for the description to be useful for disambiguation.",
)
identifier: Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any] = Field(
default=None,
description="The identifier property represents any kind of identifier for any kind of [[Thing]],"
"such as ISBNs, GTIN codes, UUIDs etc. Schema.org provides dedicated properties for"
"representing many of these, either as textual strings or as URL (URI) links. See [background"
"notes](/docs/datamodel.html#identifierBg) for more details.",
)
image: Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]] = Field(
default=None,
description="An image of the item. This can be a [[URL]] or a fully described [[ImageObject]].",
)
name: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="The name of the item.",
)
additionalType: Optional[
Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]
] = Field(
default=None,
description="An additional type for the item, typically used for adding more specific types from external"
"vocabularies in microdata syntax. This is a relationship between something and a class"
"that the thing is in. In RDFa syntax, it is better to use the native RDFa syntax - the 'typeof'"
"attribute - for multiple types. Schema.org tools may have only weaker understanding"
"of extra types, in particular those defined externally.",
)
endTime: Optional[
Union[List[Union[datetime, str, Any]], datetime, str, Any]
] = Field(
default=None,
description="The endTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation),"
"the time that it is expected to end. For actions that span a period of time, when the action"
"was performed. E.g. John wrote a book from January to *December*. For media, including"
"audio and video, it's the time offset of the end of a clip within a larger file.Note that"
"Event uses startDate/endDate instead of startTime/endTime, even when describing"
"dates with times. This situation may be clarified in future revisions.",
)
provider: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="The service provider, service operator, or service performer; the goods producer."
"Another party (a seller) may offer those services or goods on behalf of the provider."
"A provider may also serve as the seller.",
)
startTime: Optional[
Union[List[Union[datetime, str, Any]], datetime, str, Any]
] = Field(
default=None,
description="The startTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation),"
"the time that it is expected to start. For actions that span a period of time, when the action"
"was performed. E.g. John wrote a book from *January* to December. For media, including"
"audio and video, it's the time offset of the start of a clip within a larger file.Note that"
"Event uses startDate/endDate instead of startTime/endTime, even when describing"
"dates with times. This situation may be clarified in future revisions.",
)
result: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="The result produced in the action. E.g. John wrote *a book*.",
)
actionStatus: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="Indicates the current disposition of the Action.",
)
agent: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="The direct performer or driver of the action (animate or inanimate). E.g. *John* wrote"
"a book.",
)
instrument: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="The object that helped the agent perform the action. E.g. John wrote a book with *a pen*.",
)
object: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="The object upon which the action is carried out, whose state is kept intact or changed."
"Also known as the semantic roles patient, affected or undergoer (which change their"
"state) or theme (which doesn't). E.g. John read *a book*.",
)
error: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="For failed actions, more information on the cause of the failure.",
)
target: Optional[Union[List[Union[str, AnyUrl, Any]], str, AnyUrl, Any]] = Field(
default=None,
description="Indicates a target EntryPoint, or url, for an Action.",
)
location: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="The location of, for example, where an event is happening, where an organization is located,"
"or where an action takes place.",
)
participant: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="Other co-agents that participated in the action indirectly. E.g. John wrote a book with"
"*Steve*.",
)
price: Union[
List[Union[str, Any, StrictInt, StrictFloat]], str, Any, StrictInt, StrictFloat
] = Field(
default=None,
description="The offer price of a product, or of a price component when attached to PriceSpecification"
"and its subtypes.Usage guidelines:* Use the [[priceCurrency]] property (with standard"
"formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217),"
'e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies)'
'for cryptocurrencies, e.g. "BTC"; well known names for [Local Exchange Trading Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system)'
'(LETS) and other currency types, e.g. "Ithaca HOUR") instead of including [ambiguous'
"symbols](http://en.wikipedia.org/wiki/Dollar_sign#Currencies_that_use_the_dollar_or_peso_sign)"
"such as '$' in the value.* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate"
"a decimal point. Avoid using these symbols as a readability separator.* Note that both"
"[RDFa](http://www.w3.org/TR/xhtml-rdfa-primer/#using-the-content-attribute)"
'and Microdata syntax allow the use of a "content=" attribute for publishing simple'
"machine-readable values alongside more human-friendly formatting.* Use values from"
"0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially"
"similar Unicode symbols.",
)
priceSpecification: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="One or more detailed price specifications, indicating the unit price and delivery or"
"payment charges.",
)
priceCurrency: Union[List[Union[str, Any]], str, Any] = Field(
default=None,
description="The currency of the price, or a price component when attached to [[PriceSpecification]]"
"and its subtypes.Use standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217),"
'e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies)'
'for cryptocurrencies, e.g. "BTC"; well known names for [Local Exchange Trading Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system)'
'(LETS) and other currency types, e.g. "Ithaca HOUR".',
)
recipient: Optional[Union[List[Union[str, Any]], str, Any]] = Field(
default=None,
description="A sub property of participant. The participant who is at the receiving end of the action.",
) | PypiClean |
/picsellia_tf1-0.6-py3-none-any.whl/picsellia_tf1/object_detection/builders/calibration_builder.py | import tensorflow.compat.v1 as tf
from object_detection.utils import shape_utils
def _find_interval_containing_new_value(x, new_value):
"""Find the index of x (ascending-ordered) after which new_value occurs."""
new_value_shape = shape_utils.combined_static_and_dynamic_shape(new_value)[0]
x_shape = shape_utils.combined_static_and_dynamic_shape(x)[0]
compare = tf.cast(tf.reshape(new_value, shape=(new_value_shape, 1)) >=
tf.reshape(x, shape=(1, x_shape)),
dtype=tf.int32)
diff = compare[:, 1:] - compare[:, :-1]
interval_idx = tf.argmin(diff, axis=1)
return interval_idx
def _tf_linear_interp1d(x_to_interpolate, fn_x, fn_y):
"""Tensorflow implementation of 1d linear interpolation.
Args:
x_to_interpolate: tf.float32 Tensor of shape (num_examples,) over which 1d
linear interpolation is performed.
fn_x: Monotonically-increasing, non-repeating tf.float32 Tensor of shape
(length,) used as the domain to approximate a function.
fn_y: tf.float32 Tensor of shape (length,) used as the range to approximate
a function.
Returns:
tf.float32 Tensor of shape (num_examples,)
"""
x_pad = tf.concat([fn_x[:1] - 1, fn_x, fn_x[-1:] + 1], axis=0)
y_pad = tf.concat([fn_y[:1], fn_y, fn_y[-1:]], axis=0)
interval_idx = _find_interval_containing_new_value(x_pad, x_to_interpolate)
# Interpolate
alpha = (
(x_to_interpolate - tf.gather(x_pad, interval_idx)) /
(tf.gather(x_pad, interval_idx + 1) - tf.gather(x_pad, interval_idx)))
interpolation = ((1 - alpha) * tf.gather(y_pad, interval_idx) +
alpha * tf.gather(y_pad, interval_idx + 1))
return interpolation
def _function_approximation_proto_to_tf_tensors(x_y_pairs_message):
"""Extracts (x,y) pairs from a XYPairs message.
Args:
x_y_pairs_message: calibration_pb2..XYPairs proto
Returns:
tf_x: tf.float32 tensor of shape (number_xy_pairs,) for function domain.
tf_y: tf.float32 tensor of shape (number_xy_pairs,) for function range.
"""
tf_x = tf.convert_to_tensor([x_y_pair.x
for x_y_pair
in x_y_pairs_message.x_y_pair],
dtype=tf.float32)
tf_y = tf.convert_to_tensor([x_y_pair.y
for x_y_pair
in x_y_pairs_message.x_y_pair],
dtype=tf.float32)
return tf_x, tf_y
def _get_class_id_function_dict(calibration_config):
"""Create a dictionary mapping class id to function approximations.
Args:
calibration_config: calibration_pb2 proto containing
id_function_approximations.
Returns:
Dictionary mapping a class id to a tuple of TF tensors to be used for
function approximation.
"""
class_id_function_dict = {}
class_id_xy_pairs_map = (
calibration_config.class_id_function_approximations.class_id_xy_pairs_map)
for class_id in class_id_xy_pairs_map:
class_id_function_dict[class_id] = (
_function_approximation_proto_to_tf_tensors(
class_id_xy_pairs_map[class_id]))
return class_id_function_dict
def build(calibration_config):
"""Returns a function that calibrates Tensorflow model scores.
All returned functions are expected to apply positive monotonic
transformations to inputs (i.e. score ordering is strictly preserved or
adjacent scores are mapped to the same score, but an input of lower value
should never be exceed an input of higher value after transformation). For
class-agnostic calibration, positive monotonicity should hold across all
scores. In class-specific cases, positive monotonicity should hold within each
class.
Args:
calibration_config: calibration_pb2.CalibrationConfig proto.
Returns:
Function that that accepts class_predictions_with_background and calibrates
the output based on calibration_config's parameters.
Raises:
ValueError: No calibration builder defined for "Oneof" in
calibration_config.
"""
# Linear Interpolation (usually used as a result of calibration via
# isotonic regression).
if calibration_config.WhichOneof('calibrator') == 'function_approximation':
def calibration_fn(class_predictions_with_background):
"""Calibrate predictions via 1-d linear interpolation.
Predictions scores are linearly interpolated based on a class-agnostic
function approximation. Note that the 0-indexed background class is also
transformed.
Args:
class_predictions_with_background: tf.float32 tensor of shape
[batch_size, num_anchors, num_classes + 1] containing scores on the
interval [0,1]. This is usually produced by a sigmoid or softmax layer
and the result of calling the `predict` method of a detection model.
Returns:
tf.float32 tensor of the same shape as the input with values on the
interval [0, 1].
"""
# Flattening Tensors and then reshaping at the end.
flat_class_predictions_with_background = tf.reshape(
class_predictions_with_background, shape=[-1])
fn_x, fn_y = _function_approximation_proto_to_tf_tensors(
calibration_config.function_approximation.x_y_pairs)
updated_scores = _tf_linear_interp1d(
flat_class_predictions_with_background, fn_x, fn_y)
# Un-flatten the scores
original_detections_shape = shape_utils.combined_static_and_dynamic_shape(
class_predictions_with_background)
calibrated_class_predictions_with_background = tf.reshape(
updated_scores,
shape=original_detections_shape,
name='calibrate_scores')
return calibrated_class_predictions_with_background
elif (calibration_config.WhichOneof('calibrator') ==
'class_id_function_approximations'):
def calibration_fn(class_predictions_with_background):
"""Calibrate predictions per class via 1-d linear interpolation.
Prediction scores are linearly interpolated with class-specific function
approximations. Note that after calibration, an anchor's class scores will
not necessarily sum to 1, and score ordering may change, depending on each
class' calibration parameters.
Args:
class_predictions_with_background: tf.float32 tensor of shape
[batch_size, num_anchors, num_classes + 1] containing scores on the
interval [0,1]. This is usually produced by a sigmoid or softmax layer
and the result of calling the `predict` method of a detection model.
Returns:
tf.float32 tensor of the same shape as the input with values on the
interval [0, 1].
Raises:
KeyError: Calibration parameters are not present for a class.
"""
class_id_function_dict = _get_class_id_function_dict(calibration_config)
# Tensors are split by class and then recombined at the end to recover
# the input's original shape. If a class id does not have calibration
# parameters, it is left unchanged.
class_tensors = tf.unstack(class_predictions_with_background, axis=-1)
calibrated_class_tensors = []
for class_id, class_tensor in enumerate(class_tensors):
flat_class_tensor = tf.reshape(class_tensor, shape=[-1])
if class_id in class_id_function_dict:
output_tensor = _tf_linear_interp1d(
x_to_interpolate=flat_class_tensor,
fn_x=class_id_function_dict[class_id][0],
fn_y=class_id_function_dict[class_id][1])
else:
tf.logging.info(
'Calibration parameters for class id `%d` not not found',
class_id)
output_tensor = flat_class_tensor
calibrated_class_tensors.append(output_tensor)
combined_calibrated_tensor = tf.stack(calibrated_class_tensors, axis=1)
input_shape = shape_utils.combined_static_and_dynamic_shape(
class_predictions_with_background)
calibrated_class_predictions_with_background = tf.reshape(
combined_calibrated_tensor,
shape=input_shape,
name='calibrate_scores')
return calibrated_class_predictions_with_background
elif (calibration_config.WhichOneof('calibrator') ==
'temperature_scaling_calibration'):
def calibration_fn(class_predictions_with_background):
"""Calibrate predictions via temperature scaling.
Predictions logits scores are scaled by the temperature scaler. Note that
the 0-indexed background class is also transformed.
Args:
class_predictions_with_background: tf.float32 tensor of shape
[batch_size, num_anchors, num_classes + 1] containing logits scores.
This is usually produced before a sigmoid or softmax layer.
Returns:
tf.float32 tensor of the same shape as the input.
Raises:
ValueError: If temperature scaler is of incorrect value.
"""
scaler = calibration_config.temperature_scaling_calibration.scaler
if scaler <= 0:
raise ValueError('The scaler in temperature scaling must be positive.')
calibrated_class_predictions_with_background = tf.math.divide(
class_predictions_with_background,
scaler,
name='calibrate_score')
return calibrated_class_predictions_with_background
# TODO(zbeaver): Add sigmoid calibration.
else:
raise ValueError('No calibration builder defined for "Oneof" in '
'calibration_config.')
return calibration_fn | PypiClean |
/gardener_cicd_libs-1.2143.0-py3-none-any.whl/oci/platform.py | import dataclasses
import typing
import dacite
import oci.client as oc
import oci.model as om
def from_manifest(
image_reference: om.OciImageReference,
manifest: om.OciImageManifest,
oci_client: oc.Client=None,
base_platform: om.OciPlatform=None,
) -> om.OciPlatform:
if base_platform:
cfg = base_platform.as_dict()
else:
cfg = {}
cfg |= oci_client.blob(
image_reference=image_reference,
digest=manifest.config.digest,
stream=False, # we will need to json.load the (small) result anyhow
).json()
return dacite.from_dict(
data_class=om.OciPlatform,
data=cfg,
)
def from_single_image(
image_reference: typing.Union[str, om.OciImageReference],
oci_client: oc.Client=None,
base_platform: om.OciPlatform=None,
) -> om.OciPlatform:
'''
determines the platform from a "single oci image" (i.e. an oci image which is _not_
a multiarch image).
'''
image_reference = om.OciImageReference.to_image_ref(image_reference)
manifest = oci_client.manifest(image_reference=image_reference)
if not isinstance(manifest, om.OciImageManifest):
raise ValueError(f'{image_reference=} did not yield OciImageManifest: {type(manifest)=}')
return from_manifest(
image_reference=image_reference,
manifest=manifest,
oci_client=oci_client,
base_platform=base_platform,
)
def single_platform_manifest(
image_reference: typing.Union[om.OciImageReference, str],
oci_client: oc.Client,
platform: om.OciPlatform=None,
):
'''
returns a single-platform OCI Image Manifest for the given image_reference.
lookup and validation depend on presence of platform argument.
if given image-ref points to a single-arch manifest, the returned result will be identical
to invoking `oci_client.manifest`. If platform argument is passed, and the discovered
platform does not match, a `ValueError` will be raised.
if given image-ref points to a multi-arch manifest, content-negotiation depends on presence of
platform-argument. If absent, no preference will be stated (i.e. accept-header will not be set).
Some Oci-Image-registries will return a single-arch manifest (thus saving a roundtrip).
If platform is passed, preference for multi-arch will be stated via accept-header; the specified
platform will be looked-up and returned. If not found, `ValueError` will be raised.
'''
image_reference = om.OciImageReference.to_image_ref(image_reference)
if platform:
accept = om.MimeTypes.prefer_multiarch
else:
accept = None
manifest = oci_client.manifest(
image_reference=image_reference,
accept=accept,
)
if isinstance(manifest, om.OciImageManifest):
if not platform:
return manifest
actual_platform = from_manifest(
image_reference=image_reference,
manifest=manifest,
oci_client=oci_client,
)
if not actual_platform == platform:
raise ValueError(f'{image_reference=} does not match {platform=}: {actual_platform=}')
return manifest
elif isinstance(manifest, om.OciImageManifestList):
pass
else:
raise NotImplementedError(manifest)
for manifest in manifest.manifests:
manifest: om.OciImageManifestListEntry
if manifest.platform == platform:
break
else:
raise ValueError(f'{image_reference=} does not contain {platform=}')
manifest_ref = f'{image_reference.ref_without_tag}@{manifest.digest}'
return oci_client.manifest(image_reference=manifest_ref)
def iter_platforms(
image_reference: typing.Union[str, om.OciImageReference],
oci_client: oc.Client=None,
) -> typing.Generator[tuple[om.OciImageReference, om.OciPlatform], None, None]:
image_reference = om.OciImageReference.to_image_ref(image_reference)
manifest = oci_client.manifest(
image_reference=image_reference,
accept=om.MimeTypes.prefer_multiarch,
)
if isinstance(manifest, om.OciImageManifest):
platform = from_single_image(
image_reference=image_reference,
oci_client=oci_client,
)
yield (image_reference, platform)
return
elif isinstance(manifest, om.OciImageManifestList):
manifest: om.OciImageManifestList
else:
raise NotImplementedError(type(manifest))
prefix = image_reference.ref_without_tag
for sub_manifest in manifest.manifests:
platform_dict = dataclasses.asdict(sub_manifest)
sub_manifest = oci_client.manifest(
image_reference=(sub_img_ref := f'{prefix}@{sub_manifest.digest}'),
)
platform = from_single_image(
image_reference=sub_img_ref,
oci_client=oci_client,
)
# merge platform-dicts - the one from cfg-blob is assumed to be more specific
platform_dict |= dataclasses.asdict(platform)
platform = dacite.from_dict(
data_class=om.OciPlatform,
data=platform_dict,
)
yield (sub_img_ref, platform) | PypiClean |
/drf_stripe_subscription36-1.1.11.5-py3-none-any.whl/drf_stripe/migrations/0001_initial.py |
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Feature',
fields=[
('feature_id', models.CharField(max_length=64, primary_key=True, serialize=False)),
('description', models.CharField(max_length=256, null=True)),
],
),
migrations.CreateModel(
name='Price',
fields=[
('price_id', models.CharField(max_length=256, primary_key=True, serialize=False)),
('nickname', models.CharField(max_length=256, null=True)),
('price', models.PositiveIntegerField()),
('freq', models.CharField(max_length=64, null=True)),
('active', models.BooleanField()),
],
),
migrations.CreateModel(
name='Product',
fields=[
('product_id', models.CharField(max_length=256, primary_key=True, serialize=False)),
('active', models.BooleanField()),
('description', models.CharField(max_length=1024, null=True)),
('name', models.CharField(max_length=256, null=True)),
],
),
migrations.CreateModel(
name='ProductFeature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='StripeUser',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True,
related_name='stripe_user', serialize=False,
to=settings.AUTH_USER_MODEL)),
('customer_id', models.CharField(max_length=128, null=True)),
],
),
migrations.CreateModel(
name='Subscription',
fields=[
('subscription_id', models.CharField(max_length=256, primary_key=True, serialize=False)),
('period_start', models.DateTimeField(null=True)),
('period_end', models.DateTimeField(null=True)),
('cancel_at', models.DateTimeField(null=True)),
('cancel_at_period_end', models.BooleanField()),
('ended_at', models.DateTimeField(null=True)),
('status', models.CharField(max_length=64)),
('trial_end', models.DateTimeField(null=True)),
('trial_start', models.DateTimeField(null=True)),
('stripe_user',
models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions',
to='drf_stripe.stripeuser')),
],
),
migrations.CreateModel(
name='SubscriptionItem',
fields=[
('sub_item_id', models.CharField(max_length=256, primary_key=True, serialize=False)),
('quantity', models.PositiveIntegerField()),
('price', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+',
to='drf_stripe.price')),
('subscription', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items',
to='drf_stripe.subscription')),
],
),
migrations.AddIndex(
model_name='stripeuser',
index=models.Index(fields=['user', 'customer_id'], name='drf_stripe__user_id_6bbc0d_idx'),
),
migrations.AddField(
model_name='productfeature',
name='feature',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='linked_products',
to='drf_stripe.feature'),
),
migrations.AddField(
model_name='productfeature',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='linked_features',
to='drf_stripe.product'),
),
migrations.AddField(
model_name='price',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='prices',
to='drf_stripe.product'),
),
migrations.AddIndex(
model_name='subscription',
index=models.Index(fields=['stripe_user', 'status'], name='drf_stripe__stripe__7d71e7_idx'),
),
migrations.AddIndex(
model_name='price',
index=models.Index(fields=['active', 'freq'], name='drf_stripe__active_3854b4_idx'),
),
] | PypiClean |
/dschmidt-cdktf-provider-google_beta-0.1.0.tar.gz/dschmidt-cdktf-provider-google_beta-0.1.0/src/dschmidt_cdktf_provider_google_beta/google_cloudiot_registry_iam_member/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf
import constructs
class GoogleCloudiotRegistryIamMember(
cdktf.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@dschmidt/provider-google-beta.googleCloudiotRegistryIamMember.GoogleCloudiotRegistryIamMember",
):
'''Represents a {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member google_cloudiot_registry_iam_member}.'''
def __init__(
self,
scope: constructs.Construct,
id_: builtins.str,
*,
member: builtins.str,
name: builtins.str,
role: builtins.str,
condition: typing.Optional[typing.Union["GoogleCloudiotRegistryIamMemberCondition", typing.Dict[str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
connection: typing.Optional[typing.Union[typing.Union[cdktf.SSHProvisionerConnection, typing.Dict[str, typing.Any]], typing.Union[cdktf.WinrmProvisionerConnection, typing.Dict[str, typing.Any]]]] = None,
count: typing.Optional[jsii.Number] = None,
depends_on: typing.Optional[typing.Sequence[cdktf.ITerraformDependable]] = None,
for_each: typing.Optional[cdktf.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[cdktf.TerraformResourceLifecycle, typing.Dict[str, typing.Any]]] = None,
provider: typing.Optional[cdktf.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[cdktf.FileProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.LocalExecProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.RemoteExecProvisioner, typing.Dict[str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member google_cloudiot_registry_iam_member} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param member: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#member GoogleCloudiotRegistryIamMember#member}.
:param name: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#name GoogleCloudiotRegistryIamMember#name}.
:param role: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#role GoogleCloudiotRegistryIamMember#role}.
:param condition: condition block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#condition GoogleCloudiotRegistryIamMember#condition}
:param id: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#id GoogleCloudiotRegistryIamMember#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param project: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#project GoogleCloudiotRegistryIamMember#project}.
:param region: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#region GoogleCloudiotRegistryIamMember#region}.
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(GoogleCloudiotRegistryIamMember.__init__)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = GoogleCloudiotRegistryIamMemberConfig(
member=member,
name=name,
role=role,
condition=condition,
id=id,
project=project,
region=region,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putCondition")
def put_condition(
self,
*,
expression: builtins.str,
title: builtins.str,
description: typing.Optional[builtins.str] = None,
) -> None:
'''
:param expression: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#expression GoogleCloudiotRegistryIamMember#expression}.
:param title: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#title GoogleCloudiotRegistryIamMember#title}.
:param description: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#description GoogleCloudiotRegistryIamMember#description}.
'''
value = GoogleCloudiotRegistryIamMemberCondition(
expression=expression, title=title, description=description
)
return typing.cast(None, jsii.invoke(self, "putCondition", [value]))
@jsii.member(jsii_name="resetCondition")
def reset_condition(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCondition", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetProject")
def reset_project(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetProject", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="condition")
def condition(self) -> "GoogleCloudiotRegistryIamMemberConditionOutputReference":
return typing.cast("GoogleCloudiotRegistryIamMemberConditionOutputReference", jsii.get(self, "condition"))
@builtins.property
@jsii.member(jsii_name="etag")
def etag(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "etag"))
@builtins.property
@jsii.member(jsii_name="conditionInput")
def condition_input(
self,
) -> typing.Optional["GoogleCloudiotRegistryIamMemberCondition"]:
return typing.cast(typing.Optional["GoogleCloudiotRegistryIamMemberCondition"], jsii.get(self, "conditionInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="memberInput")
def member_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "memberInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="projectInput")
def project_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "projectInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="roleInput")
def role_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "roleInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="member")
def member(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "member"))
@member.setter
def member(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "member").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "member", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "name").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="project")
def project(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "project"))
@project.setter
def project(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "project").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "project", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "region").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@builtins.property
@jsii.member(jsii_name="role")
def role(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "role"))
@role.setter
def role(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMember, "role").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "role", value)
@jsii.data_type(
jsii_type="@dschmidt/provider-google-beta.googleCloudiotRegistryIamMember.GoogleCloudiotRegistryIamMemberCondition",
jsii_struct_bases=[],
name_mapping={
"expression": "expression",
"title": "title",
"description": "description",
},
)
class GoogleCloudiotRegistryIamMemberCondition:
def __init__(
self,
*,
expression: builtins.str,
title: builtins.str,
description: typing.Optional[builtins.str] = None,
) -> None:
'''
:param expression: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#expression GoogleCloudiotRegistryIamMember#expression}.
:param title: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#title GoogleCloudiotRegistryIamMember#title}.
:param description: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#description GoogleCloudiotRegistryIamMember#description}.
'''
if __debug__:
type_hints = typing.get_type_hints(GoogleCloudiotRegistryIamMemberCondition.__init__)
check_type(argname="argument expression", value=expression, expected_type=type_hints["expression"])
check_type(argname="argument title", value=title, expected_type=type_hints["title"])
check_type(argname="argument description", value=description, expected_type=type_hints["description"])
self._values: typing.Dict[str, typing.Any] = {
"expression": expression,
"title": title,
}
if description is not None:
self._values["description"] = description
@builtins.property
def expression(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#expression GoogleCloudiotRegistryIamMember#expression}.'''
result = self._values.get("expression")
assert result is not None, "Required property 'expression' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def title(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#title GoogleCloudiotRegistryIamMember#title}.'''
result = self._values.get("title")
assert result is not None, "Required property 'title' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def description(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#description GoogleCloudiotRegistryIamMember#description}.'''
result = self._values.get("description")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GoogleCloudiotRegistryIamMemberCondition(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class GoogleCloudiotRegistryIamMemberConditionOutputReference(
cdktf.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@dschmidt/provider-google-beta.googleCloudiotRegistryIamMember.GoogleCloudiotRegistryIamMemberConditionOutputReference",
):
def __init__(
self,
terraform_resource: cdktf.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(GoogleCloudiotRegistryIamMemberConditionOutputReference.__init__)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetDescription")
def reset_description(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDescription", []))
@builtins.property
@jsii.member(jsii_name="descriptionInput")
def description_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "descriptionInput"))
@builtins.property
@jsii.member(jsii_name="expressionInput")
def expression_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "expressionInput"))
@builtins.property
@jsii.member(jsii_name="titleInput")
def title_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "titleInput"))
@builtins.property
@jsii.member(jsii_name="description")
def description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "description"))
@description.setter
def description(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMemberConditionOutputReference, "description").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "description", value)
@builtins.property
@jsii.member(jsii_name="expression")
def expression(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "expression"))
@expression.setter
def expression(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMemberConditionOutputReference, "expression").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "expression", value)
@builtins.property
@jsii.member(jsii_name="title")
def title(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "title"))
@title.setter
def title(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMemberConditionOutputReference, "title").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "title", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[GoogleCloudiotRegistryIamMemberCondition]:
return typing.cast(typing.Optional[GoogleCloudiotRegistryIamMemberCondition], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[GoogleCloudiotRegistryIamMemberCondition],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(GoogleCloudiotRegistryIamMemberConditionOutputReference, "internal_value").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@dschmidt/provider-google-beta.googleCloudiotRegistryIamMember.GoogleCloudiotRegistryIamMemberConfig",
jsii_struct_bases=[cdktf.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"member": "member",
"name": "name",
"role": "role",
"condition": "condition",
"id": "id",
"project": "project",
"region": "region",
},
)
class GoogleCloudiotRegistryIamMemberConfig(cdktf.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[cdktf.SSHProvisionerConnection, typing.Dict[str, typing.Any]], typing.Union[cdktf.WinrmProvisionerConnection, typing.Dict[str, typing.Any]]]] = None,
count: typing.Optional[jsii.Number] = None,
depends_on: typing.Optional[typing.Sequence[cdktf.ITerraformDependable]] = None,
for_each: typing.Optional[cdktf.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[cdktf.TerraformResourceLifecycle, typing.Dict[str, typing.Any]]] = None,
provider: typing.Optional[cdktf.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[cdktf.FileProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.LocalExecProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.RemoteExecProvisioner, typing.Dict[str, typing.Any]]]]] = None,
member: builtins.str,
name: builtins.str,
role: builtins.str,
condition: typing.Optional[typing.Union[GoogleCloudiotRegistryIamMemberCondition, typing.Dict[str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param member: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#member GoogleCloudiotRegistryIamMember#member}.
:param name: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#name GoogleCloudiotRegistryIamMember#name}.
:param role: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#role GoogleCloudiotRegistryIamMember#role}.
:param condition: condition block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#condition GoogleCloudiotRegistryIamMember#condition}
:param id: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#id GoogleCloudiotRegistryIamMember#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param project: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#project GoogleCloudiotRegistryIamMember#project}.
:param region: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#region GoogleCloudiotRegistryIamMember#region}.
'''
if isinstance(lifecycle, dict):
lifecycle = cdktf.TerraformResourceLifecycle(**lifecycle)
if isinstance(condition, dict):
condition = GoogleCloudiotRegistryIamMemberCondition(**condition)
if __debug__:
type_hints = typing.get_type_hints(GoogleCloudiotRegistryIamMemberConfig.__init__)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument member", value=member, expected_type=type_hints["member"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument role", value=role, expected_type=type_hints["role"])
check_type(argname="argument condition", value=condition, expected_type=type_hints["condition"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument project", value=project, expected_type=type_hints["project"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
self._values: typing.Dict[str, typing.Any] = {
"member": member,
"name": name,
"role": role,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if condition is not None:
self._values["condition"] = condition
if id is not None:
self._values["id"] = id
if project is not None:
self._values["project"] = project
if region is not None:
self._values["region"] = region
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[cdktf.SSHProvisionerConnection, cdktf.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[cdktf.SSHProvisionerConnection, cdktf.WinrmProvisionerConnection]], result)
@builtins.property
def count(self) -> typing.Optional[jsii.Number]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def depends_on(self) -> typing.Optional[typing.List[cdktf.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[cdktf.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[cdktf.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[cdktf.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[cdktf.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[cdktf.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[cdktf.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[cdktf.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[cdktf.FileProvisioner, cdktf.LocalExecProvisioner, cdktf.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[cdktf.FileProvisioner, cdktf.LocalExecProvisioner, cdktf.RemoteExecProvisioner]]], result)
@builtins.property
def member(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#member GoogleCloudiotRegistryIamMember#member}.'''
result = self._values.get("member")
assert result is not None, "Required property 'member' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def name(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#name GoogleCloudiotRegistryIamMember#name}.'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def role(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#role GoogleCloudiotRegistryIamMember#role}.'''
result = self._values.get("role")
assert result is not None, "Required property 'role' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def condition(self) -> typing.Optional[GoogleCloudiotRegistryIamMemberCondition]:
'''condition block.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#condition GoogleCloudiotRegistryIamMember#condition}
'''
result = self._values.get("condition")
return typing.cast(typing.Optional[GoogleCloudiotRegistryIamMemberCondition], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#id GoogleCloudiotRegistryIamMember#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def project(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#project GoogleCloudiotRegistryIamMember#project}.'''
result = self._values.get("project")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google-beta/r/google_cloudiot_registry_iam_member#region GoogleCloudiotRegistryIamMember#region}.'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "GoogleCloudiotRegistryIamMemberConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"GoogleCloudiotRegistryIamMember",
"GoogleCloudiotRegistryIamMemberCondition",
"GoogleCloudiotRegistryIamMemberConditionOutputReference",
"GoogleCloudiotRegistryIamMemberConfig",
]
publication.publish() | PypiClean |
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/request/AlipayBusinessOrderCreateRequest.py | import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayBusinessOrderCreateModel import AlipayBusinessOrderCreateModel
class AlipayBusinessOrderCreateRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayBusinessOrderCreateModel):
self._biz_content = value
else:
self._biz_content = AlipayBusinessOrderCreateModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.business.order.create'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params | PypiClean |
/oarepo_model_builder-4.0.38-py3-none-any.whl/oarepo_model_builder/datatypes/components/model/permissions.py | import marshmallow as ma
from oarepo_model_builder.datatypes import DataTypeComponent, ModelDataType
from oarepo_model_builder.utils.python_name import parent_module
from oarepo_model_builder.validation.utils import ImportSchema
from .defaults import DefaultsModelComponent
from .service import ServiceModelComponent
from .utils import set_default
class ModelPermissionsSchema(ma.Schema):
generate = ma.fields.Bool(
metadata={"doc": "Set to true to generate the permissions class"}
)
presets = ma.fields.List(
ma.fields.String(),
metadata={
"doc": "A list of presets that will be merged with the permissions class."
},
)
class_ = ma.fields.String(
attribute="class",
data_key="class",
metadata={"doc": "Qualified name of the generated class"},
)
base_classes = ma.fields.List(
ma.fields.String(),
attribute="base-classes",
data_key="base-classes",
metadata={"doc": "Base classes for the generated permission class"},
)
extra_code = ma.fields.String(
attribute="extra-code",
data_key="extra-code",
metadata={"doc": "Extra code to be copied below the permission class"},
)
module = ma.fields.String(
metadata={"doc": "Module where the permissions will be placed"}
)
imports = ma.fields.List(
ma.fields.Nested(ImportSchema), metadata={"doc": "List of python imports"}
)
skip = ma.fields.Boolean()
class Meta:
unknown = ma.RAISE
class PermissionsModelComponent(DataTypeComponent):
eligible_datatypes = [ModelDataType]
depends_on = [DefaultsModelComponent, ServiceModelComponent]
class ModelSchema(ma.Schema):
permissions = ma.fields.Nested(
ModelPermissionsSchema(),
required=False,
metadata={"doc": "Permissions settings"},
)
def before_model_prepare(self, datatype, *, context, **kwargs):
prefix = datatype.definition["module"]["prefix"]
record_services_module = parent_module(datatype.definition["service"]["module"])
permissions = set_default(datatype, "permissions", {})
permissions.setdefault("generate", True)
permissions.setdefault("presets", ["everyone"])
permissions.setdefault("extra-code", "")
permissions_module = permissions.setdefault(
"module",
f"{record_services_module}.permissions",
)
permissions.setdefault(
"class",
f"{permissions_module}.{prefix}PermissionPolicy",
)
permissions.setdefault(
"base-classes",
["RecordPermissionPolicy"],
)
permissions.setdefault(
"imports",
[{"import": "invenio_records_permissions.RecordPermissionPolicy"}],
) | PypiClean |
/cubicweb-web-1.2.1.tar.gz/cubicweb-web-1.2.1/cubicweb_web/data/jquery.tablesorter.js | *
* @description Create a sortable table with multi-column sorting capabilitys
*
* @example $('table').tablesorter();
* @desc Create a simple tablesorter interface.
*
* @example $('table').tablesorter({ sortList:[[0,0],[1,0]] });
* @desc Create a tablesorter interface and sort on the first and secound column column headers.
*
* @example $('table').tablesorter({ headers: { 0: { sorter: false}, 1: {sorter: false} } });
*
* @desc Create a tablesorter interface and disableing the first and second column headers.
*
*
* @example $('table').tablesorter({ headers: { 0: {sorter:"integer"}, 1: {sorter:"currency"} } });
*
* @desc Create a tablesorter interface and set a column parser for the first
* and second column.
*
*
* @param Object
* settings An object literal containing key/value pairs to provide
* optional settings.
*
*
* @option String cssHeader (optional) A string of the class name to be appended
* to sortable tr elements in the thead of the table. Default value:
* "header"
*
* @option String cssAsc (optional) A string of the class name to be appended to
* sortable tr elements in the thead on a ascending sort. Default value:
* "headerSortUp"
*
* @option String cssDesc (optional) A string of the class name to be appended
* to sortable tr elements in the thead on a descending sort. Default
* value: "headerSortDown"
*
* @option String sortInitialOrder (optional) A string of the inital sorting
* order can be asc or desc. Default value: "asc"
*
* @option String sortMultisortKey (optional) A string of the multi-column sort
* key. Default value: "shiftKey"
*
* @option String textExtraction (optional) A string of the text-extraction
* method to use. For complex html structures inside td cell set this
* option to "complex", on large tables the complex option can be slow.
* Default value: "simple"
*
* @option Object headers (optional) An array containing the forces sorting
* rules. This option let's you specify a default sorting rule. Default
* value: null
*
* @option Array sortList (optional) An array containing the forces sorting
* rules. This option let's you specify a default sorting rule. Default
* value: null
*
* @option Array sortForce (optional) An array containing forced sorting rules.
* This option let's you specify a default sorting rule, which is
* prepended to user-selected rules. Default value: null
*
* @option Boolean sortLocaleCompare (optional) Boolean flag indicating whatever
* to use String.localeCampare method or not. Default set to true.
*
*
* @option Array sortAppend (optional) An array containing forced sorting rules.
* This option let's you specify a default sorting rule, which is
* appended to user-selected rules. Default value: null
*
* @option Boolean widthFixed (optional) Boolean flag indicating if tablesorter
* should apply fixed widths to the table columns. This is usefull when
* using the pager companion plugin. This options requires the dimension
* jquery plugin. Default value: false
*
* @option Boolean cancelSelection (optional) Boolean flag indicating if
* tablesorter should cancel selection of the table headers text.
* Default value: true
*
* @option Boolean debug (optional) Boolean flag indicating if tablesorter
* should display debuging information usefull for development.
*
* @type jQuery
*
* @name tablesorter
*
* @cat Plugins/Tablesorter
*
* @author Christian Bach/[email protected]
*/
(function ($) {
$.extend({
tablesorter: new
function () {
var parsers = [],
widgets = [];
this.defaults = {
cssHeader: "header",
cssAsc: "headerSortUp",
cssDesc: "headerSortDown",
cssChildRow: "expand-child",
sortInitialOrder: "asc",
sortMultiSortKey: "shiftKey",
sortForce: null,
sortAppend: null,
sortLocaleCompare: true,
textExtraction: "simple",
parsers: {}, widgets: [],
widgetZebra: {
css: ["even", "odd"]
}, headers: {}, widthFixed: false,
cancelSelection: true,
sortList: [],
headerList: [],
dateFormat: "us",
decimal: '/\.|\,/g',
onRenderHeader: null,
selectorHeaders: 'thead th',
debug: false
};
/* debuging utils */
function benchmark(s, d) {
log(s + "," + (new Date().getTime() - d.getTime()) + "ms");
}
this.benchmark = benchmark;
function log(s) {
if (typeof console != "undefined" && typeof console.debug != "undefined") {
console.log(s);
} else {
alert(s);
}
}
/* parsers utils */
function buildParserCache(table, $headers) {
if (table.config.debug) {
var parsersDebug = "";
}
if (table.tBodies.length == 0) return; // In the case of empty tables
var rows = table.tBodies[0].rows;
if (rows[0]) {
var list = [],
cells = rows[0].cells,
l = cells.length;
for (var i = 0; i < l; i++) {
var p = false;
if ($.metadata && ($($headers[i]).metadata() && $($headers[i]).metadata().sorter)) {
p = getParserById($($headers[i]).metadata().sorter);
} else if ((table.config.headers[i] && table.config.headers[i].sorter)) {
p = getParserById(table.config.headers[i].sorter);
}
if (!p) {
p = detectParserForColumn(table, rows, -1, i);
}
if (table.config.debug) {
parsersDebug += "column:" + i + " parser:" + p.id + "\n";
}
list.push(p);
}
}
if (table.config.debug) {
log(parsersDebug);
}
return list;
};
function detectParserForColumn(table, rows, rowIndex, cellIndex) {
var l = parsers.length,
node = false,
nodeValue = false,
keepLooking = true;
while (nodeValue == '' && keepLooking) {
rowIndex++;
if (rows[rowIndex]) {
node = getNodeFromRowAndCellIndex(rows, rowIndex, cellIndex);
nodeValue = trimAndGetNodeText(table.config, node);
if (table.config.debug) {
log('Checking if value was empty on row:' + rowIndex);
}
} else {
keepLooking = false;
}
}
for (var i = 1; i < l; i++) {
if (parsers[i].is(nodeValue, table, node)) {
return parsers[i];
}
}
// 0 is always the generic parser (text)
return parsers[0];
}
function getNodeFromRowAndCellIndex(rows, rowIndex, cellIndex) {
return rows[rowIndex].cells[cellIndex];
}
function trimAndGetNodeText(config, node) {
return $.trim(getElementText(config, node));
}
function getParserById(name) {
var l = parsers.length;
for (var i = 0; i < l; i++) {
if (parsers[i].id.toLowerCase() == name.toLowerCase()) {
return parsers[i];
}
}
return false;
}
/* utils */
function buildCache(table) {
if (table.config.debug) {
var cacheTime = new Date();
}
var totalRows = (table.tBodies[0] && table.tBodies[0].rows.length) || 0,
totalCells = (table.tBodies[0].rows[0] && table.tBodies[0].rows[0].cells.length) || 0,
parsers = table.config.parsers,
cache = {
row: [],
normalized: []
};
for (var i = 0; i < totalRows; ++i) {
/** Add the table data to main data array */
var c = $(table.tBodies[0].rows[i]),
cols = [];
// if this is a child row, add it to the last row's children and
// continue to the next row
if (c.hasClass(table.config.cssChildRow)) {
cache.row[cache.row.length - 1] = cache.row[cache.row.length - 1].add(c);
// go to the next for loop
continue;
}
cache.row.push(c);
for (var j = 0; j < totalCells; ++j) {
cols.push(parsers[j].format(getElementText(table.config, c[0].cells[j]), table, c[0].cells[j]));
}
cols.push(cache.normalized.length); // add position for rowCache
cache.normalized.push(cols);
cols = null;
};
if (table.config.debug) {
benchmark("Building cache for " + totalRows + " rows:", cacheTime);
}
return cache;
};
function getElementText(config, node) {
var text = "";
if (!node) return "";
if (!config.supportsTextContent) config.supportsTextContent = node.textContent || false;
if (config.textExtraction == "simple") {
if (config.supportsTextContent) {
text = node.textContent;
} else {
if (node.childNodes[0] && node.childNodes[0].hasChildNodes()) {
text = node.childNodes[0].innerHTML;
} else {
text = node.innerHTML;
}
}
} else {
if (typeof(config.textExtraction) == "function") {
text = config.textExtraction(node);
} else {
text = $(node).text();
}
}
return text;
}
function appendToTable(table, cache) {
if (table.config.debug) {
var appendTime = new Date()
}
var c = cache,
r = c.row,
n = c.normalized,
totalRows = n.length,
checkCell = (n[0].length - 1),
tableBody = $(table.tBodies[0]),
rows = [];
for (var i = 0; i < totalRows; i++) {
var pos = n[i][checkCell];
rows.push(r[pos]);
if (!table.config.appender) {
//var o = ;
var l = r[pos].length;
for (var j = 0; j < l; j++) {
tableBody[0].appendChild(r[pos][j]);
}
//
}
}
if (table.config.appender) {
table.config.appender(table, rows);
}
rows = null;
if (table.config.debug) {
benchmark("Rebuilt table:", appendTime);
}
// apply table widgets
applyWidget(table);
// trigger sortend
setTimeout(function () {
$(table).trigger("sortEnd");
}, 0);
};
function buildHeaders(table) {
if (table.config.debug) {
var time = new Date();
}
var meta = ($.metadata) ? true : false;
var header_index = computeTableHeaderCellIndexes(table);
$tableHeaders = $(table.config.selectorHeaders, table).each(function (index) {
this.column = header_index[this.parentNode.rowIndex + "-" + this.cellIndex];
// this.column = index;
this.order = formatSortingOrder(table.config.sortInitialOrder);
this.count = this.order;
if (checkHeaderMetadata(this) || checkHeaderOptions(table, index)) this.sortDisabled = true;
if (checkHeaderOptionsSortingLocked(table, index)) this.order = this.lockedOrder = checkHeaderOptionsSortingLocked(table, index);
if (!this.sortDisabled) {
var $th = $(this).addClass(table.config.cssHeader);
if (table.config.onRenderHeader) table.config.onRenderHeader.apply($th);
}
// add cell to headerList
table.config.headerList[index] = this;
});
if (table.config.debug) {
benchmark("Built headers:", time);
log($tableHeaders);
}
return $tableHeaders;
};
// from:
// http://www.javascripttoolbox.com/lib/table/examples.php
// http://www.javascripttoolbox.com/temp/table_cellindex.html
function computeTableHeaderCellIndexes(t) {
var matrix = [];
var lookup = {};
var thead = t.getElementsByTagName('thead')[0];
var trs = thead.getElementsByTagName('tr');
for (var i = 0; i < trs.length; i++) {
var cells = trs[i].cells;
for (var j = 0; j < cells.length; j++) {
var c = cells[j];
var rowIndex = c.parentNode.rowIndex;
var cellId = rowIndex + "-" + c.cellIndex;
var rowSpan = c.rowSpan || 1;
var colSpan = c.colSpan || 1
var firstAvailCol;
if (typeof(matrix[rowIndex]) == "undefined") {
matrix[rowIndex] = [];
}
// Find first available column in the first row
for (var k = 0; k < matrix[rowIndex].length + 1; k++) {
if (typeof(matrix[rowIndex][k]) == "undefined") {
firstAvailCol = k;
break;
}
}
lookup[cellId] = firstAvailCol;
for (var k = rowIndex; k < rowIndex + rowSpan; k++) {
if (typeof(matrix[k]) == "undefined") {
matrix[k] = [];
}
var matrixrow = matrix[k];
for (var l = firstAvailCol; l < firstAvailCol + colSpan; l++) {
matrixrow[l] = "x";
}
}
}
}
return lookup;
}
function checkCellColSpan(table, rows, row) {
var arr = [],
r = table.tHead.rows,
c = r[row].cells;
for (var i = 0; i < c.length; i++) {
var cell = c[i];
if (cell.colSpan > 1) {
arr = arr.concat(checkCellColSpan(table, headerArr, row++));
} else {
if (table.tHead.length == 1 || (cell.rowSpan > 1 || !r[row + 1])) {
arr.push(cell);
}
// headerArr[row] = (i+row);
}
}
return arr;
};
function checkHeaderMetadata(cell) {
if (($.metadata) && ($(cell).metadata().sorter === false)) {
return true;
};
return false;
}
function checkHeaderOptions(table, i) {
if ((table.config.headers[i]) && (table.config.headers[i].sorter === false)) {
return true;
};
return false;
}
function checkHeaderOptionsSortingLocked(table, i) {
if ((table.config.headers[i]) && (table.config.headers[i].lockedOrder)) return table.config.headers[i].lockedOrder;
return false;
}
function applyWidget(table) {
var c = table.config.widgets;
var l = c.length;
for (var i = 0; i < l; i++) {
getWidgetById(c[i]).format(table);
}
}
function getWidgetById(name) {
var l = widgets.length;
for (var i = 0; i < l; i++) {
if (widgets[i].id.toLowerCase() == name.toLowerCase()) {
return widgets[i];
}
}
};
function formatSortingOrder(v) {
if (typeof(v) != "Number") {
return (v.toLowerCase() == "desc") ? 1 : 0;
} else {
return (v == 1) ? 1 : 0;
}
}
function isValueInArray(v, a) {
var l = a.length;
for (var i = 0; i < l; i++) {
if (a[i][0] == v) {
return true;
}
}
return false;
}
function setHeadersCss(table, $headers, list, css) {
// remove all header information
$headers.removeClass(css[0]).removeClass(css[1]);
var h = [];
$headers.each(function (offset) {
if (!this.sortDisabled) {
h[this.column] = $(this);
}
});
var l = list.length;
for (var i = 0; i < l; i++) {
h[list[i][0]].addClass(css[list[i][1]]);
}
}
function fixColumnWidth(table, $headers) {
var c = table.config;
if (c.widthFixed) {
var colgroup = $('<colgroup>');
$("tr:first td", table.tBodies[0]).each(function () {
colgroup.append($('<col>').css('width', $(this).width()));
});
$(table).prepend(colgroup);
};
}
function updateHeaderSortCount(table, sortList) {
var c = table.config,
l = sortList.length;
for (var i = 0; i < l; i++) {
var s = sortList[i],
o = c.headerList[s[0]];
o.count = s[1];
o.count++;
}
}
/* sorting methods */
function multisort(table, sortList, cache) {
if (table.config.debug) {
var sortTime = new Date();
}
var dynamicExp = "var sortWrapper = function(a,b) {",
l = sortList.length;
// TODO: inline functions.
for (var i = 0; i < l; i++) {
var c = sortList[i][0];
var order = sortList[i][1];
// var s = (getCachedSortType(table.config.parsers,c) == "text") ?
// ((order == 0) ? "sortText" : "sortTextDesc") : ((order == 0) ?
// "sortNumeric" : "sortNumericDesc");
// var s = (table.config.parsers[c].type == "text") ? ((order == 0)
// ? makeSortText(c) : makeSortTextDesc(c)) : ((order == 0) ?
// makeSortNumeric(c) : makeSortNumericDesc(c));
var s = (table.config.parsers[c].type == "text") ? ((order == 0) ? makeSortFunction("text", "asc", c) : makeSortFunction("text", "desc", c)) : ((order == 0) ? makeSortFunction("numeric", "asc", c) : makeSortFunction("numeric", "desc", c));
var e = "e" + i;
dynamicExp += "var " + e + " = " + s; // + "(a[" + c + "],b[" + c
// + "]); ";
dynamicExp += "if(" + e + ") { return " + e + "; } ";
dynamicExp += "else { ";
}
// if value is the same keep orignal order
var orgOrderCol = cache.normalized[0].length - 1;
dynamicExp += "return a[" + orgOrderCol + "]-b[" + orgOrderCol + "];";
for (var i = 0; i < l; i++) {
dynamicExp += "}; ";
}
dynamicExp += "return 0; ";
dynamicExp += "}; ";
if (table.config.debug) {
benchmark("Evaling expression:" + dynamicExp, new Date());
}
eval(dynamicExp);
cache.normalized.sort(sortWrapper);
if (table.config.debug) {
benchmark("Sorting on " + sortList.toString() + " and dir " + order + " time:", sortTime);
}
return cache;
};
function makeSortFunction(type, direction, index) {
var a = "a[" + index + "]",
b = "b[" + index + "]";
if (type == 'text' && direction == 'asc') {
return "(" + a + " == " + b + " ? 0 : (" + a + " === null ? Number.POSITIVE_INFINITY : (" + b + " === null ? Number.NEGATIVE_INFINITY : (" + a + " < " + b + ") ? -1 : 1 )));";
} else if (type == 'text' && direction == 'desc') {
return "(" + a + " == " + b + " ? 0 : (" + a + " === null ? Number.POSITIVE_INFINITY : (" + b + " === null ? Number.NEGATIVE_INFINITY : (" + b + " < " + a + ") ? -1 : 1 )));";
} else if (type == 'numeric' && direction == 'asc') {
return "(" + a + " === null && " + b + " === null) ? 0 :(" + a + " === null ? Number.POSITIVE_INFINITY : (" + b + " === null ? Number.NEGATIVE_INFINITY : " + a + " - " + b + "));";
} else if (type == 'numeric' && direction == 'desc') {
return "(" + a + " === null && " + b + " === null) ? 0 :(" + a + " === null ? Number.POSITIVE_INFINITY : (" + b + " === null ? Number.NEGATIVE_INFINITY : " + b + " - " + a + "));";
}
};
function makeSortText(i) {
return "((a[" + i + "] < b[" + i + "]) ? -1 : ((a[" + i + "] > b[" + i + "]) ? 1 : 0));";
};
function makeSortTextDesc(i) {
return "((b[" + i + "] < a[" + i + "]) ? -1 : ((b[" + i + "] > a[" + i + "]) ? 1 : 0));";
};
function makeSortNumeric(i) {
return "a[" + i + "]-b[" + i + "];";
};
function makeSortNumericDesc(i) {
return "b[" + i + "]-a[" + i + "];";
};
function sortText(a, b) {
if (table.config.sortLocaleCompare) return a.localeCompare(b);
return ((a < b) ? -1 : ((a > b) ? 1 : 0));
};
function sortTextDesc(a, b) {
if (table.config.sortLocaleCompare) return b.localeCompare(a);
return ((b < a) ? -1 : ((b > a) ? 1 : 0));
};
function sortNumeric(a, b) {
return a - b;
};
function sortNumericDesc(a, b) {
return b - a;
};
function getCachedSortType(parsers, i) {
return parsers[i].type;
}; /* public methods */
this.construct = function (settings) {
return this.each(function () {
// if no thead or tbody quit.
if (!this.tHead || !this.tBodies) return;
// declare
var $this, $document, $headers, cache, config, shiftDown = 0,
sortOrder;
// new blank config object
this.config = {};
// merge and extend.
config = $.extend(this.config, $.tablesorter.defaults, settings);
// store common expression for speed
$this = $(this);
// save the settings where they read
$.data(this, "tablesorter", config);
// build headers
$headers = buildHeaders(this);
// try to auto detect column type, and store in tables config
this.config.parsers = buildParserCache(this, $headers);
// build the cache for the tbody cells
cache = buildCache(this);
// get the css class names, could be done else where.
var sortCSS = [config.cssDesc, config.cssAsc];
// fixate columns if the users supplies the fixedWidth option
fixColumnWidth(this);
// apply event handling to headers
// this is to big, perhaps break it out?
$headers.click(
function (e) {
var totalRows = ($this[0].tBodies[0] && $this[0].tBodies[0].rows.length) || 0;
if (!this.sortDisabled && totalRows > 0) {
// Only call sortStart if sorting is
// enabled.
$this.trigger("sortStart");
// store exp, for speed
var $cell = $(this);
// get current column index
var i = this.column;
// get current column sort order
this.order = this.count++ % 2;
// always sort on the locked order.
if(this.lockedOrder) this.order = this.lockedOrder;
// user only whants to sort on one
// column
if (!e[config.sortMultiSortKey]) {
// flush the sort list
config.sortList = [];
if (config.sortForce != null) {
var a = config.sortForce;
for (var j = 0; j < a.length; j++) {
if (a[j][0] != i) {
config.sortList.push(a[j]);
}
}
}
// add column to sort list
config.sortList.push([i, this.order]);
// multi column sorting
} else {
// the user has clicked on an all
// ready sortet column.
if (isValueInArray(i, config.sortList)) {
// revers the sorting direction
// for all tables.
for (var j = 0; j < config.sortList.length; j++) {
var s = config.sortList[j],
o = config.headerList[s[0]];
if (s[0] == i) {
o.count = s[1];
o.count++;
s[1] = o.count % 2;
}
}
} else {
// add column to sort list array
config.sortList.push([i, this.order]);
}
};
setTimeout(function () {
// set css for headers
setHeadersCss($this[0], $headers, config.sortList, sortCSS);
appendToTable(
$this[0], multisort(
$this[0], config.sortList, cache)
);
}, 1);
// stop normal event by returning false
return false;
}
// cancel selection
}).mousedown(function () {
if (config.cancelSelection) {
this.onselectstart = function () {
return false
};
return false;
}
});
// apply easy methods that trigger binded events
$this.bind("update", function () {
var me = this;
setTimeout(function () {
// rebuild parsers.
me.config.parsers = buildParserCache(
me, $headers);
// rebuild the cache map
cache = buildCache(me);
}, 1);
}).bind("updateCell", function (e, cell) {
var config = this.config;
// get position from the dom.
var pos = [(cell.parentNode.rowIndex - 1), cell.cellIndex];
// update cache
cache.normalized[pos[0]][pos[1]] = config.parsers[pos[1]].format(
getElementText(config, cell), cell);
}).bind("sorton", function (e, list) {
$(this).trigger("sortStart");
config.sortList = list;
// update and store the sortlist
var sortList = config.sortList;
// update header count index
updateHeaderSortCount(this, sortList);
// set css for headers
setHeadersCss(this, $headers, sortList, sortCSS);
// sort the table and append it to the dom
appendToTable(this, multisort(this, sortList, cache));
}).bind("appendCache", function () {
appendToTable(this, cache);
}).bind("applyWidgetId", function (e, id) {
getWidgetById(id).format(this);
}).bind("applyWidgets", function () {
// apply widgets
applyWidget(this);
});
if ($.metadata && ($(this).metadata() && $(this).metadata().sortlist)) {
config.sortList = $(this).metadata().sortlist;
}
// if user has supplied a sort list to constructor.
if (config.sortList.length > 0) {
$this.trigger("sorton", [config.sortList]);
}
// apply widgets
applyWidget(this);
});
};
this.addParser = function (parser) {
var l = parsers.length,
a = true;
for (var i = 0; i < l; i++) {
if (parsers[i].id.toLowerCase() == parser.id.toLowerCase()) {
a = false;
}
}
if (a) {
parsers.push(parser);
};
};
this.addWidget = function (widget) {
widgets.push(widget);
};
this.formatFloat = function (s) {
var i = parseFloat(s);
return (isNaN(i)) ? 0 : i;
};
this.formatInt = function (s) {
var i = parseInt(s);
return (isNaN(i)) ? 0 : i;
};
this.isDigit = function (s, config) {
// replace all an wanted chars and match.
return /^[-+]?\d*$/.test($.trim(s.replace(/[,.']/g, '')));
};
this.clearTableBody = function (table) {
if ($.browser.msie) {
function empty() {
while (this.firstChild)
this.removeChild(this.firstChild);
}
empty.apply(table.tBodies[0]);
} else {
table.tBodies[0].innerHTML = "";
}
};
}
});
// extend plugin scope
$.fn.extend({
tablesorter: $.tablesorter.construct
});
// make shortcut
var ts = $.tablesorter;
// add default parsers
ts.addParser({
id: "text",
is: function (s) {
return true;
}, format: function (s) {
return $.trim(s); // CW PATCH: lowercasing decision taken in the server
}, type: "text"
});
// CW PATCH: ugly hack to catch booleans
ts.addParser({
id: 'boolean',
is: function (s) {
return (s == "true" || s == "false");
},
format: function (s) {
if (s) { return "0"; } else { return "1"; }
},
type: 'boolean'});
ts.addParser({
id: "digit",
is: function (s, table) {
var c = table.config;
return $.tablesorter.isDigit(s, c);
}, format: function (s) {
return $.tablesorter.formatFloat(s);
}, type: "numeric"
});
ts.addParser({
id: "currency",
is: function (s) {
return /^[£$€?.]/.test(s);
}, format: function (s) {
return $.tablesorter.formatFloat(s.replace(new RegExp(/[£$€]/g), ""));
}, type: "numeric"
});
ts.addParser({
id: "ipAddress",
is: function (s) {
return /^\d{2,3}[\.]\d{2,3}[\.]\d{2,3}[\.]\d{2,3}$/.test(s);
}, format: function (s) {
var a = s.split("."),
r = "",
l = a.length;
for (var i = 0; i < l; i++) {
var item = a[i];
if (item.length == 2) {
r += "0" + item;
} else {
r += item;
}
}
return $.tablesorter.formatFloat(r);
}, type: "numeric"
});
ts.addParser({
id: "url",
is: function (s) {
return /^(https?|ftp|file):\/\/$/.test(s);
}, format: function (s) {
return jQuery.trim(s.replace(new RegExp(/(https?|ftp|file):\/\//), ''));
}, type: "text"
});
ts.addParser({
id: "isoDate",
is: function (s) {
return /^\d{4}[\/-]\d{1,2}[\/-]\d{1,2}$/.test(s);
}, format: function (s) {
return $.tablesorter.formatFloat((s != "") ? new Date(s.replace(
new RegExp(/-/g), "/")).getTime() : "0");
}, type: "numeric"
});
ts.addParser({
id: "percent",
is: function (s) {
return /\%$/.test($.trim(s));
}, format: function (s) {
return $.tablesorter.formatFloat(s.replace(new RegExp(/%/g), ""));
}, type: "numeric"
});
ts.addParser({
id: "usLongDate",
is: function (s) {
return s.match(new RegExp(/^[A-Za-z]{3,10}\.? [0-9]{1,2}, ([0-9]{4}|'?[0-9]{2}) (([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(AM|PM)))$/));
}, format: function (s) {
return $.tablesorter.formatFloat(new Date(s).getTime());
}, type: "numeric"
});
ts.addParser({
id: "shortDate",
is: function (s) {
return /\d{1,2}[\/\-]\d{1,2}[\/\-]\d{2,4}/.test(s);
}, format: function (s, table) {
var c = table.config;
s = s.replace(/\-/g, "/");
if (c.dateFormat == "us") {
// reformat the string in ISO format
s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$1/$2");
} else if (c.dateFormat == "uk") {
// reformat the string in ISO format
s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{4})/, "$3/$2/$1");
} else if (c.dateFormat == "dd/mm/yy" || c.dateFormat == "dd-mm-yy") {
s = s.replace(/(\d{1,2})[\/\-](\d{1,2})[\/\-](\d{2})/, "$1/$2/$3");
}
return $.tablesorter.formatFloat(new Date(s).getTime());
}, type: "numeric"
});
ts.addParser({
id: "time",
is: function (s) {
return /^(([0-2]?[0-9]:[0-5][0-9])|([0-1]?[0-9]:[0-5][0-9]\s(am|pm)))$/.test(s);
}, format: function (s) {
return $.tablesorter.formatFloat(new Date("2000/01/01 " + s).getTime());
}, type: "numeric"
});
ts.addParser({
id: "metadata",
is: function (s) {
return false;
}, format: function (s, table, cell) {
var c = table.config,
p = (!c.parserMetadataName) ? 'sortValue' : c.parserMetadataName;
return $(cell).metadata()[p];
}, type: "numeric"
});
// add default widgets
ts.addWidget({
id: "zebra",
format: function (table) {
if (table.config.debug) {
var time = new Date();
}
var $tr, row = -1,
odd;
// loop through the visible rows
$("tr:visible", table.tBodies[0]).each(function (i) {
$tr = $(this);
// style children rows the same way the parent
// row was styled
if (!$tr.hasClass(table.config.cssChildRow)) row++;
odd = (row % 2 == 0);
$tr.removeClass(
table.config.widgetZebra.css[odd ? 0 : 1]).addClass(
table.config.widgetZebra.css[odd ? 1 : 0])
});
if (table.config.debug) {
$.tablesorter.benchmark("Applying Zebra widget", time);
}
}
});
})(jQuery); | PypiClean |
/cdktf_cdktf_provider_launchdarkly-2.0.1-py3-none-any.whl/cdktf_cdktf_provider_launchdarkly/audit_log_subscription/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class AuditLogSubscription(
_cdktf_9a9027ec.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-launchdarkly.auditLogSubscription.AuditLogSubscription",
):
'''Represents a {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription launchdarkly_audit_log_subscription}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
config: typing.Mapping[builtins.str, builtins.str],
integration_key: builtins.str,
name: builtins.str,
on: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
statements: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["AuditLogSubscriptionStatements", typing.Dict[builtins.str, typing.Any]]]],
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Sequence[builtins.str]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription launchdarkly_audit_log_subscription} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param config: The set of configuration fields corresponding to the value defined for ``integration_key``. Refer to the ``formVariables`` field in the corresponding ``integrations/<integration_key>/manifest.json`` file in `this repo <https://github.com/launchdarkly/integration-framework/tree/master/integrations>`_ for a full list of fields for the integration you wish to configure. **IMPORTANT**: Please note that Terraform will only accept these in snake case, regardless of the case shown in the manifest. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#config AuditLogSubscription#config}
:param integration_key: The integration key. Supported integration keys are ``cloudtrail``, ``datadog``, ``dynatrace``, ``elastic``, ``grafana``, ``honeycomb``, ``logdna``, ``msteams``, ``new-relic-apm``, ``signalfx``, ``slack``, and ``splunk``. A change in this field will force the destruction of the existing resource and the creation of a new one. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#integration_key AuditLogSubscription#integration_key}
:param name: A human-friendly name for your audit log subscription viewable from within the LaunchDarkly Integrations page. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#name AuditLogSubscription#name}
:param on: Whether or not you want your subscription enabled, i.e. to actively send events. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#on AuditLogSubscription#on}
:param statements: statements block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#statements AuditLogSubscription#statements}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#id AuditLogSubscription#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Tags associated with your resource. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#tags AuditLogSubscription#tags}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__411f1d0cc53fb5844b8519f71316b59e6c298594e900f01df5aa9adb42d72a9a)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config_ = AuditLogSubscriptionConfig(
config=config,
integration_key=integration_key,
name=name,
on=on,
statements=statements,
id=id,
tags=tags,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config_])
@jsii.member(jsii_name="putStatements")
def put_statements(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["AuditLogSubscriptionStatements", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b09593dc48451cac7e1de804b7017af1f32c84353b22b5c4c0e47132d112d4dc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putStatements", [value]))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetTags")
def reset_tags(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTags", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="statements")
def statements(self) -> "AuditLogSubscriptionStatementsList":
return typing.cast("AuditLogSubscriptionStatementsList", jsii.get(self, "statements"))
@builtins.property
@jsii.member(jsii_name="configInput")
def config_input(
self,
) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], jsii.get(self, "configInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="integrationKeyInput")
def integration_key_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "integrationKeyInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="onInput")
def on_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "onInput"))
@builtins.property
@jsii.member(jsii_name="statementsInput")
def statements_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["AuditLogSubscriptionStatements"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["AuditLogSubscriptionStatements"]]], jsii.get(self, "statementsInput"))
@builtins.property
@jsii.member(jsii_name="tagsInput")
def tags_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "tagsInput"))
@builtins.property
@jsii.member(jsii_name="config")
def config(self) -> typing.Mapping[builtins.str, builtins.str]:
return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "config"))
@config.setter
def config(self, value: typing.Mapping[builtins.str, builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bd93333e753af3e942674e8d35eb2bd562b2c17b91987e17175e5afe70e988de)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "config", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d2d84a7846d6946c2c7e9910b45f406f96501fb19190e96f24154d42eafb0d44)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="integrationKey")
def integration_key(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "integrationKey"))
@integration_key.setter
def integration_key(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__56df65820b16507c6d5b07dc336b9299278931508a997f928c79bb7b2c8eb391)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "integrationKey", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2d44d580c2023e22496c87a2ff72b8b489a25e7b4bb129e87f94d47b15ccf22d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="on")
def on(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "on"))
@on.setter
def on(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8677ea16381901f65d55b725f90b48f7b45956a0f95d13506f39ec151ce8ce39)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "on", value)
@builtins.property
@jsii.member(jsii_name="tags")
def tags(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "tags"))
@tags.setter
def tags(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__05ba6d47af38d7ff21f51dabc547973ed76fd65bf7e786ae53a1ca9f9637663f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "tags", value)
@jsii.data_type(
jsii_type="@cdktf/provider-launchdarkly.auditLogSubscription.AuditLogSubscriptionConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"config": "config",
"integration_key": "integrationKey",
"name": "name",
"on": "on",
"statements": "statements",
"id": "id",
"tags": "tags",
},
)
class AuditLogSubscriptionConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
config: typing.Mapping[builtins.str, builtins.str],
integration_key: builtins.str,
name: builtins.str,
on: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
statements: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["AuditLogSubscriptionStatements", typing.Dict[builtins.str, typing.Any]]]],
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param config: The set of configuration fields corresponding to the value defined for ``integration_key``. Refer to the ``formVariables`` field in the corresponding ``integrations/<integration_key>/manifest.json`` file in `this repo <https://github.com/launchdarkly/integration-framework/tree/master/integrations>`_ for a full list of fields for the integration you wish to configure. **IMPORTANT**: Please note that Terraform will only accept these in snake case, regardless of the case shown in the manifest. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#config AuditLogSubscription#config}
:param integration_key: The integration key. Supported integration keys are ``cloudtrail``, ``datadog``, ``dynatrace``, ``elastic``, ``grafana``, ``honeycomb``, ``logdna``, ``msteams``, ``new-relic-apm``, ``signalfx``, ``slack``, and ``splunk``. A change in this field will force the destruction of the existing resource and the creation of a new one. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#integration_key AuditLogSubscription#integration_key}
:param name: A human-friendly name for your audit log subscription viewable from within the LaunchDarkly Integrations page. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#name AuditLogSubscription#name}
:param on: Whether or not you want your subscription enabled, i.e. to actively send events. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#on AuditLogSubscription#on}
:param statements: statements block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#statements AuditLogSubscription#statements}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#id AuditLogSubscription#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Tags associated with your resource. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#tags AuditLogSubscription#tags}
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ef1e41a2cdac9756b56ce790ba595e5e76b13fcfd9f0a7f2d71efe562ae45ed6)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument config", value=config, expected_type=type_hints["config"])
check_type(argname="argument integration_key", value=integration_key, expected_type=type_hints["integration_key"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument on", value=on, expected_type=type_hints["on"])
check_type(argname="argument statements", value=statements, expected_type=type_hints["statements"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument tags", value=tags, expected_type=type_hints["tags"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"config": config,
"integration_key": integration_key,
"name": name,
"on": on,
"statements": statements,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if id is not None:
self._values["id"] = id
if tags is not None:
self._values["tags"] = tags
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def config(self) -> typing.Mapping[builtins.str, builtins.str]:
'''The set of configuration fields corresponding to the value defined for ``integration_key``.
Refer to the ``formVariables`` field in the corresponding ``integrations/<integration_key>/manifest.json`` file in `this repo <https://github.com/launchdarkly/integration-framework/tree/master/integrations>`_ for a full list of fields for the integration you wish to configure. **IMPORTANT**: Please note that Terraform will only accept these in snake case, regardless of the case shown in the manifest.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#config AuditLogSubscription#config}
'''
result = self._values.get("config")
assert result is not None, "Required property 'config' is missing"
return typing.cast(typing.Mapping[builtins.str, builtins.str], result)
@builtins.property
def integration_key(self) -> builtins.str:
'''The integration key.
Supported integration keys are ``cloudtrail``, ``datadog``, ``dynatrace``, ``elastic``, ``grafana``, ``honeycomb``, ``logdna``, ``msteams``, ``new-relic-apm``, ``signalfx``, ``slack``, and ``splunk``. A change in this field will force the destruction of the existing resource and the creation of a new one.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#integration_key AuditLogSubscription#integration_key}
'''
result = self._values.get("integration_key")
assert result is not None, "Required property 'integration_key' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def name(self) -> builtins.str:
'''A human-friendly name for your audit log subscription viewable from within the LaunchDarkly Integrations page.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#name AuditLogSubscription#name}
'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def on(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
'''Whether or not you want your subscription enabled, i.e. to actively send events.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#on AuditLogSubscription#on}
'''
result = self._values.get("on")
assert result is not None, "Required property 'on' is missing"
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], result)
@builtins.property
def statements(
self,
) -> typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["AuditLogSubscriptionStatements"]]:
'''statements block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#statements AuditLogSubscription#statements}
'''
result = self._values.get("statements")
assert result is not None, "Required property 'statements' is missing"
return typing.cast(typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["AuditLogSubscriptionStatements"]], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#id AuditLogSubscription#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[builtins.str]]:
'''Tags associated with your resource.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#tags AuditLogSubscription#tags}
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AuditLogSubscriptionConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-launchdarkly.auditLogSubscription.AuditLogSubscriptionStatements",
jsii_struct_bases=[],
name_mapping={
"effect": "effect",
"actions": "actions",
"not_actions": "notActions",
"not_resources": "notResources",
"resources": "resources",
},
)
class AuditLogSubscriptionStatements:
def __init__(
self,
*,
effect: builtins.str,
actions: typing.Optional[typing.Sequence[builtins.str]] = None,
not_actions: typing.Optional[typing.Sequence[builtins.str]] = None,
not_resources: typing.Optional[typing.Sequence[builtins.str]] = None,
resources: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
'''
:param effect: Either ``allow`` or ``deny``. This argument defines whether the statement allows or denies access to the named resources and actions. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#effect AuditLogSubscription#effect}
:param actions: The list of action specifiers defining the actions to which the statement applies. Either ``actions`` or ``not_actions`` must be specified. For a list of available actions read `Actions reference <https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference>`_. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#actions AuditLogSubscription#actions}
:param not_actions: The list of action specifiers defining the actions to which the statement does not apply. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#not_actions AuditLogSubscription#not_actions}
:param not_resources: The list of resource specifiers defining the resources to which the statement does not apply. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#not_resources AuditLogSubscription#not_resources}
:param resources: The list of resource specifiers defining the resources to which the statement applies. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#resources AuditLogSubscription#resources}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7eafab4833c3c1fd2c0d9c87f5e7f4b213057774377a08fc698b07c04d741b68)
check_type(argname="argument effect", value=effect, expected_type=type_hints["effect"])
check_type(argname="argument actions", value=actions, expected_type=type_hints["actions"])
check_type(argname="argument not_actions", value=not_actions, expected_type=type_hints["not_actions"])
check_type(argname="argument not_resources", value=not_resources, expected_type=type_hints["not_resources"])
check_type(argname="argument resources", value=resources, expected_type=type_hints["resources"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"effect": effect,
}
if actions is not None:
self._values["actions"] = actions
if not_actions is not None:
self._values["not_actions"] = not_actions
if not_resources is not None:
self._values["not_resources"] = not_resources
if resources is not None:
self._values["resources"] = resources
@builtins.property
def effect(self) -> builtins.str:
'''Either ``allow`` or ``deny``.
This argument defines whether the statement allows or denies access to the named resources and actions.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#effect AuditLogSubscription#effect}
'''
result = self._values.get("effect")
assert result is not None, "Required property 'effect' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def actions(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of action specifiers defining the actions to which the statement applies.
Either ``actions`` or ``not_actions`` must be specified. For a list of available actions read `Actions reference <https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference>`_.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#actions AuditLogSubscription#actions}
'''
result = self._values.get("actions")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def not_actions(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of action specifiers defining the actions to which the statement does not apply.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#not_actions AuditLogSubscription#not_actions}
'''
result = self._values.get("not_actions")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def not_resources(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of resource specifiers defining the resources to which the statement does not apply.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#not_resources AuditLogSubscription#not_resources}
'''
result = self._values.get("not_resources")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def resources(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of resource specifiers defining the resources to which the statement applies.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/launchdarkly/launchdarkly/2.15.1/docs/resources/audit_log_subscription#resources AuditLogSubscription#resources}
'''
result = self._values.get("resources")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AuditLogSubscriptionStatements(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class AuditLogSubscriptionStatementsList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-launchdarkly.auditLogSubscription.AuditLogSubscriptionStatementsList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4fa849d166c930541bfe6a77a52b6078ca14b7a989c83d9351692dc4b6a8bf32)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "AuditLogSubscriptionStatementsOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cc3cb3e53d2faa60cfe4bdef04110898eb5a0bcbe7727de00eabbddf2d3adf94)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("AuditLogSubscriptionStatementsOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__27e69a9705c47d54b626c2a39ea258848bafa6aa5cb5fe13201faeee175f77ae)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__187ad5bc76ccfd39bdbac50bbf771cf63991c31f846a503ef587176059078646)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fc64dc8178e4fe7ebaa1f51ac34965316ec68749d68976f804983f6b9ab36a7b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[AuditLogSubscriptionStatements]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[AuditLogSubscriptionStatements]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[AuditLogSubscriptionStatements]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__639b60dc88564b507650def1a760f485e0de3656efd2bbe59f501b8004c1d892)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class AuditLogSubscriptionStatementsOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-launchdarkly.auditLogSubscription.AuditLogSubscriptionStatementsOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f18177997cb8ff7e5f5ef7bf39e4c4651c9132bbfc9a55cf2219f6f731147e64)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetActions")
def reset_actions(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetActions", []))
@jsii.member(jsii_name="resetNotActions")
def reset_not_actions(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNotActions", []))
@jsii.member(jsii_name="resetNotResources")
def reset_not_resources(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNotResources", []))
@jsii.member(jsii_name="resetResources")
def reset_resources(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetResources", []))
@builtins.property
@jsii.member(jsii_name="actionsInput")
def actions_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "actionsInput"))
@builtins.property
@jsii.member(jsii_name="effectInput")
def effect_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "effectInput"))
@builtins.property
@jsii.member(jsii_name="notActionsInput")
def not_actions_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "notActionsInput"))
@builtins.property
@jsii.member(jsii_name="notResourcesInput")
def not_resources_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "notResourcesInput"))
@builtins.property
@jsii.member(jsii_name="resourcesInput")
def resources_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "resourcesInput"))
@builtins.property
@jsii.member(jsii_name="actions")
def actions(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "actions"))
@actions.setter
def actions(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fb9ec3d2a4be67c958c24158c461f584ea6cba17fc0d6d72117a4be6d2b2966d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "actions", value)
@builtins.property
@jsii.member(jsii_name="effect")
def effect(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "effect"))
@effect.setter
def effect(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__15c33141045f73a7f7d042d4fe02cefc4e2dd959f41010ef36fa6f07dfac2d72)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "effect", value)
@builtins.property
@jsii.member(jsii_name="notActions")
def not_actions(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "notActions"))
@not_actions.setter
def not_actions(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c6876594a7f9d43e7e929c7da7343d409a02684ba1316402d0af84acb40c8a4a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "notActions", value)
@builtins.property
@jsii.member(jsii_name="notResources")
def not_resources(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "notResources"))
@not_resources.setter
def not_resources(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ebf1856cf7e4f58c6236309886e41442ba4c836ebd689d63896f83739e0dfc2a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "notResources", value)
@builtins.property
@jsii.member(jsii_name="resources")
def resources(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "resources"))
@resources.setter
def resources(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c05b0dd5e7298e8daef10ffcc83822e8e6d41fb9529fc4129d6678d7e54d6f39)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "resources", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, AuditLogSubscriptionStatements]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, AuditLogSubscriptionStatements]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, AuditLogSubscriptionStatements]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7c091ff6d6dc6aed880d87fba97a924c35dae4f63a1409525b771e6a5354043f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"AuditLogSubscription",
"AuditLogSubscriptionConfig",
"AuditLogSubscriptionStatements",
"AuditLogSubscriptionStatementsList",
"AuditLogSubscriptionStatementsOutputReference",
]
publication.publish()
def _typecheckingstub__411f1d0cc53fb5844b8519f71316b59e6c298594e900f01df5aa9adb42d72a9a(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
config: typing.Mapping[builtins.str, builtins.str],
integration_key: builtins.str,
name: builtins.str,
on: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
statements: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[AuditLogSubscriptionStatements, typing.Dict[builtins.str, typing.Any]]]],
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Sequence[builtins.str]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b09593dc48451cac7e1de804b7017af1f32c84353b22b5c4c0e47132d112d4dc(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[AuditLogSubscriptionStatements, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bd93333e753af3e942674e8d35eb2bd562b2c17b91987e17175e5afe70e988de(
value: typing.Mapping[builtins.str, builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d2d84a7846d6946c2c7e9910b45f406f96501fb19190e96f24154d42eafb0d44(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__56df65820b16507c6d5b07dc336b9299278931508a997f928c79bb7b2c8eb391(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2d44d580c2023e22496c87a2ff72b8b489a25e7b4bb129e87f94d47b15ccf22d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8677ea16381901f65d55b725f90b48f7b45956a0f95d13506f39ec151ce8ce39(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__05ba6d47af38d7ff21f51dabc547973ed76fd65bf7e786ae53a1ca9f9637663f(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ef1e41a2cdac9756b56ce790ba595e5e76b13fcfd9f0a7f2d71efe562ae45ed6(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
config: typing.Mapping[builtins.str, builtins.str],
integration_key: builtins.str,
name: builtins.str,
on: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
statements: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[AuditLogSubscriptionStatements, typing.Dict[builtins.str, typing.Any]]]],
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7eafab4833c3c1fd2c0d9c87f5e7f4b213057774377a08fc698b07c04d741b68(
*,
effect: builtins.str,
actions: typing.Optional[typing.Sequence[builtins.str]] = None,
not_actions: typing.Optional[typing.Sequence[builtins.str]] = None,
not_resources: typing.Optional[typing.Sequence[builtins.str]] = None,
resources: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4fa849d166c930541bfe6a77a52b6078ca14b7a989c83d9351692dc4b6a8bf32(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cc3cb3e53d2faa60cfe4bdef04110898eb5a0bcbe7727de00eabbddf2d3adf94(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__27e69a9705c47d54b626c2a39ea258848bafa6aa5cb5fe13201faeee175f77ae(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__187ad5bc76ccfd39bdbac50bbf771cf63991c31f846a503ef587176059078646(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fc64dc8178e4fe7ebaa1f51ac34965316ec68749d68976f804983f6b9ab36a7b(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__639b60dc88564b507650def1a760f485e0de3656efd2bbe59f501b8004c1d892(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[AuditLogSubscriptionStatements]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f18177997cb8ff7e5f5ef7bf39e4c4651c9132bbfc9a55cf2219f6f731147e64(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fb9ec3d2a4be67c958c24158c461f584ea6cba17fc0d6d72117a4be6d2b2966d(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__15c33141045f73a7f7d042d4fe02cefc4e2dd959f41010ef36fa6f07dfac2d72(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c6876594a7f9d43e7e929c7da7343d409a02684ba1316402d0af84acb40c8a4a(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ebf1856cf7e4f58c6236309886e41442ba4c836ebd689d63896f83739e0dfc2a(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c05b0dd5e7298e8daef10ffcc83822e8e6d41fb9529fc4129d6678d7e54d6f39(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7c091ff6d6dc6aed880d87fba97a924c35dae4f63a1409525b771e6a5354043f(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, AuditLogSubscriptionStatements]],
) -> None:
"""Type checking stubs"""
pass | PypiClean |
/swift-2.32.0.tar.gz/swift-2.32.0/doc/source/metrics/container_auditor.rst | ``container-auditor`` Metrics
=============================
============================== ====================================================
Metric Name Description
------------------------------ ----------------------------------------------------
``container-auditor.errors`` Incremented when an Exception is caught in an audit
pass (only once per pass, max).
``container-auditor.passes`` Count of individual containers passing an audit.
``container-auditor.failures`` Count of individual containers failing an audit.
``container-auditor.timing`` Timing data for each container audit.
============================== ====================================================
| PypiClean |
/nats-py-2.3.1.tar.gz/nats-py-2.3.1/nats/aio/client.py |
from __future__ import annotations
import asyncio
import base64
import ipaddress
import json
import logging
import ssl
import time
import string
from dataclasses import dataclass
from email.parser import BytesParser
from random import shuffle
from secrets import token_hex
from typing import (
Any,
Awaitable,
Callable,
Tuple,
Union,
List,
Optional,
Dict,
)
from urllib.parse import ParseResult, urlparse
try:
from fast_mail_parser import parse_email
except ImportError:
parse_email = None
import nats.js
from nats import errors
from nats.nuid import NUID
from nats.protocol import command as prot_command
from nats.protocol.parser import (
AUTHORIZATION_VIOLATION,
PERMISSIONS_ERR,
PONG,
STALE_CONNECTION,
Parser,
)
from .errors import ErrInvalidUserCredentials, ErrStaleConnection
from .msg import Msg
from .subscription import (
DEFAULT_SUB_PENDING_BYTES_LIMIT,
DEFAULT_SUB_PENDING_MSGS_LIMIT,
Subscription,
)
from .transport import TcpTransport, Transport, WebSocketTransport
__version__ = '2.3.1'
__lang__ = 'python3'
_logger = logging.getLogger(__name__)
PROTOCOL = 1
INFO_OP = b'INFO'
CONNECT_OP = b'CONNECT'
PING_OP = b'PING'
PONG_OP = b'PONG'
OK_OP = b'+OK'
ERR_OP = b'-ERR'
_CRLF_ = b'\r\n'
_CRLF_LEN_ = len(_CRLF_)
_SPC_ = b' '
_SPC_BYTE_ = 32
EMPTY = ""
PING_PROTO = PING_OP + _CRLF_
PONG_PROTO = PONG_OP + _CRLF_
DEFAULT_INBOX_PREFIX = b'_INBOX'
DEFAULT_PENDING_SIZE = 2 * 1024 * 1024
DEFAULT_BUFFER_SIZE = 32768
DEFAULT_RECONNECT_TIME_WAIT = 2 # in seconds
DEFAULT_MAX_RECONNECT_ATTEMPTS = 60
DEFAULT_PING_INTERVAL = 120 # in seconds
DEFAULT_MAX_OUTSTANDING_PINGS = 2
DEFAULT_MAX_PAYLOAD_SIZE = 1048576
DEFAULT_MAX_FLUSHER_QUEUE_SIZE = 1024
DEFAULT_FLUSH_TIMEOUT = 10 # in seconds
DEFAULT_CONNECT_TIMEOUT = 2 # in seconds
DEFAULT_DRAIN_TIMEOUT = 30 # in seconds
MAX_CONTROL_LINE_SIZE = 1024
NATS_HDR_LINE = bytearray(b'NATS/1.0')
NATS_HDR_LINE_SIZE = len(NATS_HDR_LINE)
NO_RESPONDERS_STATUS = "503"
CTRL_STATUS = "100"
STATUS_MSG_LEN = 3 # e.g. 20x, 40x, 50x
Callback = Callable[[], Awaitable[None]]
ErrorCallback = Callable[[Exception], Awaitable[None]]
JWTCallback = Callable[[], Union[bytearray, bytes]]
Credentials = Union[str, Tuple[str, str]]
SignatureCallback = Callable[[str], bytes]
@dataclass
class Srv:
"""
Srv is a helper data structure to hold state of a server.
"""
uri: ParseResult
reconnects: int = 0
last_attempt: Optional[float] = None
did_connect: bool = False
discovered: bool = False
tls_name: Optional[str] = None
server_version: Optional[str] = None
class ServerVersion:
def __init__(self, server_version: str) -> None:
self._server_version = server_version
self._major_version: Optional[int] = None
self._minor_version: Optional[int] = None
self._patch_version: Optional[int] = None
self._dev_version: Optional[str] = None
# TODO(@orsinium): use cached_property
def parse_version(self) -> None:
v = (self._server_version).split('-')
if len(v) > 1:
self._dev_version = v[1]
tokens = v[0].split('.')
n = len(tokens)
if n > 1:
self._major_version = int(tokens[0])
if n > 2:
self._minor_version = int(tokens[1])
if n > 3:
self._patch_version = int(tokens[2])
@property
def major(self) -> int:
if not self._major_version:
self.parse_version()
return self._major_version or 0
@property
def minor(self) -> int:
if not self._minor_version:
self.parse_version()
return self._minor_version or 0
@property
def patch(self) -> int:
if not self._patch_version:
self.parse_version()
return self._patch_version or 0
@property
def dev(self) -> str:
if not self._dev_version:
self.parse_version()
return self._dev_version or ''
def __repr__(self) -> str:
return f"<nats server v{self._server_version}>"
async def _default_error_callback(ex: Exception) -> None:
"""
Provides a default way to handle async errors if the user
does not provide one.
"""
_logger.error('nats: encountered error', exc_info=ex)
class Client:
"""
Asyncio based client for NATS.
"""
msg_class: type[Msg] = Msg
# FIXME: Use an enum instead.
DISCONNECTED = 0
CONNECTED = 1
CLOSED = 2
RECONNECTING = 3
CONNECTING = 4
DRAINING_SUBS = 5
DRAINING_PUBS = 6
def __repr__(self) -> str:
return f"<nats client v{__version__}>"
def __init__(self) -> None:
self._current_server: Optional[Srv] = None
self._server_info: Dict[str, Any] = {}
self._server_pool: List[Srv] = []
self._reading_task: Optional[asyncio.Task] = None
self._ping_interval_task: Optional[asyncio.Task] = None
self._pings_outstanding: int = 0
self._pongs_received: int = 0
self._pongs: List[asyncio.Future] = []
self._transport: Optional[Transport] = None
self._err: Optional[Exception] = None
# callbacks
self._error_cb: ErrorCallback = _default_error_callback
self._disconnected_cb: Optional[Callback] = None
self._closed_cb: Optional[Callback] = None
self._discovered_server_cb: Optional[Callback] = None
self._reconnected_cb: Optional[Callback] = None
self._reconnection_task: Optional[asyncio.Task[None]] = None
self._reconnection_task_future: Optional[asyncio.Future] = None
self._max_payload: int = DEFAULT_MAX_PAYLOAD_SIZE
# client id that the NATS server knows about.
self._client_id: Optional[int] = None
self._sid: int = 0
self._subs: Dict[int, Subscription] = {}
self._status: int = Client.DISCONNECTED
self._ps: Parser = Parser(self)
# pending queue of commands that will be flushed to the server.
self._pending: List[bytes] = []
# current size of pending data in total.
self._pending_data_size: int = 0
# max pending size is the maximum size of the data that can be buffered.
self._max_pending_size: int = 0
self._flush_queue: Optional[asyncio.Queue[asyncio.Future[Any]]] = None
self._flusher_task: Optional[asyncio.Task] = None
self._flush_timeout: Optional[float] = 0
self._hdr_parser: BytesParser = BytesParser()
# New style request/response
self._resp_map: Dict[str, asyncio.Future] = {}
self._resp_sub_prefix: Optional[bytearray] = None
self._nuid = NUID()
self._inbox_prefix = bytearray(DEFAULT_INBOX_PREFIX)
self._auth_configured: bool = False
# NKEYS support
#
# user_jwt_cb is used to fetch and return the account
# signed JWT for this user.
self._user_jwt_cb: Optional[JWTCallback] = None
# signature_cb is used to sign a nonce from the server while
# authenticating with nkeys. The user should sign the nonce and
# return the base64 encoded signature.
self._signature_cb: Optional[SignatureCallback] = None
# user credentials file can be a tuple or single file.
self._user_credentials: Optional[Credentials] = None
# file that contains the nkeys seed and its public key as a string.
self._nkeys_seed: Optional[str] = None
self._public_nkey: Optional[str] = None
self.options: Dict[str, Any] = {}
self.stats = {
'in_msgs': 0,
'out_msgs': 0,
'in_bytes': 0,
'out_bytes': 0,
'reconnects': 0,
'errors_received': 0,
}
async def connect(
self,
servers: Union[str, List[str]] = ["nats://localhost:4222"],
error_cb: Optional[ErrorCallback] = None,
disconnected_cb: Optional[Callback] = None,
closed_cb: Optional[Callback] = None,
discovered_server_cb: Optional[Callback] = None,
reconnected_cb: Optional[Callback] = None,
name: Optional[str] = None,
pedantic: bool = False,
verbose: bool = False,
allow_reconnect: bool = True,
connect_timeout: int = DEFAULT_CONNECT_TIMEOUT,
reconnect_time_wait: int = DEFAULT_RECONNECT_TIME_WAIT,
max_reconnect_attempts: int = DEFAULT_MAX_RECONNECT_ATTEMPTS,
ping_interval: int = DEFAULT_PING_INTERVAL,
max_outstanding_pings: int = DEFAULT_MAX_OUTSTANDING_PINGS,
dont_randomize: bool = False,
flusher_queue_size: int = DEFAULT_MAX_FLUSHER_QUEUE_SIZE,
no_echo: bool = False,
tls: Optional[ssl.SSLContext] = None,
tls_hostname: Optional[str] = None,
user: Optional[str] = None,
password: Optional[str] = None,
token: Optional[str] = None,
drain_timeout: int = DEFAULT_DRAIN_TIMEOUT,
signature_cb: Optional[SignatureCallback] = None,
user_jwt_cb: Optional[JWTCallback] = None,
user_credentials: Optional[Credentials] = None,
nkeys_seed: Optional[str] = None,
inbox_prefix: Union[str, bytes] = DEFAULT_INBOX_PREFIX,
pending_size: int = DEFAULT_PENDING_SIZE,
flush_timeout: Optional[float] = None,
) -> None:
"""
Establishes a connection to NATS.
:param servers: NATS Connection
:param name: Label the connection with name (shown in NATS monitoring)
:param error_cb: Callback to report errors.
:param disconnected_cb: Callback to report disconnection from NATS.
:param closed_cb: Callback to report when client stops reconnection to NATS.
:param discovered_server_cb: Callback to report when a new server joins the cluster.
:param pending_size: Max size of the pending buffer for publishing commands.
:param flush_timeout: Max duration to wait for a forced flush to occur.
Connecting setting all callbacks::
import asyncio
import nats
async def main():
async def disconnected_cb():
print('Got disconnected!')
async def reconnected_cb():
print(f'Got reconnected to {nc.connected_url.netloc}')
async def error_cb(e):
print(f'There was an error: {e}')
async def closed_cb():
print('Connection is closed')
# Connect to NATS with logging callbacks.
nc = await nats.connect('demo.nats.io',
error_cb=error_cb,
reconnected_cb=reconnected_cb,
disconnected_cb=disconnected_cb,
closed_cb=closed_cb,
)
async def handler(msg):
print(f'Received a message on {msg.subject} {msg.reply}: {msg.data}')
await msg.respond(b'OK')
sub = await nc.subscribe('help.please', cb=handler)
resp = await nc.request('help.please', b'help')
print('Response:', resp)
await nc.close()
if __name__ == '__main__':
asyncio.run(main())
Using a context manager::
import asyncio
import nats
async def main():
is_done = asyncio.Future()
async def closed_cb():
print('Connection to NATS is closed.')
is_done.set_result(True)
async with (await nats.connect('nats://demo.nats.io:4222', closed_cb=closed_cb)) as nc:
print(f'Connected to NATS at {nc.connected_url.netloc}...')
async def subscribe_handler(msg):
subject = msg.subject
reply = msg.reply
data = msg.data.decode()
print('Received a message on '{subject} {reply}': {data}'.format(
subject=subject, reply=reply, data=data))
await nc.subscribe('discover', cb=subscribe_handler)
await nc.flush()
for i in range(0, 10):
await nc.publish('discover', b'hello world')
await asyncio.sleep(0.1)
await asyncio.wait_for(is_done, 60.0)
if __name__ == '__main__':
asyncio.run(main())
"""
for cb in [error_cb, disconnected_cb, closed_cb, reconnected_cb,
discovered_server_cb]:
if cb and not asyncio.iscoroutinefunction(cb):
raise errors.InvalidCallbackTypeError
self._setup_server_pool(servers)
self._error_cb = error_cb or _default_error_callback
self._closed_cb = closed_cb
self._discovered_server_cb = discovered_server_cb
self._reconnected_cb = reconnected_cb
self._disconnected_cb = disconnected_cb
# Custom inbox prefix
if isinstance(inbox_prefix, str):
inbox_prefix = inbox_prefix.encode()
assert isinstance(inbox_prefix, bytes)
self._inbox_prefix = bytearray(inbox_prefix)
# NKEYS support
self._signature_cb = signature_cb
self._user_jwt_cb = user_jwt_cb
self._user_credentials = user_credentials
self._nkeys_seed = nkeys_seed
# Customizable options
self.options["verbose"] = verbose
self.options["pedantic"] = pedantic
self.options["name"] = name
self.options["allow_reconnect"] = allow_reconnect
self.options["dont_randomize"] = dont_randomize
self.options["reconnect_time_wait"] = reconnect_time_wait
self.options["max_reconnect_attempts"] = max_reconnect_attempts
self.options["ping_interval"] = ping_interval
self.options["max_outstanding_pings"] = max_outstanding_pings
self.options["no_echo"] = no_echo
self.options["user"] = user
self.options["password"] = password
self.options["token"] = token
self.options["connect_timeout"] = connect_timeout
self.options["drain_timeout"] = drain_timeout
if tls:
self.options['tls'] = tls
if tls_hostname:
self.options['tls_hostname'] = tls_hostname
if user or password or token:
self._auth_configured = True
if self._user_credentials is not None or self._nkeys_seed is not None:
self._auth_configured = True
self._setup_nkeys_connect()
# Queue used to trigger flushes to the socket.
self._flush_queue = asyncio.Queue(maxsize=flusher_queue_size)
# Max size of buffer used for flushing commands to the server.
self._max_pending_size = pending_size
# Max duration for a force flush (happens when a buffer is full).
self._flush_timeout = flush_timeout
if self.options["dont_randomize"] is False:
shuffle(self._server_pool)
while True:
try:
await self._select_next_server()
await self._process_connect_init()
assert self._current_server, "the current server must be set by _select_next_server"
self._current_server.reconnects = 0
break
except errors.NoServersError as e:
if self.options["max_reconnect_attempts"] < 0:
# Never stop reconnecting
continue
self._err = e
raise e
except (OSError, errors.Error, asyncio.TimeoutError) as e:
self._err = e
await self._error_cb(e)
# Bail on first attempt if reconnecting is disallowed.
if not self.options["allow_reconnect"]:
raise e
await self._close(Client.DISCONNECTED, False)
if self._current_server is not None:
self._current_server.last_attempt = time.monotonic()
self._current_server.reconnects += 1
def _setup_nkeys_connect(self) -> None:
if self._user_credentials is not None:
self._setup_nkeys_jwt_connect()
else:
self._setup_nkeys_seed_connect()
def _setup_nkeys_jwt_connect(self) -> None:
assert self._user_credentials, "_user_credentials required"
import os
import nkeys
creds = self._user_credentials
if isinstance(creds, tuple):
assert len(creds) == 2
def user_cb() -> bytearray:
contents = None
with open(creds[0], 'rb') as f:
contents = bytearray(os.fstat(f.fileno()).st_size)
f.readinto(contents) # type: ignore[attr-defined]
return contents
self._user_jwt_cb = user_cb
def sig_cb(nonce: str) -> bytes:
seed = None
with open(creds[1], 'rb') as f:
seed = bytearray(os.fstat(f.fileno()).st_size)
f.readinto(seed) # type: ignore[attr-defined]
kp = nkeys.from_seed(seed)
raw_signed = kp.sign(nonce.encode())
sig = base64.b64encode(raw_signed)
# Best effort attempt to clear from memory.
kp.wipe()
del kp
del seed
return sig
self._signature_cb = sig_cb
else:
# Define the functions to be able to sign things using nkeys.
def user_cb() -> bytearray:
assert isinstance(creds, str)
user_jwt = None
with open(creds, 'rb') as f:
while True:
line = bytearray(f.readline())
if b'BEGIN NATS USER JWT' in line:
user_jwt = bytearray(f.readline())
break
# Remove trailing line break but reusing same memory view.
return user_jwt[:len(user_jwt) - 1]
self._user_jwt_cb = user_cb
def sig_cb(nonce: str) -> bytes:
assert isinstance(creds, str)
user_seed = None
with open(creds, 'rb', buffering=0) as f:
for line in f:
# Detect line where the NKEY would start and end,
# then seek and read into a fixed bytearray that
# can be wiped.
if b'BEGIN USER NKEY SEED' in line:
nkey_start_pos = f.tell()
try:
next(f)
except StopIteration:
raise ErrInvalidUserCredentials
nkey_end_pos = f.tell()
nkey_size = nkey_end_pos - nkey_start_pos - 1
f.seek(nkey_start_pos)
# Only gather enough bytes for the user seed
# into the pre allocated bytearray.
user_seed = bytearray(nkey_size)
f.readinto(user_seed) # type: ignore[attr-defined]
kp = nkeys.from_seed(user_seed)
raw_signed = kp.sign(nonce.encode())
sig = base64.b64encode(raw_signed)
# Delete all state related to the keys.
kp.wipe()
del user_seed
del kp
return sig
self._signature_cb = sig_cb
def _setup_nkeys_seed_connect(self) -> None:
assert self._nkeys_seed, "Client.connect must be called first"
import os
import nkeys
seed = None
creds = self._nkeys_seed
with open(creds, 'rb') as f:
seed = bytearray(os.fstat(f.fileno()).st_size)
f.readinto(seed) # type: ignore[attr-defined]
kp = nkeys.from_seed(seed)
self._public_nkey = kp.public_key.decode()
kp.wipe()
del kp
del seed
def sig_cb(nonce: str) -> bytes:
seed = None
with open(creds, 'rb') as f:
seed = bytearray(os.fstat(f.fileno()).st_size)
f.readinto(seed) # type: ignore[attr-defined]
kp = nkeys.from_seed(seed)
raw_signed = kp.sign(nonce.encode())
sig = base64.b64encode(raw_signed)
# Best effort attempt to clear from memory.
kp.wipe()
del kp
del seed
return sig
self._signature_cb = sig_cb
async def close(self) -> None:
"""
Closes the socket to which we are connected and
sets the client to be in the CLOSED state.
No further reconnections occur once reaching this point.
"""
await self._close(Client.CLOSED)
async def _close(self, status: int, do_cbs: bool = True) -> None:
if self.is_closed:
self._status = status
return
self._status = Client.CLOSED
# Kick the flusher once again so that Task breaks and avoid pending futures.
await self._flush_pending()
if self._reading_task is not None and not self._reading_task.cancelled(
):
self._reading_task.cancel()
if self._ping_interval_task is not None and not self._ping_interval_task.cancelled(
):
self._ping_interval_task.cancel()
if self._flusher_task is not None and not self._flusher_task.cancelled(
):
self._flusher_task.cancel()
if self._reconnection_task is not None and not self._reconnection_task.done(
):
self._reconnection_task.cancel()
# Wait for the reconnection task to be done which should be soon.
try:
if self._reconnection_task_future is not None and not self._reconnection_task_future.cancelled(
):
await asyncio.wait_for(
self._reconnection_task_future,
self.options["reconnect_time_wait"],
)
except (asyncio.CancelledError, asyncio.TimeoutError):
pass
# Relinquish control to allow background tasks to wrap up.
await asyncio.sleep(0)
assert self._transport, "Client.connect must be called first"
if self._current_server is not None:
# In case there is any pending data at this point, flush before disconnecting.
if self._pending_data_size > 0:
self._transport.writelines(self._pending[:])
self._pending = []
self._pending_data_size = 0
await self._transport.drain()
# Cleanup subscriptions since not reconnecting so no need
# to replay the subscriptions anymore.
for sub in self._subs.values():
# Async subs use join when draining already so just cancel here.
if sub._wait_for_msgs_task and not sub._wait_for_msgs_task.done():
sub._wait_for_msgs_task.cancel()
if sub._message_iterator:
sub._message_iterator._cancel()
# Sync subs may have some inflight next_msg calls that could be blocking
# so cancel them here to unblock them.
if sub._pending_next_msgs_calls:
for fut in sub._pending_next_msgs_calls.values():
fut.cancel("nats: connection is closed")
sub._pending_next_msgs_calls.clear()
self._subs.clear()
if self._transport is not None:
self._transport.close()
try:
await self._transport.wait_closed()
except Exception as e:
await self._error_cb(e)
if do_cbs:
if self._disconnected_cb is not None:
await self._disconnected_cb()
if self._closed_cb is not None:
await self._closed_cb()
# Set the client_id and subscription prefix back to None
self._client_id = None
self._resp_sub_prefix = None
async def drain(self) -> None:
"""
drain will put a connection into a drain state. All subscriptions will
immediately be put into a drain state. Upon completion, the publishers
will be drained and can not publish any additional messages. Upon draining
of the publishers, the connection will be closed. Use the `closed_cb`
option to know when the connection has moved from draining to closed.
"""
if self.is_draining:
return
if self.is_closed:
raise errors.ConnectionClosedError
if self.is_connecting or self.is_reconnecting:
raise errors.ConnectionReconnectingError
drain_tasks = []
for sub in self._subs.values():
coro = sub._drain()
task = asyncio.get_running_loop().create_task(coro)
drain_tasks.append(task)
drain_is_done = asyncio.gather(*drain_tasks)
# Start draining the subscriptions.
# Relinquish CPU to allow drain tasks to start in the background,
# before setting state to draining.
await asyncio.sleep(0)
self._status = Client.DRAINING_SUBS
try:
await asyncio.wait_for(
drain_is_done, self.options["drain_timeout"]
)
except asyncio.TimeoutError:
drain_is_done.exception()
drain_is_done.cancel()
await self._error_cb(errors.DrainTimeoutError())
except asyncio.CancelledError:
pass
finally:
self._status = Client.DRAINING_PUBS
await self.flush()
await self._close(Client.CLOSED)
async def publish(
self,
subject: str,
payload: bytes = b'',
reply: str = '',
headers: Optional[Dict[str, str]] = None
) -> None:
"""
Publishes a NATS message.
:param subject: Subject to which the message will be published.
:param payload: Message data.
:param reply: Inbox to which a responder can respond.
:param headers: Optional message header.
::
import asyncio
import nats
async def main():
nc = await nats.connect('demo.nats.io')
# Publish as message with an inbox.
inbox = nc.new_inbox()
sub = await nc.subscribe('hello')
# Simple publishing
await nc.publish('hello', b'Hello World!')
# Publish with a reply
await nc.publish('hello', b'Hello World!', reply=inbox)
# Publish with headers
await nc.publish('hello', b'With Headers', headers={'Foo':'Bar'})
while True:
try:
msg = await sub.next_msg()
except:
break
print('----------------------')
print('Subject:', msg.subject)
print('Reply :', msg.reply)
print('Data :', msg.data)
print('Headers:', msg.header)
if __name__ == '__main__':
asyncio.run(main())
"""
if self.is_closed:
raise errors.ConnectionClosedError
if self.is_draining_pubs:
raise errors.ConnectionDrainingError
payload_size = len(payload)
if not self.is_connected:
if self._max_pending_size <= 0 or payload_size + self._pending_data_size > self._max_pending_size:
# Cannot publish during a reconnection when the buffering is disabled,
# or if pending buffer is already full.
raise errors.OutboundBufferLimitError
if payload_size > self._max_payload:
raise errors.MaxPayloadError
await self._send_publish(
subject, reply, payload, payload_size, headers
)
async def _send_publish(
self,
subject: str,
reply: str,
payload: bytes,
payload_size: int,
headers: Optional[Dict[str, Any]],
) -> None:
"""
Sends PUB command to the NATS server.
"""
if subject == "":
# Avoid sending messages with empty replies.
raise errors.BadSubjectError
pub_cmd = None
if headers is None:
pub_cmd = prot_command.pub_cmd(subject, reply, payload)
else:
hdr = bytearray()
hdr.extend(NATS_HDR_LINE)
hdr.extend(_CRLF_)
for k, v in headers.items():
key = k.strip()
if not key:
# Skip empty keys
continue
hdr.extend(key.encode())
hdr.extend(b': ')
value = v.strip()
hdr.extend(value.encode())
hdr.extend(_CRLF_)
hdr.extend(_CRLF_)
pub_cmd = prot_command.hpub_cmd(subject, reply, hdr, payload)
self.stats['out_msgs'] += 1
self.stats['out_bytes'] += payload_size
await self._send_command(pub_cmd)
if self._flush_queue is not None and self._flush_queue.empty():
await self._flush_pending()
async def subscribe(
self,
subject: str,
queue: str = "",
cb: Optional[Callable[[Msg], Awaitable[None]]] = None,
future: Optional[asyncio.Future] = None,
max_msgs: int = 0,
pending_msgs_limit: int = DEFAULT_SUB_PENDING_MSGS_LIMIT,
pending_bytes_limit: int = DEFAULT_SUB_PENDING_BYTES_LIMIT,
) -> Subscription:
"""
subscribe registers interest in a given subject.
If a callback is provided, messages will be processed asychronously.
If a callback isn't provided, messages can be retrieved via an
asynchronous iterator on the returned subscription object.
"""
if not subject or (' ' in subject):
raise errors.BadSubjectError
if queue and (' ' in queue):
raise errors.BadSubjectError
if self.is_closed:
raise errors.ConnectionClosedError
if self.is_draining:
raise errors.ConnectionDrainingError
self._sid += 1
sid = self._sid
sub = Subscription(
self,
sid,
subject,
queue=queue,
cb=cb,
future=future,
max_msgs=max_msgs,
pending_msgs_limit=pending_msgs_limit,
pending_bytes_limit=pending_bytes_limit,
)
sub._start(self._error_cb)
self._subs[sid] = sub
await self._send_subscribe(sub)
return sub
def _remove_sub(self, sid: int, max_msgs: int = 0) -> None:
self._subs.pop(sid, None)
async def _send_subscribe(self, sub: Subscription) -> None:
sub_cmd = None
if sub._queue is None:
sub_cmd = prot_command.sub_cmd(sub._subject, EMPTY, sub._id)
else:
sub_cmd = prot_command.sub_cmd(sub._subject, sub._queue, sub._id)
await self._send_command(sub_cmd)
await self._flush_pending()
async def _init_request_sub(self) -> None:
self._resp_map = {}
self._resp_sub_prefix = self._inbox_prefix[:]
self._resp_sub_prefix.extend(b'.')
self._resp_sub_prefix.extend(self._nuid.next())
self._resp_sub_prefix.extend(b'.')
resp_mux_subject = self._resp_sub_prefix[:]
resp_mux_subject.extend(b'*')
await self.subscribe(
resp_mux_subject.decode(), cb=self._request_sub_callback
)
async def _request_sub_callback(self, msg: Msg) -> None:
token = msg.subject[len(self._inbox_prefix) + 22 + 2:]
try:
fut = self._resp_map.get(token)
if not fut:
return
fut.set_result(msg)
self._resp_map.pop(token, None)
except (asyncio.CancelledError, asyncio.InvalidStateError):
# Request may have timed out already so remove the entry
self._resp_map.pop(token, None)
async def request(
self,
subject: str,
payload: bytes = b'',
timeout: float = 0.5,
old_style: bool = False,
headers: Optional[Dict[str, Any]] = None,
) -> Msg:
"""
Implements the request/response pattern via pub/sub
using a single wildcard subscription that handles
the responses.
"""
if old_style:
# FIXME: Support headers in old style requests.
return await self._request_old_style(
subject, payload, timeout=timeout
)
else:
msg = await self._request_new_style(
subject, payload, timeout=timeout, headers=headers
)
if msg.headers and msg.headers.get(nats.js.api.Header.STATUS
) == NO_RESPONDERS_STATUS:
raise errors.NoRespondersError
return msg
async def _request_new_style(
self,
subject: str,
payload: bytes,
timeout: float = 1,
headers: Optional[Dict[str, Any]] = None,
) -> Msg:
if self.is_draining_pubs:
raise errors.ConnectionDrainingError
if not self._resp_sub_prefix:
await self._init_request_sub()
assert self._resp_sub_prefix
# Use a new NUID + couple of unique token bytes to identify the request,
# then use the future to get the response.
token = self._nuid.next()
token.extend(token_hex(2).encode())
inbox = self._resp_sub_prefix[:]
inbox.extend(token)
future: asyncio.Future = asyncio.Future()
self._resp_map[token.decode()] = future
await self.publish(
subject, payload, reply=inbox.decode(), headers=headers
)
# Wait for the response or give up on timeout.
try:
msg = await asyncio.wait_for(future, timeout)
return msg
except asyncio.TimeoutError:
try:
# Double check that the token is there already.
self._resp_map.pop(token.decode())
except KeyError:
await self._error_cb(
errors.
Error(f"nats: missing response token '{token.decode()}'")
)
future.cancel()
raise errors.TimeoutError
def new_inbox(self) -> str:
"""
new_inbox returns a unique inbox that can be used
for NATS requests or subscriptions::
# Create unique subscription to receive direct messages.
inbox = nc.new_inbox()
sub = await nc.subscribe(inbox)
nc.publish('broadcast', b'', reply=inbox)
msg = sub.next_msg()
"""
next_inbox = self._inbox_prefix[:]
next_inbox.extend(b'.')
next_inbox.extend(self._nuid.next())
return next_inbox.decode()
async def _request_old_style(
self, subject: str, payload: bytes, timeout: float = 1
) -> Msg:
"""
Implements the request/response pattern via pub/sub
using an ephemeral subscription which will be published
with a limited interest of 1 reply returning the response
or raising a Timeout error.
"""
inbox = self.new_inbox()
future: asyncio.Future[Msg] = asyncio.Future()
sub = await self.subscribe(inbox, future=future, max_msgs=1)
await sub.unsubscribe(limit=1)
await self.publish(subject, payload, reply=inbox)
try:
msg = await asyncio.wait_for(future, timeout)
if msg.headers:
if msg.headers.get(nats.js.api.Header.STATUS
) == NO_RESPONDERS_STATUS:
raise errors.NoRespondersError
return msg
except asyncio.TimeoutError:
await sub.unsubscribe()
future.cancel()
raise errors.TimeoutError
async def _send_unsubscribe(self, sid: int, limit: int = 0) -> None:
unsub_cmd = prot_command.unsub_cmd(sid, limit)
await self._send_command(unsub_cmd)
await self._flush_pending()
async def flush(self, timeout: int = DEFAULT_FLUSH_TIMEOUT) -> None:
"""
Sends a ping to the server expecting a pong back ensuring
what we have written so far has made it to the server and
also enabling measuring of roundtrip time.
In case a pong is not returned within the allowed timeout,
then it will raise nats.errors.TimeoutError
"""
if timeout <= 0:
raise errors.BadTimeoutError
if self.is_closed:
raise errors.ConnectionClosedError
future: asyncio.Future = asyncio.Future()
try:
await self._send_ping(future)
await asyncio.wait_for(future, timeout)
except asyncio.TimeoutError:
future.cancel()
raise errors.FlushTimeoutError
@property
def connected_url(self) -> Optional[ParseResult]:
if self._current_server and self.is_connected:
return self._current_server.uri
return None
@property
def servers(self) -> List[ParseResult]:
servers = []
for srv in self._server_pool:
servers.append(srv.uri)
return servers
@property
def discovered_servers(self) -> List[ParseResult]:
servers = []
for srv in self._server_pool:
if srv.discovered:
servers.append(srv.uri)
return servers
@property
def max_payload(self) -> int:
"""
Returns the max payload which we received from the servers INFO
"""
return self._max_payload
@property
def client_id(self) -> Optional[int]:
"""
Returns the client id which we received from the servers INFO
"""
return self._client_id
@property
def last_error(self) -> Optional[Exception]:
"""
Returns the last error which may have occurred.
"""
return self._err
@property
def pending_data_size(self) -> int:
return self._pending_data_size
@property
def is_closed(self) -> bool:
return self._status == Client.CLOSED
@property
def is_reconnecting(self) -> bool:
return self._status == Client.RECONNECTING
@property
def is_connected(self) -> bool:
return (self._status == Client.CONNECTED) or self.is_draining
@property
def is_connecting(self) -> bool:
return self._status == Client.CONNECTING
@property
def is_draining(self) -> bool:
return (
self._status == Client.DRAINING_SUBS
or self._status == Client.DRAINING_PUBS
)
@property
def is_draining_pubs(self) -> bool:
return self._status == Client.DRAINING_PUBS
@property
def connected_server_version(self) -> ServerVersion:
"""
Returns the ServerVersion of the server to which the client
is currently connected.
"""
if self._current_server and self._current_server.server_version:
return ServerVersion(self._current_server.server_version)
return ServerVersion("0.0.0-unknown")
@property
def ssl_context(self) -> ssl.SSLContext:
ssl_context: Optional[ssl.SSLContext] = None
if "tls" in self.options:
ssl_context = self.options.get('tls')
else:
ssl_context = ssl.create_default_context()
if ssl_context is None:
raise errors.Error('nats: no ssl context provided')
return ssl_context
async def _send_command(self, cmd: bytes, priority: bool = False) -> None:
if priority:
self._pending.insert(0, cmd)
else:
self._pending.append(cmd)
self._pending_data_size += len(cmd)
if self._max_pending_size > 0 and self._pending_data_size > self._max_pending_size:
# Only flush force timeout on publish
await self._flush_pending(force_flush=True)
async def _flush_pending(
self,
force_flush: bool = False,
) -> Any:
assert self._flush_queue, "Client.connect must be called first"
try:
future: asyncio.Future = asyncio.Future()
if not self.is_connected:
future.set_result(None)
return future
# kick the flusher!
await self._flush_queue.put(future)
if force_flush:
try:
await asyncio.wait_for(future, self._flush_timeout)
except asyncio.TimeoutError:
# Report to the async callback that there was a timeout.
await self._error_cb(errors.FlushTimeoutError())
except asyncio.CancelledError:
pass
def _setup_server_pool(self, connect_url: Union[List[str]]) -> None:
if isinstance(connect_url, str):
try:
if "nats://" in connect_url or "tls://" in connect_url:
# Closer to how the Go client handles this.
# e.g. nats://localhost:4222
uri = urlparse(connect_url)
elif "ws://" in connect_url or "wss://" in connect_url:
uri = urlparse(connect_url)
elif ":" in connect_url:
# Expand the scheme for the user
# e.g. localhost:4222
uri = urlparse(f"nats://{connect_url}")
else:
# Just use the endpoint with the default NATS port.
# e.g. demo.nats.io
uri = urlparse(f"nats://{connect_url}:4222")
# In case only endpoint with scheme was set.
# e.g. nats://demo.nats.io or localhost:
# the ws and wss do not need a default port as the transport will assume 80 and 443, respectively
if uri.port is None and uri.scheme not in ("ws", "wss"):
uri = urlparse(f"nats://{uri.hostname}:4222")
except ValueError:
raise errors.Error("nats: invalid connect url option")
if uri.hostname is None or uri.hostname == "none":
raise errors.Error("nats: invalid hostname in connect url")
self._server_pool.append(Srv(uri))
elif isinstance(connect_url, list):
try:
for server in connect_url:
uri = urlparse(server)
self._server_pool.append(Srv(uri))
except ValueError:
raise errors.Error("nats: invalid connect url option")
# make sure protocols aren't mixed
if not (all(server.uri.scheme in ("nats", "tls")
for server in self._server_pool)
or all(server.uri.scheme in ("ws", "wss")
for server in self._server_pool)):
raise errors.Error(
"nats: mixing of websocket and non websocket URLs is not allowed"
)
else:
raise errors.Error("nats: invalid connect url option")
async def _select_next_server(self) -> None:
"""
Looks up in the server pool for an available server
and attempts to connect.
"""
while True:
if len(self._server_pool) == 0:
self._current_server = None
raise errors.NoServersError
now = time.monotonic()
s = self._server_pool.pop(0)
if self.options["max_reconnect_attempts"] > 0:
if s.reconnects > self.options["max_reconnect_attempts"]:
# Discard server since already tried to reconnect too many times
continue
# Not yet exceeded max_reconnect_attempts so can still use
# this server in the future.
self._server_pool.append(s)
if s.last_attempt is not None and now < s.last_attempt + self.options[
"reconnect_time_wait"]:
# Backoff connecting to server if we attempted recently.
await asyncio.sleep(self.options["reconnect_time_wait"])
try:
s.last_attempt = time.monotonic()
if not self._transport:
if s.uri.scheme in ("ws", "wss"):
self._transport = WebSocketTransport()
else:
# use TcpTransport as a fallback
self._transport = TcpTransport()
if s.uri.scheme == "wss":
# wss is expected to connect directly with tls
await self._transport.connect_tls(
s.uri,
ssl_context=self.ssl_context,
buffer_size=DEFAULT_BUFFER_SIZE,
connect_timeout=self.options['connect_timeout']
)
else:
await self._transport.connect(
s.uri,
buffer_size=DEFAULT_BUFFER_SIZE,
connect_timeout=self.options['connect_timeout']
)
self._current_server = s
break
except Exception as e:
s.last_attempt = time.monotonic()
s.reconnects += 1
self._err = e
await self._error_cb(e)
continue
async def _process_err(self, err_msg: str) -> None:
"""
Processes the raw error message sent by the server
and close connection with current server.
"""
if STALE_CONNECTION in err_msg:
await self._process_op_err(errors.StaleConnectionError())
return
if AUTHORIZATION_VIOLATION in err_msg:
self._err = errors.AuthorizationError()
else:
prot_err = err_msg.strip("'")
m = f"nats: {prot_err}"
err = errors.Error(m)
self._err = err
if PERMISSIONS_ERR in m:
await self._error_cb(err)
return
do_cbs = False
if not self.is_connecting:
do_cbs = True
# FIXME: Some errors such as 'Invalid Subscription'
# do not cause the server to close the connection.
# For now we handle similar as other clients and close.
asyncio.create_task(self._close(Client.CLOSED, do_cbs))
async def _process_op_err(self, e: Exception) -> None:
"""
Process errors which occurred while reading or parsing
the protocol. If allow_reconnect is enabled it will
try to switch the server to which it is currently connected
otherwise it will disconnect.
"""
if self.is_connecting or self.is_closed or self.is_reconnecting:
return
if self.options["allow_reconnect"] and self.is_connected:
self._status = Client.RECONNECTING
self._ps.reset()
if self._reconnection_task is not None and not self._reconnection_task.cancelled(
):
# Cancel the previous task in case it may still be running.
self._reconnection_task.cancel()
self._reconnection_task = asyncio.get_running_loop().create_task(
self._attempt_reconnect()
)
else:
self._process_disconnect()
self._err = e
await self._close(Client.CLOSED, True)
async def _attempt_reconnect(self) -> None:
assert self._current_server, "Client.connect must be called first"
if self._reading_task is not None and not self._reading_task.cancelled(
):
self._reading_task.cancel()
if self._ping_interval_task is not None and not self._ping_interval_task.cancelled(
):
self._ping_interval_task.cancel()
if self._flusher_task is not None and not self._flusher_task.cancelled(
):
self._flusher_task.cancel()
if self._transport is not None:
self._transport.close()
try:
await self._transport.wait_closed()
except Exception as e:
await self._error_cb(e)
self._err = None
if self._disconnected_cb is not None:
await self._disconnected_cb()
if self.is_closed:
return
if "dont_randomize" not in self.options or not self.options[
"dont_randomize"]:
shuffle(self._server_pool)
# Create a future that the client can use to control waiting
# on the reconnection attempts.
self._reconnection_task_future = asyncio.Future()
while True:
try:
# Try to establish a TCP connection to a server in
# the cluster then send CONNECT command to it.
await self._select_next_server()
assert self._transport, "_select_next_server must've set _transport"
await self._process_connect_init()
# Consider a reconnect to be done once CONNECT was
# processed by the server successfully.
self.stats["reconnects"] += 1
# Reset reconnect attempts for this server
# since have successfully connected.
self._current_server.did_connect = True
self._current_server.reconnects = 0
# Replay all the subscriptions in case there were some.
subs_to_remove = []
for sid, sub in self._subs.items():
max_msgs = 0
if sub._max_msgs > 0:
# If we already hit the message limit, remove the subscription and don't
# resubscribe.
if sub._received >= sub._max_msgs:
subs_to_remove.append(sid)
continue
# auto unsubscribe the number of messages we have left
max_msgs = sub._max_msgs - sub._received
sub_cmd = prot_command.sub_cmd(
sub._subject, sub._queue, sid
)
self._transport.write(sub_cmd)
if max_msgs > 0:
unsub_cmd = prot_command.unsub_cmd(sid, max_msgs)
self._transport.write(unsub_cmd)
for sid in subs_to_remove:
self._subs.pop(sid)
await self._transport.drain()
# Flush pending data before continuing in connected status.
# FIXME: Could use future here and wait for an error result
# to bail earlier in case there are errors in the connection.
# await self._flush_pending(force_flush=True)
await self._flush_pending()
self._status = Client.CONNECTED
await self.flush()
if self._reconnected_cb is not None:
await self._reconnected_cb()
self._reconnection_task_future = None
break
except errors.NoServersError as e:
self._err = e
await self.close()
break
except (OSError, errors.Error, asyncio.TimeoutError) as e:
self._err = e
await self._error_cb(e)
self._status = Client.RECONNECTING
self._current_server.last_attempt = time.monotonic()
self._current_server.reconnects += 1
except asyncio.CancelledError:
break
if self._reconnection_task_future is not None and not self._reconnection_task_future.cancelled(
):
self._reconnection_task_future.set_result(True)
def _connect_command(self) -> bytes:
'''
Generates a JSON string with the params to be used
when sending CONNECT to the server.
->> CONNECT {"lang": "python3"}
'''
options = {
"verbose": self.options["verbose"],
"pedantic": self.options["pedantic"],
"lang": __lang__,
"version": __version__,
"protocol": PROTOCOL
}
if "headers" in self._server_info:
options["headers"] = self._server_info["headers"]
options["no_responders"] = self._server_info["headers"]
if self._auth_configured:
if "nonce" in self._server_info and self._signature_cb is not None:
sig = self._signature_cb(self._server_info["nonce"])
options["sig"] = sig.decode()
if self._user_jwt_cb is not None:
jwt = self._user_jwt_cb()
options["jwt"] = jwt.decode()
elif self._public_nkey is not None:
options["nkey"] = self._public_nkey
# In case there is no password, then consider handle
# sending a token instead.
elif self.options["user"] is not None and self.options[
"password"] is not None:
options["user"] = self.options["user"]
options["pass"] = self.options["password"]
elif self.options["token"] is not None:
options["auth_token"] = self.options["token"]
elif self._current_server and self._current_server.uri.username is not None:
if self._current_server.uri.password is None:
options["auth_token"] = self._current_server.uri.username
else:
options["user"] = self._current_server.uri.username
options["pass"] = self._current_server.uri.password
if self.options["name"] is not None:
options["name"] = self.options["name"]
if self.options["no_echo"] is not None:
options["echo"] = not self.options["no_echo"]
connect_opts = json.dumps(options, sort_keys=True)
return b''.join([CONNECT_OP + _SPC_ + connect_opts.encode() + _CRLF_])
async def _process_ping(self) -> None:
"""
Process PING sent by server.
"""
await self._send_command(PONG)
await self._flush_pending()
async def _process_pong(self) -> None:
"""
Process PONG sent by server.
"""
if len(self._pongs) > 0:
future = self._pongs.pop(0)
future.set_result(True)
self._pongs_received += 1
self._pings_outstanding = 0
def _is_control_message(self, data, header: Dict[str,
str]) -> Optional[str]:
if len(data) > 0:
return None
status = header.get(nats.js.api.Header.STATUS)
if status == CTRL_STATUS:
return header.get(nats.js.api.Header.DESCRIPTION)
return None
async def _process_headers(self, headers) -> Optional[Dict[str, str]]:
if not headers:
return None
hdr: Optional[Dict[str, str]] = None
raw_headers = headers[NATS_HDR_LINE_SIZE:]
# If the first character is an empty space, then this is
# an inline status message sent by the server.
#
# NATS/1.0 404\r\n\r\n
# NATS/1.0 503\r\n\r\n
# NATS/1.0 404 No Messages\r\n\r\n
#
# Note: it is possible to receive a message with both inline status
# and a set of headers.
#
# NATS/1.0 100\r\nIdle Heartbeat\r\nNats-Last-Consumer: 1016\r\nNats-Last-Stream: 1024\r\n\r\n
#
if raw_headers[0] == _SPC_BYTE_:
# Special handling for status messages.
line = headers[len(NATS_HDR_LINE) + 1:]
status = line[:STATUS_MSG_LEN]
desc = line[STATUS_MSG_LEN + 1:len(line) - _CRLF_LEN_ - _CRLF_LEN_]
stripped_status = status.strip().decode()
# Process as status only when it is a valid integer.
hdr = {}
if stripped_status.isdigit():
hdr[nats.js.api.Header.STATUS.value] = stripped_status
# Move the raw_headers to end of line
i = raw_headers.find(_CRLF_)
raw_headers = raw_headers[i + _CRLF_LEN_:]
if len(desc) > 0:
# Heartbeat messages can have both headers and inline status,
# check that there are no pending headers to be parsed.
i = desc.find(_CRLF_)
if i > 0:
hdr[nats.js.api.Header.DESCRIPTION] = desc[:i].decode()
parsed_hdr = self._hdr_parser.parsebytes(
desc[i + _CRLF_LEN_:]
)
for k, v in parsed_hdr.items():
hdr[k] = v
else:
# Just inline status...
hdr[nats.js.api.Header.DESCRIPTION] = desc.decode()
if not len(raw_headers) > _CRLF_LEN_:
return hdr
#
# Example header without status:
#
# NATS/1.0\r\nfoo: bar\r\nhello: world
#
raw_headers = headers[NATS_HDR_LINE_SIZE + _CRLF_LEN_:]
try:
if parse_email:
parsed_hdr = parse_email(raw_headers).headers
else:
parsed_hdr = {
k.strip(): v.strip()
for k, v in self._hdr_parser.parsebytes(raw_headers
).items()
}
if hdr:
hdr.update(parsed_hdr)
else:
hdr = parsed_hdr
if parse_email:
to_delete = []
for k in hdr.keys():
if any(c in k for c in string.whitespace):
to_delete.append(k)
for k in to_delete:
del hdr[k]
except Exception as e:
await self._error_cb(e)
return hdr
return hdr or None
async def _process_msg(
self,
sid: int,
subject: bytes,
reply: bytes,
data: bytes,
headers: bytes,
) -> None:
"""
Process MSG sent by server.
"""
payload_size = len(data)
self.stats['in_msgs'] += 1
self.stats['in_bytes'] += payload_size
sub = self._subs.get(sid)
if not sub:
# Skip in case no subscription present.
return
sub._received += 1
if sub._max_msgs > 0 and sub._received >= sub._max_msgs:
# Enough messages so can throwaway subscription now, the
# pending messages will still be in the subscription
# internal queue and the task will finish once the last
# message is processed.
self._subs.pop(sid, None)
hdr = await self._process_headers(headers)
msg = self._build_message(sid, subject, reply, data, hdr)
if not msg:
return
# Process flow control messages in case of using a JetStream context.
ctrl_msg = None
fcReply = None
if sub._jsi:
#########################################
# #
# JetStream Control Messages Processing #
# #
#########################################
jsi = sub._jsi
if hdr:
ctrl_msg = self._is_control_message(data, hdr)
# Check if the heartbeat has a "Consumer Stalled" header, if
# so, the value is the FC reply to send a nil message to.
# We will send it at the end of this function.
if ctrl_msg and ctrl_msg.startswith("Idle"):
fcReply = hdr.get(nats.js.api.Header.CONSUMER_STALLED)
# OrderedConsumer: checkOrderedMsgs
if not ctrl_msg and jsi._ordered and msg.reply:
did_reset = None
tokens = Msg.Metadata._get_metadata_fields(msg.reply)
# FIXME: Support JS Domains.
sseq = int(tokens[5])
dseq = int(tokens[6])
if dseq != jsi._dseq:
# Pick up from where we last left.
did_reset = await jsi.reset_ordered_consumer(jsi._sseq + 1)
else:
# Update our tracking
jsi._dseq = dseq + 1
jsi._sseq = sseq
if did_reset:
return
# Skip processing if this is a control message.
if not ctrl_msg:
# Check if it is an old style request.
if sub._future:
if sub._future.cancelled():
# Already gave up, nothing to do.
return
sub._future.set_result(msg)
return
# Let subscription wait_for_msgs coroutine process the messages,
# but in case sending to the subscription task would block,
# then consider it to be an slow consumer and drop the message.
try:
sub._pending_size += payload_size
# allow setting pending_bytes_limit to 0 to disable
if sub._pending_bytes_limit > 0 and sub._pending_size >= sub._pending_bytes_limit:
# Subtract the bytes since the message will be thrown away
# so it would not be pending data.
sub._pending_size -= payload_size
await self._error_cb(
errors.SlowConsumerError(
subject=msg.subject,
reply=msg.reply,
sid=sid,
sub=sub
)
)
return
sub._pending_queue.put_nowait(msg)
except asyncio.QueueFull:
sub._pending_size -= len(msg.data)
await self._error_cb(
errors.SlowConsumerError(
subject=msg.subject, reply=msg.reply, sid=sid, sub=sub
)
)
# Store the ACK metadata from the message to
# compare later on with the received heartbeat.
if sub._jsi:
sub._jsi.track_sequences(msg.reply)
elif ctrl_msg.startswith("Flow") and msg.reply and sub._jsi:
# This is a flow control message.
# We will schedule the send of the FC reply once we have delivered the
# DATA message that was received before this flow control message, which
# has sequence `jsi.fciseq`. However, it is possible that this message
# has already been delivered, in that case, we need to send the FC reply now.
if sub.delivered >= sub._jsi._fciseq:
fcReply = msg.reply
else:
# Schedule a reply after the previous message is delivered.
sub._jsi.schedule_flow_control_response(msg.reply)
# Handle flow control response.
if fcReply:
await self.publish(fcReply)
if ctrl_msg and not msg.reply and ctrl_msg.startswith("Idle"):
if sub._jsi:
await sub._jsi.check_for_sequence_mismatch(msg)
def _build_message(
self,
sid: int,
subject: bytes,
reply: bytes,
data: bytes,
headers: Optional[Dict[str, str]],
):
return self.msg_class(
subject=subject.decode(),
reply=reply.decode(),
data=data,
headers=headers,
_client=self,
_sid=sid,
)
def _process_disconnect(self) -> None:
"""
Process disconnection from the server and set client status
to DISCONNECTED.
"""
self._status = Client.DISCONNECTED
def _process_info(
self, info: Dict[str, Any], initial_connection: bool = False
) -> None:
"""
Process INFO lines sent by the server to reconfigure client
with latest updates from cluster to enable server discovery.
"""
assert self._current_server, "Client.connect must be called first"
if 'connect_urls' in info:
if info['connect_urls']:
connect_urls = []
for connect_url in info['connect_urls']:
scheme = ''
if self._current_server.uri.scheme == 'tls':
scheme = 'tls'
else:
scheme = 'nats'
uri = urlparse(f"{scheme}://{connect_url}")
srv = Srv(uri)
srv.discovered = True
# Check whether we should reuse the original hostname.
if 'tls_required' in self._server_info and self._server_info['tls_required'] \
and self._host_is_ip(uri.hostname):
srv.tls_name = self._current_server.uri.hostname
# Filter for any similar server in the server pool already.
should_add = True
for s in self._server_pool:
if uri.netloc == s.uri.netloc:
should_add = False
if should_add:
connect_urls.append(srv)
if self.options["dont_randomize"] is not True:
shuffle(connect_urls)
for srv in connect_urls:
self._server_pool.append(srv)
if not initial_connection and connect_urls and self._discovered_server_cb:
self._discovered_server_cb()
def _host_is_ip(self, connect_url: Optional[str]) -> bool:
if connect_url is None:
return False
try:
ipaddress.ip_address(connect_url)
return True
except Exception:
return False
async def _process_connect_init(self) -> None:
"""
Process INFO received from the server and CONNECT to the server
with authentication. It is also responsible of setting up the
reading and ping interval tasks from the client.
"""
assert self._transport, "must be called only from Client.connect"
assert self._current_server, "must be called only from Client.connect"
self._status = Client.CONNECTING
connection_completed = self._transport.readline()
info_line = await asyncio.wait_for(
connection_completed, self.options["connect_timeout"]
)
if INFO_OP not in info_line:
# FIXME: Handle PING/PONG arriving first as well.
raise errors.Error(
"nats: empty response from server when expecting INFO message"
)
_, info = info_line.split(INFO_OP + _SPC_, 1)
try:
srv_info = json.loads(info.decode())
self._server_info = srv_info
except Exception:
raise errors.Error("nats: info message, json parse error")
# In case 'auth_required' is part of INFO, then need to send credentials.
if srv_info.get("auth_required", False):
self._auth_configured = True
self._process_info(srv_info, initial_connection=True)
if 'version' in self._server_info:
self._current_server.server_version = self._server_info['version']
if 'max_payload' in self._server_info:
self._max_payload = self._server_info["max_payload"]
if 'client_id' in self._server_info:
self._client_id = self._server_info["client_id"]
if 'tls_required' in self._server_info and self._server_info[
'tls_required']:
# Check whether to reuse the original hostname for an implicit route.
hostname = None
if "tls_hostname" in self.options:
hostname = self.options["tls_hostname"]
elif self._current_server.tls_name is not None:
hostname = self._current_server.tls_name
else:
hostname = self._current_server.uri.hostname
await self._transport.drain() # just in case something is left
# connect to transport via tls
await self._transport.connect_tls(
hostname,
self.ssl_context,
DEFAULT_BUFFER_SIZE,
self.options['connect_timeout'],
)
# Refresh state of parser upon reconnect.
if self.is_reconnecting:
self._ps.reset()
assert self._transport
connect_cmd = self._connect_command()
self._transport.write(connect_cmd)
await self._transport.drain()
if self.options["verbose"]:
future = self._transport.readline()
next_op = await asyncio.wait_for(
future, self.options["connect_timeout"]
)
if OK_OP in next_op:
# Do nothing
pass
elif ERR_OP in next_op:
err_line = next_op.decode()
_, err_msg = err_line.split(" ", 1)
# FIXME: Maybe handling could be more special here,
# checking for errors.AuthorizationError for example.
# await self._process_err(err_msg)
raise errors.Error("nats: " + err_msg.rstrip('\r\n'))
self._transport.write(PING_PROTO)
await self._transport.drain()
future = self._transport.readline()
next_op = await asyncio.wait_for(
future, self.options["connect_timeout"]
)
if PONG_PROTO in next_op:
self._status = Client.CONNECTED
elif ERR_OP in next_op:
err_line = next_op.decode()
_, err_msg = err_line.split(" ", 1)
# FIXME: Maybe handling could be more special here,
# checking for ErrAuthorization for example.
# await self._process_err(err_msg)
raise errors.Error("nats: " + err_msg.rstrip('\r\n'))
if PONG_PROTO in next_op:
self._status = Client.CONNECTED
self._reading_task = asyncio.get_running_loop().create_task(
self._read_loop()
)
self._pongs = []
self._pings_outstanding = 0
self._ping_interval_task = asyncio.get_running_loop().create_task(
self._ping_interval()
)
# Task for kicking the flusher queue
self._flusher_task = asyncio.get_running_loop().create_task(
self._flusher()
)
async def _send_ping(
self, future: Optional[asyncio.Future] = None
) -> None:
assert self._transport, "Client.connect must be called first"
if future is None:
future = asyncio.Future()
self._pongs.append(future)
self._transport.write(PING_PROTO)
self._pending_data_size += len(PING_PROTO)
await self._flush_pending()
async def _flusher(self) -> None:
"""
Coroutine which continuously tries to consume pending commands
and then flushes them to the socket.
"""
assert self._transport, "Client.connect must be called first"
assert self._flush_queue, "Client.connect must be called first"
while True:
if not self.is_connected or self.is_connecting:
break
future: asyncio.Future = await self._flush_queue.get()
try:
if self._pending_data_size > 0:
self._transport.writelines(self._pending[:])
self._pending = []
self._pending_data_size = 0
await self._transport.drain()
except OSError as e:
await self._error_cb(e)
await self._process_op_err(e)
break
except (asyncio.CancelledError, RuntimeError, AttributeError):
# RuntimeError in case the event loop is closed
break
finally:
future.set_result(None)
async def _ping_interval(self) -> None:
while True:
await asyncio.sleep(self.options["ping_interval"])
if not self.is_connected:
continue
try:
self._pings_outstanding += 1
if self._pings_outstanding > self.options[
"max_outstanding_pings"]:
await self._process_op_err(ErrStaleConnection())
return
await self._send_ping()
except (asyncio.CancelledError, RuntimeError, AttributeError):
break
# except asyncio.InvalidStateError:
# pass
async def _read_loop(self) -> None:
"""
Coroutine which gathers bytes sent by the server
and feeds them to the protocol parser.
In case of error while reading, it will stop running
and its task has to be rescheduled.
"""
while True:
try:
should_bail = self.is_closed or self.is_reconnecting
if should_bail or self._transport is None:
break
if self.is_connected and self._transport.at_eof():
err = errors.UnexpectedEOF()
await self._error_cb(err)
await self._process_op_err(err)
break
b = await self._transport.read(DEFAULT_BUFFER_SIZE)
await self._ps.parse(b)
except errors.ProtocolError:
await self._process_op_err(errors.ProtocolError())
break
except OSError as e:
await self._process_op_err(e)
break
except asyncio.CancelledError:
break
except Exception as ex:
_logger.error('nats: encountered error', exc_info=ex)
break
# except asyncio.InvalidStateError:
# pass
async def __aenter__(self) -> "Client":
"""For when NATS client is used in a context manager"""
return self
async def __aexit__(self, *exc_info) -> None:
"""Close connection to NATS when used in a context manager"""
await self._close(Client.CLOSED, do_cbs=True)
def jetstream(self, **opts) -> nats.js.JetStreamContext:
"""
jetstream returns a context that can be used to produce and consume
messages from NATS JetStream.
:param prefix: Default JetStream API Prefix.
:param domain: Optional domain used by the JetStream API.
:param timeout: Timeout for all JS API actions.
::
import asyncio
import nats
async def main():
nc = await nats.connect()
js = nc.jetstream()
await js.add_stream(name='hello', subjects=['hello'])
ack = await js.publish('hello', b'Hello JS!')
print(f'Ack: stream={ack.stream}, sequence={ack.seq}')
# Ack: stream=hello, sequence=1
await nc.close()
if __name__ == '__main__':
asyncio.run(main())
"""
return nats.js.JetStreamContext(self, **opts)
def jsm(self, **opts) -> nats.js.JetStreamManager:
"""JetStream context for managing JetStream via JS API"""
return nats.js.JetStreamManager(self, **opts) | PypiClean |
/bimdata_api_client-9.18.1-py3-none-any.whl/bimdata_api_client/model/user_project.py | import re # noqa: F401
import sys # noqa: F401
from bimdata_api_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from bimdata_api_client.exceptions import ApiAttributeError
class UserProject(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('role',): {
'100': 100,
'50': 50,
'25': 25,
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'user_id': (int, none_type,), # noqa: E501
'invitation_id': (int, none_type,), # noqa: E501
'email': (str,), # noqa: E501
'firstname': (str, none_type,), # noqa: E501
'lastname': (str, none_type,), # noqa: E501
'profile_picture': (str, none_type,), # noqa: E501
'sub': (str, none_type,), # noqa: E501
'role': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'user_id': 'user_id', # noqa: E501
'invitation_id': 'invitation_id', # noqa: E501
'email': 'email', # noqa: E501
'firstname': 'firstname', # noqa: E501
'lastname': 'lastname', # noqa: E501
'profile_picture': 'profile_picture', # noqa: E501
'sub': 'sub', # noqa: E501
'role': 'role', # noqa: E501
}
read_only_vars = {
'id', # noqa: E501
'user_id', # noqa: E501
'invitation_id', # noqa: E501
'email', # noqa: E501
'firstname', # noqa: E501
'lastname', # noqa: E501
'profile_picture', # noqa: E501
'sub', # noqa: E501
'role', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, user_id, invitation_id, email, firstname, lastname, profile_picture, sub, role, *args, **kwargs): # noqa: E501
"""UserProject - a model defined in OpenAPI
Args:
id (int):
user_id (int, none_type):
invitation_id (int, none_type):
email (str):
firstname (str, none_type):
lastname (str, none_type):
profile_picture (str, none_type):
sub (str, none_type):
role (int): * `100` - admin * `50` - user * `25` - guest
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.user_id = user_id
self.invitation_id = invitation_id
self.email = email
self.firstname = firstname
self.lastname = lastname
self.profile_picture = profile_picture
self.sub = sub
self.role = role
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""UserProject - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/pulumi_google_native-0.31.2a1689827148.tar.gz/pulumi_google_native-0.31.2a1689827148/pulumi_google_native/beyondcorp/v1alpha/net_connection_iam_binding.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import iam as _iam
__all__ = ['NetConnectionIamBindingArgs', 'NetConnectionIamBinding']
@pulumi.input_type
class NetConnectionIamBindingArgs:
def __init__(__self__, *,
members: pulumi.Input[Sequence[pulumi.Input[str]]],
name: pulumi.Input[str],
role: pulumi.Input[str],
condition: Optional[pulumi.Input['_iam.v1.ConditionArgs']] = None):
"""
The set of arguments for constructing a NetConnectionIamBinding resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, [email protected] or [email protected].
* serviceAccount:{emailid}: An email address that represents a service account. For example, [email protected].
* group:{emailid}: An email address that represents a Google group. For example, [email protected].
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
:param pulumi.Input[str] name: The name of the resource to manage IAM policies for.
:param pulumi.Input[str] role: The role that should be applied. Only one `IamBinding` can be used per role.
:param pulumi.Input['_iam.v1.ConditionArgs'] condition: An IAM Condition for a given binding.
"""
pulumi.set(__self__, "members", members)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "role", role)
if condition is not None:
pulumi.set(__self__, "condition", condition)
@property
@pulumi.getter
def members(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, [email protected] or [email protected].
* serviceAccount:{emailid}: An email address that represents a service account. For example, [email protected].
* group:{emailid}: An email address that represents a Google group. For example, [email protected].
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the resource to manage IAM policies for.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def role(self) -> pulumi.Input[str]:
"""
The role that should be applied. Only one `IamBinding` can be used per role.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: pulumi.Input[str]):
pulumi.set(self, "role", value)
@property
@pulumi.getter
def condition(self) -> Optional[pulumi.Input['_iam.v1.ConditionArgs']]:
"""
An IAM Condition for a given binding.
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: Optional[pulumi.Input['_iam.v1.ConditionArgs']]):
pulumi.set(self, "condition", value)
class NetConnectionIamBinding(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']] condition: An IAM Condition for a given binding.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, [email protected] or [email protected].
* serviceAccount:{emailid}: An email address that represents a service account. For example, [email protected].
* group:{emailid}: An email address that represents a Google group. For example, [email protected].
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
:param pulumi.Input[str] name: The name of the resource to manage IAM policies for.
:param pulumi.Input[str] role: The role that should be applied. Only one `IamBinding` can be used per role.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetConnectionIamBindingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
:param str resource_name: The name of the resource.
:param NetConnectionIamBindingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetConnectionIamBindingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
condition: Optional[pulumi.Input[pulumi.InputType['_iam.v1.ConditionArgs']]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetConnectionIamBindingArgs.__new__(NetConnectionIamBindingArgs)
__props__.__dict__["condition"] = condition
if members is None and not opts.urn:
raise TypeError("Missing required property 'members'")
__props__.__dict__["members"] = members
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if role is None and not opts.urn:
raise TypeError("Missing required property 'role'")
__props__.__dict__["role"] = role
__props__.__dict__["etag"] = None
__props__.__dict__["project"] = None
super(NetConnectionIamBinding, __self__).__init__(
'google-native:beyondcorp/v1alpha:NetConnectionIamBinding',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'NetConnectionIamBinding':
"""
Get an existing NetConnectionIamBinding resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = NetConnectionIamBindingArgs.__new__(NetConnectionIamBindingArgs)
__props__.__dict__["condition"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["members"] = None
__props__.__dict__["name"] = None
__props__.__dict__["project"] = None
__props__.__dict__["role"] = None
return NetConnectionIamBinding(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def condition(self) -> pulumi.Output[Optional['_iam.v1.outputs.Condition']]:
"""
An IAM Condition for a given binding. See https://cloud.google.com/iam/docs/conditions-overview for additional details.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
The etag of the resource's IAM policy.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def members(self) -> pulumi.Output[Sequence[str]]:
"""
Identities that will be granted the privilege in role. Each entry can have one of the following values:
* user:{emailid}: An email address that represents a specific Google account. For example, [email protected] or [email protected].
* serviceAccount:{emailid}: An email address that represents a service account. For example, [email protected].
* group:{emailid}: An email address that represents a Google group. For example, [email protected].
* domain:{domain}: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com.
"""
return pulumi.get(self, "members")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource to manage IAM policies for.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The project in which the resource belongs. If it is not provided, a default will be supplied.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
The role that should be applied. Only one `IamBinding` can be used per role.
"""
return pulumi.get(self, "role") | PypiClean |
/jupyros-0.7.0a0.tar.gz/jupyros-0.7.0a0/js/node_modules/three/examples/js/ShaderTerrain.js | THREE.ShaderTerrain = {
/* -------------------------------------------------------------------------
// Dynamic terrain shader
// - Blinn-Phong
// - height + normal + diffuse1 + diffuse2 + specular + detail maps
// - point, directional and hemisphere lights (use with "lights: true" material option)
// - shadow maps receiving
------------------------------------------------------------------------- */
'terrain' : {
uniforms: THREE.UniformsUtils.merge( [
THREE.UniformsLib[ "fog" ],
THREE.UniformsLib[ "lights" ],
{
"enableDiffuse1": { value: 0 },
"enableDiffuse2": { value: 0 },
"enableSpecular": { value: 0 },
"enableReflection": { value: 0 },
"tDiffuse1": { value: null },
"tDiffuse2": { value: null },
"tDetail": { value: null },
"tNormal": { value: null },
"tSpecular": { value: null },
"tDisplacement": { value: null },
"uNormalScale": { value: 1.0 },
"uDisplacementBias": { value: 0.0 },
"uDisplacementScale": { value: 1.0 },
"diffuse": { value: new THREE.Color( 0xeeeeee ) },
"specular": { value: new THREE.Color( 0x111111 ) },
"shininess": { value: 30 },
"opacity": { value: 1 },
"uRepeatBase": { value: new THREE.Vector2( 1, 1 ) },
"uRepeatOverlay": { value: new THREE.Vector2( 1, 1 ) },
"uOffset": { value: new THREE.Vector2( 0, 0 ) }
}
] ),
fragmentShader: [
"uniform vec3 diffuse;",
"uniform vec3 specular;",
"uniform float shininess;",
"uniform float opacity;",
"uniform bool enableDiffuse1;",
"uniform bool enableDiffuse2;",
"uniform bool enableSpecular;",
"uniform sampler2D tDiffuse1;",
"uniform sampler2D tDiffuse2;",
"uniform sampler2D tDetail;",
"uniform sampler2D tNormal;",
"uniform sampler2D tSpecular;",
"uniform sampler2D tDisplacement;",
"uniform float uNormalScale;",
"uniform vec2 uRepeatOverlay;",
"uniform vec2 uRepeatBase;",
"uniform vec2 uOffset;",
"varying vec3 vTangent;",
"varying vec3 vBinormal;",
"varying vec3 vNormal;",
"varying vec2 vUv;",
"varying vec3 vViewPosition;",
THREE.ShaderChunk[ "common" ],
THREE.ShaderChunk[ "bsdfs" ],
THREE.ShaderChunk[ "lights_pars" ],
THREE.ShaderChunk[ "shadowmap_pars_fragment" ],
THREE.ShaderChunk[ "fog_pars_fragment" ],
"float calcLightAttenuation( float lightDistance, float cutoffDistance, float decayExponent ) {",
"if ( decayExponent > 0.0 ) {",
"return pow( saturate( - lightDistance / cutoffDistance + 1.0 ), decayExponent );",
"}",
"return 1.0;",
"}",
"void main() {",
"vec3 outgoingLight = vec3( 0.0 );", // outgoing light does not have an alpha, the surface does
"vec4 diffuseColor = vec4( diffuse, opacity );",
"vec3 specularTex = vec3( 1.0 );",
"vec2 uvOverlay = uRepeatOverlay * vUv + uOffset;",
"vec2 uvBase = uRepeatBase * vUv;",
"vec3 normalTex = texture2D( tDetail, uvOverlay ).xyz * 2.0 - 1.0;",
"normalTex.xy *= uNormalScale;",
"normalTex = normalize( normalTex );",
"if( enableDiffuse1 && enableDiffuse2 ) {",
"vec4 colDiffuse1 = texture2D( tDiffuse1, uvOverlay );",
"vec4 colDiffuse2 = texture2D( tDiffuse2, uvOverlay );",
"colDiffuse1 = GammaToLinear( colDiffuse1, float( GAMMA_FACTOR ) );",
"colDiffuse2 = GammaToLinear( colDiffuse2, float( GAMMA_FACTOR ) );",
"diffuseColor *= mix ( colDiffuse1, colDiffuse2, 1.0 - texture2D( tDisplacement, uvBase ) );",
" } else if( enableDiffuse1 ) {",
"diffuseColor *= texture2D( tDiffuse1, uvOverlay );",
"} else if( enableDiffuse2 ) {",
"diffuseColor *= texture2D( tDiffuse2, uvOverlay );",
"}",
"if( enableSpecular )",
"specularTex = texture2D( tSpecular, uvOverlay ).xyz;",
"mat3 tsb = mat3( vTangent, vBinormal, vNormal );",
"vec3 finalNormal = tsb * normalTex;",
"vec3 normal = normalize( finalNormal );",
"vec3 viewPosition = normalize( vViewPosition );",
"vec3 totalDiffuseLight = vec3( 0.0 );",
"vec3 totalSpecularLight = vec3( 0.0 );",
// point lights
"#if NUM_POINT_LIGHTS > 0",
"for ( int i = 0; i < NUM_POINT_LIGHTS; i ++ ) {",
"vec3 lVector = pointLights[ i ].position + vViewPosition.xyz;",
"float attenuation = calcLightAttenuation( length( lVector ), pointLights[ i ].distance, pointLights[ i ].decay );",
"lVector = normalize( lVector );",
"vec3 pointHalfVector = normalize( lVector + viewPosition );",
"float pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );",
"float pointDiffuseWeight = max( dot( normal, lVector ), 0.0 );",
"float pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );",
"totalDiffuseLight += attenuation * pointLights[ i ].color * pointDiffuseWeight;",
"totalSpecularLight += attenuation * pointLights[ i ].color * specular * pointSpecularWeight * pointDiffuseWeight;",
"}",
"#endif",
// directional lights
"#if NUM_DIR_LIGHTS > 0",
"vec3 dirDiffuse = vec3( 0.0 );",
"vec3 dirSpecular = vec3( 0.0 );",
"for( int i = 0; i < NUM_DIR_LIGHTS; i++ ) {",
"vec3 dirVector = directionalLights[ i ].direction;",
"vec3 dirHalfVector = normalize( dirVector + viewPosition );",
"float dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );",
"float dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );",
"float dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );",
"totalDiffuseLight += directionalLights[ i ].color * dirDiffuseWeight;",
"totalSpecularLight += directionalLights[ i ].color * specular * dirSpecularWeight * dirDiffuseWeight;",
"}",
"#endif",
// hemisphere lights
"#if NUM_HEMI_LIGHTS > 0",
"vec3 hemiDiffuse = vec3( 0.0 );",
"vec3 hemiSpecular = vec3( 0.0 );",
"for( int i = 0; i < NUM_HEMI_LIGHTS; i ++ ) {",
"vec3 lVector = hemisphereLightDirection[ i ];",
// diffuse
"float dotProduct = dot( normal, lVector );",
"float hemiDiffuseWeight = 0.5 * dotProduct + 0.5;",
"totalDiffuseLight += mix( hemisphereLights[ i ].groundColor, hemisphereLights[ i ].skyColor, hemiDiffuseWeight );",
// specular (sky light)
"float hemiSpecularWeight = 0.0;",
"vec3 hemiHalfVectorSky = normalize( lVector + viewPosition );",
"float hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;",
"hemiSpecularWeight += specularTex.r * max( pow( hemiDotNormalHalfSky, shininess ), 0.0 );",
// specular (ground light)
"vec3 lVectorGround = -lVector;",
"vec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );",
"float hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;",
"hemiSpecularWeight += specularTex.r * max( pow( hemiDotNormalHalfGround, shininess ), 0.0 );",
"totalSpecularLight += specular * mix( hemisphereLights[ i ].groundColor, hemisphereLights[ i ].skyColor, hemiDiffuseWeight ) * hemiSpecularWeight * hemiDiffuseWeight;",
"}",
"#endif",
"outgoingLight += diffuseColor.xyz * ( totalDiffuseLight + ambientLightColor + totalSpecularLight );",
"gl_FragColor = vec4( outgoingLight, diffuseColor.a );", // TODO, this should be pre-multiplied to allow for bright highlights on very transparent objects
THREE.ShaderChunk[ "fog_fragment" ],
"}"
].join( "\n" ),
vertexShader: [
"attribute vec4 tangent;",
"uniform vec2 uRepeatBase;",
"uniform sampler2D tNormal;",
"#ifdef VERTEX_TEXTURES",
"uniform sampler2D tDisplacement;",
"uniform float uDisplacementScale;",
"uniform float uDisplacementBias;",
"#endif",
"varying vec3 vTangent;",
"varying vec3 vBinormal;",
"varying vec3 vNormal;",
"varying vec2 vUv;",
"varying vec3 vViewPosition;",
THREE.ShaderChunk[ "shadowmap_pars_vertex" ],
THREE.ShaderChunk[ "fog_pars_vertex" ],
"void main() {",
"vNormal = normalize( normalMatrix * normal );",
// tangent and binormal vectors
"vTangent = normalize( normalMatrix * tangent.xyz );",
"vBinormal = cross( vNormal, vTangent ) * tangent.w;",
"vBinormal = normalize( vBinormal );",
// texture coordinates
"vUv = uv;",
"vec2 uvBase = uv * uRepeatBase;",
// displacement mapping
"#ifdef VERTEX_TEXTURES",
"vec3 dv = texture2D( tDisplacement, uvBase ).xyz;",
"float df = uDisplacementScale * dv.x + uDisplacementBias;",
"vec3 displacedPosition = normal * df + position;",
"vec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );",
"vec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );",
"#else",
"vec4 worldPosition = modelMatrix * vec4( position, 1.0 );",
"vec4 mvPosition = modelViewMatrix * vec4( position, 1.0 );",
"#endif",
"gl_Position = projectionMatrix * mvPosition;",
"vViewPosition = -mvPosition.xyz;",
"vec3 normalTex = texture2D( tNormal, uvBase ).xyz * 2.0 - 1.0;",
"vNormal = normalMatrix * normalTex;",
THREE.ShaderChunk[ "shadowmap_vertex" ],
THREE.ShaderChunk[ "fog_vertex" ],
"}"
].join( "\n" )
}
}; | PypiClean |
/dbpedia_ent-0.1.9-py3-none-any.whl/dbpedia_ent/dto/ent/n1/v/trie_vm.py |
d_trie_vm = {'_': ['vm2000', 'vm/386', 'vm-t', 'vm86'],
'a': ['vmayakovsky',
'vmat-203',
'vma-312',
'vma-542',
'vma-112',
'vma-513',
'vma-233',
'vma-141',
'vma-212',
'vma-142',
'vma-543',
'vma-241',
'vma-331',
'vma-311',
'vma-333',
'vma-124',
'vma-211',
'vma-223',
'vma-322',
'vma-131',
'vma-214',
'vma-231',
'vma-217',
'vma-134',
'vma-234',
'vmaqt-1',
'vma-143',
'vma-144',
'vma-323',
'vma-133',
'vma-351',
'vma-251',
'vmaq-2',
'vmat-2',
'vmaq-1',
'vma231',
'vmaq-3',
'vmaq-4',
'vmat-1',
'vmat2',
'vmat1',
'vmap0',
'vmads',
'vmap1',
'vmail',
'vmac',
'vmaj',
'vmat',
'vmas',
'vmap',
'vman',
'vmax',
'vma'],
'b': ['vmbf-333',
'vmbf-231',
'vmb-463',
'vmb-413',
'vmb-433',
'vmb-1',
'vmb-2',
'vmb2'],
'c': ['vmcj-2',
'vmc-2',
'vmcid',
'vmchs',
'vmc-1',
'vmcc',
'vmca',
'vmcv',
'vmc'],
'd': ['vmd2', 'vmdk', 'vmds', 'vmd'],
'e': ['vmessage',
'vme2900',
'vmebus',
'vmedia',
'vmeme',
'vme/k',
'vme/b',
'vme'],
'f': ['vmfat-501',
'vmfat-502',
'vmfat-101',
'vmfa-542',
'vmfa-112',
'vmfa-513',
'vmft-401',
'vmfa-225',
'vmfa-121',
'vmfa-115',
'vmfa-323',
'vmfa-211',
'vmfa-122',
'vmfa-124',
'vmfa-251',
'vmfa-333',
'vmfa-334',
'vmfa-321',
'vmfa-134',
'vmfa-314',
'vmfa-235',
'vmfa-312',
'vmfa-531',
'vmfa-451',
'vmfa-212',
'vmfa-242',
'vmfa-232',
'vmfa-351',
'vmfa-142',
'vmf-218',
'vmf-222',
'vmf-122',
'vmf-132',
'vmf-134',
'vmf-413',
'vmf-231',
'vmf-113',
'vmf-462',
'vmf-334',
'vmf-142',
'vmf-251',
'vmf-111',
'vmf-217',
'vmf-232',
'vmf-323',
'vmf-223',
'vmf-472',
'vmf-234',
'vmf-461',
'vmf-221',
'vmf-213',
'vmf-541',
'vmf-312',
'vmf-452',
'vmf-451',
'vmf-235',
'vmf-314',
'vmf-141',
'vmf-214',
'vmf-512',
'vmf-911',
'vmf-115',
'vmf-441',
'vmf-543',
'vmf-322',
'vmf-155',
'vmf-236',
'vmf-511',
'vmf-311',
'vmf-313',
'vmf-333',
'vmf-123',
'vmf-321',
'vmf-211',
'vmf-112',
'vmf-212',
'vmf-422',
'vmf-351',
'vmf-513',
'vmf-215',
'vmf-124',
'vmf-216',
'vmf311',
'vmf221',
'vmfp-3',
'vmf-1',
'vmfs2',
'vmfs3',
'vmf-2',
'vmfc',
'vmfs',
'vmf'],
'g': ['vmgrt-253',
'vmgr-452',
'vmgr-352',
'vmgr-152',
'vmgr-234',
'vmgr-252',
'vmg'],
'h': ['vmh'],
'i': ['vmigo', 'vmix', 'vmi'],
'j': ['vmj-352', 'vmj-252', 'vmj-253', 'vmj'],
'k': ['vmkernel', 'vmk'],
'l': ['vmlinux', 'vmlinuz', 'vmly&r', 'vml'],
'm': ['vmmt-204',
'vmm-261',
'vmm-161',
'vmm-162',
'vmm-164',
'vmm-262',
'vmm-263',
'vmm-764',
'vmm-561',
'vmm-268',
'vmm-364',
'vmm-363',
'vmm-165',
'vmm-264',
'vmm-266',
'vmm-265',
'vmm-365',
'vmm-362',
'vmm-774',
'vmm-163',
'vmm-166',
'vmm263',
'vmmark',
'vmmc',
'vmma',
'vmm'],
'n': ['vmno', 'vmn'],
'o': ['vmotion',
'vmo-151',
'vmobile',
'vmo-251',
'vmo-155',
'vmo-351',
'vmo-3',
'vmoda',
'vmo-4',
'vmo-1',
'vmoro',
'vmo-6',
'vmo-7',
'vmoto',
'vmo-5',
'vmo-2',
'vmoc',
'vmod',
'vmos',
'vmo'],
'p': ['vmp-254', 'vmps', 'vmpc', 'vmp'],
'r': ['vmro-dpmne',
'vmr-234',
'vmr-252',
'vmr-152',
'vmr-153',
'vmr-253',
'vmr-352',
'vmr-1',
'vmr9',
'vmro',
'vmrc',
'vmr'],
's': ['vmsclustering',
'vmscluster',
'vmsb-245',
'vmsb-343',
'vmsb-234',
'vmsb-331',
'vmsb-341',
'vmsb-342',
'vmsb-235',
'vmsb-151',
'vmsb-141',
'vmsb-144',
'vmsb-333',
'vmsb-134',
'vmsb-464',
'vmsb-236',
'vmsb-131',
'vmsb-243',
'vmsb-142',
'vmsb-132',
'vmsb-244',
'vmsb-231',
'vmsb-232',
'vmsb-233',
'vmsb-334',
'vmstat',
'vmsk/2',
'vms-1',
'vms-2',
'vmsa',
'vmsk',
'vmsl',
'vms'],
't': ['vmtb-151',
'vmtb-232',
'vmtb-464',
'vmtb-463',
'vmtb-234',
'vmtb-233',
'vmtb-134',
'vmtb-144',
'vmtb-231',
'vmtb-141',
'vmtb-132',
'vmtb-341',
'vmt-203',
'vmt-1',
'vmt'],
'u': ['vmusa', 'vmu-3', 'vmu-1', 'vmu-2', 'vmu-4', 'vmu'],
'w': ['vmware-thinstall', 'vmw.lic', 'vmworld', 'vmware', 'vmwi'],
'x': ['vmx128', 'vmx-1', 'vmx']} | PypiClean |
/PyCIM-15.15.0.tar.gz/PyCIM-15.15.0/CIM14/ENTSOE/Dynamics/IEC61970/Dynamics/DynamicsMetaBlockConnectable.py |
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CoreIdentifiedObject import CoreIdentifiedObject
class DynamicsMetaBlockConnectable(CoreIdentifiedObject):
def __init__(self, MetaBlockParameterReference=None, StandardControlBlock_MetaBlockInputReference=None, MetaBlockStateReference=None, StandardControlBlock_MetaBlockStateReference=None, MetaBlockInputReference=None, StandardControlBlock_MetaBlockParameterReference=None, StandardControlBlock_MetaBlockOutputReference=None, MetaBlockOutputReference=None, *args, **kw_args):
"""Initialises a new 'DynamicsMetaBlockConnectable' instance.
@param MetaBlockParameterReference:
@param StandardControlBlock_MetaBlockInputReference:
@param MetaBlockStateReference:
@param StandardControlBlock_MetaBlockStateReference:
@param MetaBlockInputReference:
@param StandardControlBlock_MetaBlockParameterReference:
@param StandardControlBlock_MetaBlockOutputReference:
@param MetaBlockOutputReference:
"""
self._MetaBlockParameterReference = []
self.MetaBlockParameterReference = [] if MetaBlockParameterReference is None else MetaBlockParameterReference
self._StandardControlBlock_MetaBlockInputReference = []
self.StandardControlBlock_MetaBlockInputReference = [] if StandardControlBlock_MetaBlockInputReference is None else StandardControlBlock_MetaBlockInputReference
self._MetaBlockStateReference = []
self.MetaBlockStateReference = [] if MetaBlockStateReference is None else MetaBlockStateReference
self._StandardControlBlock_MetaBlockStateReference = []
self.StandardControlBlock_MetaBlockStateReference = [] if StandardControlBlock_MetaBlockStateReference is None else StandardControlBlock_MetaBlockStateReference
self._MetaBlockInputReference = []
self.MetaBlockInputReference = [] if MetaBlockInputReference is None else MetaBlockInputReference
self._StandardControlBlock_MetaBlockParameterReference = []
self.StandardControlBlock_MetaBlockParameterReference = [] if StandardControlBlock_MetaBlockParameterReference is None else StandardControlBlock_MetaBlockParameterReference
self._StandardControlBlock_MetaBlockOutputReference = []
self.StandardControlBlock_MetaBlockOutputReference = [] if StandardControlBlock_MetaBlockOutputReference is None else StandardControlBlock_MetaBlockOutputReference
self._MetaBlockOutputReference = []
self.MetaBlockOutputReference = [] if MetaBlockOutputReference is None else MetaBlockOutputReference
super(DynamicsMetaBlockConnectable, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["MetaBlockParameterReference", "StandardControlBlock_MetaBlockInputReference", "MetaBlockStateReference", "StandardControlBlock_MetaBlockStateReference", "MetaBlockInputReference", "StandardControlBlock_MetaBlockParameterReference", "StandardControlBlock_MetaBlockOutputReference", "MetaBlockOutputReference"]
_many_refs = ["MetaBlockParameterReference", "StandardControlBlock_MetaBlockInputReference", "MetaBlockStateReference", "StandardControlBlock_MetaBlockStateReference", "MetaBlockInputReference", "StandardControlBlock_MetaBlockParameterReference", "StandardControlBlock_MetaBlockOutputReference", "MetaBlockOutputReference"]
def getMetaBlockParameterReference(self):
"""
"""
return self._MetaBlockParameterReference
def setMetaBlockParameterReference(self, value):
for x in self._MetaBlockParameterReference:
x.MetaBlockConnectable = None
for y in value:
y._MetaBlockConnectable = self
self._MetaBlockParameterReference = value
MetaBlockParameterReference = property(getMetaBlockParameterReference, setMetaBlockParameterReference)
def addMetaBlockParameterReference(self, *MetaBlockParameterReference):
for obj in MetaBlockParameterReference:
obj.MetaBlockConnectable = self
def removeMetaBlockParameterReference(self, *MetaBlockParameterReference):
for obj in MetaBlockParameterReference:
obj.MetaBlockConnectable = None
def getStandardControlBlock_MetaBlockInputReference(self):
"""
"""
return self._StandardControlBlock_MetaBlockInputReference
def setStandardControlBlock_MetaBlockInputReference(self, value):
for x in self._StandardControlBlock_MetaBlockInputReference:
x.StandardControlBlock_MetaBlockConnectable = None
for y in value:
y._StandardControlBlock_MetaBlockConnectable = self
self._StandardControlBlock_MetaBlockInputReference = value
StandardControlBlock_MetaBlockInputReference = property(getStandardControlBlock_MetaBlockInputReference, setStandardControlBlock_MetaBlockInputReference)
def addStandardControlBlock_MetaBlockInputReference(self, *StandardControlBlock_MetaBlockInputReference):
for obj in StandardControlBlock_MetaBlockInputReference:
obj.StandardControlBlock_MetaBlockConnectable = self
def removeStandardControlBlock_MetaBlockInputReference(self, *StandardControlBlock_MetaBlockInputReference):
for obj in StandardControlBlock_MetaBlockInputReference:
obj.StandardControlBlock_MetaBlockConnectable = None
def getMetaBlockStateReference(self):
"""
"""
return self._MetaBlockStateReference
def setMetaBlockStateReference(self, value):
for x in self._MetaBlockStateReference:
x.MetaBlockConnectable = None
for y in value:
y._MetaBlockConnectable = self
self._MetaBlockStateReference = value
MetaBlockStateReference = property(getMetaBlockStateReference, setMetaBlockStateReference)
def addMetaBlockStateReference(self, *MetaBlockStateReference):
for obj in MetaBlockStateReference:
obj.MetaBlockConnectable = self
def removeMetaBlockStateReference(self, *MetaBlockStateReference):
for obj in MetaBlockStateReference:
obj.MetaBlockConnectable = None
def getStandardControlBlock_MetaBlockStateReference(self):
"""
"""
return self._StandardControlBlock_MetaBlockStateReference
def setStandardControlBlock_MetaBlockStateReference(self, value):
for x in self._StandardControlBlock_MetaBlockStateReference:
x.StandardControlBlock_MetaBlockConnectable = None
for y in value:
y._StandardControlBlock_MetaBlockConnectable = self
self._StandardControlBlock_MetaBlockStateReference = value
StandardControlBlock_MetaBlockStateReference = property(getStandardControlBlock_MetaBlockStateReference, setStandardControlBlock_MetaBlockStateReference)
def addStandardControlBlock_MetaBlockStateReference(self, *StandardControlBlock_MetaBlockStateReference):
for obj in StandardControlBlock_MetaBlockStateReference:
obj.StandardControlBlock_MetaBlockConnectable = self
def removeStandardControlBlock_MetaBlockStateReference(self, *StandardControlBlock_MetaBlockStateReference):
for obj in StandardControlBlock_MetaBlockStateReference:
obj.StandardControlBlock_MetaBlockConnectable = None
def getMetaBlockInputReference(self):
"""
"""
return self._MetaBlockInputReference
def setMetaBlockInputReference(self, value):
for x in self._MetaBlockInputReference:
x.MetaBlockConnectable = None
for y in value:
y._MetaBlockConnectable = self
self._MetaBlockInputReference = value
MetaBlockInputReference = property(getMetaBlockInputReference, setMetaBlockInputReference)
def addMetaBlockInputReference(self, *MetaBlockInputReference):
for obj in MetaBlockInputReference:
obj.MetaBlockConnectable = self
def removeMetaBlockInputReference(self, *MetaBlockInputReference):
for obj in MetaBlockInputReference:
obj.MetaBlockConnectable = None
def getStandardControlBlock_MetaBlockParameterReference(self):
"""
"""
return self._StandardControlBlock_MetaBlockParameterReference
def setStandardControlBlock_MetaBlockParameterReference(self, value):
for x in self._StandardControlBlock_MetaBlockParameterReference:
x.StandardControlBlock_MetaBlockConnectable = None
for y in value:
y._StandardControlBlock_MetaBlockConnectable = self
self._StandardControlBlock_MetaBlockParameterReference = value
StandardControlBlock_MetaBlockParameterReference = property(getStandardControlBlock_MetaBlockParameterReference, setStandardControlBlock_MetaBlockParameterReference)
def addStandardControlBlock_MetaBlockParameterReference(self, *StandardControlBlock_MetaBlockParameterReference):
for obj in StandardControlBlock_MetaBlockParameterReference:
obj.StandardControlBlock_MetaBlockConnectable = self
def removeStandardControlBlock_MetaBlockParameterReference(self, *StandardControlBlock_MetaBlockParameterReference):
for obj in StandardControlBlock_MetaBlockParameterReference:
obj.StandardControlBlock_MetaBlockConnectable = None
def getStandardControlBlock_MetaBlockOutputReference(self):
"""
"""
return self._StandardControlBlock_MetaBlockOutputReference
def setStandardControlBlock_MetaBlockOutputReference(self, value):
for x in self._StandardControlBlock_MetaBlockOutputReference:
x.StandardControlBlock_MetaBlockConnectable = None
for y in value:
y._StandardControlBlock_MetaBlockConnectable = self
self._StandardControlBlock_MetaBlockOutputReference = value
StandardControlBlock_MetaBlockOutputReference = property(getStandardControlBlock_MetaBlockOutputReference, setStandardControlBlock_MetaBlockOutputReference)
def addStandardControlBlock_MetaBlockOutputReference(self, *StandardControlBlock_MetaBlockOutputReference):
for obj in StandardControlBlock_MetaBlockOutputReference:
obj.StandardControlBlock_MetaBlockConnectable = self
def removeStandardControlBlock_MetaBlockOutputReference(self, *StandardControlBlock_MetaBlockOutputReference):
for obj in StandardControlBlock_MetaBlockOutputReference:
obj.StandardControlBlock_MetaBlockConnectable = None
def getMetaBlockOutputReference(self):
"""
"""
return self._MetaBlockOutputReference
def setMetaBlockOutputReference(self, value):
for x in self._MetaBlockOutputReference:
x.MetaBlockConnectable = None
for y in value:
y._MetaBlockConnectable = self
self._MetaBlockOutputReference = value
MetaBlockOutputReference = property(getMetaBlockOutputReference, setMetaBlockOutputReference)
def addMetaBlockOutputReference(self, *MetaBlockOutputReference):
for obj in MetaBlockOutputReference:
obj.MetaBlockConnectable = self
def removeMetaBlockOutputReference(self, *MetaBlockOutputReference):
for obj in MetaBlockOutputReference:
obj.MetaBlockConnectable = None | PypiClean |
/nniv04-0.4.1-py3-none-any.whl/nniv04-0.4.1.data/data/nni/node_modules/moment/src/locale/fr.js |
import moment from '../moment';
export default moment.defineLocale('fr', {
months : 'janvier_février_mars_avril_mai_juin_juillet_août_septembre_octobre_novembre_décembre'.split('_'),
monthsShort : 'janv._févr._mars_avr._mai_juin_juil._août_sept._oct._nov._déc.'.split('_'),
monthsParseExact : true,
weekdays : 'dimanche_lundi_mardi_mercredi_jeudi_vendredi_samedi'.split('_'),
weekdaysShort : 'dim._lun._mar._mer._jeu._ven._sam.'.split('_'),
weekdaysMin : 'di_lu_ma_me_je_ve_sa'.split('_'),
weekdaysParseExact : true,
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD/MM/YYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY HH:mm',
LLLL : 'dddd D MMMM YYYY HH:mm'
},
calendar : {
sameDay : '[Aujourd’hui à] LT',
nextDay : '[Demain à] LT',
nextWeek : 'dddd [à] LT',
lastDay : '[Hier à] LT',
lastWeek : 'dddd [dernier à] LT',
sameElse : 'L'
},
relativeTime : {
future : 'dans %s',
past : 'il y a %s',
s : 'quelques secondes',
ss : '%d secondes',
m : 'une minute',
mm : '%d minutes',
h : 'une heure',
hh : '%d heures',
d : 'un jour',
dd : '%d jours',
M : 'un mois',
MM : '%d mois',
y : 'un an',
yy : '%d ans'
},
dayOfMonthOrdinalParse: /\d{1,2}(er|)/,
ordinal : function (number, period) {
switch (period) {
// TODO: Return 'e' when day of month > 1. Move this case inside
// block for masculine words below.
// See https://github.com/moment/moment/issues/3375
case 'D':
return number + (number === 1 ? 'er' : '');
// Words with masculine grammatical gender: mois, trimestre, jour
default:
case 'M':
case 'Q':
case 'DDD':
case 'd':
return number + (number === 1 ? 'er' : 'e');
// Words with feminine grammatical gender: semaine
case 'w':
case 'W':
return number + (number === 1 ? 're' : 'e');
}
},
week : {
dow : 1, // Monday is the first day of the week.
doy : 4 // The week that contains Jan 4th is the first week of the year.
}
}); | PypiClean |
/ansys_optislang_core-0.3.0-py3-none-any.whl/ansys/optislang/core/nodes.py | from __future__ import annotations
import copy
from enum import Enum
from typing import TYPE_CHECKING, Dict, Iterable, List, Tuple, Union
from ansys.optislang.core.project_parametric import (
ConstraintCriterion,
CriteriaManager,
Design,
LimitStateCriterion,
ObjectiveCriterion,
ParameterManager,
ResponseManager,
VariableCriterion,
)
from ansys.optislang.core.utils import enum_from_str
if TYPE_CHECKING:
from ansys.optislang.core.osl_server import OslServer
from ansys.optislang.core.project_parametric import Criterion
class DesignFlow(Enum):
"""Provides design flow options."""
NONE = 0
RECEIVE = 1
SEND = 2
RECEIVE_SEND = 3
@staticmethod
def from_str(string: str) -> DesignFlow:
"""Convert string to an instance of the ``DesignFlow`` class.
Parameters
----------
string: str
String to be converted.
Returns
-------
DesignFlow
Instance of the ``DesignFlow`` class.
Raises
------
TypeError
Raised when an invalid type of ``string`` is given.
ValueError
Raised when an invalid value of ``string`` is given.
"""
return enum_from_str(string=string, enum_class=__class__, replace=(" ", "_"))
class Node:
"""Provides for creating and operating on nodes."""
def __init__(
self,
uid: str,
osl_server: OslServer,
) -> None:
"""Create a ``Node`` instance.
Parameters
----------
uid: str
Unique ID of the node.
osl_server: OslServer
Object providing access to the optiSLang server.
"""
self._osl_server = osl_server
self.__uid = uid
def __str__(self):
"""Return formatted string."""
return f"Node type: {self.get_type()} Name: {self.get_name()} Uid: {self.uid}"
@property
def uid(self) -> str:
"""Unique ID of the node.
Returns
-------
str
Unique ID of the node.
"""
return self.__uid
def _get_info(self) -> dict:
"""Get the raw server output with the node info.
Returns
-------
dict
Dictionary with the node info.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
return self._osl_server.get_actor_info(self.uid)
def get_name(self) -> str:
"""Get the name of the node.
Returns
-------
str
Name of the node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
actor_info = self._osl_server.get_actor_info(uid=self.__uid)
return actor_info["name"]
def get_parent(self) -> Node:
"""Get the instance of the parent node.
Returns
-------
Node
Instance of the parent node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
project_tree = self._osl_server.get_full_project_tree_with_properties()
root_system_uid = project_tree["projects"][0]["system"]["uid"]
parent_tree = project_tree["projects"][0]["system"]
parent_uid = Node._find_parent_node_uid(
tree=parent_tree,
parent_uid=root_system_uid,
node_uid=self.uid,
)
properties_dicts_list = [
{
"uid": parent_uid,
"kind": "root_system" if parent_uid == root_system_uid else "system",
}
]
return self._create_nodes_from_properties_dicts(
properties_dicts_list=properties_dicts_list
)[0]
def get_parent_name(self) -> str:
"""Get the name of the parent node.
Returns
-------
str
Name of the parent node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
parent_uid = self._get_parent_uid()
actor_info = self._osl_server.get_actor_info(uid=parent_uid)
return actor_info["name"]
def get_properties(self) -> dict:
"""Get the raw server output with the node properties.
Returns
-------
dict
Dictionary with the node properties.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
return self._osl_server.get_actor_properties(self.uid)
def get_status(self) -> str:
"""Get the status of the node.
Returns
-------
str
Status of the node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
actor_info = self._osl_server.get_actor_info(uid=self.__uid)
return actor_info["status"]
def get_type(self) -> str:
"""Get the type of the node.
Returns
-------
str
Type of the node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
actor_info = self._osl_server.get_actor_info(uid=self.__uid)
return actor_info["type"]
def _get_parent_uid(self) -> str:
"""Get the unique ID of the parent node.
Return
------
str
Unique ID of the parent node.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
project_tree = self._osl_server.get_full_project_tree_with_properties()
root_system_uid = project_tree["projects"][0]["system"]["uid"]
parent_tree = project_tree["projects"][0]["system"]
return Node._find_parent_node_uid(
tree=parent_tree,
parent_uid=root_system_uid,
node_uid=self.uid,
)
def _create_nodes_from_properties_dicts(
self, properties_dicts_list: List[dict]
) -> Tuple[Node, ...]:
"""Create nodes from a dictionary of properties.
Parameters
----------
properties_dicts_list : List[dict]
Dictionary of node properties.
Returns
-------
Tuple[Node, ...]
Tuple of nodes.
Raises
------
TypeError
Raised when an unknown type of component is found.
"""
nodes_list = []
for node in properties_dicts_list:
kind = node["kind"]
uid = node["uid"]
if kind == "actor":
nodes_list.append(Node(uid=uid, osl_server=self._osl_server))
elif kind == "system":
if self._is_parametric_system(uid=uid):
nodes_list.append(ParametricSystem(uid=uid, osl_server=self._osl_server))
else:
nodes_list.append(System(uid=uid, osl_server=self._osl_server))
elif kind == "root_system":
nodes_list.append(RootSystem(uid=node["uid"], osl_server=self._osl_server))
else:
TypeError(
f'Unknown kind of component: "{kind}", '
'"node", "system" or "root_system" were expected.'
)
return tuple(nodes_list)
def _is_parametric_system(self, uid: str) -> bool:
"""Check if the system is parametric.
Parameters
----------
uid : str
Unique ID of the system.
Returns
-------
bool
``True`` when the system is parametric, ``False`` otherwise.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
props = self._osl_server.get_actor_properties(uid=uid)
return "ParameterManager" in props["properties"]
@staticmethod
def _find_parent_node_uid(tree: dict, parent_uid: str, node_uid: str) -> str:
"""Get the unique ID of the the parent node.
Parameters
----------
tree: dict
Dictionary with children nodes.
parent_uid: str
Uniquie ID of the system to loop through.
node_uid: str
Unique ID of the node for which to search for the parent.
Return
------
str
Unique ID of the parent node.
"""
for node in tree["nodes"]:
if node["uid"] == node_uid:
return parent_uid
if node["kind"] == "system":
Node._find_parent_node_uid(tree=node, parent_uid=tree["uid"], node_uid=node_uid)
raise RuntimeError(f'Node "{node_uid}" was not located in structure tree.')
class System(Node):
"""Provides for creating and operatating on a system."""
def __init__(
self,
uid: str,
osl_server: OslServer,
) -> None:
"""Create a ``System`` instance.
Parameters
----------
uid: str
Unique ID.
osl_server: OslServer
Object providing access to the optiSLang server.
"""
super().__init__(
uid=uid,
osl_server=osl_server,
)
def find_node_by_uid(self, uid: str, search_depth: int = 1) -> Union[Node, None]:
"""Find a node in the system with a specified unique ID.
This method searches only in the descendant nodes for the current system.
Parameters
----------
uid : str
Unique ID of the node.
search_depth: int, optional
Depth of the node subtree to search. The default is ``1``, which corresponds
to direct children nodes of the current system.
Returns
-------
Union[Node, None]
``Node`` with the specified unique ID. If this ID isn't located in any
descendant node, ``None`` is returned.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
TypeError
Raised when an unknown type of component is found.
"""
project_tree = self._osl_server.get_full_project_tree_with_properties()
if self.uid == project_tree["projects"][0]["system"]["uid"]:
system_tree = project_tree["projects"][0]["system"]
else:
system_tree = System._find_subtree(
tree=project_tree["projects"][0]["system"],
uid=self.uid,
)
properties_dicts_list = System._find_node_with_uid(
uid=uid,
tree=system_tree,
properties_dicts_list=[],
current_depth=1,
max_search_depth=search_depth,
)
if len(properties_dicts_list) == 0:
self._osl_server._logger.error(f"Node `{uid}` not found in the current system.")
return None
return self._create_nodes_from_properties_dicts(
properties_dicts_list=properties_dicts_list
)[0]
def find_nodes_by_name(self, name: str, search_depth: int = 1) -> Tuple[Node, ...]:
"""Find nodes in the system with a specified name.
This method searches only in the descendant nodes for the current system.
Parameters
----------
name : str
Name of the node.
search_depth: int, optional
Depth of the node subtree to search. The default is ``1``, which corresponds
to direct children nodes of the current system.
Returns
-------
Tuple[Node, ...]
Tuple of nodes with the specified name.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
TypeError
Raised when an unknown type of component is found.
"""
project_tree = self._osl_server.get_full_project_tree_with_properties()
if self.uid == project_tree["projects"][0]["system"]["uid"]:
system_tree = project_tree["projects"][0]["system"]
else:
system_tree = System._find_subtree(
tree=project_tree["projects"][0]["system"],
uid=self.uid,
)
properties_dicts_list = System._find_nodes_with_name(
name=name,
tree=system_tree,
properties_dicts_list=[],
current_depth=1,
max_search_depth=search_depth,
)
if len(properties_dicts_list) == 0:
self._osl_server._logger.error(f"Node `{name}` not found in the current system.")
return tuple()
return self._create_nodes_from_properties_dicts(properties_dicts_list=properties_dicts_list)
def get_nodes(self) -> Tuple[Node, ...]:
"""Get the direct children nodes.
Returns
-------
Tuple[Node, ...]
Current system nodes.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
return self._create_nodes_from_properties_dicts(
properties_dicts_list=self._get_nodes_dicts()
)
def _get_nodes_dicts(self) -> List[dict]:
"""Get data for children nodes.
Returns
-------
List[dict]
List of dictionaries with data for children nodes.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
RuntimeError
Raised when the system wasn't located in the project tree.
"""
project_tree = self._osl_server.get_full_project_tree_with_properties()
if self.uid == project_tree["projects"][0]["system"]["uid"]:
system_tree = project_tree["projects"][0]["system"]
else:
system_tree = System._find_subtree(
tree=project_tree["projects"][0]["system"],
uid=self.uid,
)
if len(system_tree) == 0:
raise RuntimeError(f"System `{self.uid}` wasn't found.")
children_dicts_list = []
for node in system_tree["nodes"]:
children_dicts_list.append(
{
"type": node["type"],
"name": node["name"],
"uid": node["uid"],
"kind": node["kind"],
}
)
return children_dicts_list
@staticmethod
def _find_nodes_with_name(
name: str,
tree: dict,
properties_dicts_list: List[dict],
current_depth: int,
max_search_depth: int,
) -> List[dict]:
"""Find nodes with the specified name.
Parameters
----------
name : str
Node name.
tree : dict
Tree to search for nodes with the specified name.
properties_dicts_list : dict
Dictionary with properties.
current_depth: int
Current depth of the search.
max_search_depth: int
Maximum depth of the search.
Returns
-------
dict
Dictionary with necessary information for creation of a node.
"""
for node in tree["nodes"]:
if node["name"] == name:
properties_dicts_list.append(
{
"type": node["type"],
"name": node["name"],
"uid": node["uid"],
"parent_uid": tree["uid"],
"parent_name": tree["name"],
"kind": node["kind"],
}
)
if node["kind"] == "system" and current_depth < max_search_depth:
System._find_nodes_with_name(
name=name,
tree=node,
properties_dicts_list=properties_dicts_list,
current_depth=current_depth + 1,
max_search_depth=max_search_depth,
)
return properties_dicts_list
@staticmethod
def _find_node_with_uid(
uid: str,
tree: dict,
properties_dicts_list: List[dict],
current_depth: int,
max_search_depth: int,
) -> List[dict]:
"""Find a node with a specified unique ID.
Parameters
----------
uid : str
Unique ID of the node.
tree : dict
Tree to search for nodes with the specified unique ID.
properties_dicts_list : List[dict]
Dictionary with properties.
current_depth: int
Current depth of the search.
max_search_depth: int
Maximum depth of the search.
Returns
-------
dict
Dictionary with the necessary information for creation of a node.
"""
for node in tree["nodes"]:
if node["uid"] == uid:
properties_dicts_list.append(
{
"type": node["type"],
"name": node["name"],
"uid": node["uid"],
"parent_uid": tree["uid"],
"parent_name": tree["name"],
"kind": node["kind"],
}
)
if node["kind"] == "system" and current_depth < max_search_depth:
System._find_node_with_uid(
uid=uid,
tree=node,
properties_dicts_list=properties_dicts_list,
current_depth=current_depth + 1,
max_search_depth=max_search_depth,
)
return properties_dicts_list
@staticmethod
def _find_subtree(tree: dict, uid: str) -> dict:
"""Find the subtree with a root node matching a specified unique ID.
Parameters
----------
tree: dict
Dictionary with the parent structure.
uid: str
Unique ID of the subtree root node.
Returns
-------
dict
Dictionary representing the subtree found.
"""
for node in tree["nodes"]:
if node["uid"] == uid:
return node
if node["kind"] == "system":
System._find_subtree(tree=node, uid=uid)
class ParametricSystem(System):
"""Provides methods to obtain data from a parametric system."""
def __init__(
self,
uid: str,
osl_server: OslServer,
) -> None:
"""Create a parametric system.
Parameters
----------
uid: str
Unique ID.
osl_server: OslServer
Object providing access to the optiSLang server.
"""
super().__init__(
uid=uid,
osl_server=osl_server,
)
self.__criteria_manager = CriteriaManager(uid, osl_server)
self.__parameter_manager = ParameterManager(uid, osl_server)
self.__response_manager = ResponseManager(uid, osl_server)
@property
def criteria_manager(self) -> CriteriaManager:
"""Criteria manager of the current system.
Returns
-------
CriteriaManager
Instance of the ``CriteriaManager`` class.
"""
return self.__criteria_manager
@property
def parameter_manager(self) -> ParameterManager:
"""Parameter manager of the current system.
Returns
-------
ParameterManager
Instance of the ``ParameterManager`` class.
"""
return self.__parameter_manager
@property
def response_manager(self) -> ResponseManager:
"""Response manager of the current system.
Returns
-------
ResponseManager
Instance of the ``ResponseManager`` class.
"""
return self.__response_manager
class RootSystem(ParametricSystem):
"""Provides for creating and operating on a project system."""
def __init__(
self,
uid: str,
osl_server: OslServer,
) -> None:
"""Create a ``RootSystem`` system.
Parameters
----------
uid: str
Unique ID.
osl_server: OslServer
Object providing access to the optiSLang server.
"""
super().__init__(
uid=uid,
osl_server=osl_server,
)
def evaluate_design(self, design: Design, update_design: bool = True) -> Design:
"""Evaluate a design.
Parameters
----------
design: Design
Instance of a ``Design`` class with defined parameters.
update_design: bool, optional
Determines whether given design should be updated and returned or new instance
should be created. When ``True`` given design is updated and returned, otherwise
new ``Design`` is created. Defaults to ``True``.
Returns
-------
Design
Evaluated design.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
evaluate_dict = {}
for parameter in design.parameters:
evaluate_dict[parameter.name] = parameter.value
output_dict = self._osl_server.evaluate_design(evaluate_dict=evaluate_dict)
if update_design:
return_design = design
return_design._receive_results(output_dict[0])
else:
return_design = copy.deepcopy(design)
return_design._receive_results(output_dict[0])
design_parameters = return_design.parameters_names
output_parameters = output_dict[0]["result_design"]["parameter_names"]
missing_parameters = __class__.__get_sorted_difference_of_sets(
output_parameters, design_parameters
)
undefined_parameters = __class__.__get_sorted_difference_of_sets(
design_parameters, output_parameters
)
unused = __class__.__compare_input_w_processed_values(evaluate_dict, output_dict)
if undefined_parameters:
self._osl_server._logger.debug(f"Parameters ``{undefined_parameters}`` weren't used.")
if missing_parameters:
self._osl_server._logger.warning(
f"Parameters ``{missing_parameters}`` were missing, "
"reference values were used for evaluation and list of parameters will be updated."
)
if unused:
self._osl_server._logger.warning(
"Values of parameters were changed:"
f"{[par[0] + ': ' + str(par[1]) + ' -> ' + str(par[2]) for par in unused]}"
)
for parameter in missing_parameters:
position = output_dict[0]["result_design"]["parameter_names"].index(parameter)
design.set_parameter_by_name(
parameter,
output_dict[0]["result_design"]["parameter_values"][position],
False,
)
return return_design
def get_reference_design(self) -> Design:
"""Get the design with reference values of the parameters.
Returns
-------
Design
Instance of the ``Design`` class with defined parameters and reference values.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
parameters = self.parameter_manager.get_parameters()
responses = self.response_manager.get_responses()
criteria = self.criteria_manager.get_criteria()
sorted_criteria = __class__.__categorize_criteria(criteria=criteria)
return Design(
parameters=parameters,
constraints=sorted_criteria.get("constraints", []),
limit_states=sorted_criteria.get("limit_states", []),
objectives=sorted_criteria.get("objectives", []),
variables=sorted_criteria.get("variables", []),
responses=responses,
)
def get_missing_parameters_names(self, design: Design) -> Tuple[str, ...]:
"""Get the names of the parameters that are missing in a design.
This method compare design parameters with the root system's parameters.
Parameters
----------
design: Design
Instance of the ``Design`` class with defined parameters.
Returns
-------
Tuple[str, ...]
Names of the parameters that are missing in the instance of ``Design`` class.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
return __class__.__get_sorted_difference_of_sets(
first=self.parameter_manager.get_parameters_names(),
second=design.parameters_names,
)
def get_undefined_parameters_names(self, design: Design) -> Tuple[str, ...]:
"""Get the names of the parameters that are not defined in the root system.
This method compare design parameters with the root system's parameters.
Parameters
----------
design: Design
Instance of the ``Design`` class with defined parameters.
Returns
-------
Tuple[str, ...]
Names of the parameters that are not defined in the root system.
Raises
------
OslCommunicationError
Raised when an error occurs while communicating with the server.
OslCommandError
Raised when a command or query fails.
TimeoutError
Raised when the timeout float value expires.
"""
return __class__.__get_sorted_difference_of_sets(
first=design.parameters_names,
second=self.parameter_manager.get_parameters_names(),
)
@staticmethod
def __categorize_criteria(criteria: Tuple[Criterion]) -> Dict[str, List[Criterion]]:
"""Get criteria sorted by its kinds.
Parameters
----------
criteria : Tuple[Criterion]
Tuple of unsorted criteria.
Returns
-------
Dict[str, Criterion]
Dictionary of criteria sorted by its kinds.
Raises
------
TypeError
Raised when an invalid type of criterion is passed.
"""
constraints = []
limit_states = []
objectives = []
variables = []
for criterion in criteria:
if isinstance(criterion, ConstraintCriterion):
constraints.append(criterion)
elif isinstance(criterion, LimitStateCriterion):
limit_states.append(criterion)
elif isinstance(criterion, ObjectiveCriterion):
objectives.append(criterion)
elif isinstance(criterion, VariableCriterion):
variables.append(criterion)
else:
raise TypeError(f"Invalid type of criterion: `{type(criterion)}`.")
return {
"constraints": constraints,
"limit_states": limit_states,
"objectives": objectives,
"variables": variables,
}
@staticmethod
def __get_sorted_difference_of_sets(
first: Iterable[str], second: Iterable[str]
) -> Tuple[str, ...]:
"""Get the sorted asymmetric difference of two string sets.
This method executes the difference of two string sets: ``first - second``.
Parameters
----------
first: Iterable[str]
Iterable of strings.
second: Iterable[str]
Iterable of string.
Returns
-------
Tuple[str, ...]
Tuple with the sorted difference.
"""
diff = list(set(first) - set(second))
diff.sort()
return tuple(diff)
@staticmethod
def __compare_input_w_processed_values(
input: dict, processed: dict
) -> Tuple[Tuple[str, Union[float, str, bool], Union[float, str, bool]]]:
"""Compare input values of parameters before and after it's processed by server.
Parameters
----------
input: Dict[str: Union[float, str, bool]]
Dictionary with parameter's names and values.
processed: dict
Server output.
Return
------
Tuple[Tuple[str, Union[float, str, bool], Union[float, str, bool]]]
Tuple of parameters with different values before and after processing by server.
Tuple[0]: name
Tuple[1]: input value
Tuple[2]: processed value
"""
differences = []
for index, parameter_name in enumerate(processed[0]["result_design"]["parameter_names"]):
input_value = input.get(parameter_name)
output_value = processed[0]["result_design"]["parameter_values"][index]
if input_value and input_value != output_value:
differences.append((parameter_name, input_value, output_value))
return tuple(differences) | PypiClean |
/dschmidt-cdktf-provider-google-0.0.1.tar.gz/dschmidt-cdktf-provider-google-0.0.1/src/dschmidt_cdktf_provider_google/cloud_identity_group/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf
import constructs
class CloudIdentityGroup(
cdktf.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroup",
):
'''Represents a {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group google_cloud_identity_group}.'''
def __init__(
self,
scope: constructs.Construct,
id_: builtins.str,
*,
group_key: typing.Union["CloudIdentityGroupGroupKey", typing.Dict[str, typing.Any]],
labels: typing.Mapping[builtins.str, builtins.str],
parent: builtins.str,
description: typing.Optional[builtins.str] = None,
display_name: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
initial_group_config: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union["CloudIdentityGroupTimeouts", typing.Dict[str, typing.Any]]] = None,
connection: typing.Optional[typing.Union[typing.Union[cdktf.SSHProvisionerConnection, typing.Dict[str, typing.Any]], typing.Union[cdktf.WinrmProvisionerConnection, typing.Dict[str, typing.Any]]]] = None,
count: typing.Optional[jsii.Number] = None,
depends_on: typing.Optional[typing.Sequence[cdktf.ITerraformDependable]] = None,
for_each: typing.Optional[cdktf.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[cdktf.TerraformResourceLifecycle, typing.Dict[str, typing.Any]]] = None,
provider: typing.Optional[cdktf.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[cdktf.FileProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.LocalExecProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.RemoteExecProvisioner, typing.Dict[str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group google_cloud_identity_group} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param group_key: group_key block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#group_key CloudIdentityGroup#group_key}
:param labels: The labels that apply to the Group. Must not contain more than one entry. Must contain the entry 'cloudidentity.googleapis.com/groups.discussion_forum': '' if the Group is a Google Group or 'system/groups/external': '' if the Group is an external-identity-mapped group. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#labels CloudIdentityGroup#labels}
:param parent: The resource name of the entity under which this Group resides in the Cloud Identity resource hierarchy. Must be of the form identitysources/{identity_source_id} for external-identity-mapped groups or customers/{customer_id} for Google Groups. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#parent CloudIdentityGroup#parent}
:param description: An extended description to help users determine the purpose of a Group. Must not be longer than 4,096 characters. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#description CloudIdentityGroup#description}
:param display_name: The display name of the Group. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#display_name CloudIdentityGroup#display_name}
:param id: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param initial_group_config: The initial configuration options for creating a Group. See the `API reference <https://cloud.google.com/identity/docs/reference/rest/v1beta1/groups/create#initialgroupconfig>`_ for possible values. Default value: "EMPTY" Possible values: ["INITIAL_GROUP_CONFIG_UNSPECIFIED", "WITH_INITIAL_OWNER", "EMPTY"] Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#initial_group_config CloudIdentityGroup#initial_group_config}
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#timeouts CloudIdentityGroup#timeouts}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroup.__init__)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = CloudIdentityGroupConfig(
group_key=group_key,
labels=labels,
parent=parent,
description=description,
display_name=display_name,
id=id,
initial_group_config=initial_group_config,
timeouts=timeouts,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putGroupKey")
def put_group_key(
self,
*,
id: builtins.str,
namespace: typing.Optional[builtins.str] = None,
) -> None:
'''
:param id: The ID of the entity. For Google-managed entities, the id must be the email address of an existing group or user. For external-identity-mapped entities, the id must be a string conforming to the Identity Source's requirements. Must be unique within a namespace. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id} Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param namespace: The namespace in which the entity exists. If not specified, the EntityKey represents a Google-managed entity such as a Google user or a Google Group. If specified, the EntityKey represents an external-identity-mapped group. The namespace must correspond to an identity source created in Admin Console and must be in the form of 'identitysources/{identity_source_id}'. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#namespace CloudIdentityGroup#namespace}
'''
value = CloudIdentityGroupGroupKey(id=id, namespace=namespace)
return typing.cast(None, jsii.invoke(self, "putGroupKey", [value]))
@jsii.member(jsii_name="putTimeouts")
def put_timeouts(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#create CloudIdentityGroup#create}.
:param delete: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#delete CloudIdentityGroup#delete}.
:param update: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#update CloudIdentityGroup#update}.
'''
value = CloudIdentityGroupTimeouts(create=create, delete=delete, update=update)
return typing.cast(None, jsii.invoke(self, "putTimeouts", [value]))
@jsii.member(jsii_name="resetDescription")
def reset_description(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDescription", []))
@jsii.member(jsii_name="resetDisplayName")
def reset_display_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDisplayName", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetInitialGroupConfig")
def reset_initial_group_config(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInitialGroupConfig", []))
@jsii.member(jsii_name="resetTimeouts")
def reset_timeouts(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTimeouts", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="createTime")
def create_time(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "createTime"))
@builtins.property
@jsii.member(jsii_name="groupKey")
def group_key(self) -> "CloudIdentityGroupGroupKeyOutputReference":
return typing.cast("CloudIdentityGroupGroupKeyOutputReference", jsii.get(self, "groupKey"))
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@builtins.property
@jsii.member(jsii_name="timeouts")
def timeouts(self) -> "CloudIdentityGroupTimeoutsOutputReference":
return typing.cast("CloudIdentityGroupTimeoutsOutputReference", jsii.get(self, "timeouts"))
@builtins.property
@jsii.member(jsii_name="updateTime")
def update_time(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "updateTime"))
@builtins.property
@jsii.member(jsii_name="descriptionInput")
def description_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "descriptionInput"))
@builtins.property
@jsii.member(jsii_name="displayNameInput")
def display_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "displayNameInput"))
@builtins.property
@jsii.member(jsii_name="groupKeyInput")
def group_key_input(self) -> typing.Optional["CloudIdentityGroupGroupKey"]:
return typing.cast(typing.Optional["CloudIdentityGroupGroupKey"], jsii.get(self, "groupKeyInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="initialGroupConfigInput")
def initial_group_config_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "initialGroupConfigInput"))
@builtins.property
@jsii.member(jsii_name="labelsInput")
def labels_input(
self,
) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], jsii.get(self, "labelsInput"))
@builtins.property
@jsii.member(jsii_name="parentInput")
def parent_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "parentInput"))
@builtins.property
@jsii.member(jsii_name="timeoutsInput")
def timeouts_input(
self,
) -> typing.Optional[typing.Union["CloudIdentityGroupTimeouts", cdktf.IResolvable]]:
return typing.cast(typing.Optional[typing.Union["CloudIdentityGroupTimeouts", cdktf.IResolvable]], jsii.get(self, "timeoutsInput"))
@builtins.property
@jsii.member(jsii_name="description")
def description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "description"))
@description.setter
def description(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "description").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "description", value)
@builtins.property
@jsii.member(jsii_name="displayName")
def display_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "displayName"))
@display_name.setter
def display_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "display_name").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "displayName", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="initialGroupConfig")
def initial_group_config(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "initialGroupConfig"))
@initial_group_config.setter
def initial_group_config(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "initial_group_config").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "initialGroupConfig", value)
@builtins.property
@jsii.member(jsii_name="labels")
def labels(self) -> typing.Mapping[builtins.str, builtins.str]:
return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "labels"))
@labels.setter
def labels(self, value: typing.Mapping[builtins.str, builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "labels").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "labels", value)
@builtins.property
@jsii.member(jsii_name="parent")
def parent(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "parent"))
@parent.setter
def parent(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroup, "parent").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "parent", value)
@jsii.data_type(
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroupConfig",
jsii_struct_bases=[cdktf.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"group_key": "groupKey",
"labels": "labels",
"parent": "parent",
"description": "description",
"display_name": "displayName",
"id": "id",
"initial_group_config": "initialGroupConfig",
"timeouts": "timeouts",
},
)
class CloudIdentityGroupConfig(cdktf.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[cdktf.SSHProvisionerConnection, typing.Dict[str, typing.Any]], typing.Union[cdktf.WinrmProvisionerConnection, typing.Dict[str, typing.Any]]]] = None,
count: typing.Optional[jsii.Number] = None,
depends_on: typing.Optional[typing.Sequence[cdktf.ITerraformDependable]] = None,
for_each: typing.Optional[cdktf.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[cdktf.TerraformResourceLifecycle, typing.Dict[str, typing.Any]]] = None,
provider: typing.Optional[cdktf.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[cdktf.FileProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.LocalExecProvisioner, typing.Dict[str, typing.Any]], typing.Union[cdktf.RemoteExecProvisioner, typing.Dict[str, typing.Any]]]]] = None,
group_key: typing.Union["CloudIdentityGroupGroupKey", typing.Dict[str, typing.Any]],
labels: typing.Mapping[builtins.str, builtins.str],
parent: builtins.str,
description: typing.Optional[builtins.str] = None,
display_name: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
initial_group_config: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union["CloudIdentityGroupTimeouts", typing.Dict[str, typing.Any]]] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param group_key: group_key block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#group_key CloudIdentityGroup#group_key}
:param labels: The labels that apply to the Group. Must not contain more than one entry. Must contain the entry 'cloudidentity.googleapis.com/groups.discussion_forum': '' if the Group is a Google Group or 'system/groups/external': '' if the Group is an external-identity-mapped group. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#labels CloudIdentityGroup#labels}
:param parent: The resource name of the entity under which this Group resides in the Cloud Identity resource hierarchy. Must be of the form identitysources/{identity_source_id} for external-identity-mapped groups or customers/{customer_id} for Google Groups. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#parent CloudIdentityGroup#parent}
:param description: An extended description to help users determine the purpose of a Group. Must not be longer than 4,096 characters. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#description CloudIdentityGroup#description}
:param display_name: The display name of the Group. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#display_name CloudIdentityGroup#display_name}
:param id: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param initial_group_config: The initial configuration options for creating a Group. See the `API reference <https://cloud.google.com/identity/docs/reference/rest/v1beta1/groups/create#initialgroupconfig>`_ for possible values. Default value: "EMPTY" Possible values: ["INITIAL_GROUP_CONFIG_UNSPECIFIED", "WITH_INITIAL_OWNER", "EMPTY"] Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#initial_group_config CloudIdentityGroup#initial_group_config}
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#timeouts CloudIdentityGroup#timeouts}
'''
if isinstance(lifecycle, dict):
lifecycle = cdktf.TerraformResourceLifecycle(**lifecycle)
if isinstance(group_key, dict):
group_key = CloudIdentityGroupGroupKey(**group_key)
if isinstance(timeouts, dict):
timeouts = CloudIdentityGroupTimeouts(**timeouts)
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroupConfig.__init__)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument group_key", value=group_key, expected_type=type_hints["group_key"])
check_type(argname="argument labels", value=labels, expected_type=type_hints["labels"])
check_type(argname="argument parent", value=parent, expected_type=type_hints["parent"])
check_type(argname="argument description", value=description, expected_type=type_hints["description"])
check_type(argname="argument display_name", value=display_name, expected_type=type_hints["display_name"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument initial_group_config", value=initial_group_config, expected_type=type_hints["initial_group_config"])
check_type(argname="argument timeouts", value=timeouts, expected_type=type_hints["timeouts"])
self._values: typing.Dict[str, typing.Any] = {
"group_key": group_key,
"labels": labels,
"parent": parent,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if description is not None:
self._values["description"] = description
if display_name is not None:
self._values["display_name"] = display_name
if id is not None:
self._values["id"] = id
if initial_group_config is not None:
self._values["initial_group_config"] = initial_group_config
if timeouts is not None:
self._values["timeouts"] = timeouts
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[cdktf.SSHProvisionerConnection, cdktf.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[cdktf.SSHProvisionerConnection, cdktf.WinrmProvisionerConnection]], result)
@builtins.property
def count(self) -> typing.Optional[jsii.Number]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def depends_on(self) -> typing.Optional[typing.List[cdktf.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[cdktf.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[cdktf.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[cdktf.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[cdktf.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[cdktf.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[cdktf.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[cdktf.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[cdktf.FileProvisioner, cdktf.LocalExecProvisioner, cdktf.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[cdktf.FileProvisioner, cdktf.LocalExecProvisioner, cdktf.RemoteExecProvisioner]]], result)
@builtins.property
def group_key(self) -> "CloudIdentityGroupGroupKey":
'''group_key block.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#group_key CloudIdentityGroup#group_key}
'''
result = self._values.get("group_key")
assert result is not None, "Required property 'group_key' is missing"
return typing.cast("CloudIdentityGroupGroupKey", result)
@builtins.property
def labels(self) -> typing.Mapping[builtins.str, builtins.str]:
'''The labels that apply to the Group.
Must not contain more than one entry. Must contain the entry
'cloudidentity.googleapis.com/groups.discussion_forum': '' if the Group is a Google Group or
'system/groups/external': '' if the Group is an external-identity-mapped group.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#labels CloudIdentityGroup#labels}
'''
result = self._values.get("labels")
assert result is not None, "Required property 'labels' is missing"
return typing.cast(typing.Mapping[builtins.str, builtins.str], result)
@builtins.property
def parent(self) -> builtins.str:
'''The resource name of the entity under which this Group resides in the Cloud Identity resource hierarchy.
Must be of the form identitysources/{identity_source_id} for external-identity-mapped
groups or customers/{customer_id} for Google Groups.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#parent CloudIdentityGroup#parent}
'''
result = self._values.get("parent")
assert result is not None, "Required property 'parent' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def description(self) -> typing.Optional[builtins.str]:
'''An extended description to help users determine the purpose of a Group. Must not be longer than 4,096 characters.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#description CloudIdentityGroup#description}
'''
result = self._values.get("description")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def display_name(self) -> typing.Optional[builtins.str]:
'''The display name of the Group.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#display_name CloudIdentityGroup#display_name}
'''
result = self._values.get("display_name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def initial_group_config(self) -> typing.Optional[builtins.str]:
'''The initial configuration options for creating a Group.
See the
`API reference <https://cloud.google.com/identity/docs/reference/rest/v1beta1/groups/create#initialgroupconfig>`_
for possible values. Default value: "EMPTY" Possible values: ["INITIAL_GROUP_CONFIG_UNSPECIFIED", "WITH_INITIAL_OWNER", "EMPTY"]
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#initial_group_config CloudIdentityGroup#initial_group_config}
'''
result = self._values.get("initial_group_config")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def timeouts(self) -> typing.Optional["CloudIdentityGroupTimeouts"]:
'''timeouts block.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#timeouts CloudIdentityGroup#timeouts}
'''
result = self._values.get("timeouts")
return typing.cast(typing.Optional["CloudIdentityGroupTimeouts"], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CloudIdentityGroupConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroupGroupKey",
jsii_struct_bases=[],
name_mapping={"id": "id", "namespace": "namespace"},
)
class CloudIdentityGroupGroupKey:
def __init__(
self,
*,
id: builtins.str,
namespace: typing.Optional[builtins.str] = None,
) -> None:
'''
:param id: The ID of the entity. For Google-managed entities, the id must be the email address of an existing group or user. For external-identity-mapped entities, the id must be a string conforming to the Identity Source's requirements. Must be unique within a namespace. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id} Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param namespace: The namespace in which the entity exists. If not specified, the EntityKey represents a Google-managed entity such as a Google user or a Google Group. If specified, the EntityKey represents an external-identity-mapped group. The namespace must correspond to an identity source created in Admin Console and must be in the form of 'identitysources/{identity_source_id}'. Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#namespace CloudIdentityGroup#namespace}
'''
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroupGroupKey.__init__)
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument namespace", value=namespace, expected_type=type_hints["namespace"])
self._values: typing.Dict[str, typing.Any] = {
"id": id,
}
if namespace is not None:
self._values["namespace"] = namespace
@builtins.property
def id(self) -> builtins.str:
'''The ID of the entity.
For Google-managed entities, the id must be the email address of an existing
group or user.
For external-identity-mapped entities, the id must be a string conforming
to the Identity Source's requirements.
Must be unique within a namespace.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#id CloudIdentityGroup#id}
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
assert result is not None, "Required property 'id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def namespace(self) -> typing.Optional[builtins.str]:
'''The namespace in which the entity exists.
If not specified, the EntityKey represents a Google-managed entity
such as a Google user or a Google Group.
If specified, the EntityKey represents an external-identity-mapped group.
The namespace must correspond to an identity source created in Admin Console
and must be in the form of 'identitysources/{identity_source_id}'.
Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#namespace CloudIdentityGroup#namespace}
'''
result = self._values.get("namespace")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CloudIdentityGroupGroupKey(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CloudIdentityGroupGroupKeyOutputReference(
cdktf.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroupGroupKeyOutputReference",
):
def __init__(
self,
terraform_resource: cdktf.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroupGroupKeyOutputReference.__init__)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetNamespace")
def reset_namespace(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNamespace", []))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="namespaceInput")
def namespace_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "namespaceInput"))
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupGroupKeyOutputReference, "id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="namespace")
def namespace(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "namespace"))
@namespace.setter
def namespace(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupGroupKeyOutputReference, "namespace").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "namespace", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[CloudIdentityGroupGroupKey]:
return typing.cast(typing.Optional[CloudIdentityGroupGroupKey], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[CloudIdentityGroupGroupKey],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupGroupKeyOutputReference, "internal_value").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroupTimeouts",
jsii_struct_bases=[],
name_mapping={"create": "create", "delete": "delete", "update": "update"},
)
class CloudIdentityGroupTimeouts:
def __init__(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#create CloudIdentityGroup#create}.
:param delete: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#delete CloudIdentityGroup#delete}.
:param update: Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#update CloudIdentityGroup#update}.
'''
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroupTimeouts.__init__)
check_type(argname="argument create", value=create, expected_type=type_hints["create"])
check_type(argname="argument delete", value=delete, expected_type=type_hints["delete"])
check_type(argname="argument update", value=update, expected_type=type_hints["update"])
self._values: typing.Dict[str, typing.Any] = {}
if create is not None:
self._values["create"] = create
if delete is not None:
self._values["delete"] = delete
if update is not None:
self._values["update"] = update
@builtins.property
def create(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#create CloudIdentityGroup#create}.'''
result = self._values.get("create")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def delete(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#delete CloudIdentityGroup#delete}.'''
result = self._values.get("delete")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def update(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://www.terraform.io/docs/providers/google/r/cloud_identity_group#update CloudIdentityGroup#update}.'''
result = self._values.get("update")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CloudIdentityGroupTimeouts(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CloudIdentityGroupTimeoutsOutputReference(
cdktf.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@dschmidt/provider-google.cloudIdentityGroup.CloudIdentityGroupTimeoutsOutputReference",
):
def __init__(
self,
terraform_resource: cdktf.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(CloudIdentityGroupTimeoutsOutputReference.__init__)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetCreate")
def reset_create(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCreate", []))
@jsii.member(jsii_name="resetDelete")
def reset_delete(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDelete", []))
@jsii.member(jsii_name="resetUpdate")
def reset_update(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetUpdate", []))
@builtins.property
@jsii.member(jsii_name="createInput")
def create_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "createInput"))
@builtins.property
@jsii.member(jsii_name="deleteInput")
def delete_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "deleteInput"))
@builtins.property
@jsii.member(jsii_name="updateInput")
def update_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "updateInput"))
@builtins.property
@jsii.member(jsii_name="create")
def create(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "create"))
@create.setter
def create(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupTimeoutsOutputReference, "create").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "create", value)
@builtins.property
@jsii.member(jsii_name="delete")
def delete(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "delete"))
@delete.setter
def delete(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupTimeoutsOutputReference, "delete").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "delete", value)
@builtins.property
@jsii.member(jsii_name="update")
def update(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "update"))
@update.setter
def update(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupTimeoutsOutputReference, "update").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "update", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[CloudIdentityGroupTimeouts, cdktf.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[CloudIdentityGroupTimeouts, cdktf.IResolvable]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[CloudIdentityGroupTimeouts, cdktf.IResolvable]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CloudIdentityGroupTimeoutsOutputReference, "internal_value").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"CloudIdentityGroup",
"CloudIdentityGroupConfig",
"CloudIdentityGroupGroupKey",
"CloudIdentityGroupGroupKeyOutputReference",
"CloudIdentityGroupTimeouts",
"CloudIdentityGroupTimeoutsOutputReference",
]
publication.publish() | PypiClean |
/PyCIM-15.15.0.tar.gz/PyCIM-15.15.0/CIM14/IEC61968/PaymentMetering/Receipt.py |
from CIM14.IEC61970.Core.IdentifiedObject import IdentifiedObject
class Receipt(IdentifiedObject):
"""Record of total receipted payment from customer.
"""
def __init__(self, isBankable=False, CashierShift=None, Tenders=None, VendorShift=None, Transactions=None, line=None, *args, **kw_args):
"""Initialises a new 'Receipt' instance.
@param isBankable: True if this receipted payment is manually bankable, otherwise it is an electronic funds transfer.
@param CashierShift: Cashier shift during which this receipt was recorded.
@param Tenders: All payments received in the form of tenders recorded by this receipt.
@param VendorShift: Vendor shift during which this receipt was recorded.
@param Transactions: All transactions recorded for this receipted payment.
@param line: Receipted amount with rounding, date and note.
"""
#: True if this receipted payment is manually bankable, otherwise it is an electronic funds transfer.
self.isBankable = isBankable
self._CashierShift = None
self.CashierShift = CashierShift
self._Tenders = []
self.Tenders = [] if Tenders is None else Tenders
self._VendorShift = None
self.VendorShift = VendorShift
self._Transactions = []
self.Transactions = [] if Transactions is None else Transactions
self.line = line
super(Receipt, self).__init__(*args, **kw_args)
_attrs = ["isBankable"]
_attr_types = {"isBankable": bool}
_defaults = {"isBankable": False}
_enums = {}
_refs = ["CashierShift", "Tenders", "VendorShift", "Transactions", "line"]
_many_refs = ["Tenders", "Transactions"]
def getCashierShift(self):
"""Cashier shift during which this receipt was recorded.
"""
return self._CashierShift
def setCashierShift(self, value):
if self._CashierShift is not None:
filtered = [x for x in self.CashierShift.Receipts if x != self]
self._CashierShift._Receipts = filtered
self._CashierShift = value
if self._CashierShift is not None:
if self not in self._CashierShift._Receipts:
self._CashierShift._Receipts.append(self)
CashierShift = property(getCashierShift, setCashierShift)
def getTenders(self):
"""All payments received in the form of tenders recorded by this receipt.
"""
return self._Tenders
def setTenders(self, value):
for x in self._Tenders:
x.Receipt = None
for y in value:
y._Receipt = self
self._Tenders = value
Tenders = property(getTenders, setTenders)
def addTenders(self, *Tenders):
for obj in Tenders:
obj.Receipt = self
def removeTenders(self, *Tenders):
for obj in Tenders:
obj.Receipt = None
def getVendorShift(self):
"""Vendor shift during which this receipt was recorded.
"""
return self._VendorShift
def setVendorShift(self, value):
if self._VendorShift is not None:
filtered = [x for x in self.VendorShift.Receipts if x != self]
self._VendorShift._Receipts = filtered
self._VendorShift = value
if self._VendorShift is not None:
if self not in self._VendorShift._Receipts:
self._VendorShift._Receipts.append(self)
VendorShift = property(getVendorShift, setVendorShift)
def getTransactions(self):
"""All transactions recorded for this receipted payment.
"""
return self._Transactions
def setTransactions(self, value):
for x in self._Transactions:
x.Receipt = None
for y in value:
y._Receipt = self
self._Transactions = value
Transactions = property(getTransactions, setTransactions)
def addTransactions(self, *Transactions):
for obj in Transactions:
obj.Receipt = self
def removeTransactions(self, *Transactions):
for obj in Transactions:
obj.Receipt = None
# Receipted amount with rounding, date and note.
line = None | PypiClean |
/humblesetuptools-3.4.5.tar.gz/humblesetuptools-3.4.5/setuptools/command/install_scripts.py | from distutils.command.install_scripts import install_scripts \
as _install_scripts
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def initialize_options(self):
_install_scripts.initialize_options(self)
self.no_ep = False
def run(self):
from setuptools.command.easy_install import get_script_args
from setuptools.command.easy_install import sys_executable
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
if self.no_ep:
# don't install entry point scripts into .egg file!
return
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
bs_cmd = self.get_finalized_command('build_scripts')
executable = getattr(bs_cmd,'executable',sys_executable)
is_wininst = getattr(
self.get_finalized_command("bdist_wininst"), '_is_running', False
)
for args in get_script_args(dist, executable, is_wininst):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
from setuptools.command.easy_install import chmod, current_umask
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
chmod(target, 0x1FF-mask) # 0777 | PypiClean |
/genpipes-suite-0.0a0.tar.gz/genpipes-suite-0.0a0/genpipes/pipelines/rnaseq_denovo_assembly/rnaseq_denovo_assembly.py |
################################################################################
# Copyright (C) 2014, 2022 GenAP, McGill University and Genome Quebec Innovation Centre
#
# This file is part of MUGQIC Pipelines.
#
# MUGQIC Pipelines is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MUGQIC Pipelines is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MUGQIC Pipelines. If not, see <http://www.gnu.org/licenses/>.
################################################################################
# Python Standard Modules
import argparse
import logging
import os
import sys
# Append mugqic_pipelines directory to Python library path
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))))
# MUGQIC Modules
from ... import utils
log = logging.getLogger(__name__)
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
# Check if Genpipes must be ran inside a container
utils.container_wrapper_argparse(__file__, argv)
# Build help
epilog = RnaSeqDeNovoAssembly.process_help(argv)
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
conflict_handler='resolve', epilog=epilog)
# populate the parser
parser = RnaSeqDeNovoAssembly.argparser(parser)
parsed_args = parser.parse_args(argv)
sanity_check = parsed_args.sanity_check
loglevel = parsed_args.log
utils.set_logger(loglevel, sanity_check=sanity_check)
# Pipeline config
config_files = parsed_args.config
# Common Pipeline options
genpipes_file = parsed_args.genpipes_file
container = parsed_args.container
clean = parsed_args.clean
report = parsed_args.report
no_json = parsed_args.no_json
force = parsed_args.force
job_scheduler = parsed_args.job_scheduler
output_dir = parsed_args.output_dir
steps = parsed_args.steps
readset_file = parsed_args.readsets_file
design_file = parsed_args.design_file
protocol = parsed_args.protocol
pipeline = RnaSeqDeNovoAssembly(config_files, genpipes_file=genpipes_file, steps=steps, readsets_file=readset_file,
clean=clean, report=report, force=force, job_scheduler=job_scheduler, output_dir=output_dir,
design_file=design_file, no_json=no_json, container=container,
protocol=protocol)
pipeline.submit_jobs()
if __name__ == '__main__':
main() | PypiClean |
/IndustryGuesser-0.1.3.tar.gz/IndustryGuesser-0.1.3/industryguesser/encoder.py | import os
import re
import pickle
import logging
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from industryguesser import PARENT_DIR
logger = logging.getLogger(__name__)
class KerasBatchGenerator(object):
def __init__(self, data, label, batch_size):
self.data = data
self.label = label
self.batch_size = batch_size
self.current_idx = 0
self.num_of_batches = len(data) // batch_size
def generate(self):
while True:
if self.current_idx >= self.num_of_batches:
self.current_idx = 0
for index in range(self.num_of_batches):
x, y = self.data[(self.current_idx * self.batch_size):((self.current_idx + 1) * self.batch_size)], \
self.label[(self.current_idx * self.batch_size):((self.current_idx + 1) * self.batch_size)]
self.current_idx += 1
yield x, y
class IndustryEncoder(object):
""" Encode the gender to categories. """
_ind_encoder_file_name = 'industry_encoder.pkl'
_encoder_path = os.path.join(PARENT_DIR, 'industryguesser/models', _ind_encoder_file_name)
def __init__(self):
self._ind_encoder = None
self._fit = False
self._load = False
def fit(self, industries):
""" Fit the gender label encoder if needed. """
self._ind_encoder = LabelEncoder()
self._ind_encoder.fit(list(set(industries)))
with open(self._encoder_path, 'wb') as f:
pickle.dump(self._ind_encoder, f, protocol=pickle.HIGHEST_PROTOCOL)
self._fit = True
def load(self):
""" Load the pre-fit gender label encoder. """
with open(self._encoder_path, 'rb') as f:
self._ind_encoder = pickle.load(f)
self._load = True
def encode(self, industries):
""" Convert gender values to encoded integers. """
if not self._ind_encoder:
self.load()
encoded_industries = self._ind_encoder.transform(industries)
encoded_industries = to_categorical(encoded_industries)
return encoded_industries
def decode(self, y_pred):
""" Convert gender values to encoded integers. """
if not self._ind_encoder:
self.load()
industries = self._ind_encoder.inverse_transform(y_pred)
return industries
@property
def class_size(self):
if not self._ind_encoder:
self.load()
return len(self._ind_encoder.classes_)
@property
def classes(self):
if not self._ind_encoder:
self.load()
return self._ind_encoder.classes_
class CompanyEncoder(object):
""" Encode the name list into encoded char-to-int 2-D numpy array. """
_com_encoder_file_name = 'company_encoder.pkl'
_encoder_path = os.path.join(PARENT_DIR, 'industryguesser/models', _com_encoder_file_name)
def __init__(self, lower=True, pad_size=18, padding='post'):
self._lower = lower
self._com_encoder = None
self._fit = False
self._load = False
self._pad_size = pad_size
self._padding = padding
self._stopwords = \
{'of', 'inc.', 'ltd', 'group', 'the', 'and', 'inc', 'ltd.', 'llc', 'company', 'limited', 'corporation',
'pvt', 'pvt.', 'for', 'co.', 'at', 'formerly', 'us', 'usa', 'india', 'canada', 'llp', 'a', 'corp', 'co',
'in', 'u.s.', 'city', 'world', 'china', 'corporate', 'p', 't', 'by', 's.a.', 'sa', 'uk', 'plc', 'm', 'i',
'e', 'asia', 'europe', 'romania', 'washington', 'enterprise', 'enterprises', 'an', 'la', 'deloitte', 'al',
'on', 'boston', 'â', 'london', 'subsidiary', 'regional', 'as', 'corps', 'r', 'western', 'africa',
'singapore', 'pakistan', 'd', 'mexico', 'to', 'groupe', 'georgia', 'o', 'firm', 'mumbai', 'ohio',
'bangalore', 'delhi', 'illinois', 'llc.', 'ã', 'dubai', 'indian', 'holding', 'j', 'c', 'british',
'colorado', 'virginia', 'paris', 'pvt.ltd.', 'pte', 'pune', 'indiana', 'l', 'japan', 'canadian',
'shanghai', 'b', 'european', 'minnesota', 'ca', 'deutsche', 'atlanta', 'houston', 'toronto', 'k',
'miami', 's.', 'saudi', 'a.', 'pllc', 'g', 'f', 'h', 'n', 'q', 's', 'u', 'v', 'w', 'x', 'y', 'z',
'missouri', 'jersey', 'england', 'chennai', 'tennessee', 'philadelphia', 'pennsylvania', 'indonesia',
'asian', 'ireland', 'iowa', '.', 'alabama', 'cambridge', 'israel', 'netherlands', 'detroit', 'seattle',
'philippines', 'connecticut', 'argentina', '1', 'malaysia', 'venezuela', 'scotland', '-'}
def text_clean(self, company):
""" Clean the input name string. """
try:
if self._lower:
company = company.lower()
company = re.sub('[^\\w \\-"\'.]+', ' ', company)
company = company.strip().split()
company = [token for token in company if token not in self._stopwords]
company = ' '.join(company)
return company
except (TypeError, AttributeError) as e:
logger.exception(f'text_clean [{company}]: {e}')
def fit(self, companies, num_words=150000):
""" Fit the new encoder if not loaded. """
clean_companies = [self.text_clean(company) for company in companies]
self._com_encoder = Tokenizer(num_words=num_words)
self._com_encoder.fit_on_texts(clean_companies)
with open(self._encoder_path, 'wb') as f:
pickle.dump(self._com_encoder, f, protocol=pickle.HIGHEST_PROTOCOL)
self._fit = True
def load(self):
""" Load the fitted encoder. """
with open(self._encoder_path, 'rb') as f:
self._com_encoder = pickle.load(f)
self._load = True
def encode(self, companies):
""" Encode all input names. """
if not self._com_encoder:
self.load()
companies = [self.text_clean(company) for company in companies]
encoded_companies = self._com_encoder.texts_to_sequences(companies)
encoded_companies = pad_sequences(encoded_companies, maxlen=self._pad_size, padding=self._padding)
return encoded_companies
@property
def vocab_size(self):
return len(self._com_encoder.word_index) | PypiClean |
/fuzzy_search-2.0.1a0.tar.gz/fuzzy_search-2.0.1a0/fuzzy_search/search/phrase_searcher.py | import time
from typing import Dict, List, Union
from fuzzy_search.phrase.phrase_model import PhraseModel
from fuzzy_search.match.phrase_match import PhraseMatch
from fuzzy_search.match.phrase_match import Candidate
from fuzzy_search.match.phrase_match import adjust_match_offsets
from fuzzy_search.match.phrase_match import candidates_to_matches
from fuzzy_search.match.phrase_match import filter_matches_by_overlap
from fuzzy_search.match.skip_match import get_skipmatch_candidates
from fuzzy_search.match.exact_match import index_known_word_offsets
from fuzzy_search.match.exact_match import search_exact_phrases
from fuzzy_search.search.searcher import FuzzySearcher
from fuzzy_search.search.token_searcher import FuzzyTokenSearcher
from fuzzy_search.tokenization.string import score_levenshtein_similarity_ratio
from fuzzy_search.tokenization.token import Tokenizer
def get_text_dict(text: Union[str, dict], ignorecase: bool = False) -> dict:
"""Check that text is in a dictionary with an id property, so that passing a long text
goes by reference instead of copying the long text string.
:param text: a text string or text dictionary
:type text: Union[str, dict]
:param ignorecase: boolean flag for whether to ignore case
:type ignorecase: bool
:return: a text dictionary with an id property
:rtype: dict
"""
if isinstance(text, str):
text = {"text": text, "id": None, 'text_lower': text.lower()}
if "id" not in text:
text["id"] = None
return text
class FuzzyPhraseSearcher(FuzzySearcher):
def __init__(self, phrase_list: List[any] = None,
phrase_model: Union[Dict[str, any], List[Dict[str, any]], PhraseModel] = None,
config: Union[None, Dict[str, Union[str, int, float]]] = None,
tokenizer: Tokenizer = None,
token_searcher: FuzzyTokenSearcher = None):
"""This class represents the basic fuzzy searcher. You can pass a list of phrases or a phrase model and
configuration dictionary that overrides the default configuration values. The default config dictionary
is available via `fuzzy_search.default_config`.
To set e.g. the character ngram_size to 3 and the skip_size to 1 use the following dictionary:
config = {
'ngram_size': 3,
'skip_size': 1
}
:param phrase_list: a list of phrases (a list of strings or more complex dictionaries with phrases and variants)
:type phrase_list: list
:param phrase_model: a phrase model
:type phrase_model: PhraseModel
:param config: a configuration dictionary to override default configuration properties.
Only the properties in the config dictionaries of updated.
:type config: dict
:param tokenizer: a tokenizer instance
:type tokenizer: Tokenizer
:param token_searcher: a fuzzy token searcher instance (using the same phrase model and config)
:type token_searcher: FuzzyTokenSearcher
"""
super().__init__(phrase_list=phrase_list, phrase_model=phrase_model,
config=config, tokenizer=tokenizer)
if token_searcher is None:
token_searcher = FuzzyTokenSearcher(phrase_model=self.phrase_model,
config=self.config, tokenizer=self.tokenizer)
self.token_searcher = token_searcher
def find_candidates(self, text: dict, use_word_boundaries: bool,
include_variants: Union[None, bool] = None,
known_word_offset: Dict[int, Dict[str, any]] = None,
debug: int = 0) -> List[Candidate]:
"""Find candidate fuzzy matches for a given text.
:param text: the text object to match with phrases
:type text: dict
:param use_word_boundaries: use word boundaries in determining match boundaries
:type use_word_boundaries: bool
:param include_variants: boolean flag for whether to include phrase variants for finding matches
:type include_variants: bool
:param known_word_offset: a dictionary of known words and their text offsets based on exact matches
:type known_word_offset: Dict[int, Dict[str, any]]
:param debug: level to show debug information
:type debug: int
:return: a list of candidate matches
:rtype: List[Candidate]
"""
skip_matches = self.find_skipgram_matches(text, include_variants=include_variants,
known_word_offset=known_word_offset)
candidates = get_skipmatch_candidates(text, skip_matches, self.skipgram_threshold, self.phrase_model,
max_length_variance=self.max_length_variance,
ignorecase=self.ignorecase, debug=debug)
if debug > 0:
print('find_candidates - candidates:', candidates)
filtered = []
use_word_boundaries = use_word_boundaries if use_word_boundaries is not None else self.use_word_boundaries
if debug > 0:
print('find_candidates - start filtereing candidates')
for candidate in candidates:
if debug > 1:
print()
print('find_candidates - candidate:', candidate)
if debug > 0:
print('find_candidates - use_word_boundaries:', use_word_boundaries)
if use_word_boundaries:
if debug > 0:
print('find_candidates - adjusting match offsets')
adjusted_match = adjust_match_offsets(candidate.phrase.phrase_string, candidate.match_string,
text, candidate.match_start_offset, candidate.match_end_offset,
self.punctuation, debug=debug)
if debug > 0:
print('done adjusting match')
print("find_candidates - adjusted_match:", adjusted_match)
if not adjusted_match:
continue
candidate.match_start_offset = adjusted_match["match_start_offset"]
candidate.match_end_offset = adjusted_match["match_end_offset"]
candidate.match_string = adjusted_match["match_string"]
if debug > 0:
print("find_candidates - new match string:", candidate.match_string)
if debug:
print('find_candidates - appending candidate:', candidate)
filtered.append(candidate)
if debug > 0:
print('find_candidates - returning candidates:', filtered)
return filtered
def filter_matches_by_distractors(self, matches: List[PhraseMatch]) -> List[PhraseMatch]:
filtered: List[PhraseMatch] = []
for match in matches:
if match.phrase.phrase_string in self.phrase_model.has_distractors:
for distractor in self.phrase_model.has_distractors[match.phrase.phrase_string]:
score = score_levenshtein_similarity_ratio(match.string, distractor)
if score > match.levenshtein_similarity:
break
else:
filtered.append(match)
else:
filtered.append(match)
return filtered
def filter_matches_by_threshold(self, matches: List[PhraseMatch]) -> List[PhraseMatch]:
filtered: List[PhraseMatch] = []
for match in matches:
# print('match.character_overlap:', match.character_overlap)
# print('match.ngram_overlap:', match.ngram_overlap)
# print('match.levenshtein_similarity:', match.levenshtein_similarity)
if match.character_overlap < self.char_match_threshold:
continue
if match.ngram_overlap < self.ngram_threshold:
continue
if match.levenshtein_similarity < self.levenshtein_threshold:
continue
filtered.append(match)
return filtered
def find_matches(self, text: Union[str, Dict[str, str]],
use_word_boundaries: Union[None, bool] = None,
allow_overlapping_matches: Union[None, bool] = None,
include_variants: Union[None, bool] = None,
filter_distractors: Union[None, bool] = None,
skip_exact_matching: bool = None,
debug: int = 0) -> List[PhraseMatch]:
"""Find all fuzzy matching phrases for a given text. By default, a first pass of exact matching is conducted
to find exact occurrences of phrases. This is to speed up the fuzzy matching pass
:param text: the text (string or dictionary with 'text' property) to find fuzzy matching phrases in.
:type text: Union[str, Dict[str, str]]
:param use_word_boundaries: use word boundaries in determining match boundaries
:type use_word_boundaries: Union[None, bool]
:param allow_overlapping_matches: boolean flag for whether to allow matches to overlap in their text ranges
:type allow_overlapping_matches: Union[None, bool]
:param include_variants: boolean flag for whether to include phrase variants for finding matches
:type include_variants: Union[None, bool]
:param filter_distractors: boolean flag for whether to remove phrase matches that better match distractors
:type filter_distractors: Union[None, bool]
:param skip_exact_matching: boolean flag whether to skip the exact matching step
:type skip_exact_matching: Union[None, bool]
:return: a list of phrases matches
:param debug: level to show debug information
:type debug: int
:rtype: PhraseMatch
"""
if debug > 0:
print('find_matches - getting text dict')
time_step = step_timer()
if self.phrase_model is None:
raise ValueError("No phrase model indexed")
text = get_text_dict(text, ignorecase=self.ignorecase)
if use_word_boundaries is None:
use_word_boundaries = self.use_word_boundaries
if skip_exact_matching is None:
skip_exact_matching = self.skip_exact_matching
if not skip_exact_matching:
if debug > 0:
time_step()
print("find_matches - running exact matching")
exact_matches = self.find_exact_matches(text, use_word_boundaries=use_word_boundaries,
include_variants=include_variants)
known_word_offset = index_known_word_offsets(exact_matches)
else:
if debug > 0:
time_step()
print("find_matches - skipping exact matching")
exact_matches = []
known_word_offset = {}
if debug > 0:
time_step()
print('find_matches - number of exact matches:', len(exact_matches))
candidates = self.find_candidates(text, use_word_boundaries=use_word_boundaries,
include_variants=include_variants,
known_word_offset=known_word_offset, debug=debug)
if debug > 0:
print('find_matches - received from find_candidates:', candidates)
if debug > 0:
time_step()
print('find_matches - candidates:', candidates)
matches = candidates_to_matches(candidates, text, self.phrase_model, ignorecase=self.ignorecase)
if debug > 0:
time_step()
print('find_macthes - matches:', matches)
filtered_matches = self.filter_matches_by_threshold(matches)
if filter_distractors is None:
filter_distractors = self.filter_distractors
if filter_distractors:
filtered_matches = self.filter_matches_by_distractors(filtered_matches)
if allow_overlapping_matches is None:
allow_overlapping_matches = self.allow_overlapping_matches
filtered_matches = filtered_matches + exact_matches
if not allow_overlapping_matches:
filtered_matches = filter_matches_by_overlap(filtered_matches)
# print(exact_matches)
if debug > 0:
time_step()
print('find_matches - filtered_matches:', filtered_matches)
return sorted(filtered_matches, key=lambda x: (x.text_id, x.offset, x.offset + len(x.string)))
def find_exact_matches(self, text: Union[str, Dict[str, str]],
use_word_boundaries: Union[None, bool] = None,
include_variants: Union[None, bool] = None,
debug: int = 0) -> List[PhraseMatch]:
"""Find all fuzzy matching phrases for a given text.
:param text: the text (string or dictionary with 'text' property) to find fuzzy matching phrases in.
:type text: Union[str, Dict[str, str]]
:param use_word_boundaries: use word boundaries in determining match boundaries
:type use_word_boundaries: Union[None, bool]
:param include_variants: boolean flag for whether to include phrase variants for finding matches
:type include_variants: Union[None, bool]
:param debug: level to show debug information
:type debug: int
:return: a list of phrases matches
:rtype: PhraseMatch
"""
exact_matches: List[PhraseMatch] = []
text = get_text_dict(text, ignorecase=self.ignorecase)
if use_word_boundaries is None:
use_word_boundaries = self.use_word_boundaries
if include_variants is None:
include_variants = self.include_variants
if debug > 0:
print('find_exact_matches - use_word_boundaries:', use_word_boundaries)
print('find_exact_matches - include_variants:', include_variants)
for exact_match in search_exact_phrases(self.phrase_model, text, use_word_boundaries=use_word_boundaries,
include_variants=include_variants, debug=debug):
exact_matches.append(exact_match)
return exact_matches
def step_timer():
first_step = time.time()
prev_step = first_step
def time_step():
nonlocal prev_step
curr_step = time.time()
took = curr_step - prev_step
prev_step = curr_step
print(f'\tstep took {took: >.2f} seconds, total: {curr_step - first_step: >.2f}')
return took
return time_step | PypiClean |
/lightning-app-2.1.0rc0.tar.gz/lightning-app-2.1.0rc0/src/lightning_app/utilities/app_commands.py |
import logging
import os
import subprocess
from dataclasses import dataclass, field
from typing import List
from lightning_app.utilities.exceptions import MisconfigurationException
logger = logging.getLogger(__name__)
# These are common lines at the top of python files which conflict with our
# command syntax but which should not be executed. This is non-exhaustive,
# and it may be better to just ignoring shebang lines if we see problems here.
APP_COMMAND_LINES_TO_IGNORE = {
"#!/usr/bin/python",
"#!/usr/local/bin/python",
"#!/usr/bin/env python",
"#!/usr/bin/env python3",
}
@dataclass
class CommandLines:
file: str
commands: List[str] = field(default_factory=list)
line_numbers: List[int] = field(default_factory=list)
def _extract_commands_from_file(file_name: str) -> CommandLines:
"""Extract all lines at the top of the file which contain commands to execute.
The return struct contains a list of commands to execute with the corresponding line number the command executed on.
"""
cl = CommandLines(
file=file_name,
)
with open(file_name) as f:
file_lines = f.readlines()
for line_number, line in enumerate(file_lines):
line = line.strip()
if line in APP_COMMAND_LINES_TO_IGNORE:
continue
# stop parsing at first non-comment line at top of file
if not line.startswith("#"):
continue
# remove comment marker and any leading / trailing whitespaces
line = line.lstrip("#").strip()
if len(line) == 0:
# do not stop parsing on empty on comment lines
continue
# only run commands starting with a bang (!) & strip the bang from the executed command.
if line[0] != "!":
continue
line = line[1:].strip()
cl.commands.append(line)
# add 1 to line number because enumerate returns indexes starting at 0, while
# text exitors list lines beginning at index 1.
cl.line_numbers.append(line_number + 1)
return cl
def _execute_app_commands(cl: CommandLines) -> None:
"""Open a subprocess shell to execute app commands.
The calling app environment is used in the current environment the code is running in
"""
for command, line_number in zip(cl.commands, cl.line_numbers):
logger.info(f"Running app setup command: {command}")
completed = subprocess.run(
command,
shell=True,
env=os.environ,
)
try:
completed.check_returncode()
except subprocess.CalledProcessError:
err_txt = (
f"There was a problem on line {line_number} of {cl.file} while executing the command: "
f"{command}. More information on the problem is shown in the output above this "
f"message. After editing this line to fix the problem you can run the app again."
)
logger.error(err_txt)
raise MisconfigurationException(err_txt) from None
def run_app_commands(file: str) -> None:
"""Extract all lines at the top of the file which contain commands & execute them.
Commands to execute are comment lines whose first non-whitespace character begins with the "bang" symbol (`!`).
After the first non comment line we stop parsing the rest of the file. Running environment is preserved in the
subprocess shell.
For example:
# some file <--- not a command # !echo "hello world" <--- a command # ! pip install foo <--- a command #
foo! bar <--- not a command import lightning_app <--- not a command, end parsing.
where `echo "hello world" && pip install foo` would be executed in the current running environment.
"""
cl = _extract_commands_from_file(file_name=file)
if len(cl.commands) == 0:
logger.debug("No in app commands to install.")
return
_execute_app_commands(cl=cl) | PypiClean |
/planetary_test_data-0.4.0.tar.gz/planetary_test_data-0.4.0/docs/index.rst | .. planetary_test_data documentation master file, created by
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to Planetary Test Data's documentation!
===============================================
Contents:
.. toctree::
:maxdepth: 2
readme
installation
usage
planetary_test_data
contributing
authors
history
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| PypiClean |
/dbpedia_ent-0.1.9-py3-none-any.whl/dbpedia_ent/dto/syn/n3/s/rev_se.py |
d_rev_se = {'se-r_spec_v': 'nissan_sentra',
'se_210_caravelle': 'sud_aviation_caravelle',
'se_da_guarda': 'cathedral_of_guarda',
'se_de_lisboa': 'lisbon_cathedral',
'se_gwang_ri': 'ri_se_gwang',
'se_lim_oh': 'oh_se-lim',
'se_pon_river': 'sepon_river',
'se_postal_area': 'london_postal_district',
'se_stiamo_insieme': 'riccardo_cocciante',
'se_yol_kim': 'kim_se-yol',
'sea-blue_histiocyte_syndrome': 'sea-blue_histiocytosis',
'sea-land_service_inc.': 'sea-land_service',
'sea-tac_international_airport': 'seattle-tacoma_international_airport',
'sea_4000_project': 'hobart_class_destroyer',
'sea_air_rescue': 'air-sea_rescue',
'sea_and_cake': 'the_sea_and_cake',
'sea_and_sand': 'quadrophenia',
'sea_around_us': 'the_sea_around_us',
'sea_beach_line': 'bmt_sea_beach_line',
'sea_beach_railroad': 'bmt_sea_beach_line',
'sea_beach_railway': 'bmt_sea_beach_line',
'sea_bird_island': 'yerba_buena_island',
'sea_breeze_cocktail': 'cocktail',
'sea_breeze_expressway': 'new_york_state_route_590',
'sea_cadet_corps': 'sea_cadets',
'sea_cat_missiles': 'sea_cat_missile',
'sea_clown_triopha': 'triopha_catalinae',
'sea_containers_ltd': 'sea_containers',
'sea_effect_snow': 'lake-effect_snow',
'sea_floor_spread': 'seafloor_spreading',
'sea_floor_spreading': 'seafloor_spreading',
'sea_grant_college': 'sea_grant_colleges',
'sea_harrier_fa.2': 'bae_sea_harrier',
'sea_harrier_fa2': 'bae_sea_harrier',
'sea_ice_runway': 'ice_runway',
'sea_island_bridge': 'sea_island_connector',
'sea_island_cotton': 'gossypium_barbadense',
'sea_land_service': 'sea-land_service',
'sea_level_change': 'current_sea_level_rise',
'sea_level_curve': 'sea-level_curve',
'sea_level_increase': 'current_sea_level_rise',
'sea_level_pressure': 'atmospheric_pressure',
'sea_level_rise': 'current_sea_level_rise',
'sea_life_centre': 'sea_life_centres',
'sea_of_afar': 'afar_depression',
'sea_of_aral': 'aral_sea',
'sea_of_arava': 'dead_sea',
'sea_of_archipelago': 'archipelago_sea',
'sea_of_azof': 'sea_of_azov',
'sea_of_azoff': 'sea_of_azov',
'sea_of_celebes': 'celebes_sea',
'sea_of_charge': 'fermi_gas',
'sea_of_chinnereth': 'sea_of_galilee',
'sea_of_chinneroth': 'sea_of_galilee',
'sea_of_clouds': 'mare_nubium',
'sea_of_corea': 'korea',
'sea_of_cortes': 'gulf_of_california',
'sea_of_cortez': 'gulf_of_california',
'sea_of_cubes': 'sea_of_cubicles',
'sea_of_dirac': 'dirac_sea',
'sea_of_fecundity': 'mare_fecunditatis',
'sea_of_gallilee': 'sea_of_galilee',
'sea_of_genezareth': 'sea_of_galilee',
'sea_of_gennesaret': 'sea_of_galilee',
'sea_of_glass': 'book_of_revelation',
'sea_of_grass': 'the_sea_of_grass',
'sea_of_hands': 'national_football_league_lore',
'sea_of_helle': 'hellespont',
'sea_of_ice': 'mer_de_glace',
'sea_of_instability': 'island_of_stability',
'sea_of_kinnereth': 'sea_of_galilee',
'sea_of_korea': 'sea_of_japan',
'sea_of_koro': 'koro_sea',
'sea_of_lot': 'dead_sea',
'sea_of_madness': 'somewhere_in_time',
'sea_of_marmora': 'sea_of_marmara',
'sea_of_milk': 'ocean_of_milk',
'sea_of_monsters': 'the_sea_of_monsters',
'sea_of_moyle': 'straits_of_moyle',
'sea_of_nectar': 'mare_nectaris',
'sea_of_norway': 'norwegian_sea',
'sea_of_ohotsk': 'sea_of_okhotsk',
'sea_of_oman': 'gulf_of_oman',
'sea_of_propontis': 'sea_of_marmara',
'sea_of_rains': 'mare_imbrium',
'sea_of_reeds': 'reed_sea',
'sea_of_rocks': 'felsenmeer',
'sea_of_salt': 'lake_milh',
'sea_of_serenity': 'mare_serenitatis',
'sea_of_sicily': 'strait_of_sicily',
'sea_of_sisters': 'sisters_of_the_sea',
'sea_of_states': 'dirac_sea',
'sea_of_swords': 'paths_of_darkness',
'sea_of_tiberias': 'sea_of_galilee',
'sea_of_tranquility': 'mare_tranquillitatis',
'sea_of_tranquillity': 'mare_tranquillitatis',
'sea_of_trees': 'aokigahara',
'sea_of_trolls': 'the_sea_of_trolls',
'sea_of_vapors': 'mare_vaporum',
'sea_of_wind': 'the_twelve_kingdoms:_sea_of_wind',
'sea_of_zoar': 'dead_sea',
'sea_otter_rock': 'sea_otter_rocks',
'sea_pines_heritage': 'verizon_heritage',
'sea_pines_plantation': 'sea_pines_resort',
'sea_poison_tree': 'barringtonia_asiatica',
'sea_river_mediterranean': 'exxon_valdez',
'sea_side_resort': 'beach',
'sea_surface_temperatures': 'sea_surface_temperature',
'sea_tac_mall': 'the_commons_at_federal_way',
'sea_trade_route': 'early_history_of_kedah',
'sea_urchin_hakea': 'hakea_petiolaris',
'sea_urchin_injuries': 'sea_urchin_injury',
'sea_view_railroad': 'seaview_railroad',
'sea_water_greenhouse': 'seawater_greenhouse',
'sea_wind_line': 'seawind_line',
'sea_world_inc.': 'seaworld',
'sea_world_orlando': 'seaworld_orlando',
'sea_world_texas': 'seaworld_san_antonio',
'sea_write_award': 's.e.a._write_award',
'seabed_and_foreshore': 'new_zealand_foreshore_and_seabed_controversy',
'seaboard_air-line_railroad': 'seaboard_air_line_railroad',
'seaboard_air_line': 'seaboard_air_line_railroad',
'seaboard_coast_line': 'seaboard_coast_line_railroad',
'seaboard_coastline_industries': 'seaboard_coast_line_industries',
'seaborn_a._roddenbery': 'seaborn_roddenbery',
'seaborn_anderson_roddenbery': 'seaborn_roddenbery',
'seaburn_railway_station': 'seaburn_metro_station',
'seach_engine_optimization': 'search_engine_optimization',
'seach_oriented_architecture': 'search_oriented_architecture',
'seacliff_mental_asylum': 'seacliff_lunatic_asylum',
'seacliff_mental_hospital': 'seacliff_lunatic_asylum',
'seactain_na_gaeilge': 'seachtain_na_gaeilge',
'seadog_brewing_company': 'sea_dog_brewing_company',
'seafair_torchlight_parade': 'torchlight_parade',
'seafarers_international_union': 'seafarers_international_union_of_north_america',
'seafloor_spreading_hypothesis': 'seafloor_spreading',
'seafloor_spreading_ridge': 'mid-ocean_ridge',
'seafloor_spreading_theory': 'seafloor_spreading',
'seafood_bird_nest': 'seafood_birdsnest',
'seafood_birds_nest': 'seafood_birdsnest',
'seaford-oyster_bay_expressway': 'new_york_state_route_135',
'seaford_golf_course': 'seaford_golf_club',
'seaford_town_fc': 'seaford_town_f.c.',
'seagate_police_department': 'sea_gate_police_department',
'seagoing_buoy_tender': 'uscg_seagoing_buoy_tender',
'seagram_7_crown': 'pernod_ricard',
'seagram_company_ltd': 'seagram',
'seagram_company_ltd.': 'seagram',
'seaham_red_star': 'seaham_red_star_f.c.',
'seahorse_key_lighthouse': 'cedar_key_light',
'seahpo_peak_falls': 'mount_shuksan_waterfalls',
'seal_bay_airport': 'seal_bay_seaplane_base',
'seal_chart_murder': 'caroline_mary_luard',
'seal_nunataks_volcano': 'seal_nunataks',
'seal_of_approval': 'certification_mark',
'seal_of_bucharest': 'coat_of_arms_of_bucharest',
'seal_of_confession': 'seal_of_the_confessional',
'seal_of_jefferson': 'state_of_jefferson',
'seal_of_junqueiropolis': 'coat_of_arms',
'seal_of_miskolc': 'coat_of_arms_of_miskolc',
'seal_of_nehahra': 'the_seal_of_nehahra',
'seal_of_ohio': 'great_seal_of_ohio',
'seal_of_oregon': 'oregon_state_seal',
'seal_of_palau': 'coat_of_arms_of_palau',
'seal_of_peru': 'coat_of_arms_of_peru',
'seal_of_prophet': 'seal_of_the_prophets',
'seal_of_rassilon': 'rassilon',
'seal_of_sarajevo': 'coat_of_arms_of_sarajevo',
'seal_of_sofia': 'coat_of_arms_of_sofia',
'seal_of_suleyman': 'seal_of_solomon',
'seal_of_tibet': 'emblem_of_tibet',
'seal_of_tripoli': 'coat_of_arms_of_tripoli',
'seal_of_vermont': 'great_seal_of_vermont',
'seal_team_6': 'united_states_naval_special_warfare_development_group',
'seal_team_six': 'united_states_naval_special_warfare_development_group',
'seal_the_deal': 'the_sword_of_god',
'sealaska_regional_corporation': 'sealaska_corporation',
'sealed-bid_second-price_auction': 'vickrey_auction',
'sealed_air_corp.': 'sealed_air',
'sealed_air_corporation': 'sealed_air',
'sealed_bid_auction': 'auction_theory',
'sealed_first-price_auction': 'first-price_sealed-bid_auction',
'sealed_insulating_glass': 'insulated_glazing',
'sealed_lead-acid_batteries': 'vrla_battery',
'sealed_lead-acid_battery': 'vrla_battery',
'sealed_lead-acid_cell': 'vrla_battery',
'sealed_lead-acid_cells': 'vrla_battery',
'sealed_second-price_auction': 'vickrey_auction',
'sealed_source_radiotherapy': 'brachytherapy',
'sealed_with_diss': 'the_clique_series',
'sealing_of_records': 'record_sealing',
'sealink_british_ferries': 'sealink',
'sealink_stena_line': 'sealink',
'sealth_high_school': 'chief_sealth_high_school',
'sealy_mattress_company': 'sealy_corporation',
'seaman_first_class': 'seaman',
'seaman_jack_kramer': 'jack_kramer',
'seamless_desktop_mode': 'virtualbox',
'seamless_garment_network': 'consistent_life_ethic',
'seamless_rate_adaptation': 'itu_g.992.3/4',
'seamour_shavin_house': 'seamour_and_gerte_shavin_house',
'seamour_shavin_residence': 'seamour_and_gerte_shavin_house',
'seamus_casey_actor': 'seamus_casey',
'seamus_davey_fitzpatrick': 'seamus_davey-fitzpatrick',
'seamus_justin_heaney': 'seamus_heaney',
'seamus_p._mccaffery': 'seamus_mccaffery',
'seamus_p._mccaffrey': 'seamus_mccaffery',
'seamus_p_mccaffery': 'seamus_mccaffery',
'seamus_p_mccaffrey': 'seamus_mccaffery',
'sean_alexander_music': 'sean_alexander',
'sean_anthony_frye': 'sean_frye',
'sean_anthony_pittman': 'sean_a._pittman',
'sean_b_carroll': 'sean_b._carroll',
'sean_bell_case': 'sean_bell_shooting_incident',
'sean_bell_shooting': 'sean_bell_shooting_incident',
'sean_boru_author': 'sean_boru',
'sean_charles_watkins': 'sean_watkins',
'sean_d._altman': 'sean_altman',
'sean_d_tucker': 'sean_d._tucker',
'sean_f._cox': 'sean_cox',
'sean_f._logan': 'sean_logan',
'sean_francis_cox': 'sean_cox',
'sean_francis_quinn': 'sean_f._quinn',
'sean_gregory_may': 'sean_may',
'sean_hannity_show': 'the_sean_hannity_show',
'sean_john_combs': 'sean_combs',
'sean_justin_penn': 'sean_penn',
'sean_k_reynolds': 'sean_k._reynolds',
'sean_kelly_team': 'an_post-m.donnelly-grant_thornton-sean_kelly_team',
'sean_kingston_discography': 'sean_kingston',
'sean_leslie_flynn': 'sean_flynn',
'sean_m_carroll': 'sean_m._carroll',
'sean_man_now': 'ytmnd',
'sean_michael_taylor': 'sean_taylor',
'sean_michael_waltman': 'sean_waltman',
'sean_o_maolchalann': 'john_philip_holland',
'sean_o_tuathhallain': 'john_toland',
'sean_ono_lennon': 'sean_lennon',
'sean_patrick_flannery': 'sean_patrick_flanery',
'sean_patrick_reiley': 'seanbaby',
'sean_paul_henriques': 'sean_paul',
'sean_paul_lockhart': 'brent_corrigan',
'sean_paul_lockheart': 'brent_corrigan',
'sean_peter_becker': 'sean_becker',
'sean_preston_federline': 'britney_spears',
'sean_preston_spears': 'britney_spears',
'sean_puffy_combs': 'sean_combs',
'sean_puffy_coombs': 'sean_combs',
'sean_r._parnell': 'sean_parnell',
'sean_s._munson': 'sean_munson',
'sean_s_munson': 'sean_munson',
'sean_st._ledger': 'sean_st_ledger',
'sean_thomas_casey': 'sean_casey',
'sean_watkins_discography': 'sean_watkins',
'sean_william_scott': 'seann_william_scott',
'sean_williams_scott': 'seann_william_scott',
'seaparate_car_bill': 'separate_car_act',
'seaplane_defence_flight': 'no._213_squadron_raf',
'seaplane_type_1000': 'ad_seaplane_type_1000',
'seapoint_dart_station': 'seapoint',
'seapoint_railway_station': 'seapoint',
'seapoint_train_station': 'seapoint',
'seaport_railway_station': 'seapoint',
'seaports_of_japan': 'port',
'seaqaqa_f._c.': 'seaqaqa_f.c.',
'seaqaqa_f_c': 'seaqaqa_f.c.',
'sear_river_mediterranean': 'exxon_valdez',
'search_based_application': 'search-based_application',
'search_based_applications': 'search-based_application',
'search_by_singing': 'query_by_humming',
'search_centric_architecture': 'search_oriented_architecture',
'search_engine_engineering': 'search_engine_optimization',
'search_engine_gaming': 'search_engine_optimization',
'search_engine_optimisation': 'search_engine_optimization',
'search_engine_optimiser': 'search_engine_optimization',
'search_engine_optimization/': 'search_engine_optimization',
'search_engine_optimizers': 'search_engine_optimization',
'search_engine_placement': 'search_engine_optimization',
'search_engine_position': 'search_engine_optimization',
'search_engine_prominence': 'search_engine_optimization',
'search_engine_queries': 'web_search_query',
'search_engine_query': 'web_search_query',
'search_engine_ranking': 'search_engine_optimization',
'search_engine_robot': 'web_crawler',
'search_engine_robots': 'web_crawler',
'search_engine_spam': 'spamdexing',
'search_engine_spammer': 'spamdexing',
'search_engine_spamming': 'spamdexing',
'search_engine_spider': 'web_crawler',
'search_engine_spiders': 'web_crawler',
'search_for_dignity': 'annie_douglas_richards',
'search_for_happiness': 'search_for_tomorrow',
'search_for_spock': 'star_trek_iii:_the_search_for_spock',
'search_guard_successor': 'search_guard_successor_foundation',
'search_marketing_strategies': 'internet_marketing',
'search_optimization_marketing': 'search_engine_optimization',
'search_server_express': 'microsoft_search_server',
'searches_and_seizures': 'search_and_seizure',
'searching_for_dragons': 'enchanted_forest_chronicles',
'searcy_state_hospital': 'mount_vernon_arsenal-searcy_hospital_complex',
'seargent_s._prentiss': 'seargent_smith_prentiss',
'searle_scholar_award': 'searle_scholars_program',
'searle_scholars_award': 'searle_scholars_program',
'searles_dry_lake': 'searles_lake',
'sears_and_roebuck': 'sears',
'sears_auto_center': 'sears',
'sears_c._walker': 'sears_cook_walker',
'sears_canada-casual_male': 'casual_male_retail_group',
'sears_canada_inc.': 'sears_canada',
'sears_catalog_homes': 'sears_catalog_home',
'sears_christmas_catalog': 'sears_wishbook',
'sears_christmas_catalogue': 'sears_wishbook',
'sears_christmas_wishbook': 'sears_wishbook',
'sears_diehard_200': 'copart_200',
'sears_diehard_250': 'northerntool.com_250',
'sears_holding_company': 'sears_holdings_corporation',
'sears_holdings_corp.': 'sears_holdings_corporation',
'sears_holdings_plc': 'sears_plc',
'sears_modern_homes': 'sears_catalog_home',
'sears_point_cutoff': 'california_state_route_37',
'sears_point_raceway': 'infineon_raceway',
'sears_subject_headings': 'minnie_earl_sears',
'sears_tower_1905': 'sears_merchandise_building_tower',
'sears_tower_1906': 'sears_merchandise_building_tower',
'sears_toy_catalog': 'sears_wishbook',
'sears_toy_catalogue': 'sears_wishbook',
'sears_video_arcade': 'atari_2600',
'sears_wish_book': 'sears_wishbook',
'seas_of_cheese': 'sailing_the_seas_of_cheese',
'seas_of_titan': 'lakes_of_titan',
'seashore_bent_grass': 'agrostis_pallens',
'seashore_state_park': 'first_landing_state_park',
'seaside_fm_105.3': 'seaside_fm',
'season_2:_girlicious': 'pussycat_dolls_present:_girlicious',
'season_for_fasting': 'seasons_for_fasting',
'season_for_miracles': 'a_season_for_miracles',
'season_in_review': 'season_review',
'season_of_advent': 'advent',
'season_of_heat': 'the_warped_ones',
'season_of_inundation': 'season_of_the_inundation',
'season_of_mists': 'the_sandman:_season_of_mists',
'season_of_sakura': 'season_of_the_sakura',
'season_ticket_holder': 'season_ticket',
'seasonal_effective_disorder': 'seasonal_affective_disorder',
'seasonal_food_calendar': 'seasonal_food',
'seasonal_heat_storage': 'seasonal_thermal_store',
'seasonal_heat_store': 'seasonal_thermal_store',
'seasonal_spread_traders': 'seasonal_spread_trading',
'seasonal_thermal_storage': 'seasonal_thermal_store',
'seasoned_trade_lines': 'seasoned_trade_line',
'seasons_of_giving': 'winnie_the_pooh:_seasons_of_giving',
'seasons_of_mist': 'season_of_mist',
'seastallion_from_glendalough': 'havhingsten_fra_glendalough',
'seat_allocation_error': 'seat_allocation_error_and_degree_of_negation',
'seat_altea_freetrack': 'seat_altea_xl/freetrack',
'seat_altea_xl': 'seat_altea_xl/freetrack',
'seat_belt_controversy': 'seat_belt_legislation',
'seat_belt_law': 'seat_belt_legislation',
'seat_belt_laws': 'seat_belt_legislation',
'seat_of_albany': 'electoral_district_of_albany',
'seat_of_ivanhoe': 'electoral_district_of_ivanhoe',
'seat_of_learning': 'seat_of_wisdom',
'seat_of_leederville': 'electoral_district_of_leederville',
'seat_of_nannine': 'electoral_district_of_nannine',
'seat_of_roebourne': 'electoral_district_of_roebourne',
'seatac_international_airport': 'seattle-tacoma_international_airport',
'seated_calf_raise': 'calf_raises',
'seated_calf_raises': 'calf_raises',
'seated_liberty_dime': 'united_states_seated_liberty_coinage',
'seated_liberty_quarter': 'united_states_seated_liberty_coinage',
'seatle_public_library': 'seattle_public_library',
'seatown_of_largo': 'lower_largo',
'seattle-bainbridge_island_ferry': 'washington_state_ferries',
'seattle-first_national_bank': 'seafirst_bank',
'seattle-tacoma_metropolitan_area': 'puget_sound',
'seattle-tacoma_shipbuilding_co.': 'seattle-tacoma_shipbuilding_corporation',
'seattle-tacoma_shipbuilding_company': 'seattle-tacoma_shipbuilding_corporation',
'seattle-tacoma_shipbuilding_corp.': 'seattle-tacoma_shipbuilding_corporation',
'seattle_academy_gym': 'seattle_academy_of_arts_and_sciences',
'seattle_academy_vanderbilt': 'seattle_academy_of_arts_and_sciences',
'seattle_area_council': 'scouting_in_washington',
'seattle_bus_tunnel': 'downtown_seattle_transit_tunnel',
'seattle_business_journal': 'american_city_business_journals',
'seattle_center_arena': 'mercer_arena',
'seattle_center_campus': 'seattle_center',
'seattle_center_coliseum': 'keyarena',
'seattle_commuter_rail': 'sounder_commuter_rail',
'seattle_convention_center': 'washington_state_convention_and_trade_center',
'seattle_fault_zone': 'seattle_fault',
'seattle_film_critics': 'seattle_film_critics_awards',
'seattle_hiberian_saints': 'hibernian_saints',
'seattle_hibernian_saints': 'hibernian_saints',
'seattle_international_raceway': 'pacific_raceways',
'seattle_international_raceways': 'pacific_raceways',
'seattle_light_rail': 'link_light_rail',
'seattle_mariners/team_records': 'seattle_mariners_team_records',
'seattle_mariners_roster': 'seattle_mariners',
'seattle_metal_festival': 'seattle_metal_fest',
'seattle_metro_area': 'seattle_metropolitan_area',
'seattle_metropolitan_magazine': 'seattle_metropolitan',
'seattle_mls_2010': 'seattle_sounders_fc',
'seattle_mls_team': 'seattle_sounders_fc',
'seattle_municipal_building': 'seattle_city_hall',
'seattle_neighborhoods_lists': 'neighborhoods_in_seattle',
'seattle_opera_house': 'mccaw_hall',
'seattle_pacific_college': 'seattle_pacific_university',
'seattle_pipe_band': 'city_of_seattle_pipe_band',
'seattle_port_commission': 'port_of_seattle',
'seattle_post_intelligencer': 'seattle_post-intelligencer',
'seattle_public_libraries': 'seattle_public_library',
'seattle_public_school': 'seattle_public_schools',
'seattle_radio_stations': 'media_in_seattle',
'seattle_regional_rail': 'sounder_commuter_rail',
'seattle_repertory_theater': 'seattle_repertory_theatre',
'seattle_school_district': 'seattle_public_schools',
'seattle_schools_system': 'education_in_seattle',
'seattle_sounders_f.c.': 'seattle_sounders_fc',
'seattle_sounders_saints': 'seattle_sounders_women',
'seattle_space_needle': 'space_needle',
'seattle_suburban_rail': 'sounder_commuter_rail',
'seattle_super_sonics': 'seattle_supersonics',
'seattle_symphony_orchestra': 'seattle_symphony',
'seattle_tacoma_airport': 'seattle-tacoma_international_airport',
'seattle_tea_party': 'tea_party_protests',
'seattle_to_portland': 'seattle_to_portland_bicycle_classic',
'seattle_town_hall': 'town_hall_seattle',
'seattle_transit_tunnel': 'downtown_seattle_transit_tunnel',
'seattle_underground_tour': 'seattle_underground',
'seattle_university_athletics': 'seattle_redhawks',
'seattle_waterfront_streetcar': 'waterfront_streetcar',
'seattle_world_exposition': 'century_21_exposition',
'seattle_youth_symphony': 'seattle_youth_symphony_orchestras',
'seaview_country_club': 'seaview_marriott_resort',
'seaview_elementary_school': 'school_district_68_nanaimo-ladysmith',
'seavington_st._mary': 'seavington_st_mary',
'seaward_bush_branch': 'tokanui_branch',
'seaward_defence_boats': 'seaward_class_defense_boats',
'seaward_kaikoura_range': 'kaikoura_ranges',
'seawater_air_conditioning': 'deep_water_source_cooling',
'seaway_international_bridge': 'three_nations_crossing',
'seawell_international_airport': 'grantley_adams_international_airport',
'seaworld_of_florida': 'seaworld_orlando',
'seb_eesti_uhispank': 'seb_pank',
'seb_vilniaus_bankas': 'seb_bankas',
'seba_united_fc': 'seba_united_f.c.',
'sebaceous_enlargement_symptoms': 'sebaceous_hyperplasia',
'sebaceous_gland_carcinoma': 'sebaceous_carcinoma',
'sebaceous_gland_diseases': 'sebaceous_gland',
'sebadoh_vs._helmet': 'sebadoh_vs_helmet',
'sebald_j._rutgers': 's._j._rutgers',
'sebangau_national_park': 'sabangau_river',
'sebasian_harrison_white': 's._harrison_white',
'sebastian_de_ferranti': 'sebastian_ziani_de_ferranti',
'sebastian_del_piombo': 'sebastiano_del_piombo',
'sebastian_fabian_klonowic': 'sebastian_klonowic',
'sebastian_fabian_peralta': 'rancho_rinconada_de_los_gatos',
'sebastian_ferner_johansen': 'princess_astrid_of_norway',
'sebastian_g._messmer': 'sebastian_gebhard_messmer',
'sebastian_h._white': 's._harrison_white',
'sebastian_harrison_white': 's._harrison_white',
'sebastian_koch_hansen': 'sebastian_koch-hansen',
'sebastian_newbold_coe': 'sebastian_coe',
'sebastian_of_rostock': 'sebastian_von_rostock',
'sebastian_of_spain': 'infante_sebastian_of_portugal_and_spain',
'sebastian_s._kresge': 's._s._kresge',
'sebastian_spering_kresge': 's._s._kresge',
'sebastian_star_bear': 'sebastian_star_bear:_first_mission',
'sebastiano_cardinal_baggio': 'sebastiano_baggio',
'sebastiano_cardinal_martinelli': 'sebastiano_martinelli',
'sebastiano_dal_piombo': 'sebastiano_del_piombo',
'sebastiano_nicolo_buonaparte': 'sebastiano_nicola_buonaparte',
'sebastiao_carvalho_leme': 'leme_panoramic_camera',
'sebastiao_of_portugal': 'sebastian_of_portugal',
'sebastien-roch_nicolas_chamfort': 'nicolas_chamfort',
'sebastien_de_vauban': 'vauban',
'sebastino_cardinal_baggio': 'sebastiano_baggio',
'sebat_bet_gurage': 'sebat_bet_gurage_language',
'sebel_town_house': 'sebel_townhouse_hotel',
'sebelas_maret_university': 'eleventh_march_university',
'seberang_perai_selatan': 'south_seberang_perai',
'seberang_perai_tengah': 'central_seberang_perai',
'seberang_perai_utara': 'north_seberang_perai',
'sec_230_immunity': 'section_230_of_the_communications_decency_act',
'sec_filing_codes': 'sec_filing',
'sec_form_4': 'form_4',
'sec_form_s-1': 'form_s-1',
'sec_taylor_stadium': 'principal_park',
'sec_v._sloan': 'sam_sloan',
'secant-secant_power_theorem': 'power_of_a_point',
'secaucus_transfer_station': 'secaucus_junction',
'secchi_disk_depth': 'secchi_disk',
'secchi_disk_transparency': 'secchi_disk',
'sech_square_distribution': 'logistic_distribution',
'sech_squared_distribution': 'logistic_distribution',
'seckel_type_dwarfism': 'virchow-seckel_syndrome',
'second-degree_av_block': 'second-degree_atrioventricular_block',
'second-degree_heart_block': 'second-degree_atrioventricular_block',
'second-generation_programming_languages': 'second-generation_programming_language',
'second-harmonic_imaging_microscopy': 'second_harmonic_imaging_microscopy',
'second-order_idiot_plot': 'idiot_plot',
'second-order_inclusion_probability': 'inclusion_probability',
'second-order_maximal_evenness': 'maximal_evenness',
'second-order_phase_transition': 'phase_transition',
'second-order_predicate_calculus': 'second-order_logic',
'second-price_sealed-bid_auction': 'vickrey_auction',
'second-round_chinese_character': 'second_round_of_simplified_chinese_characters',
'second_acid_test': 'acid2',
'second_affirmative_constructive': 'structure_of_policy_debate',
'second_affirmative_rebuttal': 'structure_of_policy_debate',
'second_afghan_war': 'second_anglo-afghan_war',
'second_agricultural_revolution': 'green_revolution',
'second_album_syndrome': 'sophomore_slump',
'second_anglo-boer_war': 'second_boer_war',
'second_anglo-chinese_war': 'second_opium_war',
'second_anthon_transcript': 'reformed_egyptian',
'second_arab-israeli_war': 'suez_crisis',
'second_assistant_camera': 'clapper_loader',
'second_assyriaii_empire': 'assyria',
'second_assyrian_empire': 'assyria',
'second_athenian_confederacy': 'second_athenian_empire',
'second_athenian_league': 'second_athenian_empire',
'second_austrian_republic': 'austria',
'second_austro-prussian_war': 'austro-prussian_war',
'second_ave_partners': 'second_avenue_partners',
'second_ave_subway': 'second_avenue_subway',
'second_avenue_el': 'irt_second_avenue_line',
'second_avenue_elevated': 'irt_second_avenue_line',
'second_balkenende_cabinet': 'netherlands_cabinet_balkenende-2',
'second_ballot_voting': 'runoff_voting',
'second_baltic_pipeline': 'baltic_pipeline_system-ii',
'second_baron_kershaw': 'baron_kershaw',
'second_battle_kharkiv': 'second_battle_of_kharkov',
'second_branchial_arch': 'hyoid_arch',
'second_british_invasion': 'british_invasion',
'second_brocard_point': 'brocard_points',
'second_buddhist_countil': 'second_buddhist_council',
'second_bulgarian_kingdom': 'second_bulgarian_empire',
'second_bulgarian_legion': 'bulgarian_legion',
'second_bulgarian_state': 'second_bulgarian_empire',
'second_bull_run': 'second_battle_of_bull_run',
'second_burmese_war': 'second_anglo-burmese_war',
'second_cabinet_balkenende': 'netherlands_cabinet_balkenende-2',
'second_cabinet_kok': 'netherlands_cabinet_kok-2',
'second_cambridge_catalog': 'second_cambridge_catalogue_of_radio_sources',
'second_cambridge_catalogue': 'second_cambridge_catalogue_of_radio_sources',
'second_carnatic_war': 'carnatic_wars',
'second_celtiberian_war': 'numantine_war',
'second_chance_program': 'criminon',
'second_chance_tuesday': 'first_tuesday',
'second_chechen_war/temp': 'second_chechen_war',
'second_china-japan_war': 'second_sino-japanese_war',
'second_china_war': 'second_opium_war',
'second_chinese-japanese_war': 'second_sino-japanese_war',
'second_city_chicago': 'the_second_city',
'second_city_derby': 'birmingham_derby',
'second_city_detroit': 'the_second_city_detroit',
'second_city_tv': 'second_city_television',
'second_class_citizen': 'second-class_citizen',
'second_class_creditor': 'preferential_creditor',
'second_class_cruiser': 'cruiser',
'second_class_male': 'second_class_male/time_to_go',
'second_cod_war': 'cod_wars',
'second_company_grip': 'best_boy',
'second_confiscation_act': 'emancipation_proclamation',
'second_conscription_crisis': 'conscription_crisis_of_1944',
'second_consonant_shift': 'high_german_consonant_shift',
'second_cortina_war': 'cortina_troubles',
'second_cosmic_velocity': 'escape_velocity',
'second_countability_axiom': 'second-countable_space',
'second_countable_space': 'second-countable_space',
'second_countable_topology': 'second-countable_space',
'second_countablility_axiom': 'second-countable_space',
'second_court-cowan_ministry': 'court-cowan_ministry',
'second_cousin_once-removed': 'cousin',
'second_cousin_problem': 'cousin_problems',
'second_cranial_nerve': 'optic_nerve',
'second_cuneiform_bone': 'intermediate_cuneiform_bone',
'second_cylon_war': 'destruction_of_the_twelve_colonies',
'second_death_star': 'death_star',
'second_degree_murder': 'murder',
'second_degree_polynomial': 'quadratic_equation',
'second_deviatoric_invariant': 'invariants_of_tensors',
'second_dowding_ministry': 'dowding_ministry',
'second_ecumenical_council': 'first_council_of_constantinople',
'second_eelam_war': 'eelam_war_ii',
'second_eleven_championship': 'second_xi_championship',
'second_empire_architecture': 'second_empire',
'second_empire_style': 'second_empire',
'second_enemy_offensive': 'second_anti-partisan_offensive',
'second_esopus_war': 'esopus_wars',
'second_extended_filesystem': 'ext2',
'second_finnish_crusade': 'second_swedish_crusade',
'second_fire_group': 'paris_fire_brigade',
'second_five-year_pla': 'five-year_plans_of_china',
'second_five-year_plan': 'five-year_plans_of_china',
'second_fox_war': 'fox_wars',
'second_freedom_war': 'second_boer_war',
'second_french_republic': 'french_second_republic',
'second_fundamental_tensor': 'second_fundamental_form',
'second_gap_phase': 'g2_phase',
'second_generation_biofuel': 'second_generation_biofuels',
'second_generation_computer': 'history_of_computing_hardware',
'second_german_empire': 'german_empire',
'second_german_reich': 'german_empire',
'second_germanna_colony': 'germanna',
'second_great_depression': 'the_second_great_depression',
'second_great_war': 'world_war_ii',
'second_growth_forest': 'secondary_forest',
'second_gulf_war': 'iraq_war',
'second_hand_book': 'used_book',
'second_hand_goods': 'used_good',
'second_hand_lions': 'secondhand_lions',
'second_hand_news': 'rumours',
'second_hand_smoke': 'passive_smoking',
'second_hand_smoking': 'passive_smoking',
'second_hand_store': 'charity_shop',
'second_harmonic_generation': 'second-harmonic_generation',
'second_harmonics_generation': 'second-harmonic_generation',
'second_helvetic_confession': 'helvetic_confessions',
'second_illyrian_war': 'illyrian_wars',
'second_impact_syndrome': 'second-impact_syndrome',
'second_in_command': 'second-in-command',
'second_independence_war': 'second_italian_war_of_independence',
'second_indo-chinese_war': 'vietnam_war',
'second_indochina_war': 'vietnam_war',
'second_indochinese_war': 'vietnam_war',
'second_injury_syndrome': 'second-impact_syndrome',
'second_intercostal_nerve': 'intercostal_nerves',
'second_intermediary_period': 'second_intermediate_period_of_egypt',
'second_intermediate_period': 'second_intermediate_period_of_egypt',
'second_ionization_energy': 'ionization_energy',
'second_iraq_war': 'iraq_war',
'second_italian-abyssian_war': 'second_italo-abyssinian_war',
'second_italian-abyssinian_war': 'second_italo-abyssinian_war',
'second_italo-ethiopian_war': 'second_italo-abyssinian_war',
'second_jacobite_rebellion': 'jacobite_rising',
'second_jacobite_rising': 'jacobite_rising',
'second_jewish-roman_war': 'kitos_war',
'second_jewish_commonwealth': 'second_commonwealth',
'second_jewish_revolt': 'bar_kokhba_revolt',
'second_jewish_temple': 'second_temple',
'second_jungle_book': 'the_second_jungle_book',
'second_kashmir_war': 'indo-pakistani_war_of_1965',
'second_korean_conflict': 'second_korean_war',
'second_lagrange_point': 'lagrangian_point',
'second_language_prosody': 'second_language_phonology',
'second_lateran_council': 'second_council_of_the_lateran',
'second_law_efficiency': 'exergy_efficiency',
'second_law_thermodynamics': 'second_law_of_thermodynamics',
'second_level_canal': 'holyoke_canal_system',
'second_level_domain': 'second-level_domain',
'second_lien_financing': 'second_lien_loan',
'second_life_criticism': 'criticism_of_second_life',
'second_life_culture': 'culture_of_second_life',
'second_london_conference': 'second_london_naval_treaty',
'second_mahratta_war': 'second_anglo-maratha_war',
'second_malayan_emergency': 'communist_insurgency_war',
'second_manassas_campaign': 'northern_virginia_campaign',
'second_mandibular_molar': 'mandibular_second_molar',
'second_mandibular_premolar': 'mandibular_second_premolar',
'second_mandingo_war': 'mandingo_wars',
'second_maratha_war': 'second_anglo-maratha_war',
'second_marian_dogma': 'perpetual_virginity_of_mary',
'second_maxillary_bicuspid': 'maxillary_second_premolar',
'second_maxillary_molar': 'maxillary_second_molar',
'second_maxillary_premolar': 'maxillary_second_premolar',
'second_messenger_cascade': 'second_messenger_system',
'second_messenger_systems': 'second_messenger_system',
'second_minamata_disease': 'niigata_minamata_disease',
'second_moroccan_crisis': 'agadir_crisis',
'second_mover_advantage': 'first-mover_advantage',
'second_mysore_war': 'second_anglo-mysore_war',
'second_national_bank': 'second_bank_of_the_united_states',
'second_naval_district': 'united_states_naval_districts',
'second_negative_constructive': 'structure_of_policy_debate',
'second_negative_rebuttal': 'structure_of_policy_debate',
'second_northern_war': 'northern_wars',
'second_numantine_war': 'numantine_war',
'second_operation_lathe': 'turret_lathe',
'second_order_approximation': 'orders_of_approximation',
'second_order_arithmetic': 'second-order_arithmetic',
'second_order_condition': 'second_derivative_test',
'second_order_conditioning': 'second-order_conditioning',
'second_order_cybernetics': 'second-order_cybernetics',
'second_order_desire': 'higher-order_volition',
'second_order_equation': 'quadratic_equation',
'second_order_fluid': 'second-order_fluid',
'second_order_logic': 'second-order_logic',
'second_order_polynomial': 'quadratic_polynomial',
'second_order_predicate': 'second-order_predicate',
'second_order_reaction': 'order_of_reaction',
'second_oxford_company': 'authorized_king_james_version',
'second_palestinian_uprising': 'second_intifada',
'second_party_developer': 'video_game_development_parties',
'second_party_sysetm': 'second_party_system',
'second_party_title': 'video_game_development_parties',
'second_permanent_secretary': 'permanent_secretary',
'second_persian_empire': 'sassanid_empire',
'second_persian_period': 'history_of_persian_egypt',
'second_person_narrative': 'second-person_narrative',
'second_person_plural': 'grammatical_person',
'second_person_singular': 'grammatical_person',
'second_polish_war': 'french_invasion_of_russia',
'second_preimage_attack': 'preimage_attack',
'second_presidential_designate': 'presidential_designate',
'second_price_auction': 'vickrey_auction',
'second_qana_massacre': 'qana_airstrike',
'second_qana_shelling': 'qana_airstrike',
'second_quartering_act': 'quartering_act',
'second_queenston-lewiston_bridge': 'lewiston-queenston_bridge',
'second_readers-writers_problem': 'readers-writers_problem',
'second_red_scare': 'mccarthyism',
'second_reform_act': 'reform_act_1867',
'second_reform_bill': 'reform_act_1867',
'second_riel_rebellion': 'north-west_rebellion',
'second_roman-illyrian_war': 'illyrian_wars',
'second_roman_empire': 'history_of_italy_as_a_monarchy_and_in_the_world_wars',
'second_round_k.o.': 'can-i-bus',
'second_runner_up': 'runner-up',
'second_russian_division': 'russian_second_division',
'second_rzhev-sychevka_offensive': 'operation_mars',
'second_samnite_war': 'samnite_wars',
'second_samogitian_uprising': 'samogitian_uprisings',
'second_santer-poos_cabinet': 'santer-poos_ministry_ii',
'second_schleswig-holstein_war': 'second_schleswig_war',
'second_shanghai_incident': 'battle_of_shanghai',
'second_shaw_alphabet': 'quikscript',
'second_sikh_war': 'second_anglo-sikh_war',
'second_silesian_uprising': 'silesian_uprisings',
'second_sino-japan_war': 'second_sino-japanese_war',
'second_somaliland_expedition': 'somaliland_campaign',
'second_sound_shift': 'high_german_consonant_shift',
'second_spanish_war': 'numantine_war',
'second_special_panel': 'special_panels_of_the_dili_district_court',
'second_stage_manufacturers': 'second_stage_manufacturer',
'second_stage_theater': 'second_stage_theatre',
'second_state_pension': 'state_second_pension',
'second_statistical_account': 'statistical_accounts_of_scotland',
'second_strike_capability': 'second_strike',
'second_sudanese_war': 'second_sudanese_civil_war',
'second_syrian_war': 'syrian_wars',
'second_system_effect': 'second-system_effect',
'second_system_syndrome': 'second-system_effect',
'second_temple_period': 'second_temple',
'second_temporal_dimension': 'multiple_time_dimensions',
'second_tier_sourcing': 'second-tier_sourcing',
'second_turkish-hephthalite_war': 'second_perso-turkic_war',
'second_union_flag': 'flag_of_the_united_kingdom',
'second_union_jack': 'flag_of_the_united_kingdom',
'second_unit_director': 'second_unit',
'second_vatican_counsel': 'second_vatican_council',
'second_vatican_mythographer': 'vatican_mythographer',
'second_vienna_arbitration': 'second_vienna_award',
'second_vienna_school': 'second_viennese_school',
'second_vienna_treaty': 'second_vienna_award',
'second_virginia_convention': 'virginia_conventions',
'second_vivekananda_setu': 'nivedita_setu',
'second_watchung_mountain': 'watchung_mountains',
'second_wave_feminism': 'second-wave_feminism',
'second_welfare_theorem': 'fundamental_theorems_of_welfare_economics',
'second_world_country': 'second_world',
'second_world_war': 'world_war_ii',
'second_world_wars': 'world_war_ii',
'second_wounded_knee': 'wounded_knee_incident',
'second_zhou_dynasty': 'wu_zetian',
'secondarily_articulated_consonant': 'secondary_articulation',
'secondarily_articulated_consonants': 'secondary_articulation',
'secondary_audio_program': 'second_audio_program',
'secondary_audio_programming': 'second_audio_program',
'secondary_bear_market': 'market_trend',
'secondary_brain_damage': 'primary_and_secondary_brain_injury',
'secondary_brain_injury': 'primary_and_secondary_brain_injury',
'secondary_bull_market': 'market_trend',
'secondary_carbon_atom': 'carbon-carbon_bond',
'secondary_cardinal_vowel': 'cardinal_vowel',
'secondary_cartilaginous_joint': 'symphysis',
'secondary_cartilaginous_joints': 'symphysis',
'secondary_crater_chain': 'secondary_crater',
'secondary_cutaneous_vasculitis': 'cutaneous_small-vessel_vasculitis',
'secondary_data_analysis': 'secondary_data',
'secondary_ecological_succession': 'secondary_succession',
'secondary_electron_emission': 'secondary_emission',
'secondary_equity_offering': 'secondary_market_offering',
'secondary_forest_products': 'non-timber_forest_products',
'secondary_hopf_surface': 'hopf_surface',
'secondary_intracranial_hypertension': 'idiopathic_intracranial_hypertension',
'secondary_line_constants': 'propagation_constant',
'secondary_liver_cancer': 'hepatocellular_carcinoma',
'secondary_loan_participation': 'loan_sale',
'secondary_logic_board': 'motherboard',
'secondary_malignant_neoplasm': 'metastasis',
'secondary_market_trend': 'market_trend',
'secondary_market_trends': 'market_trend',
'secondary_messenger_system': 'second_messenger_system',
'secondary_modern_schools': 'secondary_modern_school',
'secondary_motor_cortex': 'motor_cortex',
'secondary_private_equity': 'private_equity_secondary_market',
'secondary_public_offering': 'secondary_market_offering',
'secondary_pulmonary_hypertension': 'pulmonary_hypertension',
'secondary_restraint_system': 'airbag',
'secondary_school_newbridge': 'patrician_secondary_school',
'secondary_scientific_literature': 'secondary_source',
'secondary_sensory_fasciculus': 'spinothalamic_tract',
'secondary_sex_characteristics': 'secondary_sex_characteristic',
'secondary_sexual_characteristic': 'secondary_sex_characteristic',
'secondary_sexual_characteristics': 'secondary_sex_characteristic',
'secondary_sexual_dysfunction': 'sexual_dysfunction',
'secondary_somatosensory_cortex': 'brodmann_area_5',
'secondary_sort_key': 'sorting',
'secondary_structure_motif': 'structural_motif',
'secondary_tympanic_membrane': 'eardrum',
'secqueville_en_bessin': 'secqueville-en-bessin',
'secr_class_p': 'secr_p_class',
'secr_d1_class': 'secr_d_class',
'secr_k_class': 'secr_k_and_sr_k1_classes',
'secr_l_class': 'secr_l_and_sr_l1_classes',
'secr_o1_class': 'secr_o_class',
'secr_s_class': 'secr_c_class',
'secrecy_of_communication': 'secrecy_of_correspondence',
'secrecy_of_communications': 'secrecy_of_correspondence',
'secrecy_of_letter': 'secrecy_of_correspondence',
'secrecy_of_letters': 'secrecy_of_correspondence',
'secret_agent_007': 'james_bond',
'secret_agent_corrigan': 'secret_agent_x-9',
'secret_agent_h21': 'mata_hari',
'secret_auxiliary_unit': 'auxiliary_units',
'secret_cheifs_3': 'secret_chiefs_3',
'secret_cheifs_three': 'secret_chiefs_3',
'secret_congress_radio': 'congress_radio',
'secret_cow_level': 'diablo_ii',
'secret_dakota_ring': 'andy_ross',
'secret_detention_facilities': 'black_site',
'secret_files_2': 'secret_files_2:_puritas_cordis',
'secret_files_tunguska': 'secret_files:_tunguska',
'secret_garden_festival': 'secret_garden_party',
'secret_labour_loans': 'cash_for_honours',
'secret_life_of': 'secret_life',
'secret_mulroney_tapes': 'the_secret_mulroney_tapes',
'secret_nazi_forest': 'forest_swastika',
'secret_nazi_item': 'ytmnd',
'secret_nuclear_bunker': 'kelvedon_hatch_secret_nuclear_bunker',
'secret_of_chimneys': 'the_secret_of_chimneys',
'secret_of_delft': 'the_secret_of_delft',
'secret_of_mamo': 'mystery_of_mamo',
'secret_of_nimh': 'the_secret_of_nimh',
'secret_of_nyhm': 'the_secret_of_nimh',
'secret_of_nymh': 'the_secret_of_nimh',
'secret_of_secrets': 'secretum_secretorum',
'secret_restricted_data': 'classified_information_in_the_united_states',
'secret_service_agent': 'espionage',
'secret_service_bureau': 'secret_intelligence_service',
'secret_service_codenames': 'secret_service_codename',
'secret_service_nicknames': 'secret_service_codename',
'secret_service_operator': 'operator_no._5',
'secret_seven_series': 'the_secret_seven',
'secret_slumber_party': 'cookie_jar_tv',
'secret_squadron_fiveranger': 'himitsu_sentai_goranger',
'secret_squirrel_show': 'secret_squirrel',
'secret_state_police': 'gestapo',
'secret_sunday_lover': 'ali_love',
'secret_teacher_society': 'secret_teaching_organization',
'secret_vatican_archives': 'vatican_secret_archives',
'secret_wars_2': 'secret_wars_ii',
'secretaria_de_hacienda': 'secretariat_of_finance_and_public_credit',
'secretaria_de_marina': 'mexican_navy',
'secretariat_for_non-christians': 'pontifical_council_for_interreligious_dialogue',
'secretariat_of_state': 'secretary_of_state',
'secretariate_of_briefs': 'secretariate_of_briefs_to_princes_and_of_latin_letters',
'secretaries_for_scotland': 'secretary_for_scotland',
'secretaries_of_culture': 'culture_minister',
'secretaries_of_state': 'secretary_of_state',
'secretary-general_of_nato': 'secretary_general_of_nato',
'secretary-general_of_opec': 'opec',
'secretary_class_cutter': 'hamilton_class_cutter',
'secretary_for_finance': 'cabinet_secretary_for_finance_and_sustainable_growth',
'secretary_for_tourism': 'secretary_of_tourism',
'secretary_for_war': 'secretary_at_war',
'secretary_general_un': 'secretary-general_of_the_united_nations',
'secretary_james_baker': 'james_baker',
'secretary_of_agriculture': 'united_states_secretary_of_agriculture',
'secretary_of_commerce': 'united_states_secretary_of_commerce',
'secretary_of_culture': 'culture_minister',
'secretary_of_defence': 'united_states_secretary_of_defense',
'secretary_of_defense': 'united_states_secretary_of_defense',
'secretary_of_dreams': 'the_secretary_of_dreams',
'secretary_of_economy': 'secretariat_of_economy',
'secretary_of_education': 'united_states_secretary_of_education',
'secretary_of_energy': 'united_states_secretary_of_energy',
'secretary_of_finance': 'secretariat_of_finance_and_public_credit',
'secretary_of_health': 'health_minister',
'secretary_of_hud': 'united_states_secretary_of_housing_and_urban_development',
'secretary_of_interior': 'united_states_secretary_of_the_interior',
'secretary_of_justice': 'attorney_general',
'secretary_of_labor': 'united_states_secretary_of_labor',
'secretary_of_navy': 'united_states_secretary_of_the_navy',
'secretary_of_scotland': 'secretary_for_scotland',
'secretary_of_transportation': 'united_states_secretary_of_transportation',
'secretary_of_treasury': 'united_states_secretary_of_the_treasury',
'secretary_of_war': 'united_states_secretary_of_war',
'secreted_phosphoprotein_1': 'osteopontin',
'secretly_canadian_records': 'secretly_canadian',
'secretos_del_corazon': 'secrets_of_the_heart',
'secrets_of_droon': 'the_secrets_of_droon',
'secrets_of_enoch': 'second_book_of_enoch',
'secrets_of_faydwer': 'everquest:_secrets_of_faydwer',
'secrets_of_vesuvius': 'the_secrets_of_vesuvius',
'sect_and_sects': 'sect',
'section_106_agreement': 'town_and_country_planning_act_1990',
'section_125_plan': 'cafeteria_plan',
'section_179_deduction': 'section_179_depreciation_deduction',
'section_1983_litigation': 'civil_rights_act_of_1871',
'section_226_plan': 'income_and_corporation_taxes_act_1970',
'section_230_immunity': 'section_230_of_the_communications_decency_act',
'section_508_compliance': 'section_508_amendment_to_the_rehabilitation_act_of_1973',
'section_515_loans': 'section_515_rural_rental_housing',
'section_620_plan': 'retirement_annuity_plan',
'section_automatic_weapon': 'squad_automatic_weapon',
'sector_mass_spectrometer': 'sector_instrument',
'sector_of_rwanda': 'sectors_of_rwanda',
'sector_skills_council': 'sector_skills_councils',
'sector_wide_approach': 'sector-wide_approach',
'sects_of_buddhism': 'schools_of_buddhism',
'sects_of_rastafari': 'mansions_of_rastafari',
'sects_of_shinto': 'shinto_sects_and_schools',
'secular_bear_market': 'market_trend',
'secular_bull_market': 'market_trend',
'secular_humanist_declaration': 'a_secular_humanist_declaration',
'secular_market_trend': 'market_trend',
'secular_market_trends': 'market_trend',
'secular_progressive_movement': 'social_progressivism',
'secularism_in_canada': 'religion_in_canada',
'secularization_of_christmas': 'christmas_controversy',
'secunda_and_rufina': 'rufina_and_secunda',
'secundino_zuazo_ugalde': 'secundino_zuazo',
'secundus_of_trent': 'secundus_of_non',
'secure_access_key': 'secure_attention_key',
'secure_attention_sequence': 'secure_attention_key',
'secure_coding_practice': 'defensive_programming',
'secure_computing_corp': 'secure_computing',
'secure_computing_corp.': 'secure_computing',
'secure_computing_corporation': 'secure_computing',
'secure_copy_protocol': 'secure_copy',
'secure_digital_card': 'secure_digital',
'secure_direct_client-to-client': 'direct_client-to-client',
'secure_electronic_signature': 'electronic_signature',
'secure_electronic_transfer': 'secure_electronic_transaction',
'secure_fence_act': 'secure_fence_act_of_2006',
'secure_file_deletion': 'data_remanence',
'secure_function_evaluation': 'secure_two-party_computation',
'secure_global_desktop': 'sun_secure_global_desktop',
'secure_hash_algorithm': 'sha_hash_functions',
'secure_hash_algorithms': 'sha_hash_functions',
'secure_hash_function': 'cryptographic_hash_function',
'secure_hash_functions': 'sha_hash_functions',
'secure_id_token': 'securid',
'secure_multiparty_computation': 'secure_multi-party_computation',
'secure_neighbor_discovery': 'secure_neighbor_discovery_protocol',
'secure_operating_systems': 'secure_operating_system',
'secure_personal_internet': 'secure_peered_internet',
'secure_private_internet': 'secure_peered_internet',
'secure_self_storage': 'self_storage',
'secure_shell_filesystem': 'sshfs',
'secure_shell_host': 'secure_shell',
'secure_socket_layer': 'transport_layer_security',
'secure_sockets_layer': 'transport_layer_security',
'secure_usb_drive': 'usb_flash_drive_security',
'securing_a_climb': 'traditional_climbing',
'securing_a_computer': 'computer_security',
'securities_exchange_act': 'securities_exchange_act_of_1934',
'securities_exchange_commission': 'u.s._securities_and_exchange_commission',
'securities_exchange_company': 'charles_ponzi',
'security_account_manager': 'security_accounts_manager',
'security_advisory_opinions': 'security_advisory_opinion',
'security_assistance_organization': 'united_states_security_assistance_organizations',
'security_by_design': 'secure_by_design',
'security_by_obfuscation': 'security_through_obscurity',
'security_by_obscurity': 'security_through_obscurity',
'security_contracting_firm': 'mercenary',
'security_council_resolution': 'united_nations_security_council_resolution',
'security_council_un': 'united_nations_security_council',
'security_council_veto': 'united_nations_security_council_veto_power',
'security_development_lifecycle': 'trustworthy_computing_security_development_lifecycle',
'security_door_chain': 'door_chain',
'security_enhanced_linux': 'security-enhanced_linux',
'security_housing_unit': 'solitary_confinement',
'security_management_practices': 'security_management',
'security_market_line': 'capital_asset_pricing_model',
'security_of_aacs': 'security_of_advanced_access_content_system',
'security_of_information': 'security_of_information_act',
'security_of_supply': 'energy_security',
'security_operation_center': 'security_operations_center',
'security_police_board': 'kaitsepolitsei',
'security_prison_21': 'tuol_sleng_genocide_museum',
'security_requirements_analysis': 'requirements_analysis',
'security_requirements_engineering': 'requirements_analysis',
'security_service_provider': 'security_support_provider',
'security_threat_group': 'prison_gang',
'security_through_obfuscation': 'security_through_obscurity',
'security_through_obsolescence': 'security_through_obscurity',
'security_through_obsurity': 'security_through_obscurity',
'sed_programming_language': 'sed',
'sed_tv_display': 'surface-conduction_electron-emitter_display',
'sedale_threatt_jnr.': 'sedale_threatt_jnr',
'sedale_threatt_jr.': 'sedale_threatt_jnr',
'sedalia_army_airfield': 'whiteman_air_force_base',
'sedalia_glider_base': 'whiteman_air_force_base',
'sedan_delivery_vehicle': 'sedan_delivery',
'sedan_delivery_vehicles': 'sedan_delivery',
'sedanca_de_ville': 'town_car',
'sedentary_life_style': 'sedentary_lifestyle',
'seder_olam_r.': 'seder_olam_rabbah',
'seder_olam_rabba': 'seder_olam_rabbah',
'seder_olam_zuta': 'seder_olam_zutta',
'seder_on_sunday': 'sam_seder',
'seder_r._amram': 'amram_gaon',
'sedgley_park_rufc': 'sedgley_park_r.u.f.c.',
'sedgley_park_school': 'cotton_college',
'sedgwick_county_arena': 'intrust_bank_arena',
'sedgwick_elementary_school': 'cupertino_union_school_district',
'sedgwick_w._green': 's._william_green',
'sedgwick_william_green': 's._william_green',
'sedition_in_australia': 'australian_sedition_law',
'sedof_mikha_airbase': 'sdot_micha_airbase',
'sedonoude_janvier_abouta': 'sedonoude_abouta',
'sedot_mikha_airbase': 'sdot_micha_airbase',
'sedotta_e_abbandonata': 'seduced_and_abandoned',
'seducing_dr._lewis': 'seducing_doctor_lewis',
'seducing_mr._robin': 'seducing_mr._perfect',
'seducing_mr_perfect': 'seducing_mr._perfect',
'sedulius_the_gael': 'sedulius_scottus',
'sedus_stoll_ag': 'sedus',
'see/hear/speak_no_evil': 'three_wise_monkeys',
'see_air_rescue': 'air-sea_rescue',
'see_article_text': 'ichthyornis',
'see_celebrations_festival': 'see_change_festival',
'see_for_miles': 'see_for_miles_records',
'see_line_woman': 'sea_lion_woman',
'see_my_friend': 'see_my_friends',
'see_my_friend-dummy-swap': 'see_my_friends',
'see_my_vest': 'two_dozen_and_one_greyhounds',
'see_my_way': 'a_quick_one',
'see_of_acerenza': 'roman_catholic_archdiocese_of_acerenza',
'see_of_acerra': 'roman_catholic_diocese_of_acerra',
'see_of_acireale': 'roman_catholic_diocese_of_acireale',
'see_of_acqui': 'roman_catholic_diocese_of_acqui',
'see_of_adria': 'roman_catholic_diocese_of_adria-rovigo',
'see_of_agde': 'ancient_diocese_of_agde',
'see_of_agen': 'roman_catholic_diocese_of_agen',
'see_of_agrigento': 'roman_catholic_archdiocese_of_agrigento',
'see_of_agrigentum': 'roman_catholic_archdiocese_of_agrigento',
'see_of_aire': 'roman_catholic_diocese_of_aire',
'see_of_alais': 'ancient_diocese_of_alais',
'see_of_alatri': 'diocese_of_alatri',
'see_of_albano': 'roman_catholic_suburbicarian_diocese_of_albano',
'see_of_albenga': 'roman_catholic_diocese_of_albenga-imperia',
'see_of_albi': 'roman_catholic_archdiocese_of_albi',
'see_of_ales': 'roman_catholic_diocese_of_ales-terralba',
'see_of_alessandria': 'roman_catholic_diocese_of_alessandria_della_paglia',
'see_of_alet': 'ancient_diocese_of_alet',
'see_of_aleth': 'ancient_diocese_of_saint-malo',
'see_of_alexandria': 'church_of_alexandria',
'see_of_alghero': 'roman_catholic_diocese_of_alghero-bosa',
'see_of_algiers': 'patriarchate_of_carthage',
'see_of_alife': 'roman_catholic_diocese_of_alife-caiazzo',
'see_of_amelia': 'diocese_of_amelia',
'see_of_amiens': 'roman_catholic_diocese_of_amiens',
'see_of_anagni': 'roman_catholic_diocese_of_anagni-alatri',
'see_of_andria': 'roman_catholic_diocese_of_andria',
'see_of_angers': 'roman_catholic_diocese_of_angers',
'see_of_anglona': 'roman_catholic_diocese_of_tursi-lagonegro',
'see_of_angra': 'roman_catholic_diocese_of_angra',
'see_of_annecy': 'roman_catholic_diocese_of_annecy',
'see_of_antibes': 'ancient_diocese_of_grasse',
'see_of_antwerp': 'roman_catholic_diocese_of_antwerp',
'see_of_aosta': 'roman_catholic_diocese_of_aosta',
'see_of_apt': 'ancient_diocese_of_apt',
'see_of_aquileia': 'patriarchate_of_aquileia',
'see_of_aquino': 'roman_catholic_diocese_of_sora-aquino-pontecorvo',
'see_of_ardagh': 'bishop_of_ardagh',
'see_of_arezzo': 'roman_catholic_diocese_of_arezzo-cortona-sansepolcro',
'see_of_ariano': 'roman_catholic_diocese_of_ariano_irpino-lacedonia',
'see_of_arisitum': 'ancient_diocese_of_alais',
'see_of_arles': 'ancient_diocese_of_arles',
'see_of_armagh': 'roman_catholic_archdiocese_of_armagh',
'see_of_arras': 'roman_catholic_diocese_of_arras',
'see_of_assisi': 'roman_catholic_diocese_of_assisi-nocera_umbra-gualdo_tadino',
'see_of_asti': 'roman_catholic_diocese_of_asti',
'see_of_auch': 'roman_catholic_archdiocese_of_auch',
'see_of_augsburg': 'prince-bishopric_of_augsburg',
'see_of_autun': 'roman_catholic_diocese_of_autun',
'see_of_auxerre': 'ancient_diocese_of_auxerre',
'see_of_aveiro': 'roman_catholic_diocese_of_aveiro',
'see_of_avellino': 'roman_catholic_diocese_of_avellino',
'see_of_aversa': 'roman_catholic_diocese_of_aversa',
'see_of_avezzano': 'roman_catholic_diocese_of_avezzano',
'see_of_avignon': 'roman_catholic_archdiocese_of_avignon',
'see_of_avranches': 'roman_catholic_diocese_of_coutances',
'see_of_baltimore': 'roman_catholic_archdiocese_of_baltimore',
'see_of_bari': 'roman_catholic_archdiocese_of_bari-bitonto',
'see_of_bayeux': 'roman_catholic_diocese_of_bayeux',
'see_of_bayonne': 'roman_catholic_diocese_of_bayonne',
'see_of_bazas': 'ancient_diocese_of_bazas',
'see_of_beauvais': 'roman_catholic_diocese_of_beauvais',
'see_of_beja': 'roman_catholic_diocese_of_beja',
'see_of_belley': 'roman_catholic_diocese_of_belley-ars',
'see_of_belluno': 'roman_catholic_diocese_of_belluno-feltre',
'see_of_benevento': 'roman_catholic_archdiocese_of_benevento',
'see_of_bergamo': 'roman_catholic_diocese_of_bergamo',
'see_of_bertinoro': 'diocese_of_bertinoro',
'see_of_biella': 'roman_catholic_diocese_of_biella',
'see_of_bilbao': 'roman_catholic_diocese_of_bilbao',
'see_of_bisarchio': 'roman_catholic_diocese_of_ozieri',
'see_of_bitonto': 'diocese_of_bitonto',
'see_of_blois': 'roman_catholic_diocese_of_blois',
'see_of_bobbio': 'diocese_of_bobbio',
'see_of_boiano': 'roman_catholic_archdiocese_of_campobasso-boiano',
'see_of_bologna': 'roman_catholic_archdiocese_of_bologna',
'see_of_bolzano': 'roman_catholic_diocese_of_bolzano-brixen',
'see_of_bordeaux': 'roman_catholic_archdiocese_of_bordeaux',
'see_of_bosa': 'diocese_of_bosa',
'see_of_boulogne': 'ancient_diocese_of_boulogne',
'see_of_bourges': 'roman_catholic_archdiocese_of_bourges',
'see_of_bova': 'diocese_of_bova',
'see_of_bovino': 'diocese_of_bovino',
'see_of_brechin': 'diocese_of_brechin',
'see_of_brescia': 'roman_catholic_diocese_of_brescia',
'see_of_brindisi': 'roman_catholic_archdiocese_of_brindisi-ostuni',
'see_of_bristol': 'diocese_of_bristol',
'see_of_bruges': 'roman_catholic_diocese_of_brugge',
'see_of_brugge': 'roman_catholic_diocese_of_brugge',
'see_of_burgos': 'roman_catholic_archdiocese_of_burgos',
'see_of_cagli': 'diocese_of_cagli_e_pergola',
'see_of_cahors': 'roman_catholic_diocese_of_cahors',
'see_of_caltagirone': 'roman_catholic_diocese_of_caltagirone',
'see_of_caltanissetta': 'roman_catholic_diocese_of_caltanissetta',
'see_of_calvi': 'roman_catholic_diocese_of_teano-calvi',
'see_of_cambrai': 'roman_catholic_archdiocese_of_cambrai',
'see_of_cambrai-arras': 'roman_catholic_archdiocese_of_cambrai',
'see_of_camerino': 'roman_catholic_archdiocese_of_camerino-san_severino_marche',
'see_of_cammin': 'bishopric_of_cammin',
'see_of_canterbury': 'archbishop_of_canterbury',
'see_of_capaccio': 'roman_catholic_diocese_of_vallo_della_lucania',
'see_of_capua': 'roman_catholic_archdiocese_of_capua',
'see_of_carcassonne': 'roman_catholic_diocese_of_carcassonne-narbonne',
'see_of_cariati': 'diocese_of_cariati',
'see_of_carlisle': 'diocese_of_carlisle',
'see_of_carpentras': 'ancient_diocese_of_carpentras',
'see_of_carpi': 'roman_catholic_diocese_of_carpi',
'see_of_casale': 'roman_catholic_diocese_of_casale_monferrato',
'see_of_caserta': 'roman_catholic_diocese_of_caserta',
'see_of_castellaneta': 'roman_catholic_diocese_of_castellaneta',
'see_of_castres': 'ancient_diocese_of_castres',
'see_of_catania': 'roman_catholic_archdiocese_of_catania',
'see_of_catanzaro': 'roman_catholic_archdiocese_of_catanzaro-squillace',
'see_of_cava': 'diocese_of_cava_and_sarno',
'see_of_cavaillon': 'ancient_diocese_of_cavaillon',
'see_of_ceneda': 'roman_catholic_diocese_of_vittorio_veneto',
'see_of_cesena': 'roman_catholic_diocese_of_cesena-sarsina',
'see_of_ceuta': 'roman_catholic_diocese_of_ceuta',
'see_of_chartres': 'roman_catholic_diocese_of_chartres',
'see_of_chiavari': 'roman_catholic_diocese_of_chiavari',
'see_of_chichester': 'diocese_of_chichester',
'see_of_chieti': 'roman_catholic_archdiocese_of_chieti-vasto',
'see_of_chioggia': 'roman_catholic_diocese_of_chioggia',
'see_of_cilicia': 'holy_see_of_cilicia',
'see_of_cingoli': 'diocese_of_cingoli',
'see_of_civitavecchia': 'roman_catholic_diocese_of_civitavecchia-tarquinia',
'see_of_clermont': 'roman_catholic_archdiocese_of_clermont',
'see_of_clermont-ferrand': 'roman_catholic_archdiocese_of_clermont',
'see_of_cloyne': 'diocese_of_cloyne',
'see_of_comminges': 'ancient_diocese_of_comminges',
'see_of_como': 'roman_catholic_diocese_of_como',
'see_of_concordia': 'roman_catholic_diocese_of_concordia-pordenone',
'see_of_condom': 'bishop_of_condom',
'see_of_conversano': 'roman_catholic_diocese_of_conversano-monopoli',
'see_of_corneto': 'roman_catholic_diocese_of_civitavecchia-tarquinia',
'see_of_cornouaille': 'roman_catholic_diocese_of_quimper',
'see_of_cornouailles': 'roman_catholic_diocese_of_quimper',
'see_of_cortona': 'diocese_of_cortona',
'see_of_cosenza': 'roman_catholic_archdiocese_of_cosenza-bisignano',
'see_of_couserans': 'ancient_diocese_of_couserans',
'see_of_coutances': 'roman_catholic_diocese_of_coutances',
'see_of_crema': 'roman_catholic_diocese_of_crema',
'see_of_cremona': 'roman_catholic_diocese_of_cremona',
'see_of_crotone': 'roman_catholic_archdiocese_of_crotone-santa_severina',
'see_of_cuneo': 'roman_catholic_diocese_of_cuneo',
'see_of_dax': 'ancient_diocese_of_dax',
'see_of_derry': 'bishop_of_derry',
'see_of_die': 'ancient_diocese_of_die',
'see_of_digne': 'roman_catholic_diocese_of_digne',
'see_of_dijon': 'roman_catholic_archdiocese_of_dijon',
'see_of_dol': 'ancient_diocese_of_dol',
'see_of_dresden': 'roman_catholic_diocese_of_dresden-meissen',
'see_of_dresden-meissen': 'roman_catholic_diocese_of_dresden-meissen',
'see_of_dublin': 'archbishop_of_dublin',
'see_of_dunblane': 'diocese_of_dunblane',
'see_of_durham': 'bishop_of_durham',
'see_of_eauze': 'ancient_diocese_of_eauze',
'see_of_edinburgh': 'diocese_of_edinburgh',
'see_of_elne': 'roman_catholic_diocese_of_perpignan-elne',
'see_of_ely': 'diocese_of_ely',
'see_of_embrun': 'ancient_diocese_of_embrun',
'see_of_evora': 'roman_catholic_archdiocese_of_evora',
'see_of_exeter': 'diocese_of_exeter',
'see_of_fabriano': 'roman_catholic_diocese_of_fabriano-matelica',
'see_of_faenza': 'diocese_of_faenza',
'see_of_fano': 'roman_catholic_diocese_of_fano-fossombrone-cagli-pergola',
'see_of_faro': 'roman_catholic_diocese_of_faro',
'see_of_feltre': 'diocese_of_feltre',
'see_of_ferentino': 'diocese_of_ferentino',
'see_of_fermo': 'roman_catholic_archdiocese_of_fermo',
'see_of_ferns': 'roman_catholic_diocese_of_ferns',
'see_of_ferrara': 'roman_catholic_archdiocese_of_ferrara-comacchio',
'see_of_fidenza': 'roman_catholic_diocese_of_fidenza',
'see_of_fiesole': 'roman_catholic_diocese_of_fiesole',
'see_of_foggia': 'archdiocese_of_foggia',
'see_of_foligno': 'roman_catholic_diocese_of_foligno',
'see_of_fossano': 'roman_catholic_diocese_of_fossano',
'see_of_fossombrone': 'diocese_of_fossombrone',
'see_of_frascati': 'roman_catholic_suburbicarian_diocese_of_frascati',
'see_of_gaeta': 'roman_catholic_archdiocese_of_gaeta',
'see_of_galloway': 'roman_catholic_diocese_of_galloway',
'see_of_galtelli': 'roman_catholic_diocese_of_nuoro',
'see_of_gap': 'roman_catholic_diocese_of_gap',
'see_of_genoa': 'roman_catholic_archdiocese_of_genoa',
'see_of_gerace': 'roman_catholic_diocese_of_locri-gerace',
'see_of_girgenti': 'roman_catholic_archdiocese_of_agrigento',
'see_of_gloucester': 'bishop_of_gloucester',
'see_of_grasse': 'ancient_diocese_of_grasse',
'see_of_graz-seckau': 'roman_catholic_diocese_of_graz-seckau',
'see_of_grenoble': 'roman_catholic_diocese_of_grenoble-vienne',
'see_of_grosseto': 'roman_catholic_diocese_of_grosseto',
'see_of_guastalla': 'diocese_of_guastalla',
'see_of_guatemala': 'roman_catholic_archdiocese_of_guatemala',
'see_of_gubbio': 'roman_catholic_diocese_of_gubbio',
'see_of_halberstadt': 'bishopric_of_halberstadt',
'see_of_hasselt': 'roman_catholic_diocese_of_hasselt',
'see_of_havelberg': 'bishopric_of_havelberg',
'see_of_hereford': 'diocese_of_hereford',
'see_of_hildesheim': 'bishopric_of_hildesheim',
'see_of_iesi': 'roman_catholic_diocese_of_jesi',
'see_of_ischia': 'roman_catholic_diocese_of_ischia',
'see_of_isernia': 'roman_catholic_diocese_of_isernia-venafro',
'see_of_ivrea': 'roman_catholic_diocese_of_ivrea',
'see_of_jerusalem': 'greek_orthodox_church_of_jerusalem',
'see_of_jesi': 'roman_catholic_diocese_of_jesi',
'see_of_julin': 'bishopric_of_cammin',
'see_of_kammin': 'bishopric_of_cammin',
'see_of_kildare': 'bishop_of_kildare',
'see_of_killaloe': 'diocese_of_killaloe',
'see_of_lanciano': 'roman_catholic_archdiocese_of_lanciano-ortona',
'see_of_langres': 'roman_catholic_diocese_of_langres',
'see_of_lanusei': 'roman_catholic_diocese_of_lanusei',
'see_of_laon': 'ancient_diocese_of_laon',
'see_of_larino': 'diocese_of_larino',
'see_of_latina': 'roman_catholic_diocese_of_latina-terracina-sezze-priverno',
'see_of_laval': 'roman_catholic_diocese_of_laval',
'see_of_lavaur': 'ancient_diocese_of_lavaur',
'see_of_lebus': 'bishopric_of_lebus',
'see_of_lecce': 'roman_catholic_archdiocese_of_lecce',
'see_of_lectoure': 'ancient_diocese_of_lectoure',
'see_of_leon': 'roman_catholic_diocese_of_quimper',
'see_of_lescar': 'ancient_diocese_of_lescar',
'see_of_lille': 'roman_catholic_archdiocese_of_lille',
'see_of_limerick': 'diocese_of_limerick',
'see_of_limoges': 'roman_catholic_diocese_of_limoges',
'see_of_lincoln': 'diocese_of_lincoln',
'see_of_lisieux': 'roman_catholic_diocese_of_lisieux',
'see_of_livorno': 'roman_catholic_diocese_of_livorno',
'see_of_lodi': 'roman_catholic_diocese_of_lodi',
'see_of_lombez': 'ancient_diocese_of_lombez',
'see_of_london': 'diocese_of_london',
'see_of_lourdes': 'roman_catholic_diocese_of_tarbes-et-lourdes',
'see_of_lucca': 'roman_catholic_archdiocese_of_lucca',
'see_of_lungro': 'eparchy_of_lungro_degli_albanesi',
'see_of_luni': 'roman_catholic_diocese_of_la_spezia-sarzana-brugnato',
'see_of_lyon': 'roman_catholic_archdiocese_of_lyon',
'see_of_lyon-vienne': 'roman_catholic_archdiocese_of_lyon',
'see_of_lyons': 'roman_catholic_archdiocese_of_lyon',
'see_of_magdeburg': 'archbishopric_of_magdeburg',
'see_of_maguelone': 'roman_catholic_archdiocese_of_montpellier',
'see_of_maguelonne': 'roman_catholic_archdiocese_of_montpellier',
'see_of_maillezais': 'ancient_diocese_of_maillezais',
'see_of_manfredonia': 'roman_catholic_archdiocese_of_manfredonia-vieste-s._giovanni_rotondo',
'see_of_manila': 'roman_catholic_archdiocese_of_manila',
'see_of_mans': 'roman_catholic_diocese_of_le_mans',
'see_of_mantova': 'roman_catholic_diocese_of_mantua',
'see_of_mantua': 'roman_catholic_diocese_of_mantua',
'see_of_marseille': 'roman_catholic_archdiocese_of_marseille',
'see_of_marseilles': 'roman_catholic_archdiocese_of_marseille',
'see_of_matelica': 'roman_catholic_diocese_of_fabriano-matelica',
'see_of_matera': 'roman_catholic_archdiocese_of_matera-irsina',
'see_of_maurienne': 'diocese_of_saint-jean-de-maurienne',
'see_of_mazara': 'roman_catholic_diocese_of_mazara_del_vallo',
'see_of_mazzara': 'roman_catholic_diocese_of_mazara_del_vallo',
'see_of_meaux': 'roman_catholic_diocese_of_meaux',
'see_of_meissen': 'roman_catholic_diocese_of_dresden-meissen',
'see_of_melfi': 'roman_catholic_diocese_of_melfi-rapolla-venosa',
'see_of_mende': 'roman_catholic_diocese_of_mende',
'see_of_merseburg': 'bishopric_of_merseburg',
'see_of_metz': 'roman_catholic_diocese_of_metz',
'see_of_milan': 'roman_catholic_archdiocese_of_milan',
'see_of_mileto': 'roman_catholic_diocese_of_mileto-nicotera-tropea',
'see_of_minden': 'bishopric_of_minden',
'see_of_mirepoix': 'ancient_diocese_of_mirepoix',
'see_of_modena': 'roman_catholic_archdiocese_of_modena-nonantola',
'see_of_modena-nonantola': 'roman_catholic_archdiocese_of_modena-nonantola',
'see_of_molfetta': 'roman_catholic_diocese_of_molfetta-ruvo-giovinazzo-terlizzi',
'see_of_monopoli': 'diocese_of_monopoli',
'see_of_monreale': 'roman_catholic_archdiocese_of_monreale',
'see_of_montauban': 'roman_catholic_diocese_of_montauban',
'see_of_montefeltro': 'roman_catholic_diocese_of_san_marino-montefeltro',
'see_of_montefiascone': 'diocese_of_montefiascone',
'see_of_montepulciano': 'roman_catholic_diocese_of_montepulciano-chiusi-pienza',
'see_of_montpellier': 'roman_catholic_archdiocese_of_montpellier',
'see_of_moulins': 'roman_catholic_diocese_of_moulins',
'see_of_muro': 'diocese_of_muro_lucano',
'see_of_muro-lucano': 'diocese_of_muro_lucano',
'see_of_nancy': 'roman_catholic_diocese_of_nancy',
'see_of_nanterre': 'roman_catholic_diocese_of_nanterre',
'see_of_nantes': 'roman_catholic_diocese_of_nantes',
'see_of_naples': 'roman_catholic_archdiocese_of_naples',
'see_of_narbonne': 'ancient_diocese_of_narbonne',
'see_of_narni': 'diocese_of_narni',
'see_of_nepi': 'diocese_of_nepi-sutri',
'see_of_nevers': 'roman_catholic_diocese_of_nevers',
'see_of_newcastle': 'diocese_of_newcastle',
'see_of_nicastro': 'roman_catholic_diocese_of_lamezia_terme',
'see_of_nice': 'roman_catholic_diocese_of_nice',
'see_of_nola': 'roman_catholic_diocese_of_nola',
'see_of_norwich': 'anglican_diocese_of_norwich',
'see_of_noto': 'roman_catholic_diocese_of_noto',
'see_of_novara': 'roman_catholic_diocese_of_novara',
'see_of_noyon': 'ancient_diocese_of_noyon',
'see_of_nuoro': 'roman_catholic_diocese_of_nuoro',
'see_of_ogliastra': 'roman_catholic_diocese_of_lanusei',
'see_of_oloron': 'ancient_diocese_of_oloron',
'see_of_oradea': 'roman_catholic_diocese_of_oradea_mare',
'see_of_orange': 'ancient_diocese_of_orange',
'see_of_oria': 'roman_catholic_diocese_of_oria',
'see_of_oristano': 'roman_catholic_archdiocese_of_oristano',
'see_of_ortona': 'roman_catholic_archdiocese_of_lanciano-ortona',
'see_of_orvieto': 'roman_catholic_diocese_of_orvieto-todi',
'see_of_osimo': 'diocese_of_osimo',
'see_of_osma': 'roman_catholic_diocese_of_osma-soria',
'see_of_ossory': 'diocese_of_ossory',
'see_of_ostia': 'bishop_of_ostia',
'see_of_otranto': 'roman_catholic_archdiocese_of_otranto',
'see_of_oviedo': 'roman_catholic_archdiocese_of_oviedo',
'see_of_ozieri': 'roman_catholic_diocese_of_ozieri',
'see_of_paderborn': 'prince-bishopric_of_paderborn',
'see_of_palencia': 'roman_catholic_diocese_of_palencia',
'see_of_palestrina': 'roman_catholic_suburbicarian_diocese_of_palestrina',
'see_of_pamiers': 'roman_catholic_diocese_of_pamiers',
'see_of_paris': 'roman_catholic_archdiocese_of_paris',
'see_of_parma': 'roman_catholic_diocese_of_parma',
'see_of_passau': 'roman_catholic_diocese_of_passau',
'see_of_patti': 'roman_catholic_diocese_of_patti',
'see_of_pavia': 'roman_catholic_diocese_of_pavia',
'see_of_penne': 'roman_catholic_archdiocese_of_pescara-penne',
'see_of_perpignan': 'roman_catholic_diocese_of_perpignan-elne',
'see_of_pesaro': 'roman_catholic_archdiocese_of_pesaro',
'see_of_pescia': 'roman_catholic_diocese_of_pescia',
'see_of_peter': 'holy_see',
'see_of_piacenza': 'roman_catholic_diocese_of_piacenza-bobbio',
'see_of_pistoia': 'roman_catholic_diocese_of_pistoia',
'see_of_poitiers': 'roman_catholic_archdiocese_of_poitiers',
'see_of_policastro': 'diocese_of_policastro',
'see_of_pontoise': 'roman_catholic_diocese_of_pontoise',
'see_of_portalegre': 'roman_catholic_diocese_of_portalegre-castelo_branco',
'see_of_potenza': 'roman_catholic_archdiocese_of_potenza-muro_lucano-marsico_nuovo',
'see_of_pozzuoli': 'roman_catholic_diocese_of_pozzuoli',
'see_of_prato': 'roman_catholic_diocese_of_prato',
'see_of_puy': 'roman_catholic_diocese_of_le_puy-en-velay',
'see_of_puy-en-velay': 'roman_catholic_diocese_of_le_puy-en-velay',
'see_of_quimper': 'roman_catholic_diocese_of_quimper',
'see_of_quimper-et-leon': 'roman_catholic_diocese_of_quimper',
'see_of_rapolla': 'diocese_of_rapolla',
'see_of_ratisbon': 'roman_catholic_diocese_of_regensburg',
'see_of_ratzeburg': 'bishopric_of_ratzeburg',
'see_of_ravenna': 'bishop_of_ravenna',
'see_of_regensburg': 'roman_catholic_diocese_of_regensburg',
'see_of_reggio': 'roman_catholic_diocese_of_reggio_emilia-guastalla',
'see_of_reims': 'roman_catholic_archdiocese_of_reims',
'see_of_rennes': 'roman_catholic_archdiocese_of_rennes',
'see_of_rieti': 'roman_catholic_diocese_of_rieti',
'see_of_rieux': 'ancient_diocese_of_rieux',
'see_of_riez': 'ancient_diocese_of_riez',
'see_of_rimini': 'roman_catholic_diocese_of_rimini',
'see_of_ripatransone': 'roman_catholic_diocese_of_san_benedetto_del_tronto-ripatransone-montalto',
'see_of_rochester': 'diocese_of_rochester',
'see_of_rodez': 'roman_catholic_diocese_of_rodez',
'see_of_roermond': 'roman_catholic_diocese_of_roermond',
'see_of_rome': 'holy_see',
'see_of_rossano': 'roman_catholic_archdiocese_of_rossano-cariati',
'see_of_rouen': 'roman_catholic_archdiocese_of_rouen',
'see_of_ruvo': 'diocese_of_ruvo',
'see_of_sabina': 'roman_catholic_suburbicarian_diocese_of_sabina-poggio_mirteto',
'see_of_saint-bertrand-de-comminges': 'ancient_diocese_of_comminges',
'see_of_saint-brieuc': 'roman_catholic_diocese_of_saint-brieuc',
'see_of_saint-claude': 'roman_catholic_diocese_of_saint-claude',
'see_of_saint-flour': 'roman_catholic_diocese_of_saint-flour',
'see_of_saint-lizier': 'ancient_diocese_of_couserans',
'see_of_saint-malo': 'ancient_diocese_of_saint-malo',
'see_of_saint-omer': 'ancient_diocese_of_saint-omer',
'see_of_saint-papoul': 'ancient_diocese_of_saint-papoul',
'see_of_saint-pol-de-leon': 'roman_catholic_diocese_of_quimper',
'see_of_saintes': 'ancient_diocese_of_saintes',
'see_of_salerno': 'roman_catholic_archdiocese_of_salerno-campagna-acerno',
'see_of_salisbury': 'diocese_of_salisbury',
'see_of_sansepolcro': 'diocese_of_sansepolcro',
'see_of_santander': 'roman_catholic_diocese_of_santander',
'see_of_sardes': 'see_of_sardis',
'see_of_sardi': 'see_of_sardis',
'see_of_sarlat': 'ancient_diocese_of_sarlat',
'see_of_sarno': 'roman_catholic_diocese_of_nocera_inferiore-sarno',
'see_of_sarsina': 'diocese_of_sarsina',
'see_of_sassari': 'roman_catholic_archdiocese_of_sassari',
'see_of_savona': 'roman_catholic_diocese_of_savona-noli',
'see_of_savona-noli': 'roman_catholic_diocese_of_savona-noli',
'see_of_seckau': 'roman_catholic_diocese_of_graz-seckau',
'see_of_senez': 'ancient_diocese_of_senez',
'see_of_senigallia': 'roman_catholic_diocese_of_senigallia',
'see_of_senlis': 'ancient_diocese_of_senlis',
'see_of_sens': 'roman_catholic_archdiocese_of_sens',
'see_of_sherborne': 'bishop_of_salisbury',
'see_of_sinigaglia': 'roman_catholic_diocese_of_senigallia',
'see_of_siponto': 'roman_catholic_archdiocese_of_manfredonia-vieste-s._giovanni_rotondo',
'see_of_sipontum': 'roman_catholic_archdiocese_of_manfredonia-vieste-s._giovanni_rotondo',
'see_of_siracusa': 'roman_catholic_archdiocese_of_siracusa',
'see_of_sisteron': 'ancient_diocese_of_sisteron',
'see_of_sofia': 'roman_catholic_diocese_of_sofia_and_plovdiv',
'see_of_soissons': 'roman_catholic_diocese_of_soissons',
'see_of_sora': 'roman_catholic_diocese_of_sora-aquino-pontecorvo',
'see_of_sorrento': 'roman_catholic_archdiocese_of_sorrento-castellammare_di_stabia',
'see_of_southwell': 'diocese_of_southwell_and_nottingham',
'see_of_sovana': 'roman_catholic_diocese_of_pitigliano-sovana-orbetello',
'see_of_sovana-pitigliano': 'roman_catholic_diocese_of_pitigliano-sovana-orbetello',
'see_of_spoleto': 'roman_catholic_archdiocese_of_spoleto-norcia',
'see_of_spoleto-norcia': 'roman_catholic_archdiocese_of_spoleto-norcia',
'see_of_squillace': 'diocese_of_squillace',
'see_of_st-lizier': 'ancient_diocese_of_couserans',
'see_of_st-malo': 'ancient_diocese_of_saint-malo',
'see_of_st-omer': 'ancient_diocese_of_saint-omer',
'see_of_st-papoul': 'ancient_diocese_of_saint-papoul',
'see_of_strasbourg': 'archbishopric_of_strasbourg',
'see_of_strasburg': 'archbishopric_of_strasbourg',
'see_of_strassburg': 'archbishopric_of_strasbourg',
'see_of_sulmona': 'roman_catholic_diocese_of_sulmona-valva',
'see_of_susa': 'roman_catholic_diocese_of_susa',
'see_of_sutri': 'diocese_of_nepi-sutri',
'see_of_syracuse': 'roman_catholic_archdiocese_of_siracusa',
'see_of_tabasco': 'roman_catholic_diocese_of_tabasco',
'see_of_taranto': 'roman_catholic_archdiocese_of_taranto',
'see_of_tarbes': 'roman_catholic_diocese_of_tarbes-et-lourdes',
'see_of_tarentaise': 'ancient_diocese_of_tarentaise',
'see_of_teano': 'roman_catholic_diocese_of_teano-calvi',
'see_of_teramo': 'roman_catholic_diocese_of_teramo-atri',
'see_of_termoli': 'roman_catholic_diocese_of_termoli-larino',
'see_of_terni': 'roman_catholic_diocese_of_terni-narni-amelia',
'see_of_terracina': 'roman_catholic_diocese_of_latina-terracina-sezze-priverno',
'see_of_terrassa': 'roman_catholic_diocese_of_terrassa',
'see_of_tivoli': 'roman_catholic_diocese_of_tivoli',
'see_of_todi': 'diocese_of_todi',
'see_of_torres': 'roman_catholic_archdiocese_of_sassari',
'see_of_tortona': 'roman_catholic_diocese_of_tortona',
'see_of_toul': 'ancient_diocese_of_toul',
'see_of_toulon': 'ancient_diocese_of_toulon',
'see_of_toulouse': 'roman_catholic_archdiocese_of_toulouse',
'see_of_tournai': 'roman_catholic_diocese_of_tournai',
'see_of_tours': 'roman_catholic_archdiocese_of_tours',
'see_of_trani': 'roman_catholic_archdiocese_of_trani-barletta-bisceglie',
'see_of_trapani': 'roman_catholic_diocese_of_trapani',
'see_of_treves': 'archbishopric_of_trier',
'see_of_treviso': 'roman_catholic_diocese_of_treviso',
'see_of_trier': 'archbishopric_of_trier',
'see_of_trieste': 'roman_catholic_diocese_of_trieste',
'see_of_trivento': 'roman_catholic_diocese_of_trivento',
'see_of_troyes': 'roman_catholic_diocese_of_troyes',
'see_of_truro': 'diocese_of_truro',
'see_of_tulle': 'roman_catholic_diocese_of_tulle',
'see_of_ugento': 'roman_catholic_diocese_of_ugento-santa_maria_di_leuca',
'see_of_utrecht': 'bishopric_of_utrecht',
'see_of_vabres': 'ancient_diocese_of_vabres',
'see_of_vaison': 'ancient_diocese_of_vaison',
'see_of_valence': 'roman_catholic_diocese_of_valence',
'see_of_valva': 'roman_catholic_diocese_of_sulmona-valva',
'see_of_vannes': 'roman_catholic_diocese_of_vannes',
'see_of_vence': 'ancient_diocese_of_vence',
'see_of_venosa': 'diocese_of_venosa',
'see_of_ventimiglia': 'roman_catholic_diocese_of_ventimiglia-san_remo',
'see_of_vercelli': 'roman_catholic_archdiocese_of_vercelli',
'see_of_verdun': 'bishopric_of_verdun',
'see_of_vermandois': 'ancient_diocese_of_noyon',
'see_of_veroli': 'roman_catholic_diocese_of_frosinone-veroli-ferentino',
'see_of_verona': 'roman_catholic_diocese_of_verona',
'see_of_versailles': 'roman_catholic_diocese_of_versailles',
'see_of_vicenza': 'roman_catholic_diocese_of_vicenza',
'see_of_vienne': 'ancient_diocese_of_vienne',
'see_of_vigevano': 'roman_catholic_diocese_of_vigevano',
'see_of_viseu': 'roman_catholic_diocese_of_viseu',
'see_of_viterbo': 'roman_catholic_diocese_of_viterbo',
'see_of_viviers': 'roman_catholic_diocese_of_viviers',
'see_of_vizeu': 'roman_catholic_diocese_of_viseu',
'see_of_volterra': 'roman_catholic_diocese_of_volterra',
'see_of_waterford': 'diocese_of_waterford_and_lismore',
'see_of_wells': 'diocese_of_bath_and_wells',
'see_of_whithorn': 'bishop_of_galloway',
'see_of_winchester': 'diocese_of_winchester',
'see_of_wolin': 'bishopric_of_cammin',
'see_of_wollin': 'bishopric_of_cammin',
'see_of_worcester': 'anglican_diocese_of_worcester',
'see_of_worms': 'bishopric_of_worms',
'see_of_york': 'archbishop_of_york',
'see_of_ypres': 'diocese_of_ypres',
'see_of_zamora': 'roman_catholic_diocese_of_zamora',
'see_rock_city': 'rock_city',
'see_sharp_press': 'chaz_bufe',
'see_the_future': 'precognition',
'see_through_clothing': 'see-through_clothing',
'see_through_head': 'tyrannosaurus_hives',
'see_thru_clothing': 'see-through_clothing',
'see_thru_records': 'see_thru_broadcasting',
'see_ya_discography': 'seeya_discography',
'seeb_international_airport': 'muscat_international_airport',
'seed-induced_contact_dermatitis': 'contact_dermatitis',
'seed_al_ghamdi': 'saeed_al-ghamdi',
'seed_growing_secretly': 'parable_of_the_growing_seed',
'seed_of_abraham': 'abraham',
'seed_propogation_mat': 'seed_propagation_mat',
'seed_test_types': 'seed_testing',
'seed_to_harvest': 'patternist_series',
'seedless_vascular_plant': 'pteridophyte',
'seeds_of_deception': 'the_babymaker:_the_dr._cecil_jacobson_story',
'seeds_of_doom': 'the_seeds_of_doom',
'seeds_of_love': 'the_seeds_of_love',
'seeger_high_school': 'seeger_memorial_junior-senior_high_school',
'seeing_eye_dog': 'guide_dog',
'seeing_eye_dogs': 'guide_dog',
'seeing_in_double': 'diplopia',
'seeing_the_future': 'precognition',
'seek_and_destroy': 'search_and_destroy',
'seek_u_out': 'cqout',
'seek_you_out': 'cqout',
'seeley_g_mudd': 'seeley_g._mudd',
'seemingly_unrelated_regressions': 'seemingly_unrelated_regression',
'seems_2_be': 'boy_in_da_corner',
'seen_it_all': 'see_you_on_the_other_side',
'seen_the_light': 'life_on_other_planets',
'seena_thaana_001': 'seena_thaana_007',
'seena_thanna_007': 'seena_thaana_007',
'seer_green_station': 'seer_green_and_jordans_railway_station',
'seer_of_lublin': 'yaakov_yitzchak_of_lublin',
'seerah_al_halbeeya': 'sirat_al-halbiya',
'seerath_ul_halbeeya': 'sirat_al-halbiya',
'seeress_of_kell': 'the_malloreon',
'seerkazhi_g._sivachidambaram': 'sirkazhi_g._sivachidambaram',
'seers_and_roebuck': 'sears',
'seet_khiam_keong': 'k._k._seet',
'sefer_mafteah_shlomoh': 'key_of_solomon',
'sefer_mitzvot_gadol': 'moses_ben_jacob_of_coucy',
'sefer_raziel_ha-malakh': 'sefer_raziel_hamalakh',
'sefer_raziel_hamalach': 'sefer_raziel_hamalakh',
'sefton_metropolitan_borough': 'metropolitan_borough_of_sefton',
'sega_cd_32x': 'sega_32x',
'sega_driving_studio': 'sega_racing_studio',
'sega_enterprises_ltd.': 'sega',
'sega_force_mega': 'sega_force',
'sega_genesis/sega_megadrive': 'mega_drive',
'sega_genesis_3': 'variations_of_the_sega_mega_drive',
'sega_genesis_32x': 'sega_32x',
'sega_genesis_ii': 'variations_of_the_sega_mega_drive',
'sega_golf_club': 'miyasato_miyoshi_kyoudai_naizou:_sega_golf_club',
'sega_gt_online': 'sega_gt_2002',
'sega_mark_iii': 'sega_master_system',
'sega_master_force': 'sega_force',
'sega_mega-cd/sega_cd': 'mega-cd',
'sega_mega-cd_32x': 'mega_drive',
'sega_mega_32x': 'sega_32x',
'sega_mega_cd': 'mega-cd',
'sega_mega_drive': 'mega_drive',
'sega_mega_drive/genesis': 'mega_drive',
'sega_mega_ld': 'sega_multi-mega',
'sega_mega_modem': 'sega_meganet',
'sega_megadrive/sega_genesis': 'mega_drive',
'sega_megadrive_32x': 'sega_32x',
'sega_megadrive_collection': 'sega_genesis_collection',
'sega_meganet_modem': 'sega_meganet',
'sega_multi-mega/sega_cdx': 'sega_multi-mega',
'sega_of_america': 'sega',
'sega_of_europe': 'sega',
'sega_of_japan': 'sega',
'sega_pico_games': 'sega_pico',
'sega_rally_2005': 'sega_rally_championship',
'sega_sammy_cup': 'nagashima_shigeo_invitational_sega_sammy_cup',
'sega_sammy_group': 'sega_sammy_holdings',
'sega_sonic_arcade': 'segasonic_the_hedgehog',
'sega_sound_team': 's.s.t._band',
'sega_sports_bowl': 'maaco_bowl_las_vegas',
'sega_sports_tennis': 'virtua_tennis_2',
'sega_studio_usa': 'sonic_team',
'sega_studios_usa': 'sonic_team',
'sega_super_32x': 'sega_32x',
'sega_super_stars': 'sega_superstars',
'sega_superstar_tennis': 'sega_superstars_tennis',
'sega_tera_drive': 'sega_teradrive',
'sega_touring_car': 'sega_touring_car_championship',
'segamat_kuantan_highway': 'tun_razak_highway',
'segedunum_roman_fort': 'segedunum',
'segegated_cycling_facility': 'segregated_cycle_facilities',
'segismundo_casado_lopez': 'segismundo_casado',
'segmental_hyalinizing_vasculitis': 'livedoid_vasculitis',
'segmentation_and_positioning': 'segmenting_and_positioning',
'segmented_address_space': 'memory_segmentation',
'segmented_sailfin_blenny': 'salarias_segmentatus',
'segments_of_lantermann': 'schmidt-lanterman_incisures',
'segna_de_bonaventura': 'segna_di_bonaventura',
'segna_di_buonaventura': 'segna_di_bonaventura',
'segontium_roman_fort': 'segontium',
'segraves_v._california': 'segraves_v._state_of_california',
'segregated_bicycle_facilities': 'segregated_cycle_facilities',
'segregated_cycle_facility': 'segregated_cycle_facilities',
'segregated_cyling_facilities': 'segregated_cycle_facilities',
'segregation_in_australia': 'white_australia_policy',
'segregation_in_brazil': 'ethnic_groups_in_brazil',
'segregation_in_israel': 'israel_and_the_apartheid_analogy',
'segregation_in_japan': 'ethnic_issues_in_japan',
'segregation_of_duties': 'separation_of_duties',
'segue:_sunken_dreams': 'so_divided',
'segunda_division_argentina': 'primera_b_nacional_argentina',
'segundo_alejandro_castillo': 'segundo_castillo',
'segundo_david_peralta': 'mate_cocido',
'seguoyah_nuclear_station': 'sequoyah_nuclear_generating_station',
'segway_human_transporter': 'segway_pt',
'segway_personal_transporter': 'segway_pt',
'sehir_al_gharam': 'dina_hayek',
'sehme_high_school': 'sehome_high_school',
'seibu_chichibu_station': 'seibu-chichibu_station',
'seibu_invoice_dome': 'seibu_dome',
'seibu_kaihatsu_inc': 'seibu_kaihatsu',
'seibu_kaihatsu_inc.': 'seibu_kaihatsu',
'seibu_lions_stadium': 'seibu_dome',
'seibu_railway_company': 'seibu_railway',
'seibu_seibuen_line': 'seibu_seibu-en_line',
'seibu_shinjuku_station': 'seibu-shinjuku_station',
'seicho_no_ie': 'seicho-no-ie',
'seid_memic_vajta': 'vajta',
'seidath_konabe_tchomogo': 'seidath_tchomogo',
'seido_no_kirisuto': 'christ_in_bronze',
'seif_al-islam_qaddafi': 'saif_al-islam_muammar_al-gaddafi',
'seif_al_adel': 'saif_al-adel',
'seif_al_adil': 'saif_al-adel',
'seif_e-din_e-zoubi': 'seif_el-din_el-zubi',
'seif_el-din_el-zubi': 'seif_el-din_el-zoubi',
'seifert_fiber_spaces': 'seifert_fiber_space',
'seifert_fibre_space': 'seifert_fiber_space',
'seige_of_bastogne': 'siege_of_bastogne',
'seige_of_boston': 'siege_of_boston',
'seige_of_catubig': 'siege_of_catubig',
'seige_of_chencang': 'siege_of_chencang',
'seige_of_freetown': 'siege_of_freetown',
'seige_of_futamata': 'siege_of_futamata',
'seige_of_gibraltar': 'history_of_gibraltar',
'seige_of_harfleur': 'siege_of_harfleur',
'seige_of_khartoum': 'siege_of_khartoum',
'seige_of_koriyama': 'siege_of_koriyama',
'seige_of_kusong': 'siege_of_kusong',
'seige_of_leningrad': 'siege_of_leningrad',
'seige_of_madras': 'siege_of_madras',
'seige_of_marune': 'siege_of_marune',
'seige_of_maubeuge': 'siege_of_maubeuge',
'seige_of_miki': 'siege_of_miki',
'seige_of_nagashima': 'sieges_of_nagashima',
'seige_of_noda': 'siege_of_noda',
'seige_of_odani': 'siege_of_odani_castle',
'seige_of_osaka': 'siege_of_osaka',
'seige_of_oviedo': 'siege_of_oviedo',
'seige_of_prague': 'siege_of_prague',
'seige_of_quebec': 'battle_of_the_plains_of_abraham',
'seige_of_seringapatam': 'battle_of_seringapatam',
'seige_of_takamatsu': 'siege_of_takamatsu',
'seige_of_terabe': 'siege_of_terabe',
'seige_of_ueda': 'siege_of_ueda',
'seige_of_vienna': 'siege_of_vienna',
'seige_of_yorktown': 'siege_of_yorktown',
'seiges_of_galway': 'sieges_of_galway',
'seiges_of_nagashima': 'sieges_of_nagashima',
'seiges_of_taketenjin': 'sieges_of_takatenjin',
'seigi_no_hero': 'lethal_enforcers_3',
'seigneur_de_boutieres': 'guigues_guiffrey',
'seigneur_de_vauban': 'vauban',
'seijin_no_hi': 'coming_of_age_day',
'seiju_sentai_gingaman': 'seijuu_sentai_gingaman',
'seijuu_sentai_gingman': 'seijuu_sentai_gingaman',
'seikai_no_monshou': 'crest_of_the_stars',
'seikai_no_senki': 'banner_of_the_stars',
'seiken_densetsu_1': 'final_fantasy_adventure',
'seiken_densetsu_2': 'secret_of_mana',
'seiken_densetsu_4': 'dawn_of_mana',
'seiken_densetsu_ds': 'children_of_mana',
'seiken_densetsu_ii': 'secret_of_mana',
'seiken_densetsu_iii': 'seiken_densetsu_3',
'seiken_gyaku_tsuki': 'gyaku_zuki',
'seiken_oi_tsuki': 'tsuki',
'seiken_shita_tsuki': 'shita_tsuki',
'seiko_and_salome': 'from_bubblegum_to_sky',
'seiko_epson_corp.': 'seiko_epson',
'seiko_epson_corporation': 'seiko_epson',
'seiko_holdings_corporation': 'seiko',
'seiko_kinetic_watches': 'automatic_quartz',
'seiko_sports_association': 'seiko_sa',
'seiko_spring_drive': 'spring_drive',
'seiko_tucson_open': 'chrysler_classic_of_tucson',
'seimu_of_japan': 'emperor_seimu',
'sein_und_zeit': 'being_and_time',
'seinai_priest_seminary': 'sejny_priest_seminary',
'seine_and_marne': 'seine-et-marne',
'seine_and_oise': 'seine-et-oise',
'seine_et_marne': 'seine-et-marne',
'seine_et_oise': 'seine-et-oise',
'seine_nord_canal': 'seine-nord_europe_canal',
'seine_nord_europe': 'seine-nord_europe_canal',
'seine_saint_denis': 'seine-saint-denis',
'seine_st._dennis': 'seine-saint-denis',
'seirei_densetsu_lickle': 'little_samson',
'seirei_no_moribito': 'moribito:_guardian_of_the_spirit',
'seisen_no_keifu': 'fire_emblem:_seisen_no_keifu',
'seishun_18_kippu': 'seishun_18_ticket',
'seishun_no_mon': 'the_gate_of_youth',
'seisill_ap_dyfnwal': 'seisyll_ap_dyfnwal',
'seismic_performance_analysis': 'seismic_analysis',
'seiu_local_1.on': 'seiu_local_1_canada',
'seiu_local_32bj': 'seiu_32bj',
'seiwa_of_japan': 'emperor_seiwa',
'seixal_f._c.': 'seixal_f.c.',
'seixal_f_c': 'seixal_f.c.',
'seixal_futebol_clube': 'seixal_f.c.',
'seizan_junior_college': 'kyoto_seizan_college',
'seize_the_day': 'carpe_diem',
'seize_the_moment': 'carpe_diem',
'seize_the_world': 'aalborg',
'seizieme_de_soupir': 'sixty-fourth_note',
'seizure_alert_dog': 'seizure_response_dog',
'sejanus:_his_fall': 'sejanus_his_fall',
'sejm_of_1632': 'election_sejm_of_1632',
'sejm_of_1793': 'grodno_sejm',
'sejm_of_grodno': 'grodno_sejm',
'sejm_of_poland': 'sejm_of_the_republic_of_poland',
'sejny_theological_seminary': 'sejny_priest_seminary',
'sejong_cultural_center': 'sejong_center',
'sejong_of_joseon': 'sejong_the_great',
'sejong_of_korea': 'sejong_the_great',
'sek_kong_airfield': 'shek_kong_airfield',
'sekai_dai-hyakka_jiten': 'heibonsha_world_encyclopedia',
'sekai_daihyakka_jiten': 'heibonsha_world_encyclopedia',
'sekaiju_no_meikyu': 'etrian_odyssey',
'sekaiju_no_meikyuu': 'etrian_odyssey',
'sekolah_menegah_sipitang': 'smk_pengiran_omar',
'sekolah_menengah_hillcrest': 'sekolah_menengah_kebangsaan_hillcrest',
'sekolah_menengah_kebangsaan': 'education_in_malaysia',
'sekolah_tinggi_klang': 'klang_high_school',
'sekondi_eleven_wise': 'sekondi_wise_fighters',
'sekondi_hasaacas_fc': 'sekondi_hasaacas_f.c.',
'sekou_damate_conneh': 'sekou_conneh',
'sektzia_nes_tziona': 'sektzia_nes_tziona_f.c.',
'sekyere_west_district': 'mampong_municipal_district',
'selah_brewster_strong': 'selah_b._strong',
'selah_creek_bridge': 'fred_g._redmon_bridge',
'selanarctos_thibetanus_formosanus': 'formosan_black_bear',
'selanger_court_district': 'medelpad',
'selangor_civil_war': 'klang_war',
'selangor_football_team': 'selangor_fa',
'selangor_mbpj_fc': 'mppj_selangor_fc',
'selangor_mppj_fc': 'mppj_selangor_fc',
'selangor_pkns_fc': 'pkns_fc',
'selangor_public_bank': 'public_bank_fc',
'selayer_reticulated_python': 'python_reticulatus',
'selbach_of_dalriada': 'selbach_mac_ferchair',
'selberg_class_s': 'selberg_class',
'selbstaendige_evangelisch-lutherische_kirche': 'independent_evangelical-lutheran_church',
'selbstandige_evangelisch-lutherische_kirche': 'independent_evangelical-lutheran_church',
'selby_town_fc': 'selby_town_f.c.',
'selden_palmer_spencer': 'selden_p._spencer',
'seldom_seen_kid': 'the_seldom_seen_kid',
'seleccao_das_quinas': 'portugal_national_football_team',
'select_agent_program': 'select_agent',
'select_comfort_corp.': 'select_comfort',
'select_on_demand': 'video_on_demand',
'select_vestries_bill': 'select_vestries_acts',
'select_yacht_group': 'select_yachts',
'selectin_p_ligand': 'p-selectin_glycoprotein_ligand-1',
'selection_based_search': 'selection-based_search',
'selection_in_schools': 'selective_school',
'selective_door_opening': 'selective_door_operation',
'selective_entry_schools': 'selective_school',
'selective_estrogen-receptor_modulator': 'selective_estrogen_receptor_modulator',
'selective_fire_shotgun': 'automatic_shotgun',
'selective_growth_medium': 'growth_medium',
'selective_iga_deficiency': 'selective_immunoglobulin_a_deficiency',
'selective_service_acts': 'selective_service_act',
'selective_service_initiative': 'universal_national_service_act',
'selective_sound_sensitivity': 'misophonia',
'selectively_permeable_membrane': 'semipermeable_membrane',
'selectmen_of_boston': 'boston_board_of_selectmen',
'selecus_i_nikator': 'seleucus_i_nicator',
'selena_cuffee_glenn': 'selena_cuffee-glenn',
'selena_gomez_songs': 'selena_gomez',
'selena_marie_gomez': 'selena_gomez',
'selena_quintanilla_perez': 'selena',
'selenium_dioxide_oxidation': 'organoselenium_chemistry',
'selenocysteine_insertion_sequence': 'secis_element',
'seleucia_above_zeugma': 'seleucia_at_the_zeugma',
'seleucia_ad_calycadnum': 'silifke',
'seleucia_ad_eulaeum': 'susa',
'seleucia_ad_pyramum': 'mopsuestia',
'seleucus_i_nikator': 'seleucus_i_nicator',
'seleucus_iii_soter': 'seleucus_iii_ceraunus',
'seleukos_i_nicator': 'seleucus_i_nicator',
'selex_aptamer_technique': 'aptamer',
'self-_discrepancy_theory': 'self-discrepancy_theory',
'self-anchored_suspension_bridges': 'self-anchored_suspension_bridge',
'self-balancing_binary_tree': 'self-balancing_binary_search_tree',
'self-defense_social_movement': 'patriotic_self-defense',
'self-diagnosed_asperger_syndrome': 'asperger_syndrome',
'self-expanding_metallic_stent': 'self-expandable_metallic_stent',
'self-facilitating_media_node': 'nathan_barley',
'self-immolation_of_muthukumar': 'self-immolation_of_k._muthukumar',
'self-ionisation_of_water': 'self-ionization_of_water',
'self-levelling_hydropneumatic_suspension': 'hydropneumatic_suspension',
'self-loading_or_semi-automatic': 'semi-automatic_firearm',
'self-medicational_drug_use': 'self-medication',
'self-organizing_feature_map': 'self-organizing_map',
'self-portrait_in_december': 'jlg/jlg_-_self-portrait_in_december',
'self-propelled_air_defence': 'self-propelled_anti-aircraft_weapon',
'self-propelled_anti-aircraft_gun': 'self-propelled_anti-aircraft_weapon',
'self-propelled_anti-aircraft_vehicle': 'self-propelled_anti-aircraft_weapon',
'self-propelled_anti-tank_gun': 'tank_destroyer',
'self-propelled_antiaircraft_gun': 'self-propelled_anti-aircraft_weapon',
'self-propelled_antitank_gun': 'tank_destroyer',
'self-sacrifice_in_judaism': 'self-sacrifice_in_jewish_law',
'self-sealing_fuel_tanks': 'self-sealing_fuel_tank',
'self-service_business_intelligence': 'business_intelligence',
'self-service_checkout_lane': 'self_checkout',
'self-supporting_suspension_bridge': 'self-anchored_suspension_bridge',
'self-supporting_suspension_bridges': 'self-anchored_suspension_bridge',
'self-synchronizing_stream_cipher': 'stream_cipher',
'self-titled_debut_album': 'album',
'self-transforming_machine_elves': 'machine_elf',
'self_addressed_envelope': 'self-addressed_stamped_envelope',
'self_adhesive_tape': 'pressure_sensitive_tape',
'self_adjoint_operator': 'self-adjoint_operator',
'self_aligned_gate': 'self-aligned_gate',
'self_aligning_nut': 'self-aligning_nut',
'self_assembled_monolayer': 'self-assembled_monolayer',
'self_avoiding_walk': 'self-avoiding_walk',
'self_certifying_filesystem': 'self-certifying_file_system',
'self_checkout_machine': 'self_checkout',
'self_checkout_machines': 'self_checkout',
'self_cleaning_glass': 'self-cleaning_glass',
'self_cleaning_oven': 'self-cleaning_oven',
'self_consuming_artifacts': 'self-consuming_artifacts',
'self_creation_cosmology': 'self-creation_cosmology',
'self_defence_force': 'military',
'self_defence_forces': 'military',
'self_defense_force': 'military',
'self_defense_forces': 'military',
'self_denying_ordinance': 'self-denying_ordinance',
'self_destruct_button': 'self-destruct',
'self_determination_theory': 'self-determination_theory',
'self_directed_ira': 'self-directed_ira',
'self_discrepancy_theory': 'self-discrepancy_theory',
'self_employment_tax': 'self-employment',
'self_extracting_archive': 'self-extracting_archive',
'self_forging_fragment': 'explosively_formed_penetrator',
'self_forging_fragments': 'explosively_formed_penetrator',
'self_fulfilling_prophecy': 'self-fulfilling_prophecy',
'self_hating_jew': 'self-hating_jew',
'self_hating_jews': 'self-hating_jew',
'self_healing_material': 'self-healing_material',
'self_help_book': 'self-help_book',
'self_help_books': 'self-help_book',
'self_immolation_records': 'self_immolation',
'self_improvement_book': 'self-help_book',
'self_improvement_books': 'self-help_book',
'self_induced_abortion': 'self-induced_abortion',
'self_inflicted_injury': 'self-injury',
'self_inflicted_violence': 'self-injury',
'self_inflicted_wound': 'self-inflicted_wound',
'self_injurious_behaviour': 'self-injury',
'self_loading_freight': 'passenger',
'self_loading_pistol': 'john_browning',
'self_made_instrument': 'experimental_musical_instrument',
'self_modifying_code': 'self-modifying_code',
'self_organising_map': 'self-organizing_map',
'self_organising_system': 'self-organization',
'self_organising_systems': 'self-organization',
'self_organizing_map': 'self-organizing_map',
'self_organizing_maps': 'self-organizing_map',
'self_organizing_system': 'self-organization',
'self_organizing_systems': 'self-organization',
'self_phase_modulation': 'self-phase_modulation',
'self_powered_equipment': 'self-powered_equipment',
'self_powered_hub': 'usb_hub',
'self_propelled_artillery': 'self-propelled_artillery',
'self_propelled_gun': 'self-propelled_gun',
'self_propelled_howitzer': 'self-propelled_artillery',
'self_raising_flour': 'flour',
'self_raising_flower': 'flour',
'self_realization_fellowship': 'self-realization_fellowship',
'self_referential_humor': 'self-referential_humor',
'self_refuting_argument': 'self-refuting_idea',
'self_refuting_idea': 'self-refuting_idea',
'self_refuting_ideas': 'self-refuting_idea',
'self_regulatory_organization': 'self-regulatory_organization',
'self_replicating_machines': 'self-replicating_machine',
'self_replicating_robot': 'self-replicating_machine',
'self_replicating_robots': 'self-replicating_machine',
'self_represented_litigant': 'pro_se_legal_representation_in_the_united_states',
'self_respect_marriages': 'self-respect_movement',
'self_respect_movement': 'self-respect_movement',
'self_righteous_individual': 'self-righteousness',
'self_rising_flour': 'flour',
'self_serve_ads': 'internet_marketing',
'self_service_cashier': 'self_checkout',
'self_service_checkout': 'self_checkout',
'self_service_moving': 'moving_company',
'self_serving_bias': 'self-serving_bias',
'self_signed_certificate': 'self-signed_certificate',
'self_stick_adhesive': 'pressure_sensitive_adhesive',
'self_strengthening_movement': 'self-strengthening_movement',
'self_support_length': 'specific_strength',
'selfish_biocosm_hypothesis': 'anthropic_principle',
'selfish_gene_theory': 'gene-centered_view_of_evolution',
'selim_ahmed_al-hoss': 'selim_al-hoss',
'selim_al_din': 'selim_al_deen',
'selim_ben_achour': 'selim_benachour',
'selim_bey_karam': 'salim_bey_karam',
'selim_g._akl': 'selim_akl',
'selim_g_akl': 'selim_akl',
'selim_h._peabody': 'selim_peabody',
'selim_the_cruel': 'selim_i',
'selim_the_grim': 'selim_i',
'selim_the_sot': 'selim_ii',
'selina_ren_jiaxuan': 'selina_jen',
'seljukid_persian_empire': 'great_seljuq_empire',
'selke_valley_railway': 'selketalbahn',
'selkirk_f_c': 'selkirk_f.c.',
'selkirk_rex_cat': 'selkirk_rex',
'sell_by_date': 'shelf_life',
'sell_in_may': 'halloween_indicator',
'sell_out_boy': 'fall_out_boy',
'sella_destroyer_class': 'sella_class_destroyer',
'sellapan_rama_nathan': 'sellapan_ramanathan',
'sellers_bros._incorporated': 'sellers_bros.',
'sellers_bros_incorporated': 'sellers_bros.',
'sellers_brother_incorporated': 'sellers_bros.',
'sellers_brothers_incorporated': 'sellers_bros.',
'selliah_parameshwaram_kurukkal': 'selliah_parameswaran_kurukkal',
'selling_free_software': 'commercial_use_of_copyleft_works',
'selling_the_moon': 'extraterrestrial_real_estate',
'selling_wolf_tickets': 'wolf_ticket',
'selling_woof_tickets': 'wolf_ticket',
'selling_your_soul': 'deal_with_the_devil',
'sellinger_business_school': 'sellinger_school_of_business_and_management',
'sellout_of_1876': 'compromise_of_1877',
'sellstedt_railway_station': 'sellstedt_station',
'selma_and_patty': 'patty_and_selma_bouvier',
'selma_franko_goldman': 'edwin_franko_goldman',
'selman_a._waksman': 'selman_waksman',
'selman_a_waksman': 'selman_waksman',
'selman_abraham_waksman': 'selman_waksman',
'selmer-maccaferri_style_guitar': 'selmer_guitar',
'selmon_crosstown_expressway': 'lee_roy_selmon_crosstown_expressway',
'selom_komi_klassou': 'komi_klassou',
'selous_national_park': 'selous_game_reserve',
'selsey_f._c.': 'selsey_f.c.',
'selsey_f_c': 'selsey_f.c.',
'seltzer_siphon_bottle': 'soda_syphon',
'selva_del_montello': 'volpago_del_montello',
'selwin_house_school': 'selwyn_house_school',
'selwyn_blaylock_medal': 'selwyn_g._blaylock_medal',
'selwyn_davies_whalley': 'selwyn_whalley',
'selwyn_howe_fremantle': 'selwyn_fremantle',
'selwyn_zadock_bowman': 'selwyn_z._bowman',
'semaihmoo_first_nation': 'semiahmoo_first_nation',
'semana_santa_marikina': 'procession',
'semanatoarea_metro_station': 'petrache_poenaru_metro_station',
'semantic_action_routine': 'compiler-compiler',
'semantic_data_modeling': 'semantic_data_model',
'semantic_differential_scale': 'semantic_differential',
'semantic_pragmatic_disorder': 'pragmatic_language_impairment',
'semantic_role_labelling': 'semantic_role_labeling',
'semantic_search_engine': 'semantic_search',
'semantic_social_networks': 'semantic_social_network',
'semantic_web_cake': 'semantic_web_stack',
'semantic_web_publishing': 'semantic_publishing',
'semantics_of_logic': 'formal_semantics',
'semaphore_flag_signalling': 'flag_semaphore',
'sembach_air_base': 'sembach_annex',
'sembach_middle_school': 'department_of_defense_dependents_schools',
'sembawang_hot_springs': 'sembawang_hot_spring',
'sembawang_naval_base': 'sembawang',
'sembawang_rangers_f.c.': 'sembawang_rangers_fc',
'sembawang_soccer_club': 'sembawang_rangers_fc',
'sembawang_sports_club': 'sembawang_rangers_fc',
'sembcorp_industries_limited': 'sembcorp_industries',
'sembcorp_logistics_limited': 'sembcorp_logistics',
'sembcorp_marine_limited': 'sembcorp_marine',
'semen_abramovich_furman': 'semyon_furman',
'semen_gondar_zone': 'semien_gondar_zone',
'semen_hulak_artemovsky': 'semen_hulak-artemovsky',
'semen_ivanovich_dejnev': 'semyon_dezhnyov',
'semen_jakovlevich_nadson': 'semen_nadson',
'semen_konstantinovich_timoshenko': 'semyon_timoshenko',
'semen_l_frank': 'semen_l._frank',
'semen_liudvigovich_frank': 'semen_l._frank',
'semen_padang_f.c.': 'ps_semen_padang',
'semen_padang_fc': 'ps_semen_padang',
'semen_wollo_zone': 'semien_wollo_zone',
'semenawi_keyih_bahri': 'northern_red_sea_region',
'semeon_of_russia': 'simeon_of_moscow',
'semgroup_energy_partners': 'semgroup',
'semi-active_laser_homing': 'laser_guidance',
'semi-active_laser_seeker': 'laser_guidance',
'semi-active_radar_guidance': 'semi-active_radar_homing',
'semi-automatic_ground_environment': 'semi_automatic_ground_environment',
'semi-automatic_rifle_wz.38m': 'kbsp_wz._1938m',
'semi-automatic_self-loading_pistol': 'semi-automatic_pistol',
'semi-coherent_computing_podcast': 'ashlee_vance',
'semi-conservative_dna_replication': 'dna_replication',
'semi-legendary_swedish_kings': 'semi-legendary_kings_of_sweden',
'semi-normed_vector_space': 'normed_vector_space',
'semi-regular_variable_star': 'semiregular_variable_star',
'semi-simple_lie_algebra': 'semisimple_lie_algebra',
'semi-simple_lie_group': 'semisimple_lie_algebra',
'semi-stable_abelian_variety': 'semistable_abelian_variety',
'semi-stable_vector_bundle': 'stable_vector_bundle',
'semi-submersible_heavy-lift_ship': 'semi-submersible',
'semi-trailing_arm_suspension': 'trailing_arm',
'semi_acoustic_guitar': 'semi-acoustic_guitar',
'semi_active_homing': 'semi-active_radar_homing',
'semi_active_laser': 'laser_guidance',
'semi_active_radar': 'semi-active_radar_homing',
'semi_auto_knife': 'assisted-opening_knife',
'semi_bantu_language': 'grassfields_languages',
'semi_bantu_languages': 'grassfields_languages',
'semi_charmed_life': 'semi-charmed_life',
'semi_classical_music': 'semi-classical_music',
'semi_coherent_computing': 'ashlee_vance',
'semi_conductor_detector': 'semiconductor_detector',
'semi_explicit_euler': 'semi-implicit_euler_method',
'semi_fixed_cost': 'semi_variable_cost',
'semi_implicit_euler': 'semi-implicit_euler_method',
'semi_infinite_programming': 'semi-infinite_programming',
'semi_markov_process': 'semi-markov_process',
'semi_normed_space': 'normed_vector_space',
'semi_permeable_membrane': 'semipermeable_membrane',
'semi_precious_stones': 'gemstone',
'semi_presidential_system': 'semi-presidential_system',
'semi_secret_trust': 'secret_trust',
'semi_slav_defence': 'semi-slav_defense',
'semi_slav_defense': 'semi-slav_defense',
'semi_solid_forming': 'semi-solid_metal_casting',
'semi_sparkling_wine': 'sparkling_wine',
'semi_supervised_learning': 'semi-supervised_learning',
'semiarians_and_semiarianism': 'semi-arianism',
'semicircle_potential_well': 'semicircular_potential_well',
'semiconducter_optical_amplifier': 'optical_amplifier',
'semiconductor_controlled_rectifier': 'silicon-controlled_rectifier',
'semiconductor_device_physics': 'semiconductor_device',
'semiconductor_diode_detector': 'semiconductor_detector',
'semiconductor_diode_laser': 'laser_diode',
'semiconductor_manufacturing_equipment': 'semiconductor_device_fabrication',
'semiconductor_on_insulator': 'silicon_on_insulator',
'semiconductor_optical_amplifier': 'optical_amplifier',
'semiconductor_water_fab': 'semiconductor_device_fabrication',
'semiconductor_water_fabrication': 'semiconductor_device_fabrication',
'semidefinite_bilinear_form': 'definite_bilinear_form',
'semien_national_park': 'semien_mountains_national_park',
'semigroup_of_transformations': 'transformation_semigroup',
'semiimplicit_euler_method': 'semi-implicit_euler_method',
'semilla_de_jicaro': 'horchata',
'semilocally_simply_connected': 'semi-locally_simply_connected',
'semilunar_valve_cusps': 'cusps_of_heart_valves',
'seminal_acid_phosphatase': 'prostatic_acid_phosphatase',
'seminar_schloss_bogenhofen': 'bogenhofen_seminary',
'seminari_theologi_malaysia': 'malaysia_theological_seminary',
'seminari_theoloji_malaysia': 'malaysia_theological_seminary',
'seminary_at_econe': 'the_international_seminary_of_saint_pius_x',
'seminary_of_phoenix': 'phoenix_seminary',
'seminole_community_college': 'seminole_state_college_of_florida',
'seminole_county_expressway': 'florida_state_road_417',
'seminole_golf_club': 'don_veller_seminole_golf',
'seminole_indian_war': 'seminole_wars',
'seminole_spring_snail': 'seminole_siltsnail',
'seminole_war_canoe': 'war_canoe_trophy',
'seminormed_vector_space': 'normed_vector_space',
'semion_grossu_cabinet': 'semion_grossu',
'semion_ivanovich_cheliuskin': 'semion_chelyuskin',
'semiotic_literary_interpretation': 'semiotic_literary_criticism',
'semiotics_and_literature': 'semiotic_literary_criticism',
'semiotics_in_literature': 'semiotic_literary_criticism',
'semiotics_of_literature': 'semiotic_literary_criticism',
'semipalatinsk_test_range': 'semipalatinsk_test_site',
'semiperfect_magic_tesseract': 'magic_tesseract',
'semiramis_hotel_massacre': 'semiramis_hotel_bombing',
'semiramis_of_babylon': 'semiramis',
'semirechensk_cossask_host': 'semirechye_cossacks',
'semiryechensk_cossack_host': 'semirechye_cossacks',
'semiryechensk_cossask_host': 'semirechye_cossacks',
'semiryechye_cossack_host': 'semirechye_cossacks',
'semisimple_lie_group': 'semisimple_lie_algebra',
'semispinalis_capitis_muscle': 'semispinalis_capitis',
'semispinalis_colli_muscle': 'semispinalis_muscle',
'semistable_reduction_theorem': 'semistable_abelian_variety',
'semistable_vector_bundle': 'stable_vector_bundle',
'semistandard_young_tableau': 'young_tableau',
'semistandard_young_tableaux': 'young_tableau',
'semitic_italian_language': 'sicilian_arabic',
'semitic_pagan_deities': 'canaanite_religion',
'semitic_sicilian_language': 'sicilian_arabic',
'semitic_south_italian': 'sicilian_arabic',
'semitic_southern_italian': 'sicilian_arabic',
'semitransparent_solar_cell': 'silicon_thin-film_cell',
'semjase_from_pleaides': 'billy_meier',
'semjase_on_earth': 'billy_meier',
'semmangudi_srinivasa_lyer': 'semmangudi_srinivasa_iyer',
'sempati_air_transport': 'sempati_air',
'semper_fi_highway': 'interstate_15_in_california',
'semper_opera_house': 'semperoper',
'sempiternus_rex_christus': 'sempiternus_rex',
'semporna_islands_park': 'tun_sakaran_marine_park',
'sempronius_hamilton_boyd': 'sempronius_h._boyd',
'semur_en_brionnais': 'semur-en-brionnais',
'semyan_aronovich_gershgorin': 'semyon_aranovich_gershgorin',
'semyin_a_kosberg': 'semyon_kosberg',
'semyon_abramovich_furman': 'semyon_furman',
'semyon_alekseyevich_lavochkin': 'semyon_lavochkin',
'semyon_ariyevich_kosberg': 'semyon_kosberg',
'semyon_aronovich_gershgorin': 'semyon_aranovich_gershgorin',
'semyon_efimovich_desnitsky': 'semyon_desnitsky',
'semyon_gulak_artemovsky': 'semen_hulak-artemovsky',
'semyon_isaakovich_volfkovich': 'semyon_volfkovich',
'semyon_ivanov_dezhnyov': 'semyon_dezhnyov',
'semyon_ivanovich_dezhnev': 'semyon_dezhnyov',
'semyon_konstantinovich_kurkotkin': 'semyon_kurkotkin',
'semyon_konstantinovich_timoshenko': 'semyon_timoshenko',
'semyon_kuzmich_grossu': 'semion_grossu',
'semyon_mikhailovich_budenny': 'semyon_budyonny',
'semyon_mikhaylovich_budenny': 'semyon_budyonny',
'semyon_romanovich_vorontsov': 'semyon_vorontsov',
'sen._antonio_trillanes': 'antonio_trillanes_iv',
'sen._barack_obama': 'barack_obama',
'sen._bill_frist': 'bill_frist',
'sen._charles_palantine': 'taxi_driver',
'sen._chris_dodd': 'christopher_dodd',
'sen._dianne_wilkerson': 'dianne_wilkerson',
'sen._donat_raymond': 'donat_raymond',
'sen._jesse_helms': 'jesse_helms',
'sen._joseph_mccarthy': 'joseph_mccarthy',
'sen._mike_gravel': 'mike_gravel',
'sen._robert_byrd': 'robert_byrd',
'sen_mike_gravel': 'mike_gravel',
'sen_to_chihiro': 'spirited_away',
'senado_de_tejas': 'texas_senate',
'senado_de_texas': 'texas_senate',
'senado_ng_pilipinas': 'senate_of_the_philippines',
'senai_south_interchange': 'senai_north_interchange',
'senat_of_poland': 'senate_of_the_republic_of_poland',
'senate_afterschool_caucus': 'afterschool_caucuses',
'senate_appropriations_committee': 'united_states_senate_committee_on_appropriations',
'senate_bill_1959': 'violent_radicalization_and_homegrown_terrorism_prevention_act_of_2007',
'senate_bill_2611': 'comprehensive_immigration_reform_act_of_2006',
'senate_bill_420': 'california_senate_bill_420',
'senate_budget_committee': 'united_states_senate_committee_on_the_budget',
'senate_democractic_caucus': 'democratic_caucus_of_the_united_states_senate',
'senate_democratic_caucus': 'democratic_caucus_of_the_united_states_senate',
'senate_democratic_leader': 'party_leaders_of_the_united_states_senate',
'senate_democratic_whip': 'assistant_party_leaders_of_the_united_states_senate',
'senate_ethics_committee': 'united_states_senate_select_committee_on_ethics',
'senate_finance_committee': 'united_states_senate_committee_on_finance',
'senate_house_passage': 'trinity_lane',
'senate_intelligence_committee': 'united_states_senate_select_committee_on_intelligence',
'senate_interior_committee': 'united_states_senate_committee_on_indian_affairs',
'senate_investigation_committee': 'senate_war_investigating_committee',
'senate_judiciary_committee': 'united_states_senate_committee_on_the_judiciary',
'senate_majority_leader': 'party_leaders_of_the_united_states_senate',
'senate_majority_whip': 'assistant_party_leaders_of_the_united_states_senate',
'senate_minority_leader': 'party_leaders_of_the_united_states_senate',
'senate_minority_whip': 'assistant_party_leaders_of_the_united_states_senate',
'senate_munitions_committee': 'nye_committee',
'senate_of_argentina': 'argentine_senate',
'senate_of_australia': 'australian_senate',
'senate_of_belgium': 'belgian_senate',
'senate_of_bolivia': 'national_congress_of_bolivia',
'senate_of_bosnia': 'house_of_peoples_of_bosnia_and_herzegovina',
'senate_of_georgia': 'georgia_state_senate',
'senate_of_india': 'rajya_sabha',
'senate_of_italy': 'italian_senate',
'senate_of_jamaica': 'parliament_of_jamaica',
'senate_of_kampuchea': 'senate_of_cambodia',
'senate_of_korea': 'senate_of_south_korea',
'senate_of_malaysia': 'dewan_negara',
'senate_of_morocco': 'assembly_of_councillors',
'senate_of_peru': 'congress_of_peru',
'senate_of_poland': 'senate_of_the_republic_of_poland',
'senate_of_rome': 'senate_of_the_roman_republic',
'senate_of_russia': 'governing_senate',
'senate_of_spain': 'spanish_senate',
'senate_of_tajikistan': 'supreme_assembly_of_tajikistan',
'senate_of_texas': 'texas_senate',
'senate_of_vaasa': 'senate_of_finland',
'senate_of_venice': 'republic_of_venice',
'senate_page_program': 'united_states_senate_page',
'senate_president_pro-tempore': 'president_pro_tempore_of_the_united_states_senate',
'senate_pro_tem': 'president_pro_tempore_of_the_united_states_senate',
'senate_rackets_committee': 'united_states_senate_select_committee_on_improper_activities_in_labor_and_management',
'senate_republican_conference': 'republican_conference_of_the_united_states_senate',
'senate_republican_leader': 'party_leaders_of_the_united_states_senate',
'senate_republican_whip': 'assistant_party_leaders_of_the_united_states_senate',
'senate_rules_committee': 'united_states_senate_committee_on_rules_and_administration',
'senate_watergate_committee': 'united_states_senate_watergate_committee',
'senate_whitewater_committee': 'united_states_senate_whitewater_committee',
'senate_whitewater_hearings': 'united_states_senate_whitewater_committee',
'senato_del_regno': 'italian_senate',
'senato_della_repubblica': 'italian_senate',
'senator_al_franken': 'al_franken',
'senator_barack_obama': 'barack_obama',
'senator_barbara_boxer': 'barbara_boxer',
'senator_barry_goldwater': 'barry_goldwater',
'senator_bernard_sanders': 'bernie_sanders',
'senator_bernie_sanders': 'bernie_sanders',
'senator_bob_dole': 'bob_dole',
'senator_brian_joyce': 'brian_a._joyce',
'senator_charles_curtis': 'charles_curtis',
'senator_charles_palantine': 'taxi_driver',
'senator_charles_sumner': 'charles_sumner',
'senator_deborah_cherry': 'deborah_cherry',
'senator_edward_brooke': 'edward_brooke',
'senator_eileen_stevens': 'even_stevens',
'senator_eugene_mccarthy': 'eugene_mccarthy',
'senator_fiona_nash': 'fiona_nash',
'senator_from_comcast': 'arlen_specter',
'senator_george_mitchell': 'george_j._mitchell',
'senator_harry_reid': 'harry_reid',
'senator_hillary_clinton': 'hillary_rodham_clinton',
'senator_j_mccain': 'john_mccain',
'senator_james_meeks': 'james_meeks',
'senator_jan_mclucas': 'jan_mclucas',
'senator_jay_rockefeller': 'jay_rockefeller',
'senator_jesse_helms': 'jesse_helms',
'senator_joe_biden': 'joe_biden',
'senator_joe_lieberman': 'joe_lieberman',
'senator_john_kerry': 'john_kerry',
'senator_john_mccain': 'john_mccain',
'senator_john_mcgee': 'john_mcgee',
'senator_joseph_mccarthy': 'joseph_mccarthy',
'senator_kirsten_gillibrand': 'kirsten_gillibrand',
'senator_landon_pearson': 'landon_pearson',
'senator_larry_craig': 'larry_craig',
'senator_lindsey_graham': 'lindsey_graham',
'senator_lyn_allison': 'lyn_allison',
'senator_mark_schauer': 'mark_schauer',
'senator_maxine_horner': 'maxine_horner',
'senator_mike_gravel': 'mike_gravel',
'senator_of_colombia': 'senate_of_colombia',
'senator_of_france': 'senate_of_france',
'senator_owen_brewster': 'owen_brewster',
'senator_pat_jehlen': 'patricia_d._jehlen',
'senator_pat_williams': 'john_patrick_williams',
'senator_patricia_jehlen': 'patricia_d._jehlen',
'senator_paul_wellstone': 'paul_wellstone',
'senator_robert_byrd': 'robert_byrd',
'senator_rodney_ellis': 'rodney_ellis',
'senator_rodrigo_silva': 'rodrigo_augusto_da_silva',
'senator_roy_ashburn': 'roy_ashburn',
'senator_sam_brownback': 'sam_brownback',
'senator_strom_thurmond': 'strom_thurmond',
'senator_ted_kennedy': 'ted_kennedy',
'senator_ted_stevens': 'ted_stevens',
'senator_trent_lott': 'trent_lott',
'senator_tupac_hunter': 'tupac_a._hunter',
'senator_wide_stance': 'larry_craig',
'senator_william_findlay': 'william_findlay',
'senatore_a_vita': 'senator_for_life',
'senatorial_roman_province': 'senatorial_province',
'senatorial_selection_act': 'alberta_senate_nominee_elections',
'senators_for_life': 'senator_for_life',
'senators_from_michigan': 'united_states_congressional_delegations_from_michigan',
'senators_third_jersey': 'ottawa_senators',
'senatus_consultum_macedonianum': 'senatusconsultum_macedonianum',
'senatus_populusque_romanus': 'spqr',
'senawang_industrial_park': 'senawang',
'senayan_sports_stadium': 'bung_karno_stadium',
'senboku_rapid_railway': 'semboku_rapid_railway',
'sencholai_orphanage_bombing': 'chencholai_bombing',
'senchus_fer_nalban': 'senchus_fer_n-alban',
'send_and_wait': 'stop-and-wait_arq',
'send_clinical_trial': 'standard_for_exchange_of_non-clinical_data',
'send_more_money': 'verbal_arithmetic',
'send_yourself_roses.': 'kathleen_turner',
'sendai_kuji_hongi': 'kujiki',
'sendai_seaside_railway': 'sendai_rinkai_railway',
'sendai_television_broadcasting': 'sendai_television',
'sender_address_verification': 'challenge-response_spam_filtering',
'sender_permitted_from': 'sender_policy_framework',
'sender_signing_policy': 'author_domain_signing_practices',
'sendika_kristianina_malagasy': 'christian_confederation_of_malagasy_trade_unions',
'sending_money_home': 'send_money_home',
'sending_to_coventry': 'send_to_coventry',
'seneca_college_bus': 'seneca_college',
'seneca_college_residences': 'seneca_college',
'seneca_falls_convection': 'seneca_falls_convention',
'seneca_the_rhetorician': 'seneca_the_elder',
'seneca_the_young': 'seneca_the_younger',
'seneca_valley_virus': 'seneca_valley_virus-001',
'seneca_yorkdale_campus': 'seneca_college',
'senegal_date_palm': 'phoenix_reclinata',
'senegal_football_team': 'senegal_national_football_team',
'senegal_golden_dartlet': 'common_bluetail',
'senegal_red_cross': 'senegalese_red_cross_society',
'senegal_wattled_plover': 'african_wattled_lapwing',
'senegalese_electricity_company': 'senelec',
'senegalese_progressive_union': 'socialist_party_of_senegal',
'senegalese_red_cross': 'senegalese_red_cross_society',
'seneschal_of_jerusalem': 'officers_of_the_kingdom_of_jerusalem',
'seneshal_of_anjou': 'seneschal_of_anjou',
'seng_song_poh': 'poh_seng_song',
'sengalipuram_anantarama_dikhshitar': 'sengalipuram_anantarama_dikshitar',
'senghori_concentration_camp': 'sunghori_concentration_camp',
'sengkang-punggol_football_club': 'sengkang_punggol_fc',
'sengkang_lrt_station': 'sengkang_mrt/lrt_station',
'sengkang_lrt_system': 'sengkang_lrt_line',
'sengkang_marine_f.c.': 'sengkang_punggol_fc',
'sengkang_marine_fc': 'sengkang_punggol_fc',
'sengkang_mrt_station': 'sengkang_mrt/lrt_station',
'sengkang_punggol_f.c.': 'sengkang_punggol_fc',
'senglea_athletic_fc': 'senglea_athletic_f.c.',
'senglea_athletics_f.c.': 'senglea_athletic_f.c.',
'senglea_athletics_fc': 'senglea_athletic_f.c.',
'sengoku_jietai_1549': 'sengoku_jieitai_1549',
'sengoku_lightweight_championship': 'world_victory_road',
'sengoku_majin_goshogun': 'goshogun',
'sengoku_musou_2': 'samurai_warriors_2',
'sengoku_musou_3': 'samurai_warriors_3',
'sengoku_musou_katana': 'samurai_warriors:_katana',
'sengoku_musou_wave': 'samurai_warriors:_katana',
'sengoku_no_mikaduki': 'crescent_moon_in_the_warring_states',
'sengoku_no_mikazuki': 'crescent_moon_in_the_warring_states',
'sengoku_no_ran': 'world_victory_road_presents:_sengoku_no_ran_2009',
'senhaja_de_srair': 'senhaja_de_srair_language',
'senhor_do_socorro': 'church_of_our_lord_of_socorro',
'senhora_da_hora': 'nossa_senhora_da_hora',
'senica_army_depot': 'seneca_army_depot',
'senicide_in_antiquity': 'senicide',
'senile_cardiac_amyloidosis': 'cardiac_amyloidosis',
'senile_macular_degeneration': 'macular_degeneration',
'senile_squalor_syndrome': 'diogenes_syndrome',
'senior_ado_positions': 'senior_australian_defence_organisation_positions',
'senior_berwick_prize': 'berwick_prize',
'senior_black_correspondent': 'larry_wilmore',
'senior_british_open': 'senior_british_open_championship',
'senior_cambridge_examination': 'senior_cambridge',
'senior_circuit_judge': 'chief_judge',
'senior_citizen_railcard': 'senior_railcard',
'senior_civil_servant': 'civil_service',
'senior_classical_league': 'national_senior_classical_league',
'senior_content_designer': 'content_designer',
'senior_costs_judge': 'judiciary_of_england_and_wales',
'senior_crown_program': 'sr-71_blackbird',
'senior_cut_day': 'senior_skip_day',
'senior_discount_card': 'discounts_and_allowances',
'senior_ditch_day': 'senior_skip_day',
'senior_engineering_group': 'senior_plc',
'senior_enlisted_adviser': 'senior_enlisted_advisor',
'senior_european_tour': 'european_seniors_tour',
'senior_flight_officer': 'flight_officer',
'senior_grand_prix': 'isu_grand_prix_of_figure_skating',
'senior_high_school': 'high_school',
'senior_high_schools': 'high_school',
'senior_law_lord': 'senior_lord_of_appeal_in_ordinary',
'senior_legislative_assistant': 'legislative_assistant',
'senior_legislative_assistants': 'legislative_assistant',
'senior_major_championship': 'senior_major_golf_championships',
'senior_major_championships': 'senior_major_golf_championships',
'senior_managing_director': 'managing_director',
'senior_mathematical_challenge': 'united_kingdom_mathematics_trust',
'senior_maths_challenge': 'united_kingdom_mathematics_trust',
'senior_military_colleges': 'senior_military_college',
'senior_missileman_badge': 'missile_badge',
'senior_patrol_leader': 'scout_leader',
'senior_pga_tour': 'champions_tour',
'senior_police_constable': 'constable',
'senior_police_officer': 'corporal',
'senior_prank_day': 'senior_prank',
'senior_software_engineer': 'software_engineering',
'senior_tennis_tour': 'outback_champions_series',
'senior_tour_championship': 'charles_schwab_cup_championship',
'senior_under_officer': 'under_officer',
'senior_university_international': 'rutherford_university',
'senior_vice_president': 'vice_president',
'senior_wrangler_sauce': 'brandy_butter',
'seniors_british_open': 'senior_british_open_championship',
'seniory_of_biscay': 'lord_of_biscay',
'senjou_no_valkyria': 'valkyria_chronicles',
'senkai-den_hoshin_engi': 'hoshin_engi',
'senkai-den_houshen_engi': 'hoshin_engi',
'senkai-den_houshin_engi': 'hoshin_engi',
'senkaiden_houshen_engi': 'hoshin_engi',
'senko_no_ronde': 'wartech:_senko_no_ronde',
'senkou_no_ronde': 'wartech:_senko_no_ronde',
'sennar_penduline_tit': 'sennar_penduline-tit',
'sennen_and_abdon': 'abdon_and_sennen',
'sennen_no_yuki': 'millennium_snow',
'sennett_mrt_station': 'potong_pasir_mrt_station',
'sennoy_municipal_district': 'sennoy_municipal_okrug',
'senrab_f._c.': 'senrab_f.c.',
'senrab_f_c': 'senrab_f.c.',
'sensate_focus_exercises': 'sensate_focusing',
'sensational_adelaide_500': 'adelaide_500',
'senschal_of_anjou': 'seneschal_of_anjou',
'sense_of_balance': 'equilibrioception',
'sense_of_humor': 'humour',
'sense_of_humour': 'humour',
'sense_of_sight': 'visual_perception',
'sense_of_smell': 'olfaction',
'sense_of_touch': 'somatosensory_system',
'sensei_and_ninomiya': 'minami-ke',
'sensei_anthony_stultz': 'anthony_stultz',
'sensei_no_ojikan': 'doki_doki_school_hours',
'sensient_technologies_corp': 'sensient_technologies_corporation',
'sensitive_compartmented_information': 'classified_information_in_the_united_states',
'sensitive_personal_information': 'personally_identifiable_information',
'sensitivity_to_light': 'photophobia',
'sensitivity_to_sunlight': 'photophobia',
'sensor_data_fusion': 'sensor_fusion',
'sensor_fuse_weapon': 'cbu-97_sensor_fuzed_weapon',
'sensor_fused_weapon': 'cbu-97_sensor_fuzed_weapon',
'sensor_fuze_weapon': 'cbu-97_sensor_fuzed_weapon',
'sensor_fuzed_weapon': 'cbu-97_sensor_fuzed_weapon',
'sensorineural_hearing_impairment': 'sensorineural_hearing_loss',
'sensory_art_therapies': 'art_therapy',
'sensory_autonomic_system': 'autonomic_nervous_system',
'sensory_deprivation_chamber': 'isolation_tank',
'sensory_deprivation_tank': 'isolation_tank',
'sensory_deprivation_tanks': 'isolation_tank',
'sensory_disorientation_technique': 'sensory_deprivation',
'sensory_evidental_mood': 'sensory_evidential_mood',
'sensory_evoked_potentials': 'evoked_potential',
'sensory_integration_disfunction': 'sensory_integration_dysfunction',
'sensory_integration_disorder': 'sensory_integration_dysfunction',
'sensory_integration_training': 'sensory_integration',
'sensory_integrative_dysfunction': 'sensory_integration_dysfunction',
'sensory_integrative_therapy': 'sensory_integration_therapy',
'sensory_motor_rhythm': 'sensorimotor_rhythm',
'sensory_motor_rhythym': 'sensorimotor_rhythm',
'sensory_nerve_cell': 'sensory_neuron',
'sensory_nervous_system': 'sensory_system',
'sensory_seeking_disorder': 'sensory_integration_dysfunction',
'sent_into_exile': 'exile',
'sent_to_coventry': 'send_to_coventry',
'sentai_go_ranger': 'himitsu_sentai_goranger',
'sentence_completion_test': 'sentence_completion_tests',
'sentence_final_particle': 'sentence-final_particle',
'sentence_plan_language': 'spl_notation',
'sentenced_italian_mps': 'convicted_italian_mps',
'sentenced_to_death': 'capital_punishment',
'sentient_world_simulation': 'synthetic_environment_for_analysis_and_simulations',
'sentimental_night_1': 'sentimental_graffiti',
'sentimental_night_2': 'sentimental_graffiti',
'sentinel_island_lighthouse': 'sentinel_island_light',
'sentinel_node_biopsy': 'sentinel_lymph_node',
'sentinel_node_procedure': 'sentinel_lymph_node',
'sentinel_rock_thrush': 'sentinel_rock-thrush',
'sentinel_wagon_works': 'sentinel_waggon_works',
'sento_mecha_xabungle': 'combat_mecha_xabungle',
'sento_no_musume': 'mari_yaguchi',
'sento_ruminasu_jogakuin': 'st._luminous_mission_high_school',
'sentosa_4d_magix': 'sentosa',
'sentosa_beach_station': 'beach_monorail_station',
'sentosa_express_lrt': 'sentosa_express',
'sentral_kl-ipoh_route': 'sentral_kl-ipoh-sentral_kl_route',
'sentric_music_publishing': 'sentric_music',
'sentry_equity_services': 'sentry_insurance',
'sentry_life_insurance': 'sentry_insurance',
'sentul-port_klang_line': 'sentul-port_klang_route',
'sentul_timur-ampang_line': 'ampang_line',
'senuthius_of_buasti': 'abadir_and_iraja',
'senzoku_gakuen_college': 'senzoku_gakuen_college_of_music',
'seo_for_google': 'google_search',
'seo_in_young': 'seo_in-young',
'seo_jae_pil': 'seo_jae-pil',
'seo_jeong_min': 'seo_jeong-min',
'seo_ji_hye': 'seo_ji-hye',
'seo_ji_yung': 'seo_ji_young',
'seo_moon_tak': 'seomoon_tak',
'seo_tae_ji': 'seo_taiji',
'seo_tai_ji': 'seo_taiji',
'seo_yong_song': 'buriki_one',
'seocho-gu_office_station': 'yangjae_station',
'seocho_samsung_town': 'samsung_town',
'seogwipo_worldcup_stadium': 'jeju_world_cup_stadium',
'seoirse_mac_tomais': 'george_derwent_thomson',
'seojeong_college_station': 'deokjeong_station',
'seol_kyeong_gu': 'sol_kyung-gu',
'seol_leong_tang': 'seolleongtang',
'seol_long_tang': 'seolleongtang',
'seol_nong_tang': 'seolleongtang',
'seol_reong_tang': 'seolleongtang',
'seon_hui_mun': 'mun_seon_hui',
'seon_mu_do': 'sunmudo',
'seonaid_nic_lachlainn': 'jessie_maclachlan',
'seong_gyun_gwan': 'seonggyungwan',
'seong_hyeon_ah': 'seong_hyeon-ah',
'seong_ok_oh': 'oh_seong-ok',
'seong_yeop_lee': 'lee_seung-yeop',
'seorabeol_art_college': 'chung-ang_university',
'seorabeol_art_university': 'chung-ang_university',
'seorabeol_arts_college': 'chung-ang_university',
'seorabeol_arts_university': 'chung-ang_university',
'seosamh_mac_magnais': 'joseph_macmanus',
'seosan_citizen_fc': 'yesan_football_club',
'seosan_omega_fc': 'yesan_football_club',
'seotaiji_and_boys': 'seo_taiji',
'seoul-incheon_metropolitan_area': 'seoul_national_capital_area',
'seoul-kimpo_international_airport': 'gimpo_international_airport',
'seoul_adventist_hospital': 'sahmyook_medical_center',
'seoul_art_center': 'seoul_arts_center',
'seoul_arts_centre': 'seoul_arts_center',
'seoul_capital_area': 'seoul_national_capital_area',
'seoul_capital_region': 'seoul_national_capital_area',
'seoul_castle_town': 'seoul',
'seoul_city_plaza': 'seoul_plaza',
'seoul_city_tourism': 'seoul_metropolitan_government',
'seoul_disney_resort': 'seoul',
'seoul_international_airport': 'incheon_international_airport',
'seoul_land_station': 'seoul_grand_park_station',
'seoul_martyrs_fc': 'seoul_fc_martyrs',
'seoul_metropolitan_area': 'seoul_national_capital_area',
'seoul_metropolitan_police': 'seoul_metropolitan_police_agency',
'seoul_music_award': 'seoul_music_awards',
'seoul_stock_exchange': 'korea_stock_exchange',
'seow_siew_jin': 'seow_sieu_jin',
'sepahan_f._c.': 'sepahan_f.c.',
'sepahan_f_c': 'sepahan_f.c.',
'sepahbod_ahmad_amir-ahmadi': 'ahmad_amir-ahmadi',
'sepan_civil_war': 'star_wars:_tie_fighter',
'separable_field_extension': 'separable_extension',
'separable_metric_space': 'separable_space',
'separable_quantum_state': 'separable_states',
'separable_quantum_states': 'separable_states',
'separable_random_variable': 'tightness_of_measures',
'separable_topological_space': 'separable_space',
'separably_closed_field': 'algebraic_closure',
'separate_amenities_act': 'reservation_of_separate_amenities_act',
'separate_car_bill': 'separate_car_act',
'separate_car_law': 'separate_car_act',
'separate_legal_person': 'legal_person',
'separate_legal_personality': 'legal_person',
'separate_peace_agreement': 'separate_peace',
'separate_primorye_army': 'separate_coastal_army',
'separate_school_education': 'separate_school',
'separate_school_funding': 'separate_school',
'separate_school_system': 'separate_school',
'separated_by_neighbourhoods': 'separated_sets',
'separating_hyperplane_theorem': 'separating_axis_theorem',
'separating_line_theorem': 'separating_axis_theorem',
'separating_plane_theorem': 'separating_axis_theorem',
'separation_anxiety_test': 'separation_anxiety_disorder',
'separation_by_variables': 'separation_of_variables',
'separation_in_medicine': 'comparison_of_md_and_do_in_the_united_states',
'separation_of_cases': 'proof_by_exhaustion',
'separation_of_chemicals': 'separation_process',
'separation_of_concern': 'separation_of_concerns',
'separation_of_mixture': 'separation_process',
'separation_of_mixtures': 'separation_process',
'separation_of_power': 'separation_of_powers',
'separation_of_variable': 'separation_of_variables',
'separations_of_duties': 'separation_of_duties',
'sepasi_shiraz_f.c.': 'moghavemat_sepasi_f.c.',
'sepentine_pipehead_dam': 'serpentine_pipehead_dam',
'seperate_but_equal': 'separate_but_equal',
'seperation_of_powers': 'separation_of_powers',
'seperation_of_variables': 'separation_of_variables',
'sepharadim_religious_party': 'shas',
'sephardi_hebrew_dialect': 'sephardi_hebrew',
'sephardi_hebrew_language': 'sephardi_hebrew',
'sephardi_jewish_cuisine': 'cuisine_of_the_sephardic_jews',
'sephardic_bikor_holim': 'sephardic_bikur_holim',
'sephardic_hebrew_dialect': 'sephardi_hebrew',
'sephardic_hebrew_language': 'sephardi_hebrew',
'sephardic_jewish_cuisine': 'cuisine_of_the_sephardic_jews',
'sephardic_pizmonim_project': 'pizmonim',
'sephardim_religious_party': 'shas',
'sepher_toldos_jeschu': 'toledot_yeshu',
'sepia_officinalis_jurujubai': 'caribbean_reef_squid',
'sepia_officinalis_mediterranea': 'common_cuttlefish',
'sepic_coast_exchange': 'sepik_coast_exchange',
'seppala_siberian_sleddogs': 'seppala_siberian_sleddog',
'septa_broad-ridge_spur': 'broad_street_line',
'septa_bus_operations': 'septa',
'septa_commuter_rail': 'septa_regional_rail',
'septa_route_100': 'norristown_high_speed_line',
'septa_route_101': 'septa_routes_101_and_102',
'septa_route_102': 'septa_routes_101_and_102',
'septa_suburban_rail': 'septa_regional_rail',
'septa_subway-elevated_lines': 'septa',
'septa_transit_police': 'southeastern_pennsylvania_transportation_authority_transit_police',
'septa_trolley_lines': 'septa',
'septal_heart_defect': 'congenital_heart_defect',
'september_11_2001': 'september_11_attacks',
'september_11_attack': 'september_11_attacks',
'september_11_demolitions': 'world_trade_center_controlled_demolition_conspiracy_theories',
'september_11_hijacker': 'hijackers_in_the_september_11_attacks',
'september_11_hijackers': 'hijackers_in_the_september_11_attacks',
'september_11_remembrance': 'patriot_day',
'september_11th_2001': 'september_11_attacks',
'september_11th_attack': 'september_11_attacks',
'september_11th_attacks': 'september_11_attacks',
'september_11th_station': 'once_railway_station',
'september_18_incident': 'mukden_incident',
'september_18th_incident': 'mukden_incident',
'september_2001_attacks': 'september_11_attacks',
'september_2007_deaths': 'deaths_in_2007',
'september_3_society': 'jiusan_society',
'septembri_9_stadion': 'chernomorets_stadium',
'septemvri_pri_cdv': 'pfc_cska_sofia',
'septemvri_pri_tsdv': 'pfc_cska_sofia',
'septennial_act_1716': 'septennial_act_1715',
'septima_p._clark': 'septima_poinsette_clark',
'septimal_diatonic_semitone': 'major_diatonic_semitone',
'septimal_major_second': 'septimal_whole_tone',
'septimal_minor_seventh': 'harmonic_seventh',
'septimus_severus_bridge': 'severan_bridge',
'septinu_bralu_diena': 'latvian_mythology',
'septinu_guletaju_diena': 'latvian_mythology',
'septum_mobile_nasi': 'nasal_septum',
'septum_of_brain': 'septum_pellucidum',
'septum_of_tongue': 'lingual_septum',
'sepulchre_of_songs': 'a_sepulchre_of_songs',
'sequel_to_dreamcast': 'dreamcast',
'sequence_and_series': 'sequence',
'sequence_retrieval_system': 'embnet',
'sequence_similarity_search': 'sequence_alignment',
'sequence_space_arithmetic': 'serial_number',
'sequenced_packet_exchange': 'ipx/spx',
'sequential_access_method': 'sequential_access',
'sequential_bayesian_filtering': 'recursive_bayesian_estimation',
'sequential_closure_operator': 'sequential_space',
'sequential_fuel_injection': 'fuel_injection',
'sequential_importance_sampling': 'particle_filter',
'sequential_linear_programming': 'successive_linear_programming',
'sequential_manual_gearbox': 'electrohydraulic_manual_transmission',
'sequential_move_puzzles': 'combination_puzzle',
'sequential_still_video': 'freeze_frame_television',
'sequential_valve_timing': 's-vt',
'sequestered_in_memphis': 'stay_positive',
'sequestration_of_lung': 'pulmonary_sequestration',
'sequoia-kings_canyon_freeway': 'california_state_route_180',
'sequoia_canyon_freeway': 'california_state_route_180',
'ser._sulpicius_rufus': 'servius_sulpicius_rufus',
'ser_and_estar': 'romance_copula',
'ser_b_class': 'secr_b1_class',
'ser_miang_ng': 'ng_ser_miang',
'ser_o_class': 'secr_o_class',
'ser_ou_parecer': 'ser_o_parecer',
'serafim_of_sarov': 'seraphim_of_sarov',
'serafino_cardinal_vannutelli': 'serafino_vannutelli',
'serafino_de_montegranaro': 'seraphin_of_montegranaro',
'seraiki_nationalist_movement': 'saraiki_nationalist_movement',
'serampur_union_institution': 'serampore_union_institution',
'serangoon_gardens_estate': 'serangoon_gardens',
'seraphim_of_athens': 'archbishop_seraphim_of_athens',
'serapion_of_thmuis': 'thmuis',
'serapion_of_thumis': 'sacramentary_of_serapion_of_thmuis',
'serato_scratch_live': 'scratch_live',
'serb-hungarian_baranja-baja_republic': 'serbian-hungarian_baranya-baja_republic',
'serb-hungarian_baranja_republic': 'serbian-hungarian_baranya-baja_republic',
'serb-hungarian_baranya-baja_republic': 'serbian-hungarian_baranya-baja_republic',
'serb-hungarian_baranya_republic': 'serbian-hungarian_baranya-baja_republic',
'serb_christmas_traditions': 'serbian_christmas_traditions',
'serb_democratic_party': 'serbian_democratic_party',
'serb_epic_poetry': 'serbian_epic_poetry',
'serb_epic_songs': 'serbian_epic_poetry',
'serb_orthodox_christian': 'serbian_orthodox_church',
'serb_orthodox_church': 'serbian_orthodox_church',
'serb_orthodox_churches': 'serbian_orthodox_church',
'serb_patriarch_pavle': 'patriarch_pavle_of_serbia',
'serb_progressive_party': 'serbian_progressive_party',
'serb_radical_party': 'serbian_radical_party',
'serbia_and_montenegro/communications': 'telecommunications_in_serbia',
'serbia_and_montenegro/economy': 'economy_of_serbia_and_montenegro',
'serbia_and_montenegro/geography': 'geography_of_serbia_and_montenegro',
'serbia_and_montenegro/government': 'politics_of_serbia_and_montenegro',
'serbia_and_montenegro/history': 'serbia_and_montenegro',
'serbia_and_montenegro/military': 'military_of_yugoslavia',
'serbia_and_montenegro/people': 'demographics_of_serbia_and_montenegro',
'serbia_and_montenegro/transportation': 'serbia_and_montenegro',
'serbia_and_monternagro': 'serbia_and_montenegro',
'serbia_football_team': 'serbia_national_football_team',
'serbia_in_wwi': 'history_of_modern_serbia',
'serbia_nad_montenegro': 'serbia_and_montenegro',
'serbia_to_tokyo': 'srbija_do_tokija',
'serbian-hungarian_baranja-baja_republic': 'serbian-hungarian_baranya-baja_republic',
'serbian-hungarian_baranja_republic': 'serbian-hungarian_baranya-baja_republic',
'serbian-hungarian_baranya_republic': 'serbian-hungarian_baranya-baja_republic',
'serbian-south_african_relations': 'foreign_relations_of_serbia',
'serbian_air_force': 'serbian_air_force_and_air_defense',
'serbian_area_codes': 'telephone_numbers_in_serbia',
'serbian_assault_corps': 'serbian_state_guard',
'serbian_chetnik_movement': 'chetniks',
'serbian_chetniks_movement': 'chetniks',
'serbian_contemporary_art': 'serbian_art',
'serbian_cyrillic_language': 'serbian_cyrillic_alphabet',
'serbian_cyrillic_script': 'serbian_cyrillic_alphabet',
'serbian_election_results': 'elections_in_serbia_and_montenegro',
'serbian_epic_poem': 'serbian_epic_poetry',
'serbian_film_director': 'cinema_of_serbia',
'serbian_given_name': 'serbian_name',
'serbian_given_names': 'serbian_name',
'serbian_intelligence_agency': 'security_information_agency',
'serbian_language_modernisation': 'serbian_language',
'serbian_military_ranks': 'serbian_military_ranks_and_insignia',
'serbian_mountain_hound': 'montenegrin_mountain_hound',
'serbian_national_assembly': 'national_assembly_of_serbia',
'serbian_orthodox_christian': 'serbian_orthodox_church',
'serbian_orthodox_christianity': 'serbian_orthodox_church',
'serbian_ortodox_church': 'serbian_orthodox_church',
'serbian_presidential_election': 'elections_in_serbia',
'serbian_rock_music': 'serbian_rock',
'serbian_romany_language': 'romano-serbian_language',
'serbian_royal_academy': 'serbian_academy_of_sciences_and_arts',
'serbian_rugby_league': 'serbia_national_rugby_league_team',
'serbian_state_guards': 'serbian_state_guard',
'serbian_strength_movement': 'strength_of_serbia_movement',
'serbian_tri-color_hound': 'serbian_tricolour_hound',
'serbian_tri-colour_hound': 'serbian_tricolour_hound',
'serbian_tricolor_hound': 'serbian_tricolour_hound',
'serbian_volunteer_guard': 'serb_volunteer_guard',
'serbian_white_eagles': 'serbian_white_eagles_fc',
'serbo-montenegrin_football_league': 'serbian_superliga',
'serbo_croatian_language': 'serbo-croatian_language',
'serbs_from_montenegro': 'serbs_in_montenegro',
'serbs_in_america': 'serbian_american',
'serbs_in_canada': 'canadians_of_serbian_ancestry',
'serbs_in_croatia': 'serbs_of_croatia',
'serbs_in_macedonia': 'serbs_in_the_republic_of_macedonia',
'serbs_of_albania': 'serbs_in_albania',
'serbs_of_bosnia': 'serbs_of_bosnia_and_herzegovina',
'serbs_of_bosnia-herzegovina': 'serbs_of_bosnia_and_herzegovina',
'serbs_of_dubrovnik': 'serbs_in_dubrovnik',
'serbs_of_hungary': 'serbs_in_hungary',
'serbs_of_kosovo': 'serbs_in_kosovo',
'serbs_of_macedonia': 'serbs_in_the_republic_of_macedonia',
'serbs_of_montenegro': 'serbs_in_montenegro',
'serbs_of_mostar': 'serbs_in_mostar',
'serbs_of_romania': 'serbs_in_romania',
'serbs_of_sarajevo': 'serbs_in_sarajevo',
'serbs_of_timisoara': 'serbs_in_romania',
'serbs_of_vojvodina': 'serbs_in_vojvodina',
'serca_foodservices_incorporated': 'sysco',
'serco_group_plc': 'serco_group',
'serdang_komuter_station': 'serdang_railway_station',
'serdang_power_station': 'putrajaya_power_station',
'serec_eth_zurich': 'serec',
'seregno_f.b.c._1913': 'u.s.d._1913_seregno_calcio',
'seregno_fbc_1913': 'u.s.d._1913_seregno_calcio',
'serei_no_moribito': 'moribito:_guardian_of_the_spirit',
'seremban-bukit_nanas_highway': 'seremban-bukit_nenas_highway',
'seremban_komuter_station': 'seremban_railway_station',
'seremban_municipal_council': 'majlis_perbandaran_seremban',
'seremban_state_mosque': 'negeri_sembilan_state_mosque',
'seren_y_saint': 'prophwyd_y_jubili',
'serena_jameka_williams': 'serena_williams',
'serena_st._germaine': 'james_bond_007:_everything_or_nothing',
'serenade_of_water': 'the_legend_of_zelda:_ocarina_of_time',
'serene_and_pearl': 'considering_lily',
'serengeti_volcanic_grasslands': 'serengeti',
'serenity:those_left_behind': 'serenity:_those_left_behind',
'serenity_virtual_station': 'svista',
'sereno_e._dwight': 'sereno_edwards_dwight',
'sereno_elisha_payne': 'sereno_e._payne',
'serenus_of_antinoeia': 'serenus_of_antinouplis',
'seretse_ian_khama': 'ian_khama',
'serf_emancipation_day': 'serfs_emancipation_day',
'serf_liberation_day': 'serfs_emancipation_day',
'serfaus_village_railway': 'dorfbahn_serfaus',
'serfdom_in_russia': 'russian_serfdom',
'serfdom_in_tibet': 'serfdom_in_tibet_controversy',
'serfs_liberation_day': 'serfs_emancipation_day',
'serge_alain_liri': 'alain_liri',
'serge_alain_maguy': 'serge_maguy',
'serge_alexandre_stavisky': 'alexandre_stavisky',
'serge_alexandrovitch_koussevitzky': 'sergei_koussevitzky',
'serge_and_bacchus': 'saints_sergius_and_bacchus',
'serge_arboleda_university': 'sergio_arboleda_university',
'serge_de_nimes': 'denim',
'serge_dikulu_bageta': 'dikilu_bageta',
'serge_ivan_chermayeff': 'serge_chermayeff',
'serge_jonas_ibaka': 'serge_ibaka',
'serge_modular_synthesizer': 'serge_synthesizer',
'serge_of_radonezh': 'sergius_of_radonezh',
'serge_teyssot_gay': 'serge_teyssot-gay',
'sergeant_1st_class': 'sergeant',
'sergeant_at_arms': 'serjeant-at-arms',
'sergeant_at_law': 'serjeant-at-law',
'sergeant_buck_frobisher': 'buck_frobisher',
'sergeant_charlie_hacker': 'allan_melvin',
'sergeant_darren_manzella': 'darren_manzella',
'sergeant_john_benton': 'sergeant_benton',
'sergeant_john_bradley': 'conflict:_desert_storm',
'sergeant_john_ordway': 'john_ordway',
'sergeant_lincoln_osiris': 'tropic_thunder',
'sergeant_major_academy': 'united_states_army_sergeants_major_academy',
'sergeant_major_clay': 'eric_clay',
'sergeant_major_coin': 'challenge_coin',
'sergeant_major_johndrow': 'philip_f._johndrow',
'sergeant_robbie_lewis': 'inspector_lewis',
'sergeant_stephen_trujillo': 'stephen_trujillo',
'sergeant_stewart_graeme': 'stewart_graeme_guthrie',
'sergeant_york_gun': 'm247_sergeant_york',
'sergeants_major_academy': 'united_states_army_sergeants_major_academy',
'sergei_a._kan': 'sergei_kan',
'sergei_a._markov': 'sergey_alexandrovich_markov',
'sergei_afanasyevich_toropov': 'sergei_toropov',
'sergei_aleksandrovich_esenin': 'sergei_yesenin',
'sergei_aleksandrovich_volkov': 'sergey_alexandrovich_volkov',
'sergei_aleksandrovich_yesenin': 'sergei_yesenin',
'sergei_alekseevich_chaplygin': 'sergey_chaplygin',
'sergei_alekseyevich_lebediv': 'sergei_alekseyevich_lebedev',
'sergei_alexandrovich_belov': 'sergei_belov',
'sergei_alexandrovich_bershadski': 'sergei_aleksandrovich_bershadski',
'sergei_alexandrovich_esenin': 'sergei_yesenin',
'sergei_andreyevich_muromtsev': 'sergey_muromtsev',
'sergei_arkadevich_vronsky': 'sergei_vronsky',
'sergei_b._ivanov': 'sergei_ivanov',
'sergei_borisovich_ivanov': 'sergei_ivanov',
'sergei_dmitreyevich_sazonov': 'sergey_sazonov',
'sergei_dmitrievich_stanishev': 'sergei_stanishev',
'sergei_dmitriyevich_sazonov': 'sergey_sazonov',
'sergei_eduardovich_bortkiewicz': 'sergei_bortkiewicz',
'sergei_escobar_roure': 'sergi_escobar',
'sergei_evgenyevich_naryshkin': 'sergey_naryshkin',
'sergei_f._oldenburg': 'sergey_oldenburg',
'sergei_fedorovich_bondarchuk': 'sergei_bondarchuk',
'sergei_fedorovich_oldenburg': 'sergey_oldenburg',
'sergei_feodorovich_akhromeev': 'sergey_akhromeyev',
'sergei_gavrilovich_navashin': 'sergei_navashin',
'sergei_geradievich_nechayev': 'sergey_nechayev',
'sergei_grigoriyevich_stroganov': 'stroganovs',
'sergei_i._vavilov': 'sergey_ivanovich_vavilov',
'sergei_ivanovich_beljawsky': 'sergey_belyavsky',
'sergei_ivanovich_belyavsky': 'sergey_belyavsky',
'sergei_ivanovich_kavtaradze': 'sergey_kavtaradze',
'sergei_ivanovich_rudenko': 'sergei_rudenko',
'sergei_ivanovich_taneiev': 'sergei_taneyev',
'sergei_ivanovich_taneyev': 'sergei_taneyev',
'sergei_ivanovich_vavilov': 'sergey_ivanovich_vavilov',
'sergei_k._krikalev': 'sergei_krikalyov',
'sergei_konstantinovich_krikalev': 'sergei_krikalyov',
'sergei_konstantinovich_krikalyov': 'sergei_krikalyov',
'sergei_lvovich_sobolev': 'sergei_sobolev',
'sergei_m._eisenstein': 'sergei_eisenstein',
'sergei_michailovich_trufanoff': 'sergei_trufanov',
'sergei_michailowitsch_eisenstein': 'sergei_eisenstein',
'sergei_mikhailovich_darkin': 'sergey_mikhaylovich_darkin',
'sergei_mikhailovich_eisenstein': 'sergei_eisenstein',
'sergei_mikhailovich_ignatiev': 'sergei_mikhailovich_ignatyev',
'sergei_mikhailovich_kravchinski': 'sergei_kravchinski',
'sergei_mikhailovich_liapunov': 'sergei_lyapunov',
'sergei_mikhailovich_makarov': 'sergei_makarov',
'sergei_mikhailovich_prokhudin-gorskii': 'sergey_prokudin-gorsky',
'sergei_mikhailovich_prokudin-gorskii': 'sergey_prokudin-gorsky',
'sergei_mikhailovich_soloviev': 'sergey_solovyov',
'sergei_mikhailovich_tretyakov': 'sergei_tretyakov',
'sergei_mikhas_mikhailov': 'sergei_mikhailov',
'sergei_mikhaylovich_makarov': 'sergei_makarov',
'sergei_mironovich_kirov': 'sergey_kirov',
'sergei_n._kournakoff': 'sergey_nikolaevich_kurnakov',
'sergei_nikiforovich_vassilenko': 'sergei_vasilenko',
'sergei_nikitovich_mergelyan': 'sergey_mergelyan',
'sergei_nikolaevich_abeltsev': 'sergey_abeltsev',
'sergei_nikolaevich_blazhko': 'sergey_blazhko',
'sergei_nikolaevich_lebedev': 'sergei_lebedev',
'sergei_nikolaevich_reformatskii': 'sergey_reformatsky',
'sergei_nikolajevitch_bulgakov': 'sergei_bulgakov',
'sergei_of_radonezh': 'sergius_of_radonezh',
'sergei_p._mavrodi': 'sergei_panteleevich_mavrodi',
'sergei_p._novikov': 'sergei_petrovich_novikov',
'sergei_pavlovich_diaghilev': 'sergei_diaghilev',
'sergei_pavlovich_korolev': 'sergey_korolyov',
'sergei_pavlovich_korolov': 'sergey_korolyov',
'sergei_pavlovich_korolyev': 'sergey_korolyov',
'sergei_pavlovich_korolyov': 'sergey_korolyov',
'sergei_pavlovich_nepobedimiy': 'sergey_nepobedimiy',
'sergei_petrovich_melgunov': 'sergei_melgunov',
'sergei_petrovych_novikov': 'serhiy_novikov',
'sergei_prokudin_gorskii': 'sergey_prokudin-gorsky',
'sergei_s._bryukhonenko': 'sergey_bryukhonenko',
'sergei_s._prokofiev': 'sergei_prokofiev',
'sergei_semenovitch_uvarov': 'sergey_uvarov',
'sergei_sergeevich_chetverikov': 'sergei_chetverikov',
'sergei_sergeevich_voskresenski': 'sergei_chetverikov',
'sergei_sergeievich_korsakoff': 'sergei_korsakoff',
'sergei_sergeyevich_kamenev': 'sergei_kamenev',
'sergei_sergeyevich_korsakov': 'sergei_korsakoff',
'sergei_sergeyevich_prokofiev': 'sergei_prokofiev',
'sergei_timofeyevich_aksakov': 'sergey_aksakov',
'sergei_v._lebedev': 'sergei_vasiljevich_lebedev',
'sergei_v._zalyotin': 'sergei_zalyotin',
'sergei_vadimovich_stepashin': 'sergei_stepashin',
'sergei_vasilevich_rachmaninov': 'sergei_rachmaninoff',
'sergei_vasilievich_rachmaninoff': 'sergei_rachmaninoff',
'sergei_vasilyevich_lebedev': 'sergei_vasiljevich_lebedev',
'sergei_vasilyevich_rachmaninoff': 'sergei_rachmaninoff',
'sergei_viktorovich_lavrov': 'sergey_lavrov',
'sergei_viktorovich_zalyotin': 'sergei_zalyotin',
'sergei_vitalyevich_bezrukov': 'sergey_bezrukov',
'sergei_vladimirovich_bodrov': 'sergei_bodrov',
'sergei_vladimirovich_brylin': 'sergei_brylin',
'sergei_vladimirovich_ilyushin': 'sergey_ilyushin',
'sergei_vladimirovich_mandreko': 'sergei_mandreko',
'sergei_vladimirovich_mikhalkov': 'sergey_mikhalkov',
'sergei_wassilievitch_rachmaninoff': 'sergei_rachmaninoff',
'sergei_y._treschev': 'sergei_treshchov',
'sergei_y._treshchev': 'sergei_treshchov',
'sergei_yevgenyevich_suponev': 'sergei_suponev',
'sergei_yulievich_witte': 'sergei_witte',
'sergei_yulyevich_witte': 'sergei_witte',
'sergej_alekseevich_chaplygin': 'sergey_chaplygin',
'sergej_b._iwanow': 'sergei_ivanov',
'sergej_von_glazenap': 'sergey_von_glazenap',
'sergey_adamovich_kovalev': 'sergei_kovalev',
'sergey_afanasyevich_toropov': 'sergei_toropov',
'sergey_aleksandrovich_volkov': 'sergey_alexandrovich_volkov',
'sergey_aleksandrovich_yesenin': 'sergei_yesenin',
'sergey_alekseyevich_khristianovich': 'sergey_khristianovich',
'sergey_dmitrievich_bushuyev': 'sergey_bushuyev',
'sergey_dmitrievich_merkurov': 'sergey_merkurov',
'sergey_dmitrievich_sazonov': 'sergey_sazonov',
'sergey_dmitriyevich_sazonov': 'sergey_sazonov',
'sergey_fyodorovich_platonov': 'sergey_platonov',
'sergey_g._kara-murza': 'sergey_kara-murza',
'sergey_g._nechayev': 'sergey_nechayev',
'sergey_gennadiyevich_nechayev': 'sergey_nechayev',
'sergey_georgyevich_gorshkov': 'sergey_gorshkov',
'sergey_i._koksharov': 'sergey_koksharov',
'sergey_ivanovich_kavtaradze': 'sergey_kavtaradze',
'sergey_ivanovich_muravyov-apostol': 'sergey_muravyov-apostol',
'sergey_ivanovich_taneyev': 'sergei_taneyev',
'sergey_juljevich_vitte': 'sergei_witte',
'sergey_leonidovich_katanandov': 'sergey_katanandov',
'sergey_mikhailovich_brin': 'sergey_brin',
'sergey_mikhailovich_darkin': 'sergey_mikhaylovich_darkin',
'sergey_mikhailovich_eisenstein': 'sergei_eisenstein',
'sergey_mikhaylovich_eisenstein': 'sergei_eisenstein',
'sergey_mikhaylovich_lyapunov': 'sergei_lyapunov',
'sergey_mikhaylovich_makarov': 'sergei_makarov',
'sergey_mikhaylovich_prokudin-gorsky': 'sergey_prokudin-gorsky',
'sergey_mikhaylovich_solovyev': 'sergey_solovyov',
'sergey_mikhaylovich_solovyov': 'sergey_solovyov',
'sergey_mironovich_kirov': 'sergey_kirov',
'sergey_nikanorovich_sirotkin': 'sergey_sirotkin',
'sergey_nikiforovich_vasilenko': 'sergei_vasilenko',
'sergey_nikolaevich_blazhko': 'sergey_blazhko',
'sergey_nikolayevich_bulgakov': 'sergei_bulgakov',
'sergey_nikolayevich_winogradsky': 'sergei_winogradsky',
'sergey_of_radonezh': 'sergius_of_radonezh',
'sergey_p._korolev': 'sergey_korolyov',
'sergey_panteleevich_mavrodi': 'sergei_panteleevich_mavrodi',
'sergey_parajanov_museum': 'sergei_parajanov_museum',
'sergey_pavlovich_diaghilev': 'sergei_diaghilev',
'sergey_pavlovich_dyagilev': 'sergei_diaghilev',
'sergey_pavlovich_korolev': 'sergey_korolyov',
'sergey_pavlovich_korolyov': 'sergey_korolyov',
'sergey_pavlovich_urusevsky': 'sergey_urusevsky',
'sergey_petrovich_novikov': 'sergei_petrovich_novikov',
'sergey_sergeyevich_korsakov': 'sergei_korsakoff',
'sergey_sergeyevich_prokofiev': 'sergei_prokofiev',
'sergey_sergeyevich_sidorsky': 'sergey_sidorsky',
'sergey_timofeyevich_aksakov': 'sergey_aksakov',
'sergey_v._lavrov': 'sergey_lavrov',
'sergey_vasilyevich_lebedev': 'sergei_vasilyevich_zubatov',
'sergey_vasilyevich_rachmaninoff': 'sergei_rachmaninoff',
'sergey_vasilyevich_rachmaninov': 'sergei_rachmaninoff',
'sergey_vasilyevich_zubatov': 'sergei_vasilyevich_zubatov',
'sergey_viktorovich_lavrov': 'sergey_lavrov',
'sergey_vitalyevich_bezrukov': 'sergey_bezrukov',
'sergey_vladimirovich_ilyushiin': 'sergey_ilyushin',
'sergey_vladimirovich_ilyushin': 'sergey_ilyushin',
'sergey_vladimirovich_mandreko': 'sergei_mandreko',
'sergey_vladimirovich_mikhalkov': 'sergey_mikhalkov',
'sergey_vladimirovich_obraztsov': 'sergey_obraztsov',
'sergey_vsevolodovich_yablonsky': 'sergey_yablonsky',
'sergey_yosifovich_paradzhanov': 'sergei_parajanov',
'sergey_yulyevich_witte': 'sergei_witte',
'serghei_ivanovich_rogaciov': 'serghei_rogaciov',
'sergi_bruguera_torner': 'sergi_bruguera',
'sergi_busquets_burgos': 'sergio_busquets',
'sergi_escobar_roure': 'sergi_escobar',
'sergio_aguayo_quezada': 'sergio_aguayo',
'sergio_alejandro_diaz': 'sergio_alejandro',
'sergio_andres_roitman': 'sergio_roitman',
'sergio_aragoneses_almeida': 'sergio_aragoneses',
'sergio_cardinal_guerri': 'sergio_guerri',
'sergio_cardinal_pignedoli': 'sergio_pignedoli',
'sergio_cardinal_sebastiani': 'sergio_sebastiani',
'sergio_casal_martinez': 'sergio_casal',
'sergio_daniel_batista': 'sergio_batista',
'sergio_escobar_roure': 'sergi_escobar',
'sergio_galvarino_apablaza': 'sergio_apablaza',
'sergio_javier_barila': 'sergio_barila',
'sergio_javier_goycochea': 'sergio_goycochea',
'sergio_javier_goycoechea': 'sergio_goycochea',
'sergio_jose_bastida': 'sergio_bastida',
'sergio_matabuena_delgado': 'sergio_matabuena',
'sergio_mota_mello': 'sergio_mota',
'sergio_pitol_demeneghi': 'sergio_pitol',
'sergio_ramos_garcia': 'sergio_ramos',
'sergio_sapo_livingstone': 'sergio_livingstone',
'sergio_van_dijk': 'serginho_van_dijk',
'sergio_vela_martinez': 'sergio_vela',
'sergios_and_bakchos': 'saints_sergius_and_bacchus',
'sergios_kai_bakchos': 'saints_sergius_and_bacchus',
'sergiu_florin_nicolaescu': 'sergiu_nicolaescu',
'sergiu_marian_radu': 'sergiu_radu',
'sergius_and_bacchus': 'saints_sergius_and_bacchus',
'sergius_and_herman': 'sergius_of_valaam',
'sergius_mironovitch_kirov': 'smk_tank',
'sergius_of_radonez': 'sergius_of_radonezh',
'serguei_alexandrovich_klimov': 'serguei_klimov',
'sergy_nikolaevich_glinka': 'sergey_glinka',
'seri_culture_collection': 'seri_microalgae_culture_collection',
'seri_setia_mahkota': 'order_of_the_crown_of_malaysia',
'serial_ata_2': 'serial_ata',
'serial_ata_ii': 'serial_ata',
'serial_binary_adders': 'serial_binary_adder',
'serial_binary_subtracter': 'serial_binary_adder',
'serial_communication_bus': 'serial_communication',
'serial_communication_interface': 'serial_communication',
'serial_console_server': 'console_server',
'serial_digital_video': 'rec._601',
'serial_endosymbiosis_theory': 'endosymbiotic_theory',
'serial_experiements_lain': 'serial_experiments_lain',
'serial_experiments:_lain': 'serial_experiments_lain',
'serial_hybrid_vehicle': 'hybrid_vehicle_drivetrain',
'serial_killer_art': 'murderabilia',
'serial_killer_memorabilia': 'murderabilia',
'serial_line_ip': 'serial_line_internet_protocol',
'serial_order_learning': 'sequence_learning',
'serial_peripheral_interface': 'serial_peripheral_interface_bus',
'serial_port_pinouts': 'rs-232',
'serial_position_curve': 'serial_position_effect',
'serial_sexual_predator': 'sexual_predator',
'serial_terminal_server': 'console_server',
'serial_to_ethernet': 'console_server',
'serie_a_1944': 'campionato_alta_italia_1944',
'serie_c_supercup': 'supercoppa_di_serie_c',
'serie_del_caribe': 'caribbean_series',
'series_60_browser': 'web_browser_for_s60',
'series_63_license': 'uniform_securities_agent_state_law_exam',
'series_65_license': 'uniform_investment_adviser_law_exam',
'series_66_license': 'uniform_combined_state_law_exam',
'series_700t_shinkansen': 'taiwan_high_speed_700t_train',
'series_7_license': 'general_securities_representative_exam',
'series_e1_shinkansen': 'e1_series_shinkansen',
'series_e2_shinkansen': 'e2_series_shinkansen',
'series_e3_shinkansen': 'e3_series_shinkansen',
'series_e4_shinkansen': 'e4_series_shinkansen',
'series_e_bonds': 'series_e_bond',
'series_ee_bonds': 'series_e_bond',
'series_i_80': 'land_rover_defender',
'series_n700_shinkansen': 'n700_series_shinkansen',
'series_of_books': 'book_series',
'series_of_seven': 'heptalogy',
'series_parallel_graph': 'series-parallel_graph',
'series_plug-in_hybrid': 'plug-in_hybrid',
'seriile_filmului_gay': 'gay_film_nights',
'serine-type_d-ala-d-ala_carboxypeptidase': 'dd-transpeptidase',
'serine/threonine-selective_protein_kinase': 'serine/threonine-specific_protein_kinase',
'serine/threonine-specific_protein_kinases': 'serine/threonine-specific_protein_kinase',
'serine/threonine_intracellular_kinase': 'serine/threonine-specific_protein_kinase',
'serine/threonine_kinase_11': 'stk11',
'serine/threonine_kinase_receptor': 'serine/threonine-specific_protein_kinase',
'serine/threonine_protein_kinase': 'serine/threonine-specific_protein_kinase',
'serine/threonine_receptor_kinase': 'protein_kinase',
'serine_palmitoyl_transferase': 'serine_c-palmitoyltransferase',
'serine_protease_inhibitor': 'serpin',
'serine_protease_inhibitors': 'serpin',
'serine_proteinase_inhibitors': 'serine_protease',
'serine_type_carboxypeptidase': 'serine_protease',
'serinus_canaria_domestica': 'domestic_canary',
'serinus_citrinella_corsicana': 'corsican_finch',
'serinus_citrinella_corsicanus': 'corsican_finch',
'seriola_lalandi_lalandi': 'yellowtail_amberjack',
'serious_crash_unit': 'scu:_serious_crash_unit',
'serious_crime_squad': 'west_midlands_serious_crime_squad',
'serious_digitalis_intoxication': 'digoxin',
'serious_sam_2': 'serious_sam_ii',
'serious_sam_advance': 'serious_sam',
'seriously_groovy_records': 'seriously_groovy',
'serjeant_at_arms': 'serjeant-at-arms',
'serjeant_at_law': 'serjeant-at-law',
'serjeants_at_arms': 'serjeant-at-arms',
'serjeants_at_law': 'serjeant-at-law',
'serkadji_prison_massacre': 'serkadji_prison',
'serkadji_prison_mutiny': 'serkadji_prison',
'serlo_de_berci': 'serlo_de_burci',
'serlo_de_burcy': 'serlo_de_burci',
'serlo_of_hauteville': 'serlo_i_of_hauteville',
'serlon_of_savigny': 'congregation_of_savigny',
'serment_de_strasbourg': 'oaths_of_strasbourg',
'serment_de_strazburg': 'oaths_of_strasbourg',
'serments_de_strasbourg': 'oaths_of_strasbourg',
'serments_de_strazburg': 'oaths_of_strasbourg',
'sermon_the_mount': 'sermon_on_the_mount',
'sernache_de_bonjardim': 'cernache_do_bonjardim',
'serotonin-catecholamine_reuptake_inhibitor': 'serotonin-norepinephrine-dopamine_reuptake_inhibitor',
'serotonin-dopamine_releasing_agent': 'releasing_agent',
'serotonin-noradrenaline-dopamine_reuptake_inhibitor': 'serotonin-norepinephrine-dopamine_reuptake_inhibitor',
'serotonin-norepinephrine-dopamine_releasing_agent': 'releasing_agent',
'serotonin-norepinephrine_releasing_agent': 'releasing_agent',
'serotonin-specific_reuptake_inhibitor': 'selective_serotonin_reuptake_inhibitor',
'serotonin-specific_reuptake_inhibitors': 'selective_serotonin_reuptake_inhibitor',
'serotonin/norepinephrine/dopamine_reuptake_inhibitor': 'serotonin-norepinephrine-dopamine_reuptake_inhibitor',
'serotonin_receptor_antagonist': 'serotonin_antagonist',
'serotonin_releasing_agent': 'releasing_agent',
'serotonin_reuptake_enhancer': 'selective_serotonin_reuptake_enhancer',
'serotonin_reuptake_inhibitors': 'selective_serotonin_reuptake_inhibitor',
'serotonin_syndrome_crisis': 'serotonin_syndrome',
'serotonin_uptake_inhibitor': 'serotonin_reuptake_inhibitor',
'serotonin_uptake_inhibitors': 'selective_serotonin_reuptake_inhibitor',
'serpant_seed_doctrine': 'serpent_seed',
'serpent_island_centipede': 'scolopendra_abnormis',
'serpent_mound_disturbance': 'serpent_mound_crater',
'serpent_poker_solitaire': 'poker_square',
'serpent_seed_doctrine': 'serpent_seed',
'serpentaire_de_madagascar': 'madagascar_serpent-eagle',
'serpentaire_des_andaman': 'andaman_serpent-eagle',
'serpents_of_light': 'followers_of_set',
'serphin_r._maltese': 'serphin_maltese',
'serpin_peptidase_inhibitor': 'serpin',
'serra_da_borborema': 'borborema_plateau',
'serra_da_estrella': 'serra_da_estrela',
'serra_da_gardunha': 'gardunha',
'serra_da_ibiapaba': 'serra_de_ibiapaba',
'serra_da_mantiqueira': 'mantiqueira_mountains',
'serra_de_bussaco': 'serra_do_bussaco',
'serra_de_chela': 'serra_de_chella',
'serra_de_estrela': 'serra_da_estrela',
'serra_de_sintra': 'sintra_mountains',
'serra_do_araripe': 'chapada_do_araripe',
'serra_dos_ancares': 'sierra_de_ancares',
'serramonte_del_rey': 'jefferson_union_high_school_district',
'serramonte_high_school': 'jefferson_union_high_school_district',
'serrano_chili_pepper': 'serrano_pepper',
'serranus_c._hastings': 'serranus_clinton_hastings',
'serrated_flange_nut': 'serrated_face_nut',
'serrated_hinge-back_tortoise': 'common_tortoise',
'serrated_hinge-backed_tortoise': 'common_tortoise',
'serrated_seborrheic_keratosis': 'stucco_keratosis',
'serratus_posterior_inferior': 'serratus_posterior_inferior_muscle',
'serratus_posterior_superior': 'serratus_posterior_superior_muscle',
'serravalle_di_norcia': 'norcia',
'serre_twist_sheaf': 'tautological_bundle',
'sertanense_f._c.': 'sertanense_f.c.',
'sertanense_f_c': 'sertanense_f.c.',
'sertanense_futebol_clube': 'sertanense_f.c.',
'serthar_buddhist_institute': 'jigme_phuntsok',
'sertoli-leydig_cell_tumor': 'sertoli-leydig_cell_tumour',
'sertoli_cell_tumor': 'sertoli_cell_tumour',
'sertoma_clubs_international': 'sertoma_international',
'serues_7_license': 'general_securities_representative_exam',
'serum-ascities_albumin_gradient': 'serum-ascites_albumin_gradient',
'serum_amyloid_p': 'serum_amyloid_p_component',
'serum_amyloid_p-component': 'serum_amyloid_p_component',
'serum_amyloid_protein': 'serum_amyloid_p_component',
'serum_osmolal_gap': 'serum_osmotic_gap',
'serum_trypsin_inhibitor': 'alpha_1-antitrypsin',
'servaes_de_konink': 'servaes_de_koninck',
'servandus_and_germanus': 'servandus_and_cermanus',
'servant_of_glaaki': 'glaaki',
'servant_of_sin': 'gospel_of_john',
'servants_and_agents': 'vicarious_liability',
'servants_of_mary': 'servite_order',
'serve_america_act': 'edward_m._kennedy_serve_america_act',
'serve_the_servants': 'in_utero',
'servelliance_detection_report': 'computer_assisted_passenger_prescreening_system',
'server_form_factor': 'rack_unit',
'server_gated_crypto': 'server_gated_cryptography',
'server_network_block': 'lantastic',
'server_operating_system': 'operating_system',
'server_side_code': 'server-side_scripting',
'server_side_coder': 'server-side_scripting',
'server_side_coding': 'server-side_scripting',
'server_side_include': 'server_side_includes',
'server_side_javascript': 'server-side_javascript',
'server_side_redirect': 'server-side_redirect',
'server_side_script': 'server-side_scripting',
'server_side_scripting': 'server-side_scripting',
'serverware_group_plc': 'serverware_group',
'servette_fc_geneve': 'servette_fc',
'servette_geneve_fc': 'servette_fc',
'service-oriented_business_application': 'service-oriented_architecture',
'service-oriented_modeling_framework': 'service-oriented_modeling',
'service-oriented_modelling_framework': 'service-oriented_modeling',
'service_area_interface': 'serving_area_interface',
'service_as_worship': 'rule_of_saint_benedict',
'service_by_fax': 'service_of_process',
'service_by_mail': 'service_of_process',
'service_by_publication': 'service_of_process',
'service_corp._international': 'service_corporation_international',
'service_correctionnel_canada': 'correctional_service_of_canada',
'service_creek-mitchell_highway': 'oregon_route_207',
'service_cross_award': 'service_award_cross',
'service_data_object': 'service_data_objects',
'service_discovery_protocol': 'service_discovery',
'service_dress_cap': 'peaked_cap',
'service_electric_cable': 'service_electric',
'service_endpoint_interface': 'endpoint_interface',
'service_engine_soon': 'malfunction_indicator_lamp',
'service_force_squadron': 'servron',
'service_forces_atlantic': 'comservlant',
'service_industries_limited': 'servis_industries',
'service_level_agreements': 'service_level_agreement',
'service_level_management': 'information_technology_infrastructure_library',
'service_level_objective': 'service_level_objectives',
'service_management_framework': 'ibm_service_management_framework',
'service_of_supply': 'services_of_supply',
'service_orientated_architecture': 'service-oriented_architecture',
'service_oriented_architecture': 'service-oriented_architecture',
'service_oriented_architectures': 'service-oriented_architecture',
'service_oriented_computing': 'service-oriented_architecture',
'service_oriented_programming': 'service-oriented_programming',
'service_oriented_provisining': 'service_oriented_provisioning',
'service_oriented_transformation': 'service-oriented_transformation',
'service_pack_1': 'service_pack',
'service_pack_2': 'service_pack',
'service_pack_3': 'service_pack',
'service_pack_4': 'service_pack',
'service_pack_6': 'service_pack',
'service_portfolio_management': 'it_portfolio_management',
'service_rover_award': 'scouts_of_china',
'serviced_apartment_complex': 'serviced_apartment',
'services_for_unix': 'microsoft_windows_services_for_unix',
'services_trade_statistics': 'trade_in_services_statistics',
'servicio_de_lavanderia': 'laundry_service',
'servicio_militar_nacional': 'conscription_in_mexico',
'serving_in_silence': 'serving_in_silence:_the_margarethe_cammermeyer_story',
'servius_sulpicius_galba': 'galba',
'servizio_informazioni_difesa': 'sismi',
'servo_erector_set': 'robot_kit',
'servomechanical_torsion_facilitator': 'wrench',
'servpro_of_rockwall/rowlett': 'servpro',
'ses_astra_techcom': 'ses_astra',
'ses_s._a.': 'ses_s.a.',
'ses_s_a': 'ses_s.a.',
'sesame_prawn_toast': 'shrimp_toast',
'sesame_street_canada': 'sesame_park',
'sesame_street_episodes': 'sesame_street',
'sesame_street_universe': 'sesame_street',
'sese_seko_mobutu': 'mobutu_sese_seko',
'seshadri_srinivasa_ayyangar': 's._srinivasa_iyengar',
'seshadri_srinivasa_iyengar': 's._srinivasa_iyengar',
'sesotho_sa_leboa': 'northern_sotho_language',
'sessile_drop_method': 'sessile_drop_technique',
'session_border_controllers': 'session_border_controller',
'session_data_pollution': 'session_poisoning',
'session_of_parliament': 'legislative_session',
'sessions_band_tour': 'bruce_springsteen_with_the_seeger_sessions_band_tour',
'sestroretsk_rail_station': 'sestroretsk_railway_station',
'set-theoretic_reflection_principles': 'reflection_principle',
'set50_and_set100': 'set50_index_and_set100_index',
'set_america_free': 'corporate_average_fuel_economy',
'set_aside_grants': 'set-aside',
'set_builder_notation': 'set-builder_notation',
'set_covering_problem': 'set_cover_problem',
'set_direct_product': 'direct_product',
'set_el_bait': 'sitt_al-bayt',
'set_el_beyt': 'sitt_al-bayt',
'set_if_off': 'set_it_off',
'set_of_integers': 'integer',
'set_of_points': 'railroad_switch',
'set_of_sets': 'family_of_sets',
'set_reconstruction_conjecture': 'reconstruction_conjecture',
'set_regional_airport': 'southeast_texas_regional_airport',
'set_top_box': 'set-top_box',
'set_top_boxes': 'set-top_box',
'set_user_id': 'setuid',
'seta_usa_inc.': 'seta_corporation',
'setauket_elementary_school': 'three_village_central_school_district',
'setauket_spy_ring': 'culper_ring',
'setenta_veces_siete': 'the_female:_seventy_times_seven',
'seth_a._robinson': 'robinson_technologies',
'seth_able_robinson': 'robinson_technologies',
'seth_adam_meyers': 'seth_meyers',
'seth_b._nicholson': 'seth_barnes_nicholson',
'seth_b._thornton': 'thornton_affair',
'seth_b._waxman': 'seth_p._waxman',
'seth_c._chandler': 'seth_carlo_chandler',
'seth_c._moffat': 'seth_c._moffatt',
'seth_chase_taft': 'seth_taft',
'seth_crittenden_moffatt': 'seth_c._moffatt',
'seth_david_schoen': 'seth_schoen',
'seth_de_witt': 'seth_sigsby',
'seth_e._meek': 'seth_eugene_meek',
'seth_f._henriett': 'henriett_seth_f.',
'seth_g._jones': 'seth_jones',
'seth_g._persons': 'gordon_persons',
'seth_gordon_persons': 'gordon_persons',
'seth_h._yocum': 'seth_hartman_yocum',
'seth_jane_roberts': 'jane_roberts',
'seth_kofi_obeng': 'seth_obeng',
'seth_l._phelps': 'seth_ledyard_phelps',
'seth_lakshman_chandra': 'lakshman_chandra_seth',
'seth_m._barton': 'seth_barton',
'seth_m._gates': 'seth_merrill_gates',
'seth_m_hall': 'seth_hall',
'seth_maxwell_barton': 'seth_barton',
'seth_michael_hall': 'seth_hall',
'seth_w._cobb': 'seth_wallace_cobb',
'seth_walchand_hirachand': 'walchand_hirachand',
'seth_woodbury_macfarlane': 'seth_macfarlane',
'sethi_advertising_model': 'sethi_model',
'sethu_lakshmi_bai': 'sethu_lakshmi_bayi',
'sethusamudram_canal_project': 'sethusamudram_shipping_canal_project',
'sethusamudram_shipping_canal': 'sethusamudram_shipping_canal_project',
'setia_alam_interchange': 'setia_alam_highway',
'setiaku_di_sini': 'setia_ku_di_sini',
'setif_international_airport': 'ain_arnat_airport',
'setl2_programming_language': 'setl2',
'setl_programming_language': 'setl',
'seto_great_bridge': 'great_seto_bridge',
'seto_inland_sea': 'inland_sea',
'seto_na_hanayome': 'seto_no_hanayome',
'seto_ohashi_bridge': 'great_seto_bridge',
'seto_uchi_jakucho': 'jakucho_setouchi',
'seto_uthumi_open': 'munsingwear_open_ksb_cup',
'seton_hall_college': 'seton_hall_university',
'seton_hall_fire': 'boland_hall_fire',
'seton_hall_law': 'seton_hall_university_school_of_law',
'seton_hall_prep': 'seton_hall_preparatory_school',
'seton_hall_report': 'seton_hall_reports',
'seton_hall_study': 'seton_hall_reports',
'seton_i_miller': 'seton_i._miller',
'seton_la_salle': 'seton-la_salle_catholic_high_school',
'seton_lake_band': 'seton_lake_first_nation',
'seton_lake_reservoir': 'seton_lake',
'seton_scholl_limited': 'ssl_international',
'sets_of_uniqueness': 'set_of_uniqueness',
'setske_de_haan': 'cissy_van_marxveldt',
'setsoto_national_stadium': 'setsoto_stadium',
'sette_volte_sette': 'seven_times_seven',
'settimana_ciclista_lombarda': 'settimana_ciclistica_lombarda',
'settimana_ciclistica_coppi-bartali': 'settimana_internazionale_di_coppi_e_bartali',
'settimana_ciclistica_internazionale': 'settimana_internazionale_di_coppi_e_bartali',
'settle_and_carlisle': 'settle-carlisle_line',
'settlement_act_1662': 'poor_relief_act_1662',
'settlement_on_mars': 'colonization_of_mars',
'settlements_in_israel': 'geography_of_israel',
'settlers_of_canaan': 'the_settlers_of_canaan',
'settlers_of_cataan': 'the_settlers_of_catan',
'settlers_of_catan': 'the_settlers_of_catan',
'setu_bull_coins': 'setu_coins',
'setu_lakshmi_bai': 'sethu_lakshmi_bayi',
'setu_lakshmi_bayi': 'sethu_lakshmi_bayi',
'setu_madhavrao_pagdi': 'setumadhavrao_pagdi',
'setur_antalya_marina': 'marinas_in_turkey',
'setur_ayvalik_marina': 'marinas_in_turkey',
'setur_finike_marina': 'marinas_in_turkey',
'setur_kusadasi_marina': 'marinas_in_turkey',
'seung_bae_lee': 'lee_seung-bae',
'seung_hu_cho': 'seung-hui_cho',
'seung_hui_cho': 'seung-hui_cho',
'seung_hye_jeong': 'jung_seung-hye',
'seung_hye_jung': 'jung_seung-hye',
'seung_min_ryu': 'ryu_seung-min',
'seung_mo_shon': 'shon_seung-mo',
'seung_sahn_lee': 'seung_sahn',
'seung_yeon_woo': 'woo_seung-yeon',
'seung_yeop_lee': 'lee_seung-yeop',
'seussical_the_musical': 'seussical',
'seva_novgorodsev_mbe': 'seva_novgorodsev',
'sevagram_/_sewagram': 'sevagram',
'sevan_ross_sensei': 'sevan_ross',
'sevart_de_bovill': 'sewal_de_bovil',
'sevastopol_marine_plant': 'sevastopol_shipyard',
'seven-foot_knoll_lighthouse': 'seven_foot_knoll_light',
'seven-layer_osi_model': 'osi_model',
'seven-spotted_lady_beetle': 'coccinella_septempunctata',
'seven_ancient_wonders': 'seven_wonders_of_the_ancient_world',
'seven_armed_octopus': 'seven-arm_octopus',
'seven_arts_pictures': 'seven_arts_productions',
'seven_bad_words': 'seven_dirty_words',
'seven_branched_sword': 'seven-branched_sword',
'seven_caged_tigers': 'tiny_music..._songs_from_the_vatican_gift_shop',
'seven_capital_sins': 'seven_deadly_sins',
'seven_capital_vices': 'seven_deadly_sins',
'seven_capital_virtues': 'seven_virtues',
'seven_card_stud': 'seven-card_stud',
'seven_cardinal_sins': 'seven_deadly_sins',
'seven_cardinal_virtues': 'seven_virtues',
'seven_churches_monastery': 'geghard',
'seven_clans_casino': 'seven_clans_casinos',
'seven_corners_inc.': 'seven_corners_inc',
'seven_day_campaign': 'seven_days_battles',
'seven_day_theory': 'the_don_killuminati:_the_7_day_theory',
'seven_days_battle': 'seven_days_battles',
'seven_days_campaign': 'seven_days_battles',
'seven_deadly_brothers': 'great_ten',
'seven_deadly_wonders': 'seven_ancient_wonders',
'seven_deadly_words': 'seven_dirty_words',
'seven_devils_swamp': 'seven_devils_lake',
'seven_dials_mystery': 'the_seven_dials_mystery',
'seven_dirty_word': 'seven_dirty_words',
'seven_domed_mosque': 'saat_masjid',
'seven_ecumenical_councils': 'first_seven_ecumenical_councils',
'seven_enemy_offensives': 'seven_anti-partisan_offensives',
'seven_five_three': 'shichi-go-san',
'seven_fortune_gods': 'seven_lucky_gods',
'seven_from_heaven': 'angels_revenge',
'seven_greater_antiphons': 'o_antiphon',
'seven_heavenly_objects': 'classical_planet',
'seven_heavenly_virtues': 'seven_virtues',
'seven_holy_brothers': 'felicitas_of_rome',
'seven_holy_founders': 'servite_order',
'seven_holy_virtues': 'seven_virtues',
'seven_investment_management': 'great_depression',
'seven_iron_men': 'seven_iron_brothers',
'seven_islands_republic': 'septinsular_republic',
'seven_just_titles': 'spanish_colonization_of_the_americas',
'seven_lady_godivas': 'the_seven_lady_godivas',
'seven_lakes_parkway': 'seven_lakes_drive',
'seven_lakes_valley': 'triglav_lakes_valley',
'seven_last_words': 'sayings_of_jesus_on_the_cross',
'seven_league_boots': 'seven-league_boots',
'seven_liberal_arts': 'liberal_arts',
'seven_little_fortunes': 'peking_opera_school',
'seven_m._barnum': 'william_henry_barnum',
'seven_martin_place': 'atn',
'seven_mary_3': 'seven_mary_three',
'seven_morning_news': 'seven_news',
'seven_mule_barnum': 'william_henry_barnum',
'seven_nasty_words': 'seven_dirty_words',
'seven_national_news': 'seven_network',
'seven_network_logos': 'seven_network',
'seven_nightly_news': 'seven_news',
'seven_noachide_commandments': 'seven_laws_of_noah',
'seven_noahide_laws': 'seven_laws_of_noah',
'seven_oaks_lake': 'seven_oaks_reservoir',
'seven_of_clubs': 'playing_card',
'seven_of_diamonds': 'playing_card',
'seven_of_hearts': 'playing_card',
'seven_of_spades': 'playing_card',
'seven_plus_seven': 'up_series',
'seven_point_circle': 'brocard_circle',
'seven_princes_dynasty': 'jedton_dynasty',
'seven_root_races': 'root_race',
'seven_seas_cup': 'nedbank_cup',
'seven_seas_entertainment/gomanga.com': 'seven_seas_entertainment',
'seven_second_delay': 'broadcast_delay',
'seven_segment_display': 'seven-segment_display',
'seven_segment_indicator': 'seven-segment_display',
'seven_sided_star': 'heptagram',
'seven_simple_machines': 'simple_machine',
'seven_star_mountain': 'qixing_mountain',
'seven_steps_verse': 'the_quatrain_of_seven_steps',
'seven_story_mountain': 'seven_storey_mountain',
'seven_string_guitar': 'seven-string_guitar',
'seven_string_guitars': 'seven-string_guitar',
'seven_television_network': 'seven_network',
'seven_transmembrane_protein': 'g_protein-coupled_receptor',
'seven_transmembrane_receptor': 'g_protein-coupled_receptor',
'seven_transmembrane_region': 'g_protein-coupled_receptor',
'seven_triglav_lakes': 'triglav_lakes_valley',
'seven_united_provinces': 'dutch_republic',
'seven_weeks_war': 'austro-prussian_war',
'seven_wise_men': 'seven_sages_of_greece',
'seven_year_itch': 'the_seven_year_itch',
'seven_years_itch': 'the_seven_year_itch',
'seven_yellow_gypsies': 'the_gypsy_laddie',
'sevenoaks_grammar_school': 'sevenoaks_school',
'sevenoaks_gravel_pits': 'sevenoaks_wildlife_reserve',
'sevenoaks_nature_reserve': 'sevenoaks_wildlife_reserve',
'sevenoaks_prep_school': 'sevenoaks_preparatory_school',
'sevenoaks_town_fc': 'sevenoaks_town_f.c.',
'sevenoaks_urban_district': 'sevenoaks',
'sevenoaks_wildfowl_reserve': 'sevenoaks_wildlife_reserve',
'sevens_world_series': 'irb_sevens_world_series',
'seventeen_article_constitution': 'seventeen-article_constitution',
'seventeen_point_agreement': 'seventeen_point_agreement_for_the_peaceful_liberation_of_tibet',
'seventeen_year_locust': 'magicicada',
'seventeenth_ecumenical_council': 'council_of_florence',
'seventeenth_naval_district': 'united_states_naval_districts',
'seventeenth_of_tamuz': 'seventeenth_of_tammuz',
'seventh-day_adventist_apocalyptic': 'seventh-day_adventist_eschatology',
'seventh-day_adventist_beliefs': 'seventh-day_adventist_theology',
'seventh-day_adventist_doctrine': 'seventh-day_adventist_theology',
'seventh-day_adventist_encyclopedia': 'seventh-day_adventist_commentary_reference_series',
'seventh-day_adventist_history': 'history_of_the_seventh-day_adventist_church',
'seventh-day_adventist_polity': 'government_of_the_seventh-day_adventist_church',
'seventh-day_adventist_school': 'seventh-day_adventist_education',
'seventh-day_adventist_schools': 'seventh-day_adventist_education',
'seventh-day_adventist_studies': 'adventist_studies',
'seventh-day_adventist_teaching': 'seventh-day_adventist_theology',
'seventh_anti-partisan_offensive': 'raid_on_drvar',
'seventh_art_productions': 'phil_grabsky',
'seventh_cervical_vertebra': 'vertebra_prominens',
'seventh_cranial_nerve': 'facial_nerve',
'seventh_cross_evolution': 'seventh_cross:_evolution',
'seventh_day_advantism': 'seventh-day_adventist_church',
'seventh_day_advantist': 'seventh-day_adventist_church',
'seventh_day_adventis': 'seventh-day_adventist_church',
'seventh_day_adventism': 'seventh-day_adventist_church',
'seventh_day_adventist': 'seventh-day_adventist_church',
'seventh_day_adventists': 'seventh-day_adventist_church',
'seventh_day_baptism': 'seventh_day_baptist',
'seventh_day_baptists': 'seventh_day_baptist',
'seventh_day_eventists': 'seventh-day_adventist_church',
'seventh_dragoon_guards': 'dragoon_guards',
'seventh_ecumenical_council': 'second_council_of_nicaea',
'seventh_edition_unix': 'version_7_unix',
'seventh_enemy_offensive': 'raid_on_drvar',
'seventh_five-year_plan': 'five-year_plans_of_china',
'seventh_general_council': 'second_council_of_nicaea',
'seventh_generation_amendment': 'common_property_amendment',
'seventh_generation_standard': 'seventh_generation',
'seventh_inning_stretch': 'seventh-inning_stretch',
'seventh_naval_district': 'united_states_naval_districts',
'seventh_of_february': 'february_7',
'seventh_panchen_lama': 'palden_tenpai_nyima',
'seventh_party_system': 'fifth_party_system',
'seventh_street_bridge': 'andy_warhol_bridge',
'seventh_us_army': 'seventh_united_states_army',
'seventh_zimbabwe_parliament': 'seventh_zimbabwean_parliament',
'seventy_first_classical': 'seventy-first_classical_middle_school',
'seventy_first_hs': 'seventy-first_high_school',
'seventy_weeks_prophecy': 'prophecy_of_seventy_weeks',
'sever_and_ecks': 'ballistic:_ecks_vs._sever',
'several_complex_variable': 'several_complex_variables',
'severan_marble_plan': 'forma_urbis_romae',
'severe_cyclonic_storm': 'tropical_cyclone',
'severe_mental_retardation': 'mental_retardation',
'severe_tropical_cyclone': 'tropical_cyclone',
'severe_tropical_storm': 'tropical_cyclone',
'severe_tyre_damage': 'severe_tire_damage',
'severe_weather_spotting': 'storm_spotter',
'severian_the_torturer': 'severian',
'severin_elektro_gmbh': 'severin_elektro',
'severino_cardinal_poletto': 'severino_poletto',
'severn_estuary_bridge': 'severn_bridge',
'severn_eyre_parker': 'severn_e._parker',
'severn_ojibwa_language': 'oji-cree_language',
'severn_rail_bridge': 'severn_railway_bridge',
'severn_tidal_power': 'severn_tidal_power_feasibility_study',
'severn_trent_plc': 'severn_trent',
'severn_trent_water': 'severn_trent',
'severn_tunnel_junction': 'severn_tunnel_junction_railway_station',
'severn_veiw_services': 'severn_view_services',
'severni_city_district': 'severny_district',
'severni_city_okrug': 'severny_district',
'severnii_city_district': 'severny_district',
'severnii_city_okrug': 'severny_district',
'severniy_city_district': 'severny_district',
'severniy_city_okrug': 'severny_district',
'severny_city_district': 'severny_district',
'severny_city_okrug': 'severny_district',
'severnyi_city_district': 'severny_district',
'severnyi_city_okrug': 'severny_district',
'severnyy_city_district': 'severny_district',
'severnyy_city_okrug': 'severny_district',
'severo_da_ravenna': 'severo_calzetta_da_ravenna',
'severodvinsk_class_submarine': 'graney_class_submarine',
'seversky_aircraft_company': 'republic_aviation',
'seversky_aircraft_corporation': 'republic_aviation',
'seversky_aviation_company': 'republic_aviation',
'seversky_aviation_corporation': 'republic_aviation',
'severus_de_gascogne': 'sereus',
'severus_sanctus_endelechus': 'severus_sanctus_endelechius',
'seveso_ii_directive': 'directive_96/82/ec',
'sevier_forest_reserve': 'sevier_national_forest',
'sevilla_futbol_club': 'sevilla_fc',
'sevin_devils_lake': 'seven_devils_lake',
'sewage_collection_systems': 'sewage_collection_and_disposal',
'sewage_treatment_plant': 'sewage_treatment',
'sewage_treatment_works': 'sewage_treatment',
'sewall_g._wright': 'sewall_wright',
'sewall_green_wright': 'sewall_wright',
'sewall_spaulding_farwell': 'sewall_s._farwell',
'sewall_wright_effect': 'genetic_drift',
'seward_b._collins': 'seward_collins',
'seward_bishop_collins': 'seward_collins',
'sewell_prescott_moody': 'sewell_moody',
'sewer_love_song': 'crying_nut',
'sewerage_pumping_station': 'pumping_station',
'sewing_and_tailoring': 'sewing',
'sex-determining_region_y': 'sry',
'sex-related_court_cases': 'sex-related_court_cases_in_the_united_states',
'sex_after_childbirth': 'sex_after_pregnancy',
'sex_among_public': 'public_sex',
'sex_and_alcohol': 'alcohol_and_sex',
'sex_and_crime': 'gender_and_crime',
'sex_and_law': 'sex_and_the_law',
'sex_and_magic': 'love_sex_magic',
'sex_and_religion': 'religion_and_sexuality',
'sex_as_exercise': 'sexual_intercourse',
'sex_before_16': 'adult_at_14_season',
'sex_before_marriage': 'fornication',
'sex_between_men': 'men_who_have_sex_with_men',
'sex_change_band': 'root_boy_slim',
'sex_change_op': 'sex_reassignment_surgery',
'sex_change_operation': 'sex_reassignment_surgery',
'sex_change_surgery': 'sex_reassignment_surgery',
'sex_cord-stromal_tumor': 'sex_cord-gonadal_stromal_tumour',
'sex_cord-stromal_tumour': 'sex_cord-gonadal_stromal_tumour',
'sex_determination_system': 'sex-determination_system',
'sex_determining_system': 'sex-determination_system',
'sex_development_disorder': 'disorders_of_sex_development',
'sex_differentiation_disorders': 'intersexuality',
'sex_discrimination_act': 'sex_discrimination_act_1975',
'sex_hormone_therapy': 'hormone_replacement_therapy',
'sex_in_public': 'public_sex',
'sex_in_sf': 'sex_and_sexuality_in_speculative_fiction',
'sex_limited_genes': 'sex-limited_genes',
'sex_machine_guns': 'sex_machineguns',
'sex_money_murder': 'sex_money_murda',
'sex_of_rearing': 'gender_of_rearing',
'sex_offences_act': 'sexual_offences_act',
'sex_offender_register': 'violent_and_sex_offender_register',
'sex_offender_registry': 'sex_offender_registration',
'sex_offenders_register': 'sex_offender_registration',
'sex_offenders_registry': 'sex_offender_registration',
'sex_on_fir': 'sex_on_fire',
'sex_positive_feminism': 'sex-positive_feminism',
'sex_positive_movement': 'sex-positive_movement',
'sex_radical_feminism': 'sex-positive_feminism',
'sex_reassignement_surgery': 'sex_reassignment_surgery',
'sex_selection_abortion': 'sex-selective_abortion',
'sex_selective_abortion': 'sex-selective_abortion',
'sex_sneezing_syndrome': 'sexually-induced_sneezing',
'sex_station_tv': 'sex_station',
'sex_with_animals': 'zoophilia',
'sex_with_dogs': 'zoophilia',
'sex_with_love': 'sexo_con_amor',
'sex_with_stranger': 'anonymous_sex',
'sex_with_sue': 'sunday_night_sex_show',
'sex_with_you': 'sex_wit_you',
'sex_without_consent': 'rape',
'sex_without_marriage': 'fornication',
'sex_worker_activists': ':category:sex_worker_activists',
'sex_workers_activists': ':category:sex_worker_activists',
'sexagesimal_number_system': 'sexagesimal',
'sexed_up_dossier': 'september_dossier',
'sexpartite_rib_vault': 'sexpartite_vault',
'sexred_of_essex': 'seaxred_of_essex',
'sextans_dwarf_galaxy': 'sextans_dwarf_spheroidal',
'sexto_antonio_berreyesa': 'rancho_las_putas',
'sextus_aurelius_victor': 'aurelius_victor',
'sextus_iulius_frontinus': 'sextus_julius_frontinus',
'sextus_iulius_severus': 'sextus_julius_severus',
'sextus_julius_frontius': 'sextus_julius_frontinus',
'sextus_julius_saturninus': 'julius_saturninus',
'sextus_tigidius_perennis': 'tigidius_perennis',
'sexual_arousal_disorders': 'sexual_arousal_disorder',
'sexual_assault_center': 'rape_crisis_center',
'sexual_assault_evidence': 'rape_kit',
'sexual_assault_kit': 'rape_kit',
'sexual_aversion_disorder': 'hypoactive_sexual_desire_disorder',
'sexual_chat_roleplay': 'cybersex',
'sexual_child_abuse': 'child_sexual_abuse',
'sexual_conversion_therapy': 'conversion_therapy',
'sexual_diversity_studies': 'queer_studies',
'sexual_harrassment_panda': 'sexual_harassment_panda',
'sexual_minority_cultures': 'sexuality_and_gender_identity-based_cultures',
'sexual_offenses_act': 'sexual_offences_act',
'sexual_orientation_stereotyping': 'lgbt_stereotypes',
'sexual_reassignment_surgery': 'sex_reassignment_surgery',
'sexual_reorientation_therapy': 'conversion_therapy',
'sexual_response_cycle': 'human_sexual_response_cycle',
'sexual_role_play': 'sexual_roleplay',
'sexual_size_dimorphism': 'sexual_dimorphism',
'sexual_slavery/version_2': 'sexual_slavery',
'sexuality_and_religion': 'religion_and_sexuality',
'sexuality_in_art': 'erotic_art',
'sexuality_in_buddhism': 'sexuality_and_buddhism',
'sexuality_in_islam': 'islamic_sexual_jurisprudence',
'sexuality_in_rome': 'sexuality_in_ancient_rome',
'sexuality_in_sf': 'sex_and_sexuality_in_speculative_fiction',
'sexualization_of_culture': 'sexual_objectification',
'sexually_antagonistic_co-evolution': 'sexual_conflict',
'sexually_antagonistic_coevolution': 'sexual_conflict',
'sexually_induced_sneezing': 'sexually-induced_sneezing',
'sexually_liberal_feminism': 'sex-positive_feminism',
'sexually_transmissible_disease': 'sexually_transmitted_disease',
'sexually_transmissible_diseases': 'sexually_transmitted_disease',
'sexually_transmissible_infection': 'sexually_transmitted_disease',
'sexually_transmissible_infections': 'sexually_transmitted_disease',
'sexually_transmitted_diseases': 'sexually_transmitted_disease',
'sexually_transmitted_infection': 'sexually_transmitted_disease',
'sexually_transmitted_infections': 'sexually_transmitted_disease',
'sexually_violent_predators': 'sexually_violent_predator',
'sexy_i_can': 'sexy_can_i',
'sexy_little_thug': 'in_da_club',
'sexy_m._f.': 'sexy_mf',
'sexy_m_f': 'sexy_mf',
'sexy_sat_tv': 'sexysat_tv',
'sexy_sons_hypothesis': 'sexy_son_hypothesis',
'seybourn_h._lynne': 'seybourn_harris_lynne',
'seychelles_brush_warbler': 'seychelles_warbler',
'seychelles_cave_swiftlet': 'seychelles_swiftlet',
'seychelles_chestnut-flanked_white-eye': 'seychelles_chestnut-sided_white-eye',
'seychelles_day_gecko': 'phelsuma_astriata',
'seychelles_grey_white-eye': 'seychelles_white-eye',
'seychelles_marketing_board': 'super_magic_brothers',
'seychelles_mud_turtle': 'seychelles_black_terrapin',
'seychelles_scops_owl': 'seychelles_scops-owl',
'seychellois_creole_language': 'seychellois_creole',
'seydlitz_class_battlecruiser': 'sms_seydlitz',
'seydou_diarra_elimane': 'seydou_diarra',
'seydou_elimane_diarra': 'seydou_diarra',
'seydou_nourou_doumbia': 'chris_seydou',
'seyed_abdullo_nuri': 'sayid_abdulloh_nuri',
'seyed_ahmad_khatami': 'ahmad_khatami',
'seyed_ahmed_hulusi': 'ahmed_hulusi',
'seyed_ali_khamenei': 'ali_khamenei',
'seyed_e_hasnain': 'seyed_e._hasnain',
'seyed_ehtesham_hasnain': 'seyed_e._hasnain',
'seyed_hassan_taghizadeh': 'hasan_taqizadeh',
'seyed_hossein_nasrallah': 'hassan_nasrallah',
'seyed_jalal_hosseini': 'jalal_hosseini',
'seyed_jalal_khoshkebejari': 'jalal_hosseini',
'seyed_mahmoodreza_miran': 'mahmoud_miran',
'seyed_mahmoud-reza_miran': 'mahmoud_miran',
'seyed_mahmoud_miran': 'mahmoud_miran',
'seyed_mahmoudreza_miran': 'mahmoud_miran',
'seyed_mehdi_salehi': 'mehdi_seyed_salehi',
'seyed_mohamad_salehi': 'mehdi_seyed_salehi',
'seyed_mohammad_khatami': 'mohammad_khatami',
'seyed_mohammad_salehi': 'mehdi_seyed_salehi',
'seyed_morad_mohammadi': 'morad_mohammadi',
'seyed_reza_chavoshi': 'afshin_chavoshi',
'seyfarth_shaw_llp': 'seyfarth_shaw',
'seyfert_1_galaxy': 'seyfert_galaxy',
'seyfert_2_galaxy': 'seyfert_galaxy',
'seyh_said_isyani': 'sheikh_said_piran',
'seyh_sait_rebellion': 'sheikh_said_rebellion',
'seyi_george_olofinjana': 'seyi_olofinjana',
'seyid_imadeddin_nesimi': 'imadaddin_nasimi',
'seym_of_poland': 'sejm_of_the_republic_of_poland',
'seymour_a._papert': 'seymour_papert',
'seymour_b._sarason': 'seymour_sarason',
'seymour_david_ruchamkin': 'seymour_d._ruchamkin',
'seymour_dilworth_young': 's._dilworth_young',
'seymour_duncan_sh-5': 'seymour_duncan',
'seymour_elementary_school': 'admiral_seymour_elementary_school',
'seymour_h._knox': 'seymour_knox',
'seymour_h_knox': 'seymour_knox',
'seymour_howe_person': 'seymour_h._person',
'seymour_i_rubinstein': 'seymour_i._rubinstein',
'seymour_j._singer': 'seymour_jonathan_singer',
'seymour_james_farmer': 'seymour_j._farmer',
'seymour_johnson_afb': 'seymour_johnson_air_force_base',
'seymour_johnson_field': 'seymour_johnson_air_force_base',
'seymour_knox_i': 'seymour_h._knox_i',
'seymour_knox_ii': 'seymour_h._knox_ii',
'seymour_knox_iii': 'seymour_h._knox_iii',
'seymour_m._hersh': 'seymour_hersh',
'seymour_r._cray': 'seymour_cray',
'seymour_roger_cray': 'seymour_cray',
'seymour_the_elephant': 'the_muppets',
'seymour_v._reit': 'seymour_reit',
'seymour_w._itzkoff': 'seymour_itzkoff',
'seyne_sur_mer': 'la_seyne-sur-mer',
'seyyed_ahmad_khansari': 'ahmad_khansari',
'seyyed_ali_khamenei': 'ali_khamenei',
'seyyed_ebrahim_nabavi': 'ebrahim_nabavi',
'seyyed_hassan_mir-kazemi': 'death_of_neda_agha-soltan',
'seyyed_hassan_modarres': 'hassan_modarres',
'seyyed_hossein_nasr': 'hossein_nasr',
'seyyed_husayn_borujerdi': 'seyyed_hossein_borujerdi',
'seyyed_jamaleddin_asadabadi': 'jamal-al-din_afghani',
'seyyed_mohamed_khatami': 'mohammad_khatami',
'seyyed_mohammad_khatami': 'mohammad_khatami',
'seyyed_mohammed_khatami': 'mohammad_khatami',
'seyyed_muhammad_khatami': 'mohammad_khatami',
'seyyeid_hossein_nasr': 'hossein_nasr',
'seyyid_ahmet_arvasi': 'ahmet_arvasi',
'seyyit_ahmet_arvasi': 'ahmet_arvasi',
'sez_euro-park_mielec': 'special_economic_zone_euro-park_mielec'} | PypiClean |
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_breakdown_definition.py | from typing import Union, TYPE_CHECKING
from .._enums import StatTypes
from ..._content_data import Data
from ..._content_provider_layer import ContentUsageLoggerMixin
from ...._content_type import ContentType
from ...._tools import validate_types, validate_bool_value, try_copy_to_list
from ....delivery._data._data_provider import DataProviderLayer, BaseResponse
if TYPE_CHECKING:
from ...._types import ExtendedParams, StrStrings
class Definition(
ContentUsageLoggerMixin[BaseResponse[Data]],
DataProviderLayer[BaseResponse[Data]],
):
"""
This class describe parameters to retrieve holdings data breakdown by Investor Types,
Styles, Region, Countries, Rotations and Turnovers.
Parameters
----------
universe: str, list of str
The Universe parameter allows the user to define the companies for which the content is returned.
stat_type: int, StatTypes
The statType parameter specifies which statistics type to be returned.
The types available are:
- Investor Type (1)
- Investment Style (2)
- Region (3)
- Rotation (4)
- Country (5)
- Metro Area (6)
- Investor Type Parent (7)
- Invest Style Parent (8)
use_field_names_in_headers: bool, optional
Return field name as column headers for data instead of title
extended_params: ExtendedParams, optional
If necessary other parameters.
Examples
--------
>>> from refinitiv.data.content import ownership
>>> definition = ownership.consolidated.breakdown.Definition("TRI.N", ownership.StatTypes.INVESTOR_TYPE)
>>> response = definition.get_data()
"""
_USAGE_CLS_NAME = "Ownership.Consolidated.BreakdownDefinition"
def __init__(
self,
universe: "StrStrings",
stat_type: Union[int, StatTypes],
use_field_names_in_headers: bool = False,
extended_params: "ExtendedParams" = None,
):
validate_types(stat_type, [int, StatTypes], "stat_type")
validate_bool_value(use_field_names_in_headers)
universe = try_copy_to_list(universe)
super().__init__(
ContentType.OWNERSHIP_CONSOLIDATED_BREAKDOWN,
universe=universe,
stat_type=stat_type,
use_field_names_in_headers=use_field_names_in_headers,
extended_params=extended_params,
) | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_en-pk.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"am",
"pm"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "dd-MMM-y h:mm:ss a",
"mediumDate": "dd-MMM-y",
"mediumTime": "h:mm:ss a",
"short": "dd/MM/y h:mm a",
"shortDate": "dd/MM/y",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Rs",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 2,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 2,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4\u00a0-",
"negSuf": "",
"posPre": "\u00a4\u00a0",
"posSuf": ""
}
]
},
"id": "en-pk",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/tablib-garetjax-0.9.12.tar.gz/tablib-garetjax-0.9.12/tablib/packages/openpyxl/reader/iter_worksheet.py |
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
""" Iterators-based worksheet reader
*Still very raw*
"""
from ....compat import BytesIO as StringIO
import warnings
import operator
from functools import partial
from itertools import groupby, ifilter
from ..worksheet import Worksheet
from ..cell import coordinate_from_string, get_column_letter, Cell
from ..reader.excel import get_sheet_ids
from ..reader.strings import read_string_table
from ..reader.style import read_style_table, NumberFormat
from ..shared.date_time import SharedDate
from ..reader.worksheet import read_dimension
from ..shared.ooxml import (MIN_COLUMN, MAX_COLUMN, PACKAGE_WORKSHEETS,
MAX_ROW, MIN_ROW, ARC_SHARED_STRINGS, ARC_APP, ARC_STYLE)
try:
from xml.etree.cElementTree import iterparse
except ImportError:
from xml.etree.ElementTree import iterparse
from zipfile import ZipFile
from .. import cell
import re
import tempfile
import zlib
import zipfile
import struct
TYPE_NULL = Cell.TYPE_NULL
MISSING_VALUE = None
RE_COORDINATE = re.compile('^([A-Z]+)([0-9]+)$')
SHARED_DATE = SharedDate()
_COL_CONVERSION_CACHE = dict((get_column_letter(i), i) for i in xrange(1, 18279))
def column_index_from_string(str_col, _col_conversion_cache=_COL_CONVERSION_CACHE):
# we use a function argument to get indexed name lookup
return _col_conversion_cache[str_col]
del _COL_CONVERSION_CACHE
RAW_ATTRIBUTES = ['row', 'column', 'coordinate', 'internal_value', 'data_type', 'style_id', 'number_format']
try:
from collections import namedtuple
BaseRawCell = namedtuple('RawCell', RAW_ATTRIBUTES)
except ImportError:
# warnings.warn("""Unable to import 'namedtuple' module, this may cause memory issues when using optimized reader. Please upgrade your Python installation to 2.6+""")
class BaseRawCell(object):
def __init__(self, *args):
assert len(args)==len(RAW_ATTRIBUTES)
for attr, val in zip(RAW_ATTRIBUTES, args):
setattr(self, attr, val)
def _replace(self, **kwargs):
self.__dict__.update(kwargs)
return self
class RawCell(BaseRawCell):
"""Optimized version of the :class:`openpyxl.cell.Cell`, using named tuples.
Useful attributes are:
* row
* column
* coordinate
* internal_value
You can also access if needed:
* data_type
* number_format
"""
@property
def is_date(self):
res = (self.data_type == Cell.TYPE_NUMERIC
and self.number_format is not None
and ('d' in self.number_format
or 'm' in self.number_format
or 'y' in self.number_format
or 'h' in self.number_format
or 's' in self.number_format
))
return res
def iter_rows(workbook_name, sheet_name, xml_source, range_string = '', row_offset = 0, column_offset = 0):
archive = get_archive_file(workbook_name)
source = xml_source
if range_string:
min_col, min_row, max_col, max_row = get_range_boundaries(range_string, row_offset, column_offset)
else:
min_col, min_row, max_col, max_row = read_dimension(xml_source = source)
min_col = column_index_from_string(min_col)
max_col = column_index_from_string(max_col) + 1
max_row += 6
try:
string_table = read_string_table(archive.read(ARC_SHARED_STRINGS))
except KeyError:
string_table = {}
style_table = read_style_table(archive.read(ARC_STYLE))
source.seek(0)
p = iterparse(source)
return get_squared_range(p, min_col, min_row, max_col, max_row, string_table, style_table)
def get_rows(p, min_column = MIN_COLUMN, min_row = MIN_ROW, max_column = MAX_COLUMN, max_row = MAX_ROW):
return groupby(get_cells(p, min_row, min_column, max_row, max_column), operator.attrgetter('row'))
def get_cells(p, min_row, min_col, max_row, max_col, _re_coordinate=RE_COORDINATE):
for _event, element in p:
if element.tag == '{http://schemas.openxmlformats.org/spreadsheetml/2006/main}c':
coord = element.get('r')
column_str, row = _re_coordinate.match(coord).groups()
row = int(row)
column = column_index_from_string(column_str)
if min_col <= column <= max_col and min_row <= row <= max_row:
data_type = element.get('t', 'n')
style_id = element.get('s')
value = element.findtext('{http://schemas.openxmlformats.org/spreadsheetml/2006/main}v')
yield RawCell(row, column_str, coord, value, data_type, style_id, None)
if element.tag == '{http://schemas.openxmlformats.org/spreadsheetml/2006/main}v':
continue
element.clear()
def get_range_boundaries(range_string, row = 0, column = 0):
if ':' in range_string:
min_range, max_range = range_string.split(':')
min_col, min_row = coordinate_from_string(min_range)
max_col, max_row = coordinate_from_string(max_range)
min_col = column_index_from_string(min_col) + column
max_col = column_index_from_string(max_col) + column
min_row += row
max_row += row
else:
min_col, min_row = coordinate_from_string(range_string)
min_col = column_index_from_string(min_col)
max_col = min_col + 1
max_row = min_row
return (min_col, min_row, max_col, max_row)
def get_archive_file(archive_name):
return ZipFile(archive_name, 'r')
def get_xml_source(archive_file, sheet_name):
return archive_file.read('%s/%s' % (PACKAGE_WORKSHEETS, sheet_name))
def get_missing_cells(row, columns):
return dict([(column, RawCell(row, column, '%s%s' % (column, row), MISSING_VALUE, TYPE_NULL, None, None)) for column in columns])
def get_squared_range(p, min_col, min_row, max_col, max_row, string_table, style_table):
expected_columns = [get_column_letter(ci) for ci in xrange(min_col, max_col)]
current_row = min_row
for row, cells in get_rows(p, min_row = min_row, max_row = max_row, min_column = min_col, max_column = max_col):
full_row = []
if current_row < row:
for gap_row in xrange(current_row, row):
dummy_cells = get_missing_cells(gap_row, expected_columns)
yield tuple([dummy_cells[column] for column in expected_columns])
current_row = row
temp_cells = list(cells)
retrieved_columns = dict([(c.column, c) for c in temp_cells])
missing_columns = list(set(expected_columns) - set(retrieved_columns.keys()))
replacement_columns = get_missing_cells(row, missing_columns)
for column in expected_columns:
if column in retrieved_columns:
cell = retrieved_columns[column]
if cell.style_id is not None:
style = style_table[int(cell.style_id)]
cell = cell._replace(number_format = style.number_format.format_code) #pylint: disable-msg=W0212
if cell.internal_value is not None:
if cell.data_type == Cell.TYPE_STRING:
cell = cell._replace(internal_value = string_table[int(cell.internal_value)]) #pylint: disable-msg=W0212
elif cell.data_type == Cell.TYPE_BOOL:
cell = cell._replace(internal_value = cell.internal_value == 'True')
elif cell.is_date:
cell = cell._replace(internal_value = SHARED_DATE.from_julian(float(cell.internal_value)))
elif cell.data_type == Cell.TYPE_NUMERIC:
cell = cell._replace(internal_value = float(cell.internal_value))
full_row.append(cell)
else:
full_row.append(replacement_columns[column])
current_row = row + 1
yield tuple(full_row)
#------------------------------------------------------------------------------
class IterableWorksheet(Worksheet):
def __init__(self, parent_workbook, title, workbook_name,
sheet_codename, xml_source):
Worksheet.__init__(self, parent_workbook, title)
self._workbook_name = workbook_name
self._sheet_codename = sheet_codename
self._xml_source = xml_source
def iter_rows(self, range_string = '', row_offset = 0, column_offset = 0):
""" Returns a squared range based on the `range_string` parameter,
using generators.
:param range_string: range of cells (e.g. 'A1:C4')
:type range_string: string
:param row: row index of the cell (e.g. 4)
:type row: int
:param column: column index of the cell (e.g. 3)
:type column: int
:rtype: generator
"""
return iter_rows(workbook_name = self._workbook_name,
sheet_name = self._sheet_codename,
xml_source = self._xml_source,
range_string = range_string,
row_offset = row_offset,
column_offset = column_offset)
def cell(self, *args, **kwargs):
raise NotImplementedError("use 'iter_rows()' instead")
def range(self, *args, **kwargs):
raise NotImplementedError("use 'iter_rows()' instead")
def unpack_worksheet(archive, filename):
temp_file = tempfile.TemporaryFile(mode='r+', prefix='openpyxl.', suffix='.unpack.temp')
zinfo = archive.getinfo(filename)
if zinfo.compress_type == zipfile.ZIP_STORED:
decoder = None
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
decoder = zlib.decompressobj(-zlib.MAX_WBITS)
else:
raise zipfile.BadZipFile("Unrecognized compression method")
archive.fp.seek(_get_file_offset(archive, zinfo))
bytes_to_read = zinfo.compress_size
while True:
buff = archive.fp.read(min(bytes_to_read, 102400))
if not buff:
break
bytes_to_read -= len(buff)
if decoder:
buff = decoder.decompress(buff)
temp_file.write(buff)
if decoder:
temp_file.write(decoder.decompress('Z'))
return temp_file
def _get_file_offset(archive, zinfo):
try:
return zinfo.file_offset
except AttributeError:
# From http://stackoverflow.com/questions/3781261/how-to-simulate-zipfile-open-in-python-2-5
# Seek over the fixed size fields to the "file name length" field in
# the file header (26 bytes). Unpack this and the "extra field length"
# field ourselves as info.extra doesn't seem to be the correct length.
archive.fp.seek(zinfo.header_offset + 26)
file_name_len, extra_len = struct.unpack("<HH", archive.fp.read(4))
return zinfo.header_offset + 30 + file_name_len + extra_len | PypiClean |
/vk.py-1.0.0a6-1.0.0.tar.gz/vk.py-1.0.0a6-1.0.0/vk/types/community.py | from .base import BaseModel
from .additional import City, Country, Place
from .attachments import MarketPriceCurrency
from enum import IntEnum
import typing
# https://vk.com/dev/objects/group
class CommunityBanInfo(BaseModel):
end_date: int = None
comment: str = None
class CommunityContacts(BaseModel):
user_id: int = None
desc: str = None
phone: str = None
email: str = None
class CommunityCounters(BaseModel):
photos: int = None
albums: int = None
audios: int = None
videos: int = None
topics: int = None
docs: int = None
class CommunityCoverImage(BaseModel):
url: str = None
width: int = None
height: int = None
class CommunityCover(BaseModel):
enabled: int = None
images: typing.List[CommunityCoverImage] = None
class CommunityLink(BaseModel):
id: int = None
url: str = None
name: str = None
desc: str = None
photo_50: str = None
photo_100: str = None
class CommunityMainSection(IntEnum):
no_main_section = 0
photos = 1
topics = 2
audios = 3
videos = 4
market = 5
class CommunityMarket(BaseModel):
enabled: int = None
price_min: int = None
price_max: int = None
main_album_id: int = None
contact_id: int = None
currency: MarketPriceCurrency = None
currency_text: str = None
class CommunityMemberStatus(IntEnum):
not_a_member = 0
member = 1
not_sure = 2
declined_an_invitation = 3
sent_request = 4
invited = 5
class Community(BaseModel):
id: int = None
name: str = None
screen_name: str = None
is_closed: int = None
deactivated: str = None
is_admin: int = None
admin_level: int = None
is_member: int = None
invited_by: int = None
type: str = None
has_photo: int = None
photo_50: str = None
photo_100: str = None
photo_200: str = None
activity: str = None
age_limits: int = None
ban_info: CommunityBanInfo = None
can_create_topic: int = None
can_message: int = None
can_post: int = None
can_see_all_posts: int = None
can_upload_doc: int = None
can_upload_video: int = None
city: City = None
contacts: CommunityContacts = None
counters: CommunityCounters = None
country: Country = None
cover: CommunityCover = None
description: str = None
fixed_post: int = None
is_favorite: int = None
is_hidden_from_feed: int = None
is_messages_blocked: int = None
links: typing.List[CommunityLink] = None
main_album_id: int = None
main_section: CommunityMainSection = None
market: CommunityMarket = None
member_status: CommunityMemberStatus = None
members_count: int = None
place: Place = None
public_date_label: str = None
site: str = None
start_date: int = None
finish_date: int = None
status: str = None
trending: int = None
verified: int = None
wiki_page: str = None | PypiClean |
/pyorbit-package-10.0.7.tar.gz/pyorbit-package-10.0.7/pyorbit/models/correlated_jitter.py | import numpy.polynomial.polynomial
from pyorbit.models.abstract_model import *
class LocalCorrelatedJitter(AbstractModel):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model_class = 'local_correlated_jitter'
self.jitter_model = True
self.list_pams_common = set()
self.list_pams_dataset = {'x_zero'}
self.default_bounds = {'x_zero': [-10**5, 10**5]}
self.default_spaces = {'x_zero': 'Linear'}
self.default_priors = {'x_zero': ['Uniform', []]}
self.recenter_pams_dataset = set()
self.order = 1
self.x_vals = None
self.x_mask = None
self.threshold = 0.001
def initialize_model(self, mc, **kwargs):
""" A special kind of initialization is required for this module, since it has to take a second dataset
and check the correspondence with the points
"""
dataset_ref = mc.dataset_dict[kwargs['reference']]
dataset_asc = mc.dataset_dict[kwargs['associated']]
if 'threshold' in kwargs:
self.threshold = kwargs['threshold']
if 'order' in kwargs:
self.order = kwargs['order']
""" HERE: we must associated the data from name_asc dataset to the one from name_ref
remove that part from dataset.pyx
Add a None option for the dataset
Fix input_parser to accommodate the new changes
Jitter must not be included in the analysis, but how about the offset?
Or maybe I should just leave the zero point of the polynomial fit free?
"""
self.x_vals = np.zeros(dataset_ref.n, dtype=np.double)
self.x_mask = np.zeros(dataset_ref.n, dtype=bool)
for i_date, v_date in enumerate(dataset_asc.x):
match = np.where(np.abs(v_date-dataset_ref.x) < self.threshold)[0]
self.x_vals[match] = dataset_asc.y[i_date]
self.x_mask[match] = True
print()
print('Correlation model')
print('Reference dataset: ', kwargs['reference'])
print('Associated dataset: ', kwargs['associated'])
print('Cross-match between datasets: {0:d} out of {1:d} '.format(int(np.sum(self.x_mask)), dataset_ref.n))
try:
self.fix_list[dataset_asc.name_ref]['x_zero'] = np.asarray([kwargs['x_zero'], 0.0000], dtype=np.double)
except (KeyError, ValueError):
self.fix_list[dataset_asc.name_ref]['x_zero'] = np.asarray([np.amin(self.x_vals[self.x_mask]), 0.0000], dtype=np.double)
for i_order in range(1, self.order+1):
par = 'c'+repr(i_order)
self.list_pams_dataset.update(par)
self.default_bounds.update({par: [0.0, 10**6]})
self.default_spaces.update({par: 'Linear'})
self.default_priors.update({par: ['Uniform', []]})
def compute(self, parameter_values, dataset, x0_input=None):
coeff = np.zeros(self.order+1)
for i_order in range(1, self.order+1):
par = 'c'+repr(i_order)
coeff[i_order] = parameter_values[par]
x_zero = parameter_values['x_zero']
""" In our array, coefficient are sorted from the lowest degree to the higher
This is the order accepted by NumPy.polynomial.polynomial.polyval ,
which is reversed with respect to from NumPy.polyval
"""
if x0_input is None:
return np.where(self.x_mask, numpy.polynomial.polynomial.polyval(self.x_vals-x_zero, coeff), 0.0)
else:
return numpy.polynomial.polynomial.polyval(x0_input-x_zero, coeff) | PypiClean |