id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/v2/model/show_pool_response.py |
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ShowPoolResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'pool': 'PoolResp'
}
attribute_map = {
'pool': 'pool'
}
def __init__(self, pool=None):
"""ShowPoolResponse - a model defined in huaweicloud sdk"""
super().__init__()
self._pool = None
self.discriminator = None
if pool is not None:
self.pool = pool
@property
def pool(self):
"""Gets the pool of this ShowPoolResponse.
:return: The pool of this ShowPoolResponse.
:rtype: PoolResp
"""
return self._pool
@pool.setter
def pool(self, pool):
"""Sets the pool of this ShowPoolResponse.
:param pool: The pool of this ShowPoolResponse.
:type: PoolResp
"""
self._pool = pool
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowPoolResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | PypiClean |
/accelbyte_py_sdk-0.48.0.tar.gz/accelbyte_py_sdk-0.48.0/accelbyte_py_sdk/api/lobby/models/models_admin_add_profanity_filter_into_list_request.py |
# template file: ags_py_codegen
# AccelByte Gaming Services Lobby Server (3.25.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
class ModelsAdminAddProfanityFilterIntoListRequest(Model):
"""Models admin add profanity filter into list request (models.AdminAddProfanityFilterIntoListRequest)
Properties:
filter_: (filter) REQUIRED str
note: (note) REQUIRED str
"""
# region fields
filter_: str # REQUIRED
note: str # REQUIRED
# endregion fields
# region with_x methods
def with_filter(self, value: str) -> ModelsAdminAddProfanityFilterIntoListRequest:
self.filter_ = value
return self
def with_note(self, value: str) -> ModelsAdminAddProfanityFilterIntoListRequest:
self.note = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "filter_"):
result["filter"] = str(self.filter_)
elif include_empty:
result["filter"] = ""
if hasattr(self, "note"):
result["note"] = str(self.note)
elif include_empty:
result["note"] = ""
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls, filter_: str, note: str, **kwargs
) -> ModelsAdminAddProfanityFilterIntoListRequest:
instance = cls()
instance.filter_ = filter_
instance.note = note
return instance
@classmethod
def create_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> ModelsAdminAddProfanityFilterIntoListRequest:
instance = cls()
if not dict_:
return instance
if "filter" in dict_ and dict_["filter"] is not None:
instance.filter_ = str(dict_["filter"])
elif include_empty:
instance.filter_ = ""
if "note" in dict_ and dict_["note"] is not None:
instance.note = str(dict_["note"])
elif include_empty:
instance.note = ""
return instance
@classmethod
def create_many_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> Dict[str, ModelsAdminAddProfanityFilterIntoListRequest]:
return (
{k: cls.create_from_dict(v, include_empty=include_empty) for k, v in dict_}
if dict_
else {}
)
@classmethod
def create_many_from_list(
cls, list_: list, include_empty: bool = False
) -> List[ModelsAdminAddProfanityFilterIntoListRequest]:
return (
[cls.create_from_dict(i, include_empty=include_empty) for i in list_]
if list_
else []
)
@classmethod
def create_from_any(
cls, any_: any, include_empty: bool = False, many: bool = False
) -> Union[
ModelsAdminAddProfanityFilterIntoListRequest,
List[ModelsAdminAddProfanityFilterIntoListRequest],
Dict[Any, ModelsAdminAddProfanityFilterIntoListRequest],
]:
if many:
if isinstance(any_, dict):
return cls.create_many_from_dict(any_, include_empty=include_empty)
elif isinstance(any_, list):
return cls.create_many_from_list(any_, include_empty=include_empty)
else:
raise ValueError()
else:
return cls.create_from_dict(any_, include_empty=include_empty)
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"filter": "filter_",
"note": "note",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"filter": True,
"note": True,
}
# endregion static methods | PypiClean |
/tableschema-ckan-datastore-1.1.1.tar.gz/tableschema-ckan-datastore-1.1.1/tableschema_ckan_datastore/storage.py | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import six
import json
import collections
import tableschema
from . import utils
from .mapper import Mapper
import logging
log = logging.getLogger(__name__)
# Module API
class Storage(tableschema.Storage):
"""Ckan Datastore storage
Package implements
[Tabular Storage](https://github.com/frictionlessdata/tableschema-py#storage)
interface (see full documentation on the link):
![Storage](https://i.imgur.com/RQgrxqp.png)
> Only additional API is documented
# Arguments
base_url (str):
the base url (and scheme) for the CKAN instance (e.g. http://demo.ckan.org).
dataset_id (str):
id or name of the CKAN dataset we wish to use as the bucket source.
If missing, all tables in the DataStore are used.
api_key (str):
either a CKAN user api key or, if in the format `env\\:CKAN_API_KEY_NAME`,
an env var that defines an api key.
"""
# Public
def __init__(self, base_url, dataset_id=None, api_key=None):
# Set attributes
base_path = "/api/3/action"
self.__base_url = base_url.rstrip('/')
self.__base_endpoint = self.__base_url + base_path
self.__dataset_id = dataset_id
self.__api_key = api_key
self.__descriptors = {}
self.__max_pages = 100
self.__bucket_cache = None
# Create mapper
self.__mapper = Mapper()
def __repr__(self):
# Template and format
template = 'Storage <{base_url}>'
text = template.format(base_url=self.__base_url)
return text
@property
def buckets(self):
if self.__bucket_cache:
return self.__bucket_cache
params = {
'resource_id': '_table_metadata'
}
if self.__dataset_id is not None:
filter_ids = self.__get_resource_ids_for_dataset(self.__dataset_id)
params.update({'filters': json.dumps({'name': filter_ids})})
datastore_search_url = \
"{}/datastore_search".format(self.__base_endpoint)
response = self._make_ckan_request(datastore_search_url, params=params)
buckets = [r['name'] for r in response['result']['records']]
count = 1
while response['result']['records']:
count += 1
next_url = self.__base_url + response['result']['_links']['next']
response = self._make_ckan_request(next_url)
records = response['result']['records']
if records:
buckets = buckets + [r['name']
for r in response['result']['records']]
if count == self.__max_pages:
log.warn("Max bucket count exceeded. {} buckets returned."
.format(len(buckets)))
break
self.__bucket_cache = buckets
return buckets
def create(self, bucket, descriptor, force=False):
# Make lists
buckets = bucket
if isinstance(bucket, six.string_types):
buckets = [bucket]
descriptors = descriptor
if isinstance(descriptor, dict):
descriptors = [descriptor]
# Check buckets for existence
for bucket in reversed(self.buckets):
if bucket in buckets:
if not force:
message = 'Bucket "%s" already exists.' % bucket
raise tableschema.exceptions.StorageError(message)
self.delete(bucket)
# Iterate over buckets/descriptors
for bucket, descriptor in zip(buckets, descriptors):
# Define resources
tableschema.validate(descriptor)
self.__descriptors[bucket] = descriptor
datastore_dict = \
self.__mapper.descriptor_to_datastore_dict(descriptor, bucket)
datastore_create_url = \
"{}/datastore_create".format(self.__base_endpoint)
self._make_ckan_request(datastore_create_url, method='POST',
json=datastore_dict)
# Invalidate cache
self.__bucket_cache = None
def delete(self, bucket=None, ignore=False):
# Make lists
buckets = bucket
if isinstance(bucket, six.string_types):
buckets = [bucket]
elif bucket is None:
buckets = reversed(self.buckets)
for bucket in buckets:
# Check existent
if bucket not in self.buckets:
if not ignore:
message = 'Bucket "%s" doesn\'t exist.' % bucket
raise tableschema.exceptions.StorageError(message)
return
# Remove from buckets
if bucket in self.__descriptors:
del self.__descriptors[bucket]
datastore_delete_url = \
"{}/datastore_delete".format(self.__base_endpoint)
params = {
'resource_id': bucket,
'force': True
}
self._make_ckan_request(datastore_delete_url, method='POST',
json=params)
# Invalidate cache
self.__bucket_cache = None
def describe(self, bucket, descriptor=None):
# Set descriptor
if descriptor is not None:
self.__descriptors[bucket] = descriptor
# Get descriptor
else:
descriptor = self.__descriptors.get(bucket)
if descriptor is None:
datastore_search_url = \
"{}/datastore_search".format(self.__base_endpoint)
params = {
'limit': 0,
'resource_id': bucket
}
response = self._make_ckan_request(datastore_search_url,
params=params)
fields = response['result']['fields']
descriptor = \
self.__mapper.datastore_fields_to_descriptor(fields)
return descriptor
def iter(self, bucket):
schema = tableschema.Schema(self.describe(bucket))
datastore_search_url = \
"{}/datastore_search".format(self.__base_endpoint)
params = {
'resource_id': bucket
}
response = self._make_ckan_request(datastore_search_url,
params=params)
while response['result']['records']:
for row in response['result']['records']:
row = self.__mapper.restore_row(row, schema=schema)
yield row
next_url = self.__base_url + response['result']['_links']['next']
response = self._make_ckan_request(next_url)
def read(self, bucket):
rows = list(self.iter(bucket))
return rows
def write(self, bucket, rows, method="upsert", as_generator=False):
if as_generator:
return self.write_aux(bucket, rows, method=method)
else:
collections.deque(self.write_aux(bucket, rows, method=method), maxlen=0)
def write_aux(self, bucket, rows, method="upsert"):
schema = tableschema.Schema(self.describe(bucket))
datastore_upsert_url = \
"{}/datastore_upsert".format(self.__base_endpoint)
records = []
for r in rows:
records.append(self.__mapper.convert_row(r, schema))
yield r
params = {
'resource_id': bucket,
'method': method,
'force': True,
'records': records
}
self._make_ckan_request(datastore_upsert_url, method='POST',
json=params)
# Private
def __get_resource_ids_for_dataset(self, dataset_id):
'''Get a list of resource ids for the passed dataset id.
'''
package_show_url = "{}/package_show".format(self.__base_endpoint)
response = self._make_ckan_request(package_show_url,
params=dict(id=dataset_id))
dataset = response['result']
resources = dataset['resources']
resource_ids = [r['id'] for r in resources]
return resource_ids
def _make_ckan_request(self, datastore_url, **kwargs):
response = utils.make_ckan_request(datastore_url,
api_key=self.__api_key,
**kwargs)
ckan_error = utils.get_ckan_error(response)
if ckan_error:
msg = 'CKAN returned an error: ' + json.dumps(ckan_error)
raise tableschema.exceptions.StorageError(msg)
return response | PypiClean |
/cdktf_cdktf_provider_aws-17.0.2-py3-none-any.whl/cdktf_cdktf_provider_aws/codepipeline_webhook/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class CodepipelineWebhook(
_cdktf_9a9027ec.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhook",
):
'''Represents a {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook aws_codepipeline_webhook}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
authentication: builtins.str,
filter: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["CodepipelineWebhookFilter", typing.Dict[builtins.str, typing.Any]]]],
name: builtins.str,
target_action: builtins.str,
target_pipeline: builtins.str,
authentication_configuration: typing.Optional[typing.Union["CodepipelineWebhookAuthenticationConfiguration", typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
tags_all: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook aws_codepipeline_webhook} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param authentication: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication CodepipelineWebhook#authentication}.
:param filter: filter block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#filter CodepipelineWebhook#filter}
:param name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#name CodepipelineWebhook#name}.
:param target_action: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_action CodepipelineWebhook#target_action}.
:param target_pipeline: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_pipeline CodepipelineWebhook#target_pipeline}.
:param authentication_configuration: authentication_configuration block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication_configuration CodepipelineWebhook#authentication_configuration}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#id CodepipelineWebhook#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags CodepipelineWebhook#tags}.
:param tags_all: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags_all CodepipelineWebhook#tags_all}.
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ad483c37efd98c49b08db9ebc42c7603adf8455075a1f59b93f38d25d7ab7ea2)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = CodepipelineWebhookConfig(
authentication=authentication,
filter=filter,
name=name,
target_action=target_action,
target_pipeline=target_pipeline,
authentication_configuration=authentication_configuration,
id=id,
tags=tags,
tags_all=tags_all,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putAuthenticationConfiguration")
def put_authentication_configuration(
self,
*,
allowed_ip_range: typing.Optional[builtins.str] = None,
secret_token: typing.Optional[builtins.str] = None,
) -> None:
'''
:param allowed_ip_range: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#allowed_ip_range CodepipelineWebhook#allowed_ip_range}.
:param secret_token: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#secret_token CodepipelineWebhook#secret_token}.
'''
value = CodepipelineWebhookAuthenticationConfiguration(
allowed_ip_range=allowed_ip_range, secret_token=secret_token
)
return typing.cast(None, jsii.invoke(self, "putAuthenticationConfiguration", [value]))
@jsii.member(jsii_name="putFilter")
def put_filter(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["CodepipelineWebhookFilter", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c8d4a454304ea7be858f759835c0556b4e1d6d42271bce366d2264adee1972ec)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putFilter", [value]))
@jsii.member(jsii_name="resetAuthenticationConfiguration")
def reset_authentication_configuration(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAuthenticationConfiguration", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetTags")
def reset_tags(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTags", []))
@jsii.member(jsii_name="resetTagsAll")
def reset_tags_all(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTagsAll", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="arn")
def arn(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "arn"))
@builtins.property
@jsii.member(jsii_name="authenticationConfiguration")
def authentication_configuration(
self,
) -> "CodepipelineWebhookAuthenticationConfigurationOutputReference":
return typing.cast("CodepipelineWebhookAuthenticationConfigurationOutputReference", jsii.get(self, "authenticationConfiguration"))
@builtins.property
@jsii.member(jsii_name="filter")
def filter(self) -> "CodepipelineWebhookFilterList":
return typing.cast("CodepipelineWebhookFilterList", jsii.get(self, "filter"))
@builtins.property
@jsii.member(jsii_name="url")
def url(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "url"))
@builtins.property
@jsii.member(jsii_name="authenticationConfigurationInput")
def authentication_configuration_input(
self,
) -> typing.Optional["CodepipelineWebhookAuthenticationConfiguration"]:
return typing.cast(typing.Optional["CodepipelineWebhookAuthenticationConfiguration"], jsii.get(self, "authenticationConfigurationInput"))
@builtins.property
@jsii.member(jsii_name="authenticationInput")
def authentication_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "authenticationInput"))
@builtins.property
@jsii.member(jsii_name="filterInput")
def filter_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["CodepipelineWebhookFilter"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["CodepipelineWebhookFilter"]]], jsii.get(self, "filterInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="tagsAllInput")
def tags_all_input(
self,
) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], jsii.get(self, "tagsAllInput"))
@builtins.property
@jsii.member(jsii_name="tagsInput")
def tags_input(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], jsii.get(self, "tagsInput"))
@builtins.property
@jsii.member(jsii_name="targetActionInput")
def target_action_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "targetActionInput"))
@builtins.property
@jsii.member(jsii_name="targetPipelineInput")
def target_pipeline_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "targetPipelineInput"))
@builtins.property
@jsii.member(jsii_name="authentication")
def authentication(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "authentication"))
@authentication.setter
def authentication(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__61d2493b45c5af24528f46259ba51104105b503a7c200093b83345fe4dc6278a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "authentication", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a97cc5633098a7f48aad0f0b0e0a394a63b98f78cbc95004ab381f0550308db4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__275a59535e4a389886e085b308fa8da0f13f864a0aa8c2cb7efe3135dff7a9b2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Mapping[builtins.str, builtins.str]:
return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "tags"))
@tags.setter
def tags(self, value: typing.Mapping[builtins.str, builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3202c81acdd0bbe4f91be2b635d27d02c17bc4b2c2c167fc0746df7cbd36652c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "tags", value)
@builtins.property
@jsii.member(jsii_name="tagsAll")
def tags_all(self) -> typing.Mapping[builtins.str, builtins.str]:
return typing.cast(typing.Mapping[builtins.str, builtins.str], jsii.get(self, "tagsAll"))
@tags_all.setter
def tags_all(self, value: typing.Mapping[builtins.str, builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5722712bfb18c74a32c64a0be4b3ab2e6dad2bbce83a8b2595c57e9f38a982b9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "tagsAll", value)
@builtins.property
@jsii.member(jsii_name="targetAction")
def target_action(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "targetAction"))
@target_action.setter
def target_action(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__006ef2c29f1b725893533cc61909962208baa3cbf7c529403f8860d11eb8a5a9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "targetAction", value)
@builtins.property
@jsii.member(jsii_name="targetPipeline")
def target_pipeline(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "targetPipeline"))
@target_pipeline.setter
def target_pipeline(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ff1746165cf6b2f4ede90a34c221eec5ac422f95008d7a999a885a91187a528e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "targetPipeline", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookAuthenticationConfiguration",
jsii_struct_bases=[],
name_mapping={"allowed_ip_range": "allowedIpRange", "secret_token": "secretToken"},
)
class CodepipelineWebhookAuthenticationConfiguration:
def __init__(
self,
*,
allowed_ip_range: typing.Optional[builtins.str] = None,
secret_token: typing.Optional[builtins.str] = None,
) -> None:
'''
:param allowed_ip_range: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#allowed_ip_range CodepipelineWebhook#allowed_ip_range}.
:param secret_token: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#secret_token CodepipelineWebhook#secret_token}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cc5d18ee166701823c7204f3da1f881916c1cb30954fe64057194ba5f193ab16)
check_type(argname="argument allowed_ip_range", value=allowed_ip_range, expected_type=type_hints["allowed_ip_range"])
check_type(argname="argument secret_token", value=secret_token, expected_type=type_hints["secret_token"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if allowed_ip_range is not None:
self._values["allowed_ip_range"] = allowed_ip_range
if secret_token is not None:
self._values["secret_token"] = secret_token
@builtins.property
def allowed_ip_range(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#allowed_ip_range CodepipelineWebhook#allowed_ip_range}.'''
result = self._values.get("allowed_ip_range")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def secret_token(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#secret_token CodepipelineWebhook#secret_token}.'''
result = self._values.get("secret_token")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodepipelineWebhookAuthenticationConfiguration(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CodepipelineWebhookAuthenticationConfigurationOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookAuthenticationConfigurationOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d7da107ab02ab2febf396d8b8cec0134bf2b63005194fddb4c13868d4d88c7ba)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetAllowedIpRange")
def reset_allowed_ip_range(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAllowedIpRange", []))
@jsii.member(jsii_name="resetSecretToken")
def reset_secret_token(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSecretToken", []))
@builtins.property
@jsii.member(jsii_name="allowedIpRangeInput")
def allowed_ip_range_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "allowedIpRangeInput"))
@builtins.property
@jsii.member(jsii_name="secretTokenInput")
def secret_token_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "secretTokenInput"))
@builtins.property
@jsii.member(jsii_name="allowedIpRange")
def allowed_ip_range(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "allowedIpRange"))
@allowed_ip_range.setter
def allowed_ip_range(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8fc10f6794b482a407db9b30552c7c0868ce372a22d2b5666f289aa9a8d648df)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "allowedIpRange", value)
@builtins.property
@jsii.member(jsii_name="secretToken")
def secret_token(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "secretToken"))
@secret_token.setter
def secret_token(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e266e2395ef87efb8d7c57d0364e324842bc37a6811c1a17683e495ebb0e2869)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "secretToken", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[CodepipelineWebhookAuthenticationConfiguration]:
return typing.cast(typing.Optional[CodepipelineWebhookAuthenticationConfiguration], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[CodepipelineWebhookAuthenticationConfiguration],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a3b0aca236714f9012961ba24f917b0b744e24db36402f1838468bf677ce0947)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"authentication": "authentication",
"filter": "filter",
"name": "name",
"target_action": "targetAction",
"target_pipeline": "targetPipeline",
"authentication_configuration": "authenticationConfiguration",
"id": "id",
"tags": "tags",
"tags_all": "tagsAll",
},
)
class CodepipelineWebhookConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
authentication: builtins.str,
filter: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["CodepipelineWebhookFilter", typing.Dict[builtins.str, typing.Any]]]],
name: builtins.str,
target_action: builtins.str,
target_pipeline: builtins.str,
authentication_configuration: typing.Optional[typing.Union[CodepipelineWebhookAuthenticationConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
tags_all: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param authentication: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication CodepipelineWebhook#authentication}.
:param filter: filter block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#filter CodepipelineWebhook#filter}
:param name: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#name CodepipelineWebhook#name}.
:param target_action: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_action CodepipelineWebhook#target_action}.
:param target_pipeline: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_pipeline CodepipelineWebhook#target_pipeline}.
:param authentication_configuration: authentication_configuration block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication_configuration CodepipelineWebhook#authentication_configuration}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#id CodepipelineWebhook#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param tags: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags CodepipelineWebhook#tags}.
:param tags_all: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags_all CodepipelineWebhook#tags_all}.
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if isinstance(authentication_configuration, dict):
authentication_configuration = CodepipelineWebhookAuthenticationConfiguration(**authentication_configuration)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__20dd0632bf77c16ce8078cde3f6c9862c26b54a91600568721ee09eef001edd2)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument authentication", value=authentication, expected_type=type_hints["authentication"])
check_type(argname="argument filter", value=filter, expected_type=type_hints["filter"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument target_action", value=target_action, expected_type=type_hints["target_action"])
check_type(argname="argument target_pipeline", value=target_pipeline, expected_type=type_hints["target_pipeline"])
check_type(argname="argument authentication_configuration", value=authentication_configuration, expected_type=type_hints["authentication_configuration"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument tags", value=tags, expected_type=type_hints["tags"])
check_type(argname="argument tags_all", value=tags_all, expected_type=type_hints["tags_all"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"authentication": authentication,
"filter": filter,
"name": name,
"target_action": target_action,
"target_pipeline": target_pipeline,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if authentication_configuration is not None:
self._values["authentication_configuration"] = authentication_configuration
if id is not None:
self._values["id"] = id
if tags is not None:
self._values["tags"] = tags
if tags_all is not None:
self._values["tags_all"] = tags_all
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def authentication(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication CodepipelineWebhook#authentication}.'''
result = self._values.get("authentication")
assert result is not None, "Required property 'authentication' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def filter(
self,
) -> typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["CodepipelineWebhookFilter"]]:
'''filter block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#filter CodepipelineWebhook#filter}
'''
result = self._values.get("filter")
assert result is not None, "Required property 'filter' is missing"
return typing.cast(typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["CodepipelineWebhookFilter"]], result)
@builtins.property
def name(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#name CodepipelineWebhook#name}.'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def target_action(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_action CodepipelineWebhook#target_action}.'''
result = self._values.get("target_action")
assert result is not None, "Required property 'target_action' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def target_pipeline(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#target_pipeline CodepipelineWebhook#target_pipeline}.'''
result = self._values.get("target_pipeline")
assert result is not None, "Required property 'target_pipeline' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def authentication_configuration(
self,
) -> typing.Optional[CodepipelineWebhookAuthenticationConfiguration]:
'''authentication_configuration block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#authentication_configuration CodepipelineWebhook#authentication_configuration}
'''
result = self._values.get("authentication_configuration")
return typing.cast(typing.Optional[CodepipelineWebhookAuthenticationConfiguration], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#id CodepipelineWebhook#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def tags(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags CodepipelineWebhook#tags}.'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], result)
@builtins.property
def tags_all(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#tags_all CodepipelineWebhook#tags_all}.'''
result = self._values.get("tags_all")
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodepipelineWebhookConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookFilter",
jsii_struct_bases=[],
name_mapping={"json_path": "jsonPath", "match_equals": "matchEquals"},
)
class CodepipelineWebhookFilter:
def __init__(self, *, json_path: builtins.str, match_equals: builtins.str) -> None:
'''
:param json_path: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#json_path CodepipelineWebhook#json_path}.
:param match_equals: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#match_equals CodepipelineWebhook#match_equals}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f8ce7ed2fe052a4616bc4a92ab918b939b0f5ebdefd9e98cc6a0b1358c7da887)
check_type(argname="argument json_path", value=json_path, expected_type=type_hints["json_path"])
check_type(argname="argument match_equals", value=match_equals, expected_type=type_hints["match_equals"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"json_path": json_path,
"match_equals": match_equals,
}
@builtins.property
def json_path(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#json_path CodepipelineWebhook#json_path}.'''
result = self._values.get("json_path")
assert result is not None, "Required property 'json_path' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def match_equals(self) -> builtins.str:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/aws/5.15.0/docs/resources/codepipeline_webhook#match_equals CodepipelineWebhook#match_equals}.'''
result = self._values.get("match_equals")
assert result is not None, "Required property 'match_equals' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodepipelineWebhookFilter(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CodepipelineWebhookFilterList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookFilterList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6c755179d3e0beca76835e2ea51477557eb55a9315bcb0aca1bb8f3181f2fd10)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(self, index: jsii.Number) -> "CodepipelineWebhookFilterOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d5bcd65308e646b7fb213380858307eb92d7ec4404bc8599eb8019e4616a3177)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("CodepipelineWebhookFilterOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__74ac7bf84cc5b4aa1097f0f7938c718da9c30e06f89321b7558e1d42700aff5d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__72e84aca712ef365ddf0f545f1a3e5b0dfd7a2c2ed3ca3fd6b59c7db956d482f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f257d4d028a0aaf94ef1142bb6133312243e5e5c12fe04830a710f47e775ff8c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[CodepipelineWebhookFilter]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[CodepipelineWebhookFilter]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[CodepipelineWebhookFilter]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7694d8b955411ab03f55102f5396c40ddd11b6ab6910a19b5ba9925441ce951a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class CodepipelineWebhookFilterOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-aws.codepipelineWebhook.CodepipelineWebhookFilterOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a072c3db2eb5f1c54978da5cdb9c97cbe87d223052e0415b0ec2dd29d2027658)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="jsonPathInput")
def json_path_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "jsonPathInput"))
@builtins.property
@jsii.member(jsii_name="matchEqualsInput")
def match_equals_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "matchEqualsInput"))
@builtins.property
@jsii.member(jsii_name="jsonPath")
def json_path(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "jsonPath"))
@json_path.setter
def json_path(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8f5e9e99f2d30faf3dbcefdefbb06294f9d4cba9b6788ad5b66534394aec09e1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "jsonPath", value)
@builtins.property
@jsii.member(jsii_name="matchEquals")
def match_equals(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "matchEquals"))
@match_equals.setter
def match_equals(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ce6738f6cb4fdf0303303333de866f780d38b7c04a79c8cd5b124d9710e32ded)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "matchEquals", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, CodepipelineWebhookFilter]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, CodepipelineWebhookFilter]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, CodepipelineWebhookFilter]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a8cd9c04c922900052245e5978c099bfabd89fa799d06dfad0fc16f37c13487e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"CodepipelineWebhook",
"CodepipelineWebhookAuthenticationConfiguration",
"CodepipelineWebhookAuthenticationConfigurationOutputReference",
"CodepipelineWebhookConfig",
"CodepipelineWebhookFilter",
"CodepipelineWebhookFilterList",
"CodepipelineWebhookFilterOutputReference",
]
publication.publish()
def _typecheckingstub__ad483c37efd98c49b08db9ebc42c7603adf8455075a1f59b93f38d25d7ab7ea2(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
authentication: builtins.str,
filter: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[CodepipelineWebhookFilter, typing.Dict[builtins.str, typing.Any]]]],
name: builtins.str,
target_action: builtins.str,
target_pipeline: builtins.str,
authentication_configuration: typing.Optional[typing.Union[CodepipelineWebhookAuthenticationConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
tags_all: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c8d4a454304ea7be858f759835c0556b4e1d6d42271bce366d2264adee1972ec(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[CodepipelineWebhookFilter, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__61d2493b45c5af24528f46259ba51104105b503a7c200093b83345fe4dc6278a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a97cc5633098a7f48aad0f0b0e0a394a63b98f78cbc95004ab381f0550308db4(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__275a59535e4a389886e085b308fa8da0f13f864a0aa8c2cb7efe3135dff7a9b2(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3202c81acdd0bbe4f91be2b635d27d02c17bc4b2c2c167fc0746df7cbd36652c(
value: typing.Mapping[builtins.str, builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5722712bfb18c74a32c64a0be4b3ab2e6dad2bbce83a8b2595c57e9f38a982b9(
value: typing.Mapping[builtins.str, builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__006ef2c29f1b725893533cc61909962208baa3cbf7c529403f8860d11eb8a5a9(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ff1746165cf6b2f4ede90a34c221eec5ac422f95008d7a999a885a91187a528e(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cc5d18ee166701823c7204f3da1f881916c1cb30954fe64057194ba5f193ab16(
*,
allowed_ip_range: typing.Optional[builtins.str] = None,
secret_token: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d7da107ab02ab2febf396d8b8cec0134bf2b63005194fddb4c13868d4d88c7ba(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8fc10f6794b482a407db9b30552c7c0868ce372a22d2b5666f289aa9a8d648df(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e266e2395ef87efb8d7c57d0364e324842bc37a6811c1a17683e495ebb0e2869(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a3b0aca236714f9012961ba24f917b0b744e24db36402f1838468bf677ce0947(
value: typing.Optional[CodepipelineWebhookAuthenticationConfiguration],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__20dd0632bf77c16ce8078cde3f6c9862c26b54a91600568721ee09eef001edd2(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
authentication: builtins.str,
filter: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[CodepipelineWebhookFilter, typing.Dict[builtins.str, typing.Any]]]],
name: builtins.str,
target_action: builtins.str,
target_pipeline: builtins.str,
authentication_configuration: typing.Optional[typing.Union[CodepipelineWebhookAuthenticationConfiguration, typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
tags_all: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f8ce7ed2fe052a4616bc4a92ab918b939b0f5ebdefd9e98cc6a0b1358c7da887(
*,
json_path: builtins.str,
match_equals: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6c755179d3e0beca76835e2ea51477557eb55a9315bcb0aca1bb8f3181f2fd10(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d5bcd65308e646b7fb213380858307eb92d7ec4404bc8599eb8019e4616a3177(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__74ac7bf84cc5b4aa1097f0f7938c718da9c30e06f89321b7558e1d42700aff5d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__72e84aca712ef365ddf0f545f1a3e5b0dfd7a2c2ed3ca3fd6b59c7db956d482f(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f257d4d028a0aaf94ef1142bb6133312243e5e5c12fe04830a710f47e775ff8c(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7694d8b955411ab03f55102f5396c40ddd11b6ab6910a19b5ba9925441ce951a(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[CodepipelineWebhookFilter]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a072c3db2eb5f1c54978da5cdb9c97cbe87d223052e0415b0ec2dd29d2027658(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8f5e9e99f2d30faf3dbcefdefbb06294f9d4cba9b6788ad5b66534394aec09e1(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ce6738f6cb4fdf0303303333de866f780d38b7c04a79c8cd5b124d9710e32ded(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a8cd9c04c922900052245e5978c099bfabd89fa799d06dfad0fc16f37c13487e(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, CodepipelineWebhookFilter]],
) -> None:
"""Type checking stubs"""
pass | PypiClean |
/apache-ariatosca-0.2.0.tar.gz/apache-ariatosca-0.2.0/README.rst | ARIA
====
|Build Status| |Appveyor Build Status| |License| |PyPI release| |Python Versions| |Wheel|
|Contributors| |Open Pull Requests| |Closed Pull Requests|
What is ARIA?
-------------
`ARIA <http://ariatosca.incubator.apache.org/>`__ is a an open-source,
`TOSCA <https://www.oasis-open.org/committees/tosca/>`__-based, lightweight library and CLI for
orchestration and for consumption by projects building TOSCA-based solutions for resources and
services orchestration.
ARIA can be utilized by any organization that wants to implement TOSCA-based orchestration in its
solutions, whether a multi-cloud enterprise application, or an NFV or SDN solution for multiple
virtual infrastructure managers.
With ARIA, you can utilize TOSCA's cloud portability out-of-the-box, to develop, test and run your
applications, from template to deployment.
ARIA is an incubation project under the `Apache Software Foundation <https://www.apache.org/>`__.
Installation
------------
ARIA is `available on PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__.
ARIA requires Python 2.7. Python 3 is currently not supported.
To install ARIA directly from PyPI (using a ``wheel``), use::
pip install --upgrade pip setuptools
pip install apache-ariatosca
To install ARIA from source, download the source tarball from
`PyPI <https://pypi.python.org/pypi/apache-ariatosca>`__, extract and ``cd`` into the extract dir,
and run::
pip install --upgrade pip setuptools
pip install .
| The source package comes along with relevant examples, documentation, ``requirements.txt`` (for
| installing specifically the frozen dependencies' versions with which ARIA was tested) and more.
|
| ARIA has additional optional dependencies. These are required for running operations over SSH.
| Below are instructions on how to install these dependencies, including required system
| dependencies per OS.
|
| Note: These dependencies may have varying licenses which may not be compatible with Apache license
| 2.0.
|
**Ubuntu/Debian** (tested on Ubuntu 14.04, Ubuntu 16.04)::
apt-get install -y python-dev gcc libffi-dev libssl-dev
pip install apache-ariatosca[ssh]
**CentOS/Fedora** (tested on CentOS 6.6, CentOS 7)::
yum install -y python-devel gcc libffi-devel openssl-devel
pip install apache-ariatosca[ssh]
**Arch Linux**::
pacman -Syu --noconfirm python2 gcc libffi openssl
pip2 install apache-ariatosca[ssh]
**Windows** (tested on Windows 10)::
# no additional system requirements are needed
pip install apache-ariatosca[ssh]
**macOS**::
# TODO
To install ``pip``, either use your operating system's package management system, or run::
wget http://bootstrap.pypa.io/get-pip.py
python get-pip.py
Getting Started
---------------
This section will describe how to run a simple "Hello World" example.
First, provide ARIA with the ARIA "hello world" service-template and name it (e.g.
``my-service-template``)::
aria service-templates store examples/hello-world/hello-world.yaml my-service-template
Now create a service based on this service-template and name it (e.g. ``my-service``)::
aria services create my-service -t my-service-template
Finally, start an ``install`` workflow execution on ``my-service`` like so::
aria executions start install -s my-service
You should now have a simple web-server running on your local machine. You can try visiting
``http://localhost:9090`` to view your deployed application.
To uninstall and clean your environment, follow these steps::
aria executions start uninstall -s my-service
aria services delete my-service
aria service-templates delete my-service-template
Resources
---------
- `Main site <http://ariatosca.incubator.apache.org/>`__
- `API and CLI documentation <http://ariatosca.incubator.apache.org/docs/html/>`__
- `Wiki <https://cwiki.apache.org/confluence/display/AriaTosca>`__
- `Releases <https://dist.apache.org/repos/dist/dev/incubator/ariatosca//>`__
- `Issue tracker <https://issues.apache.org/jira/browse/ARIA>`__
- Dev mailing list: [email protected]
- User mailing list: [email protected]
Subscribe by sending a mail to ``<group>[email protected]`` (e.g.
``[email protected]``). See information on how to subscribe to mailing
lists `here <https://www.apache.org/foundation/mailinglists.html>`__.
For past correspondence, see the
`dev mailing list archive <https://lists.apache.org/[email protected]>`__.
License
-------
ARIA is licensed under the
`Apache License 2.0 <https://github.com/apache/incubator-ariatosca/blob/master/LICENSE>`__.
Contribution
------------
You are welcome and encouraged to participate and contribute to the ARIA project.
Please see our guide to
`Contributing to ARIA
<https://cwiki.apache.org/confluence/display/ARIATOSCA/Contributing+to+ARIA>`__.
Feel free to also provide feedback on the mailing lists (see `Resources <#user-content-resources>`__
section).
Code of Conduct
---------------
The ARIA TOSCA Project follows
`the Apache Code of Conduct <https://www.apache.org/foundation/policies/conduct.html>`__.
.. |Build Status| image:: https://img.shields.io/travis/apache/incubator-ariatosca/master.svg
:target: https://travis-ci.org/apache/incubator-ariatosca
.. |Appveyor Build Status| image:: https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/incubator-ariatosca/master.svg
:target: https://ci.appveyor.com/project/ApacheSoftwareFoundation/incubator-ariatosca/history
.. |License| image:: https://img.shields.io/github/license/apache/incubator-ariatosca.svg
:target: http://www.apache.org/licenses/LICENSE-2.0
.. |PyPI release| image:: https://img.shields.io/pypi/v/apache-ariatosca.svg
:target: https://pypi.python.org/pypi/apache-ariatosca
.. |Python Versions| image:: https://img.shields.io/pypi/pyversions/apache-ariatosca.svg
.. |Wheel| image:: https://img.shields.io/pypi/wheel/apache-ariatosca.svg
.. |Contributors| image:: https://img.shields.io/github/contributors/apache/incubator-ariatosca.svg
.. |Open Pull Requests| image:: https://img.shields.io/github/issues-pr/apache/incubator-ariatosca.svg
:target: https://github.com/apache/incubator-ariatosca/pulls
.. |Closed Pull Requests| image:: https://img.shields.io/github/issues-pr-closed-raw/apache/incubator-ariatosca.svg
:target: https://github.com/apache/incubator-ariatosca/pulls?q=is%3Apr+is%3Aclosed
| PypiClean |
/pyEMMA-2.5.12.tar.gz/pyEMMA-2.5.12/pyemma/msm/models/pcca.py | from deeptime.markov import PCCAModel, pcca
from pyemma._base.serialization.serialization import SerializableMixIn
class PCCA(PCCAModel, SerializableMixIn):
__serialize_version = 1
"""
PCCA+ spectral clustering method with optimized memberships [1]_
Clusters the first m eigenvectors of a transition matrix in order to cluster the states.
This function does not assume that the transition matrix is fully connected. Disconnected sets
will automatically define the first metastable states, with perfect membership assignments.
Parameters
----------
P : ndarray (n,n)
Transition matrix.
m : int
Number of clusters to group to.
References
----------
[1] S. Roeblitz and M. Weber, Fuzzy spectral clustering by PCCA+:
application to Markov state models and data classification.
Adv Data Anal Classif 7, 147-179 (2013).
[2] F. Noe, multiset PCCA and HMMs, in preparation.
[3] F. Noe, H. Wu, J.-H. Prinz and N. Plattner:
Projected and hidden Markov models for calculating kinetics and metastable states of complex molecules
J. Chem. Phys. 139, 184114 (2013)
"""
def __init__(self, P, m):
dt_pcca = pcca(P, m)
super(PCCA, self).__init__(transition_matrix_coarse=dt_pcca.coarse_grained_transition_matrix,
pi_coarse=dt_pcca.coarse_grained_stationary_probability,
memberships=dt_pcca.memberships,
metastable_distributions=dt_pcca.metastable_distributions)
self.P = P
@property
def transition_matrix(self):
return self.P
@property
def stationary_probability(self):
return self.coarse_grained_stationary_probability
@property
def output_probabilities(self):
return self.metastable_distributions
@property
def metastable_sets(self):
return self.sets
@property
def metastable_assignment(self):
return self.assignments | PypiClean |
/Diofant-0.14.0a2.tar.gz/Diofant-0.14.0a2/diofant/tensor/index_methods.py | import functools
from ..core import Function
from .indexed import Idx, Indexed
class IndexConformanceException(Exception):
"""Raised if indexes are not consistent."""
def _remove_repeated(inds):
"""Removes repeated objects from sequences
Returns a set of the unique objects and a tuple of all that have been
removed.
>>> l1 = [1, 2, 3, 2]
>>> _remove_repeated(l1)
({1, 3}, (2,))
"""
sum_index = {}
for i in inds:
if i in sum_index:
sum_index[i] += 1
else:
sum_index[i] = 0
inds = [x for x in inds if not sum_index[x]]
return set(inds), tuple(k for k, v in sum_index.items() if v)
def _get_indices_Mul(expr, return_dummies=False):
"""Determine the outer indices of a Mul object.
>>> i, j, k = map(Idx, ['i', 'j', 'k'])
>>> x = IndexedBase('x')
>>> y = IndexedBase('y')
>>> _get_indices_Mul(x[i, k]*y[j, k])
({i, j}, {})
>>> _get_indices_Mul(x[i, k]*y[j, k], return_dummies=True)
({i, j}, {}, (k,))
"""
inds = list(map(get_indices, expr.args))
inds, _ = list(zip(*inds))
inds = list(map(list, inds))
inds = list(functools.reduce(lambda x, y: x + y, inds))
inds, dummies = _remove_repeated(inds)
symmetry = {}
if return_dummies:
return inds, symmetry, dummies
else:
return inds, symmetry
def _get_indices_Pow(expr):
"""Determine outer indices of a power or an exponential.
A power is considered a universal function, so that the indices of a Pow is
just the collection of indices present in the expression. This may be
viewed as a bit inconsistent in the special case:
x[i]**2 = x[i]*x[i] (1)
The above expression could have been interpreted as the contraction of x[i]
with itself, but we choose instead to interpret it as a function
lambda y: y**2
applied to each element of x (a universal function in numpy terms). In
order to allow an interpretation of (1) as a contraction, we need
contravariant and covariant Idx subclasses. (FIXME: this is not yet
implemented)
Expressions in the base or exponent are subject to contraction as usual,
but an index that is present in the exponent, will not be considered
contractable with its own base. Note however, that indices in the same
exponent can be contracted with each other.
>>> A = IndexedBase('A')
>>> x = IndexedBase('x')
>>> i, j, k = map(Idx, ['i', 'j', 'k'])
>>> _get_indices_Pow(exp(A[i, j]*x[j]))
({i}, {})
>>> _get_indices_Pow(Pow(x[i], x[i]))
({i}, {})
>>> _get_indices_Pow(Pow(A[i, j]*x[j], x[i]))
({i}, {})
"""
base, exp = expr.as_base_exp()
binds, _ = get_indices(base)
einds, _ = get_indices(exp)
inds = binds | einds
# FIXME: symmetries from power needs to check special cases, else nothing
symmetries = {}
return inds, symmetries
def _get_indices_Add(expr):
"""Determine outer indices of an Add object.
In a sum, each term must have the same set of outer indices. A valid
expression could be
x(i)*y(j) - x(j)*y(i)
But we do not allow expressions like:
x(i)*y(j) - z(j)*z(j)
FIXME: Add support for Numpy broadcasting
>>> i, j, k = map(Idx, ['i', 'j', 'k'])
>>> x = IndexedBase('x')
>>> y = IndexedBase('y')
>>> _get_indices_Add(x[i] + x[k]*y[i, k])
({i}, {})
"""
inds = list(map(get_indices, expr.args))
inds, _ = list(zip(*inds))
# allow broadcast of scalars
non_scalars = [x for x in inds if x != set()]
if not non_scalars:
return set(), {}
if not all(x == non_scalars[0] for x in non_scalars[1:]):
raise IndexConformanceException(f'Indices are not consistent: {expr}')
# FIXME: search for symmetries
symmetries = {}
return non_scalars[0], symmetries
def get_indices(expr):
"""Determine the outer indices of expression ``expr``
By *outer* we mean indices that are not summation indices. Returns a set
and a dict. The set contains outer indices and the dict contains
information about index symmetries.
Examples
========
>>> x, y, A = map(IndexedBase, ['x', 'y', 'A'])
>>> i, j = symbols('i j', integer=True)
The indices of the total expression is determined, Repeated indices imply a
summation, for instance the trace of a matrix A:
>>> get_indices(A[i, i])
(set(), {})
In the case of many terms, the terms are required to have identical
outer indices. Else an IndexConformanceException is raised.
>>> get_indices(x[i] + A[i, j]*y[j])
({i}, {})
:Exceptions:
An IndexConformanceException means that the terms are not compatible, e.g.
>>> get_indices(x[i] + y[j])
Traceback (most recent call last):
...
IndexConformanceException: Indices are not consistent: x(i) + y(j)
.. warning::
The concept of *outer* indices applies recursively, starting on the deepest
level. This implies that dummies inside parenthesis are assumed to be
summed first, so that the following expression is handled gracefully:
>>> get_indices((x[i] + A[i, j]*y[j])*x[j])
({i, j}, {})
This is correct and may appear convenient, but you need to be careful
with this as Diofant will happily .expand() the product, if requested. The
resulting expression would mix the outer ``j`` with the dummies inside
the parenthesis, which makes it a different expression. To be on the
safe side, it is best to avoid such ambiguities by using unique indices
for all contractions that should be held separate.
"""
# We call ourself recursively to determine indices of sub expressions.
# break recursion
if isinstance(expr, Indexed):
c = expr.indices
inds, _ = _remove_repeated(c)
return inds, {}
elif expr is None:
return set(), {}
elif expr.is_Atom:
return set(), {}
elif isinstance(expr, Idx):
return {expr}, {}
# recurse via specialized functions
else:
if expr.is_Mul:
return _get_indices_Mul(expr)
elif expr.is_Add:
return _get_indices_Add(expr)
elif expr.is_Pow:
return _get_indices_Pow(expr)
elif isinstance(expr, Function):
# Support ufunc like behaviour by returning indices from arguments.
# Functions do not interpret repeated indices across argumnts
# as summation
ind0 = set()
for arg in expr.args:
ind, sym = get_indices(arg)
ind0 |= ind
return ind0, sym
else:
raise NotImplementedError('No specialized handling of '
f'type {type(expr)}')
def get_contraction_structure(expr):
"""Determine dummy indices of ``expr`` and describe its structure
By *dummy* we mean indices that are summation indices.
The structure of the expression is determined and described as follows:
1) A conforming summation of Indexed objects is described with a dict where
the keys are summation indices and the corresponding values are sets
containing all terms for which the summation applies. All Add objects
in the Diofant expression tree are described like this.
2) For all nodes in the Diofant expression tree that are *not* of type Add, the
following applies:
If a node discovers contractions in one of its arguments, the node
itself will be stored as a key in the dict. For that key, the
corresponding value is a list of dicts, each of which is the result of a
recursive call to get_contraction_structure(). The list contains only
dicts for the non-trivial deeper contractions, omitting dicts with None
as the one and only key.
.. Note:: The presence of expressions among the dictionary keys indicates
multiple levels of index contractions. A nested dict displays nested
contractions and may itself contain dicts from a deeper level. In
practical calculations the summation in the deepest nested level must be
calculated first so that the outer expression can access the resulting
indexed object.
Examples
========
>>> x, y, A = map(IndexedBase, ['x', 'y', 'A'])
>>> i, j = map(Idx, ['i', 'j'])
>>> get_contraction_structure(x[i]*y[i] + A[j, j])
{(i,): {x[i]*y[i]}, (j,): {A[j, j]}}
>>> get_contraction_structure(x[i]*y[j])
{None: {x[i]*y[j]}}
A multiplication of contracted factors results in nested dicts representing
the internal contractions.
>>> d = get_contraction_structure(x[i, i]*y[j, j])
>>> sorted(d, key=default_sort_key)
[None, x[i, i]*y[j, j]]
In this case, the product has no contractions:
>>> d[None]
{x[i, i]*y[j, j]}
Factors are contracted "first":
>>> sorted(d[x[i, i]*y[j, j]], key=default_sort_key)
[{(i,): {x[i, i]}}, {(j,): {y[j, j]}}]
A parenthesized Add object is also returned as a nested dictionary. The
term containing the parenthesis is a Mul with a contraction among the
arguments, so it will be found as a key in the result. It stores the
dictionary resulting from a recursive call on the Add expression.
>>> d = get_contraction_structure(x[i]*(y[i] + A[i, j]*x[j]))
>>> sorted(d, key=default_sort_key)
[(x[j]*A[i, j] + y[i])*x[i], (i,)]
>>> d[(i,)]
{(x[j]*A[i, j] + y[i])*x[i]}
>>> d[x[i]*(A[i, j]*x[j] + y[i])]
[{None: {y[i]}, (j,): {x[j]*A[i, j]}}]
Powers with contractions in either base or exponent will also be found as
keys in the dictionary, mapping to a list of results from recursive calls:
>>> d = get_contraction_structure(A[j, j]**A[i, i])
>>> d[None]
{A[j, j]**A[i, i]}
>>> nested_contractions = d[A[j, j]**A[i, i]]
>>> nested_contractions[0]
{(j,): {A[j, j]}}
>>> nested_contractions[1]
{(i,): {A[i, i]}}
The description of the contraction structure may appear complicated when
represented with a string in the above examples, but it is easy to iterate
over:
>>> for key in d:
... if isinstance(key, Expr):
... continue
... for term in d[key]:
... if term in d:
... # treat deepest contraction first
... pass
... # treat outermost contactions here
"""
# We call ourself recursively to inspect sub expressions.
if isinstance(expr, Indexed):
_, key = _remove_repeated(expr.indices)
return {key or None: {expr}}
elif expr.is_Atom:
return {None: {expr}}
elif expr.is_Mul:
*_, key = _get_indices_Mul(expr, return_dummies=True)
result = {key or None: {expr}}
# recurse on every factor
nested = []
for fac in expr.args:
facd = get_contraction_structure(fac)
if not (None in facd and len(facd) == 1):
nested.append(facd)
if nested:
result[expr] = nested
return result
elif expr.is_Pow:
# recurse in base and exp separately. If either has internal
# contractions we must include ourselves as a key in the returned dict
b, e = expr.as_base_exp()
dbase = get_contraction_structure(b)
dexp = get_contraction_structure(e)
dicts = []
for d in dbase, dexp:
if not (None in d and len(d) == 1):
dicts.append(d)
result = {None: {expr}}
if dicts:
result[expr] = dicts
return result
elif expr.is_Add:
# Note: we just collect all terms with identical summation indices, We
# do nothing to identify equivalent terms here, as this would require
# substitutions or pattern matching in expressions of unknown
# complexity.
result = {}
for term in expr.args:
# recurse on every term
d = get_contraction_structure(term)
for k, v in d.items():
if k in result:
result[k] |= v
else:
result[k] = v
return result
elif isinstance(expr, Function):
# Collect non-trivial contraction structures in each argument
# We do not report repeated indices in separate arguments as a
# contraction
deeplist = []
for arg in expr.args:
deep = get_contraction_structure(arg)
if not (None in deep and len(deep) == 1):
deeplist.append(deep)
d = {None: {expr}}
if deeplist:
d[expr] = deeplist
return d
else:
raise NotImplementedError('No specialized handling of '
f'type {type(expr)}') | PypiClean |
/ssh-jump_hive-0.3.7.tar.gz/ssh-jump_hive-0.3.7/jumps/jump_gui.py | import sys
from PyQt5.QtGui import QPixmap,QPalette
from PyQt5 import QtCore,QtWidgets
from PyQt5.QtWidgets import QWidget,QInputDialog,QMainWindow,QDialog,QLabel,QLineEdit,QGridLayout, QToolTip,QPushButton, QApplication
from jumps.Jump_Tunnel import Jump_Tunnel
from PyQt5.QtCore import Qt
import click
import time
import logging
import threading
jumphost = '117.48.195.186'
jumpport = 2222
jumpuser = 'dm'
jumppwd = 'Vts^pztbvE339@Rw'
tunnelhost = '172.16.16.32'
tunnelappport = 10000
localhost = '127.0.0.1'
localbindport = 4800
daemonsecond=2000
logger = logging.getLogger('ssh-jump-hive-gui')
class JumpTunnel(QWidget):
def __init__(self):
super().__init__()
self.my_UI()
def my_UI(self):
jhLabel=QLabel("JumpHost:")
jpLabel=QLabel("JumpPort:")
juLable=QLabel("JumpUser:")
jpwdLabel=QLabel("JumpPwd:")
thLabel=QLabel("TunnelHost:")
tpLabel=QLabel("TunnelPort:")
lhLabel=QLabel("LocalHost:")
lpLabel=QLabel("LocalPort:")
gitLabel = QLabel("GithubRepo:")
dtLabel=QLabel("DaemonSecond:")
self.jumpHost=QLineEdit("117.48.195.186")
self.jumpPort=QLineEdit("2222")
self.jumpUser=QLineEdit("dm")
self.jumpPwd=QLineEdit("Vts^pztbvE339@Rw")
self.tunnelHost=QLineEdit("172.16.16.32")
self.tunnelPort=QLineEdit("10000")
self.localHost=QLineEdit("127.0.0.1")
self.localPort=QLineEdit("3560")
self.daemonSecond=QLineEdit("21600")
github=QLineEdit("https://github.com/mullerhai/sshjumphive")
self.btnConn = QPushButton("Trun ON", self)
self.btnClose=QPushButton("Trun Off",self)
self.grid=QGridLayout()
self.grid.setSpacing(10)
self.grid.addWidget(jhLabel,2,0)
self.grid.addWidget(self.jumpHost,2,1)
self.grid.addWidget(jpLabel,2,2)
self.grid.addWidget(self.jumpPort,2,3)
self.grid.addWidget(juLable,3,0)
self.grid.addWidget(self.jumpUser,3,1)
self.grid.addWidget(jpwdLabel,3,2)
self.grid.addWidget(self.jumpPwd,3,3)
self.grid.addWidget(thLabel,5,0)
self.grid.addWidget(self.tunnelHost,5,1)
self.grid.addWidget(tpLabel,5,2)
self.grid.addWidget(self.tunnelPort,5,3)
self.grid.addWidget(lhLabel,7,0)
self.grid.addWidget(self.localHost,7,1)
self.grid.addWidget(lpLabel,7,2)
self.grid.addWidget(self.localPort,7,3)
self.grid.addWidget(gitLabel,8,0)
self.grid.addWidget(github,8,1)
self.grid.addWidget(dtLabel,8,2)
self.grid.addWidget(self.daemonSecond,8,3)
self.grid.addWidget(self.btnConn,9,0)
self.grid.addWidget(self.btnClose,9,3)
pixmap = QPixmap("../img/guilogo.jpg")
pixmap=pixmap.scaledToHeight(80)
pixmap=pixmap.scaledToWidth(180)
lbl = QLabel(self)
lbl.setFixedHeight(80)
lbl.setFixedWidth(180)
lbl.setPixmap(pixmap)
self.grid.addWidget(lbl,10,1)
pixfox = QPixmap("../img/tunnel.jpg")
pixfox=pixfox.scaledToHeight(90)
pixfox=pixfox.scaledToWidth(90)
lblfox = QLabel(self)
lblfox.setFixedHeight(90)
lblfox.setFixedWidth(90)
lblfox.setPixmap(pixfox)
self.grid.addWidget(lblfox,10,2)
self.btnConn.clicked.connect(self.buttonClicked)
self.btnClose.clicked.connect(self.btnCloseSession)
self.setLayout(self.grid)
self.setWindowTitle('SSH-Jump-Hive')
self.setGeometry(300, 300, 490, 450)
self.show()
def btnCloseSession(self):
# text, ok = QInputDialog.getText(self, 'Turn Off',
# 'Please Input 1 then Trun off tunnel :')
# logging.warn(msg="Will kill recently ssh tunnle process")
# if ok and text=='1':
try:
self.jump_tunnel.client.close()
logging.info(msg="ssh_tunnel turn off successfully")
sucTLabel = QLabel("turn off Success")
self.grid.addWidget(sucTLabel, 9, 1)
# text, ok = QInputDialog.getText(self, 'Success',
# 'ssh_tunnel turn off successfully close dialog ok')
except:
failedTLabel = QLabel("turn off be Failed")
self.grid.addWidget(failedTLabel, 9, 2)
# text, ok = QInputDialog.getText(self, 'Failed',
# 'ssh_tunnel turn off failed check the config')
logging.error(msg="ssh_tunnel turn off failed,please try again")
# else:
# failedTLabel = QLabel("turn off Failed")
# self.grid.addWidget(failedTLabel, 9, 2)
# text, ok = QInputDialog.getText(self, 'Failed',
# 'ssh_tunnel turn off failed check the config')
def buttonClicked(self): # 在buttonClikced()方法中,我们调用sender()方法来判断哪一个按钮是我们按下的
jumphost=self.jumpHost.text().strip()
jumpuser=self.jumpUser.text().strip()
jumppwd=self.jumpPwd.text().strip()
tunnelhost=self.tunnelHost.text().strip()
localhost=self.localHost.text().strip()
logging.info(msg=self.jumpHost.text()+"%%"+self.jumpUser.text()+"%%"+self.jumpPwd.text())
try:
jumpport = (int(self.jumpPort.text().strip()) if self.jumpPort.text().strip() != None else 2222)
tunnelappport = (int(self.tunnelPort.text().strip()) if self.tunnelPort.text().strip() != None else 10000)
localbindport = (int(self.localPort.text().strip()) if self.localPort.text().strip() != None else 4320)
daemonsecond = (int(self.daemonSecond.text().strip()) if self.daemonSecond.text().strip() != None else 21600)
self.jump_tunnel=Jump_Tunnel(jumphost,jumpport,jumpuser,jumppwd,tunnelhost,tunnelappport,localhost,localbindport)
tunnel_conn=self.jump_tunnel.jump_con_tunnel()
with tunnel_conn:
#time.sleep(0.1)
logging.info(msg="启动成功")
sucLabel = QLabel("Connect Success")
self.grid.addWidget(sucLabel, 9, 2)
# pe = QPalette()
# pe.setColor(QPalette.WindowText, Qt.red)
#sucLabel.setAutoFillBackground(pe)
text, ok = QInputDialog.getText(self, 'Success',
'connect ssh tunnel successfully close dialog ok')
time.sleep(daemonsecond)
except:
logging.info(msg="启动失败")
failedLabel=QLabel("Connect Failed")
self.grid.addWidget(sucLabel, 9, 2)
# text, ok = QInputDialog.getText(self, 'Failed',
# 'connect ssh tunnel failed check the config')
#sender = self.sender()
# self.showMessage(sender.text() + ' 是发送者')
def main():
app = QApplication(sys.argv)
jtGui = JumpTunnel()
sys.exit(app.exec_())
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = JumpTunnel()
sys.exit(app.exec_()) | PypiClean |
/numericube-twistranet-2.0.0.zip/numericube-twistranet-2.0.0/twistranet/themes/twistheme/static/js/tiny_mce/plugins/table/langs/te_dlg.js | tinyMCE.addI18n('te.table_dlg',{
general_tab:"General",
advanced_tab:"Advanced",
general_props:"General properties",
advanced_props:"Advanced properties",
rowtype:"Row in table part",
title:"Insert/Modify table",
width:"Width",
height:"Height",
cols:"Cols",
rows:"Rows",
cellspacing:"Cellspacing",
cellpadding:"Cellpadding",
border:"Border",
align:"Alignment",
align_default:"Default",
align_left:"Left",
align_right:"Right",
align_middle:"Center",
row_title:"Table row properties",
cell_title:"Table cell properties",
cell_type:"Cell type",
valign:"Vertical alignment",
align_top:"Top",
align_bottom:"Bottom",
bordercolor:"Border color",
bgcolor:"Background color",
merge_cells_title:"Merge table cells",
id:"Id",
style:"Style",
langdir:"Language direction",
langcode:"Language code",
mime:"Target MIME type",
ltr:"Left to right",
rtl:"Right to left",
bgimage:"Background image",
summary:"Summary",
td:"Data",
th:"Header",
cell_cell:"Update current cell",
cell_row:"Update all cells in row",
cell_all:"Update all cells in table",
row_row:"Update current row",
row_odd:"Update odd rows in table",
row_even:"Update even rows in table",
row_all:"Update all rows in table",
thead:"Table Head",
tbody:"Table Body",
tfoot:"Table Foot",
scope:"Scope",
rowgroup:"Row Group",
colgroup:"Col Group",
col_limit:"You've exceeded the maximum number of columns of {$cols}.",
row_limit:"You've exceeded the maximum number of rows of {$rows}.",
cell_limit:"You've exceeded the maximum number of cells of {$cells}.",
missing_scope:"Are you sure you want to continue without specifying a scope for this table header cell. Without it, it may be difficult for some users with disabilities to understand the content or data displayed of the table.",
caption:"Table caption",
frame:"Frame",
frame_none:"none",
frame_groups:"groups",
frame_rows:"rows",
frame_cols:"cols",
frame_all:"all",
rules:"Rules",
rules_void:"void",
rules_above:"above",
rules_below:"below",
rules_hsides:"hsides",
rules_lhs:"lhs",
rules_rhs:"rhs",
rules_vsides:"vsides",
rules_box:"box",
rules_border:"border"
}); | PypiClean |
/bounding_ai_tool-0.3.0-py3-none-any.whl/bounding_ai_tool/python/writer/BopWriterUtility.py | from bounding_ai_tool.python.types.MeshObjectUtility import MeshObject
import json
import os
import glob
import numpy as np
import shutil
from typing import List, Optional
import png
import cv2
import bpy
from mathutils import Matrix
from bounding_ai_tool.python.utility.BlenderUtility import get_all_blender_mesh_objects
from bounding_ai_tool.python.utility.Utility import Utility, resolve_path
from bounding_ai_tool.python.postprocessing.PostProcessingUtility import dist2depth
from bounding_ai_tool.python.writer.WriterUtility import WriterUtility
def write_bop(output_dir: str, target_objects: Optional[List[MeshObject]] = None, depths: Optional[List[np.ndarray]] = None,
colors: Optional[List[np.ndarray]] = None, color_file_format: str = "PNG", dataset: str = "",
append_to_existing_output: bool = True, depth_scale: float = 1.0, jpg_quality: int = 95, save_world2cam: bool = True,
ignore_dist_thres: float = 100., m2mm: bool = True, frames_per_chunk: int = 1000):
"""Write the BOP data
:param output_dir: Path to the output directory.
:param target_objects: Objects for which to save ground truth poses in BOP format. Default: Save all objects or from specified dataset
:param depths: List of depth images in m to save
:param colors: List of color images to save
:param color_file_format: File type to save color images. Available: "PNG", "JPEG"
:param jpg_quality: If color_file_format is "JPEG", save with the given quality.
:param dataset: Only save annotations for objects of the specified bop dataset. Saves all object poses if undefined.
:param append_to_existing_output: If true, the new frames will be appended to the existing ones.
:param depth_scale: Multiply the uint16 output depth image with this factor to get depth in mm. Used to trade-off between depth accuracy
and maximum depth value. Default corresponds to 65.54m maximum depth and 1mm accuracy.
:param save_world2cam: If true, camera to world transformations "cam_R_w2c", "cam_t_w2c" are saved in scene_camera.json
:param ignore_dist_thres: Distance between camera and object after which object is ignored. Mostly due to failed physics.
:param m2mm: Original bop annotations and models are in mm. If true, we convert the gt annotations to mm here. This
is needed if BopLoader option mm2m is used.
:param frames_per_chunk: Number of frames saved in each chunk (called scene in BOP)
"""
if depths is None:
depths = []
if colors is None:
colors = []
# Output paths.
dataset_dir = os.path.join(output_dir, dataset)
chunks_dir = os.path.join(dataset_dir, 'train_pbr')
camera_path = os.path.join(dataset_dir, 'camera.json')
# Create the output directory structure.
if not os.path.exists(dataset_dir):
os.makedirs(dataset_dir)
os.makedirs(chunks_dir)
elif not append_to_existing_output:
raise Exception("The output folder already exists: {}.".format(dataset_dir))
# Select target objects or objects from the specified dataset or all objects
if target_objects is not None:
dataset_objects = [t_obj.blender_obj for t_obj in target_objects]
elif dataset:
dataset_objects = []
for obj in get_all_blender_mesh_objects():
if "bop_dataset_name" in obj and not obj.hide_render:
if obj["bop_dataset_name"] == dataset:
dataset_objects.append(obj)
else:
dataset_objects = get_all_blender_mesh_objects()
# Check if there is any object from the specified dataset.
if not dataset_objects:
raise Exception("The scene does not contain any object from the "
"specified dataset: {}. Either remove the dataset parameter "
"or assign custom property 'bop_dataset_name' to selected objects".format(dataset))
# Save the data.
BopWriterUtility._write_camera(camera_path, depth_scale=depth_scale)
BopWriterUtility._write_frames(chunks_dir, dataset_objects=dataset_objects, depths=depths, colors=colors,
color_file_format=color_file_format, frames_per_chunk=frames_per_chunk,
m2mm=m2mm, ignore_dist_thres=ignore_dist_thres, save_world2cam=save_world2cam,
depth_scale=depth_scale, jpg_quality=jpg_quality)
class BopWriterUtility:
""" Saves the synthesized dataset in the BOP format. The dataset is split
into chunks which are saved as individual "scenes". For more details
about the BOP format, visit the BOP toolkit docs:
https://github.com/thodan/bop_toolkit/blob/master/docs/bop_datasets_format.md
"""
@staticmethod
def _load_json(path, keys_to_int=False):
"""Loads content of a JSON file.
From the BOP toolkit (https://github.com/thodan/bop_toolkit).
:param path: Path to the JSON file.
:param keys_to_int: Convert digit dict keys to integers. Default: False
:return: Content of the loaded JSON file.
"""
# Keys to integers.
def convert_keys_to_int(x):
return {int(k) if k.lstrip('-').isdigit() else k: v for k, v in x.items()}
with open(path, 'r') as f:
if keys_to_int:
content = json.load(f, object_hook=lambda x: convert_keys_to_int(x))
else:
content = json.load(f)
return content
@staticmethod
def _save_json(path, content):
""" Saves the content to a JSON file in a human-friendly format.
From the BOP toolkit (https://github.com/thodan/bop_toolkit).
:param path: Path to the output JSON file.
:param content: Dictionary/list to save.
"""
with open(path, 'w') as f:
if isinstance(content, dict):
f.write('{\n')
content_sorted = sorted(content.items(), key=lambda x: x[0])
for elem_id, (k, v) in enumerate(content_sorted):
f.write(
' \"{}\": {}'.format(k, json.dumps(v, sort_keys=True)))
if elem_id != len(content) - 1:
f.write(',')
f.write('\n')
f.write('}')
elif isinstance(content, list):
f.write('[\n')
for elem_id, elem in enumerate(content):
f.write(' {}'.format(json.dumps(elem, sort_keys=True)))
if elem_id != len(content) - 1:
f.write(',')
f.write('\n')
f.write(']')
else:
json.dump(content, f, sort_keys=True)
@staticmethod
def _save_depth(path, im):
"""Saves a depth image (16-bit) to a PNG file.
From the BOP toolkit (https://github.com/thodan/bop_toolkit).
:param path: Path to the output depth image file.
:param im: ndarray with the depth image to save.
"""
if not path.endswith(".png"):
raise ValueError('Only PNG format is currently supported.')
im[im > 65535] = 65535
im_uint16 = np.round(im).astype(np.uint16)
# PyPNG library can save 16-bit PNG and is faster than imageio.imwrite().
w_depth = png.Writer(im.shape[1], im.shape[0], greyscale=True, bitdepth=16)
with open(path, 'wb') as f:
w_depth.write(f, np.reshape(im_uint16, (-1, im.shape[1])))
@staticmethod
def _write_camera(camera_path: str, depth_scale: float = 1.0):
""" Writes camera.json into dataset_dir.
:param camera_path: Path to camera.json
:param depth_scale: Multiply the uint16 output depth image with this factor to get depth in mm.
"""
cam_K = WriterUtility.get_cam_attribute(bpy.context.scene.camera, 'cam_K')
camera = {'cx': cam_K[0][2],
'cy': cam_K[1][2],
'depth_scale': depth_scale,
'fx': cam_K[0][0],
'fy': cam_K[1][1],
'height': bpy.context.scene.render.resolution_y,
'width': bpy.context.scene.render.resolution_x}
BopWriterUtility._save_json(camera_path, camera)
@staticmethod
def _get_frame_gt(dataset_objects: List[bpy.types.Mesh], unit_scaling: float, ignore_dist_thres: float,
destination_frame: List[str] = ["X", "-Y", "-Z"]):
""" Returns GT pose annotations between active camera and objects.
:param dataset_objects: Save annotations for these objects.
:param unit_scaling: 1000. for outputting poses in mm
:param ignore_dist_thres: Distance between camera and object after which object is ignored. Mostly due to failed physics.
:param destination_frame: Transform poses from Blender internal coordinates to OpenCV coordinates
:return: A list of GT camera-object pose annotations for scene_gt.json
"""
H_c2w_opencv = Matrix(WriterUtility.get_cam_attribute(bpy.context.scene.camera, 'cam2world_matrix',
local_frame_change=destination_frame))
frame_gt = []
for obj in dataset_objects:
H_m2w = Matrix(WriterUtility.get_common_attribute(obj, 'matrix_world'))
cam_H_m2c = H_c2w_opencv.inverted() @ H_m2w
cam_R_m2c = cam_H_m2c.to_quaternion().to_matrix()
cam_t_m2c = cam_H_m2c.to_translation()
assert "category_id" in obj, "{} object has no custom property 'category_id'".format(obj.get_name())
# ignore examples that fell through the plane
if not np.linalg.norm(list(cam_t_m2c)) > ignore_dist_thres:
cam_t_m2c = list(cam_t_m2c * unit_scaling)
frame_gt.append({
'cam_R_m2c': list(cam_R_m2c[0]) + list(cam_R_m2c[1]) + list(cam_R_m2c[2]),
'cam_t_m2c': cam_t_m2c,
'obj_id': obj["category_id"]
})
else:
print('ignored obj, ', obj["category_id"], 'because either ')
print('(1) it is further away than parameter "ignore_dist_thres: ",', ignore_dist_thres)
print('(e.g. because it fell through a plane during physics sim)')
print('or')
print('(2) the object pose has not been given in meters')
return frame_gt
@staticmethod
def _get_frame_camera(save_world2cam, depth_scale=1.0, unit_scaling=1000., destination_frame=["X", "-Y", "-Z"]):
""" Returns camera parameters for the active camera.
:param save_world2cam: If true, camera to world transformations "cam_R_w2c", "cam_t_w2c" are saved in scene_camera.json
:param depth_scale: Multiply the uint16 output depth image with this factor to get depth in mm.
:param unit_scaling: 1000. for outputting poses in mm
:param destination_frame: Transform poses from Blender internal coordinates to OpenCV coordinates
:return: dict containing info for scene_camera.json
"""
cam_K = WriterUtility.get_cam_attribute(bpy.context.scene.camera, 'cam_K')
frame_camera_dict = {
'cam_K': cam_K[0] + cam_K[1] + cam_K[2],
'depth_scale': depth_scale
}
if save_world2cam:
H_c2w_opencv = Matrix(WriterUtility.get_cam_attribute(bpy.context.scene.camera, 'cam2world_matrix',
local_frame_change=destination_frame))
H_w2c_opencv = H_c2w_opencv.inverted()
R_w2c_opencv = H_w2c_opencv.to_quaternion().to_matrix()
t_w2c_opencv = H_w2c_opencv.to_translation() * unit_scaling
frame_camera_dict['cam_R_w2c'] = list(R_w2c_opencv[0]) + list(R_w2c_opencv[1]) + list(R_w2c_opencv[2])
frame_camera_dict['cam_t_w2c'] = list(t_w2c_opencv)
return frame_camera_dict
@staticmethod
def _write_frames(chunks_dir: str, dataset_objects: list, depths: List[np.ndarray] = [],
colors: List[np.ndarray] = [],
color_file_format: str = "PNG", depth_scale: float = 1.0, frames_per_chunk: int = 1000,
m2mm: bool = True,
ignore_dist_thres: float = 100., save_world2cam: bool = True, jpg_quality: int = 95):
"""Write each frame's ground truth into chunk directory in BOP format
:param chunks_dir: Path to the output directory of the current chunk.
:param dataset_objects: Save annotations for these objects.
:param depths: List of depth images in m to save
:param colors: List of color images to save
:param color_file_format: File type to save color images. Available: "PNG", "JPEG"
:param jpg_quality: If color_file_format is "JPEG", save with the given quality.
:param depth_scale: Multiply the uint16 output depth image with this factor to get depth in mm. Used to trade-off between depth accuracy
and maximum depth value. Default corresponds to 65.54m maximum depth and 1mm accuracy.
:param ignore_dist_thres: Distance between camera and object after which object is ignored. Mostly due to failed physics.
:param m2mm: Original bop annotations and models are in mm. If true, we convert the gt annotations to mm here. This
is needed if BopLoader option mm2m is used.
:param frames_per_chunk: Number of frames saved in each chunk (called scene in BOP)
"""
# Format of the depth images.
depth_ext = '.png'
rgb_tpath = os.path.join(chunks_dir, '{chunk_id:06d}', 'rgb', '{im_id:06d}' + '{im_type}')
depth_tpath = os.path.join(chunks_dir, '{chunk_id:06d}', 'depth', '{im_id:06d}' + depth_ext)
chunk_camera_tpath = os.path.join(chunks_dir, '{chunk_id:06d}', 'scene_camera.json')
chunk_gt_tpath = os.path.join(chunks_dir, '{chunk_id:06d}', 'scene_gt.json')
# Paths to the already existing chunk folders (such folders may exist
# when appending to an existing dataset).
chunk_dirs = sorted(glob.glob(os.path.join(chunks_dir, '*')))
chunk_dirs = [d for d in chunk_dirs if os.path.isdir(d)]
# Get ID's of the last already existing chunk and frame.
curr_chunk_id = 0
curr_frame_id = 0
if len(chunk_dirs):
last_chunk_dir = sorted(chunk_dirs)[-1]
last_chunk_gt_fpath = os.path.join(last_chunk_dir, 'scene_gt.json')
chunk_gt = BopWriterUtility._load_json(last_chunk_gt_fpath, keys_to_int=True)
# Last chunk and frame ID's.
last_chunk_id = int(os.path.basename(last_chunk_dir))
last_frame_id = int(sorted(chunk_gt.keys())[-1])
# Current chunk and frame ID's.
curr_chunk_id = last_chunk_id
curr_frame_id = last_frame_id + 1
if curr_frame_id % frames_per_chunk == 0:
curr_chunk_id += 1
curr_frame_id = 0
# Initialize structures for the GT annotations and camera info.
chunk_gt = {}
chunk_camera = {}
if curr_frame_id != 0:
# Load GT and camera info of the chunk we are appending to.
chunk_gt = BopWriterUtility._load_json(
chunk_gt_tpath.format(chunk_id=curr_chunk_id), keys_to_int=True)
chunk_camera = BopWriterUtility._load_json(
chunk_camera_tpath.format(chunk_id=curr_chunk_id), keys_to_int=True)
# Go through all frames.
num_new_frames = bpy.context.scene.frame_end - bpy.context.scene.frame_start
if len(depths) != len(colors) != num_new_frames:
raise Exception("The amount of images stored in the depths/colors does not correspond to the amount"
"of images specified by frame_start to frame_end.")
for frame_id in range(bpy.context.scene.frame_start, bpy.context.scene.frame_end):
# Activate frame.
bpy.context.scene.frame_set(frame_id)
# Reset data structures and prepare folders for a new chunk.
if curr_frame_id == 0:
chunk_gt = {}
chunk_camera = {}
os.makedirs(os.path.dirname(
rgb_tpath.format(chunk_id=curr_chunk_id, im_id=0, im_type='PNG')))
os.makedirs(os.path.dirname(
depth_tpath.format(chunk_id=curr_chunk_id, im_id=0)))
# Get GT annotations and camera info for the current frame.
# Output translation gt in m or mm
unit_scaling = 1000. if m2mm else 1.
chunk_gt[curr_frame_id] = BopWriterUtility._get_frame_gt(dataset_objects, unit_scaling, ignore_dist_thres)
chunk_camera[curr_frame_id] = BopWriterUtility._get_frame_camera(save_world2cam, depth_scale, unit_scaling)
if colors:
color_rgb = colors[frame_id]
color_bgr = color_rgb[..., ::-1].copy()
if color_file_format == 'PNG':
rgb_fpath = rgb_tpath.format(chunk_id=curr_chunk_id, im_id=curr_frame_id, im_type='.png')
cv2.imwrite(rgb_fpath, color_bgr)
elif color_file_format == 'JPEG':
rgb_fpath = rgb_tpath.format(chunk_id=curr_chunk_id, im_id=curr_frame_id, im_type='.jpg')
cv2.imwrite(rgb_fpath, color_bgr, [int(cv2.IMWRITE_JPEG_QUALITY), jpg_quality])
else:
rgb_output = Utility.find_registered_output_by_key("colors")
if rgb_output is None:
raise Exception("RGB image has not been rendered.")
color_ext = '.png' if rgb_output['path'].endswith('png') else '.jpg'
# Copy the resulting RGB image.
rgb_fpath = rgb_tpath.format(chunk_id=curr_chunk_id, im_id=curr_frame_id, im_type=color_ext)
shutil.copyfile(rgb_output['path'] % frame_id, rgb_fpath)
if depths:
depth = depths[frame_id]
else:
# Load the resulting dist image.
dist_output = Utility.find_registered_output_by_key("distance")
if dist_output is None:
raise Exception("Distance image has not been rendered.")
distance = WriterUtility.load_output_file(resolve_path(dist_output['path'] % frame_id), remove=False)
depth = dist2depth(distance)
# Scale the depth to retain a higher precision (the depth is saved
# as a 16-bit PNG image with range 0-65535).
depth_mm = 1000.0 * depth # [m] -> [mm]
depth_mm_scaled = depth_mm / float(depth_scale)
# Save the scaled depth image.
depth_fpath = depth_tpath.format(chunk_id=curr_chunk_id, im_id=curr_frame_id)
BopWriterUtility._save_depth(depth_fpath, depth_mm_scaled)
# Save the chunk info if we are at the end of a chunk or at the last new frame.
if ((curr_frame_id + 1) % frames_per_chunk == 0) or \
(frame_id == num_new_frames - 1):
# Save GT annotations.
BopWriterUtility._save_json(chunk_gt_tpath.format(chunk_id=curr_chunk_id), chunk_gt)
# Save camera info.
BopWriterUtility._save_json(chunk_camera_tpath.format(chunk_id=curr_chunk_id), chunk_camera)
# Update ID's.
curr_chunk_id += 1
curr_frame_id = 0
else:
curr_frame_id += 1 | PypiClean |
/cdat-lite-6.0.1.tar.gz/cdat-lite-6.0.1/Packages/cdms2/Lib/hgrid.py | import numpy
import cdms2
import os
import os.path
## import PropertiedClasses
from error import CDMSError
from grid import AbstractGrid, LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
from coord import TransientVirtualAxis
from axis import getAutoBounds, allclose
import bindex
MethodNotImplemented = "Method not yet implemented"
def _flatten(boundsar):
boundsshape = boundsar.shape
if len(boundsshape)>2:
newshape = (reduce((lambda x,y: x*y), boundsshape[:-1], 1), boundsshape[-1])
boundsar.shape = newshape
return boundsar
class AbstractHorizontalGrid(AbstractGrid):
def __init__(self, latAxis, lonAxis, id=None, maskvar=None, tempmask=None, node=None):
"""Create a horizontal grid.
"""
AbstractGrid.__init__(self, node)
if id is None:
self.id = "<None>"
else:
self.id = id
self._lataxis_ = latAxis
self._lonaxis_ = lonAxis
self._maskVar_ = maskvar
self._tempMask_ = tempmask
# Generate default bounds
def genBounds(self):
raise CDMSError, MethodNotImplemented
# Get the n-th axis. naxis is 0 or 1.
def getAxis(self, naxis):
raise CDMSError, MethodNotImplemented
def getBounds(self):
"""Get the grid cell boundaries, as a tuple (latitudeBounds, longitudeBounds)
"""
latbnds, lonbnds = (self._lataxis_.getExplicitBounds(), self._lonaxis_.getExplicitBounds())
if (latbnds is None or lonbnds is None) and getAutoBounds() in [1,2]:
nlatbnds, nlonbnds = self.genBounds()
if latbnds is None:
latbnds = nlatbnds
if lonbnds is None:
lonbnds = nlonbnds
return (latbnds, lonbnds)
def getLatitude(self):
"""Get the latitude coordinates."""
return self._lataxis_
def getLongitude(self):
"""Get the longitude coordinates."""
return self._lonaxis_
def getMask(self):
"""Get the mask array, if any, otherwise None is returned."""
if self._maskVar_ is not None:
return self._maskVar_
else:
return self._tempMask_
def getMesh(self):
"""Get the mesh array used by the meshfill plot."""
raise CDMSError, MethodNotImplemented
def getWeightsArray(self):
"""Return normalized area weights, as an array of the same
shape as the grid.
"""
raise CDMSError, MethodNotImplemented
def listall (self, all=None):
result=[]
result.append('Grid has Python id %s.' % hex(id(self)))
return result
def setMask(self,mask,permanent=0):
self._maskVar_ = mask
def subGridRegion(self, latRegion, lonRegion):
raise CDMSError, MethodNotImplemented
def hasCoordType(self, coordType):
return ((coordType==LatitudeType) or (coordType==LongitudeType))
def checkConvex(self):
"""Check that each cell of the grid is convex in lon-lat space, with nodes defined counter-clockwise.
Return a 1D numpy array of cells that fail the cross-product test.
"""
from numpy import zeros, where, less, logical_or, compress
latb, lonb = self.getBounds()
saveshape = lonb.shape
lonb = _flatten(lonb)
latb = _flatten(latb)
ncell, nnode = lonb.shape
badmask = zeros((ncell,))
for n0 in range(nnode):
n1 = (n0+1)%nnode
n2 = (n1+1)%nnode
vec0lon = lonb[:,n1] - lonb[:,n0]
vec0lat = latb[:,n1] - latb[:,n0]
vec1lon = lonb[:,n2] - lonb[:,n1]
vec1lat = latb[:,n2] - latb[:,n1]
cross = vec0lon*vec1lat - vec0lat*vec1lon
mask = where(less(cross, 0.0), 1, 0)
badmask = logical_or(mask, badmask)
badcells = compress(badmask, range(len(badmask)))
lonb.shape = saveshape
latb.shape = saveshape
return badcells
def fixCutCells(self, nonConvexCells, threshold=270.0):
"""For any mapping from a spherical to a planar surface, there is a linear cut.
Grid cells that span the cut may appear to be nonconvex, which causes
problems with meshfill graphics. This routine attempts to 'repair' the cut cell
boundaries so that meshfill recognizes they are convex.
nonConvexCells: 1D numpy array of indices of nonconvex cells, as returned from
checkConvex.
threshold: positive floating-point value in degrees.
If the difference in longitude values of
consecutive boundaries nodes exceeds the threshold, the cell is considered
a cut cell.
On return, the grid boundaries are modified.
Return value is a 1D array of indices of cells that cannot be repaired.
"""
from numpy import take, array
latb, lonb = self.getBounds()
saveshape = lonb.shape
lonb = _flatten(lonb)
latb = _flatten(latb)
ncell, nnode = lonb.shape
lonb2 = take(lonb, nonConvexCells, axis=0)
latb2 = take(latb, nonConvexCells, axis=0)
newbadcells = []
for k in range(len(nonConvexCells)):
savelons = lonb2[k]
# Loop twice
for node in range(2*nnode):
n0 = node%nnode
n1 = (n0+1)%nnode
vec0lon = lonb2[k,n1]-lonb2[k,n0]
if vec0lon>threshold:
lonb2[k,n1] -= 360.0
elif vec0lon<-threshold:
lonb2[k,n1] += 360.0
# If the cross-product test still fails, restore
# the original values and add to the nonConvexCells list
for n0 in range(nnode):
n1 = (n0+1)%nnode
n2 = (n1+1)%nnode
vec0lon = lonb2[k,n1] - lonb2[k,n0]
vec0lat = latb2[k,n1] - latb2[k,n0]
vec1lon = lonb2[k,n2] - lonb2[k,n1]
vec1lat = latb2[k,n2] - latb2[k,n1]
cross = vec0lon*vec1lat - vec0lat*vec1lon
if cross<0:
lonb2[k] = savelons
newbadcells.append(nonConvexCells[k])
break
# Scatter the repaired cell bounds back to the original bounds
# and reset the grid bounds.
for k in range(len(nonConvexCells)):
lonb[nonConvexCells[k]] = lonb2[k]
lonb.shape = saveshape
self.getLongitude().setBounds(lonb)
return array(newbadcells)
class AbstractCurveGrid(AbstractHorizontalGrid):
def __init__(self, latAxis, lonAxis, id=None, maskvar=None, tempmask=None, node=None):
"""Create a curvilinear grid.
"""
if latAxis.shape != lonAxis.shape:
raise CDMSError, 'Latitude and longitude axes must have the same shape.'
AbstractHorizontalGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
self._index_ = None
def clone(self, copyData=1):
newlat = self._lataxis_.clone(copyData)
newlon = self._lonaxis_.clone(copyData)
return TransientCurveGrid(newlat, newlon, id=self.id)
def __repr__(self):
return "<CurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
__str__ = __repr__
def getMesh(self, transpose=None):
"""Generate a mesh array for the meshfill graphics method.
If transpose is defined to a tuple, say (1,0), first transpose
latbounds and lonbounds according to the tuple, (1,0,2) in this case.
"""
if self._mesh_ is None:
LAT=0
LON=1
latbounds, lonbounds = self.getBounds()
## ## following work aronud a numpy.ma bug
## latbounds=latbounds.filled()
## lonbounds=lonbounds.filled()
if latbounds is None or lonbounds is None:
raise CDMSError, 'No boundary data is available for grid %s'%self.id
if (transpose is not None) and (transpose[1]==0):
latbounds = numpy.transpose(latbounds, (1,0,2))
lonbounds = numpy.transpose(lonbounds, (1,0,2))
print latbounds.shape
mesh = numpy.zeros((self.size(),2,latbounds.shape[-1]),latbounds.dtype.char)
mesh[:,LAT,:] = numpy.reshape(latbounds,(self.size(),latbounds.shape[-1]))
mesh[:,LON,:] = numpy.reshape(lonbounds,(self.size(),latbounds.shape[-1]))
self._mesh_ = mesh
return self._mesh_
def _getShape (self):
return self._lataxis_.shape
# Don't try to generate bounds for curvilinear grids
def genBounds(self):
return (None, None)
# Get the n-th index axis. naxis is 0 or 1.
def getAxis(self, naxis):
return self._lataxis_.getAxis(naxis)
def getMask(self):
"""Get the mask array, if any, otherwise None is returned."""
if self._maskVar_ is None:
return self._tempMask_
else:
return self._maskVar_[:]
def size(self):
return self._lataxis_.size()
def writeScrip(self, cufile, gridTitle=None):
"""Write a grid to a SCRIP file.
cufile is a Cdunif file, NOT a CDMS file.
gridtitle is a string identifying the grid.
"""
import copy
lat = numpy.ma.filled(self._lataxis_)
lon = numpy.ma.filled(self._lonaxis_)
blat, blon = self.getBounds()
mask = self.getMask()
ni, nj = self.shape
if mask is None:
mask = numpy.ones((ni, nj), numpy.int32)
else:
tmp = 1 - mask
mask[:] = tmp.astype(mask.dtype.char)
mask = mask.astype(numpy.int32)
ngrid = ni*nj
centerLat = copy.copy(lat)
centerLat.shape = (ngrid,)
centerLon = copy.copy(lon)
centerLon.shape = (ngrid,)
mask.shape = (ngrid,)
clat = numpy.ma.filled(copy.copy(blat))
clat.shape = (ngrid,4)
clon = numpy.ma.filled(copy.copy(blon))
clon.shape = (ngrid,4)
# Write the file
if gridTitle is None:
gridTitle = self.id
cufile.title = gridTitle
cufile.createDimension("grid_size", ngrid)
cufile.createDimension("grid_corners", 4)
cufile.createDimension("grid_rank", 2)
griddims = cufile.createVariable("grid_dims", 'i', ("grid_rank",))
gridcenterlat = cufile.createVariable("grid_center_lat", 'd', ("grid_size",))
gridcenterlat.units = "degrees"
gridcenterlon = cufile.createVariable("grid_center_lon", 'd', ("grid_size",))
gridcenterlon.units = "degrees"
gridimask = cufile.createVariable("grid_imask", 'i', ("grid_size",))
gridimask.units = "unitless"
gridcornerlat = cufile.createVariable("grid_corner_lat", 'd', ("grid_size","grid_corners"))
gridcornerlat.units = "degrees"
gridcornerlon = cufile.createVariable("grid_corner_lon", 'd', ("grid_size","grid_corners"))
gridcornerlon.units = "degrees"
griddims[:] = numpy.array([nj,ni], numpy.int32)
gridcenterlat[:] = centerLat
gridcenterlon[:] = centerLon
gridimask[:] = mask
gridcornerlat[:] = clat
gridcornerlon[:] = clon
def toGenericGrid(self, gridid=None):
import copy
from auxcoord import TransientAuxAxis1D
from coord import TransientVirtualAxis
from gengrid import TransientGenericGrid
lat = numpy.ma.filled(self._lataxis_)
latunits = self._lataxis_.units
lon = numpy.ma.filled(self._lonaxis_)
lonunits = self._lonaxis_.units
blat, blon = self.getBounds()
mask = self.getMask()
ni, nj = self.shape
ngrid = ni*nj
centerLat = copy.copy(lat)
centerLat.shape = (ngrid,)
centerLon = copy.copy(lon)
centerLon.shape = (ngrid,)
if mask is not None:
mask.shape = (ngrid,)
cornerLat = numpy.ma.filled(copy.copy(blat))
cornerLat.shape = (ngrid,4)
cornerLon = numpy.ma.filled(copy.copy(blon))
cornerLon.shape = (ngrid,4)
iaxis = TransientVirtualAxis("cell",ngrid)
lataxis = TransientAuxAxis1D(centerLat, axes=(iaxis,), bounds=cornerLat,
attributes={'units':latunits}, id="latitude")
lonaxis = TransientAuxAxis1D(centerLon, axes=(iaxis,), bounds=cornerLon,
attributes={'units':lonunits}, id="longitude")
grid = TransientGenericGrid(lataxis, lonaxis, id=gridid, tempmask=mask)
return grid
def toCurveGrid(self, gridid=None):
if gridid is None:
gridid = self.id
result = self.clone()
result.id = gridid
return result
def writeToFile(self, file):
latvar = self._lataxis_.writeToFile(file)
lonvar = self._lonaxis_.writeToFile(file)
if self._maskVar_ is not None:
maskid = "mask_"+self.id
file.write(self._maskVar_, id=maskid)
latvar.maskid = maskid
lonvar.maskid = maskid
return (latvar, lonvar)
def writeg( self, file ):
"""Write self as a Gridspec file representing a curvilinear grid.
The file, normally a CdmsFile, should already be open for writing
and will be closed."""
import time
from tvariable import TransientVariable
# Set attributes
if ( hasattr(file,'Conventions') ):
if ( file.Conventions.find('Gridspec')<0 ):
file.Conventions = file.Conventions + ' Gridspec-0.0'
else:
file.Conventions = 'Gridspec-0.0'
if ( hasattr(file,'gs_filetypes') ):
if ( file.gs_filetypes.find('Curvilinear_Tile')<0 ):
file.gs_filetypes = file.gs_filetypes + ' Curvilinear_Tile'
else:
file.gs_filetypes = 'Curvilinear_Tile'
t=time.time()
id=int((t-int(t))*1.0e9)
file.gs_id = id
file.gs_originalfilename = os.path.basename( file.id )
newhistory = '\n' + time.ctime() + ' CDAT/CDMS AbstractCurveGrid'
# ... The \n gives a newline in the CDMS Python and in the Cdunif C code
# which gets called to write to a file. Someplace in the NetCDF libraries,
# or possibly the ncdump utility, the newline is converted back to a "\n"
# string, so you don't see a newline when you view the file. If we want
# a real newline as the CF specification says, the libraries or ncdump
# will have to be changed.
# ... In the future we may want to add more history information.
file.history = getattr( self, 'history', '' ) + \
getattr( file, 'history', '' ) + newhistory
# former tile variable and attributes
if ( hasattr(self,'long_name') and self.long_name!=None ):
file.long_name = self.long_name
else:
file.long_name = 'gridspec_tile'
# gs_geometryType is no longer required of Gridspec files, but as yet
# there is no other proposal for describing the geometry (July 2010)
if ( hasattr(self,'gs_geometryType') and self.gs_geometryType!=None):
file.gs_geometryType = self.gs_geometryType
else:
file.gs_geometryType = 'spherical'
# gs_discretizationType is no longer required of Gridspec files, but it's
# harmless and may come in useful
if ( hasattr(self,'gs_discretizationType') and self.gs_discretizationType!=None ):
file.gs_discretizationType = self.gs_discretizationType
else:
file.gs_discretizationType = 'logically_rectangular'
file.gs_lonv = 'gs_x'
file.gs_latv = 'gs_y'
# Set up and write variables. When written, cdms writes not only the arrays
# but also their coordinates, e.g. gs_nip.
x=self._lonaxis_
if ( not hasattr(x,'units') ):
print "Warning, no units found for longitude"
x.units = 'degree_east'
if ( not hasattr(x,'standard_name') ):
print "Warning, no standard_name found for longitude axis"
x.standard_name = 'longitude'
if ( x.standard_name == 'geographic_longitude'):
# temporary for updating test files
x.standard_name = 'longitude'
x.id = file.gs_lonv
# _lonaxis_ is a TransientAxis2D, hence a TransientVariable
# But I don't know where the attribute _TransientVariable__domain comes from
y=self._lataxis_
if ( not hasattr(y,'units') ):
print "Warning, no units found for latitude"
y.units = 'degree_north'
if ( not hasattr(y,'standard_name') ):
print "Warning, no standard_name found for latitude axis"
y.standard_name = 'latitude'
if ( y.standard_name == 'geographic_latitude'):
# temporary for updating test files
y.standard_name = 'latitude'
y.id = file.gs_latv
if( not hasattr(x,'_TransientVariable__domain') ):
# There probably doesn't exist enough information to write a correct
# grid, but this will help.
x._TransientVariable__domain = [ (x,), (y,) ]
x._TransientVariable__domain[0][0].id='gs_njp'
x._TransientVariable__domain[1][0].id='gs_nip'
if ( not hasattr(y,'_TransientVariable__domain') ) :
# There probably doesn't exist enough information to write a correct
# grid, but this will help.
y._TransientVariable__domain = [ (x,), (y,) ]
y._TransientVariable__domain[0][0].id='gs_njp'
y._TransientVariable__domain[1][0].id='gs_nip'
file.write(x)
file.write(y)
file.close()
def write_gridspec( self, filename ):
"""writes this grid to a Gridspec-compliant file, or does nothing if there is
already a known file corresponding to this grid. The filename should be a
complete path."""
# This method was never completed because the libCF functionality I had planned to
# use never appeared (yet).
# The functionality (other than checking gsfile) is now done by the writeg
# method above.
if ( not hasattr( self, "gsfile" ) ):
self.gsfile=None
self.gspath=None
if ( self.gsfile!=None ):
return ( tcg.gsfile, tcg.gspath )
else:
raise RuntimeError, 'The libCF/Gridspec API does not provide for writing CurveGrids<<<'
def init_from_gridspec( self, filename ):
"""reads to grid from a Gridspec-compliant file. The filename should be a
complete path. The contents of the file may overwrite data in the existing
grid object."""
# - This is really a kind of init function. The __init__ function should
# determine what kind of initialization is being done (from a file, from
# another object, from arguments specifying the contents e.g. axes) and branch
# to call a method such as this one.
# - Another way to read a file is with the standard CDMS
# pattern file=cdms2.open(...); g=file('grid') or g=file('')
try:
f = cdms2.open( filename )
except IOError:
print "Cannot open grid file for reading: ", filename
return
init_from_gridspec_file( self, f )
f.close()
def init_from_gridspec_file( self, f ):
"""reads to grid from a Gridspec-compliant file, f. This f should be a
CdmsFile object, already open for reading. The contents of the file may
overwrite data in the existing grid object."""
# As for the above init_from_gridspec method, this is really a kind of
# init function and should be called from __init__ .
ax, ay, gs_attr = f.gridspec_file_contents()
# ... gridspec_file_contents is defined in dataset.py
self.__init__( ay, ax )
self.gsfile = gs_attr['filebase']
self.gspath = gs_attr['filepath']
self.long_name = gs_attr['long_name']
# gs_geometryType is no longer required of Gridspec files, but as yet
# there is no other proposal for describing the geometry (July 2010)
self.gs_geometryType = gs_attr['gs_geometryType']
# gs_discretizationType is no longer required of Gridspec files, but it's
# harmless and may come in useful
self.gs_discretizationType = gs_attr['gs_discretizationType']
return self
def subSlice(self, *specs, **keys):
"""Get a transient subgrid based on an argument list <specs> of slices."""
newlat = self._lataxis_.subSlice(*specs, **keys)
newlon = self._lonaxis_.subSlice(*specs, **keys)
if self._maskVar_ is None:
newmask = None
else:
newmask = self._maskVar_.subSlice(*specs, **keys)
result = TransientCurveGrid(newlat, newlon, maskvar=newmask)
return result
def getGridSlices(self, domainlist, newaxislist, slicelist):
"""Determine which slices in slicelist correspond to the lat/lon elements
of the grid.
domainlist is a list of axes of a variable.
newaxislist is a list of result axes after the slicelist is applied to domainlist.
slicelist is a list of slices.
All lists are of equal length.
Return value is (newslicelist, gridaxislist) where
newslicelist is the elements of slicelist that correspond to the grid, in the
preferred order of the grid.
gridaxislist is the elements of newaxislist that correspond to the grid, in the
preferred order of the grid.
"""
iaxis = self._lataxis_.getAxis(0)
jaxis = self._lataxis_.getAxis(1)
k = 0
i = j = -1
for d in domainlist:
if d is iaxis:
inewaxis = newaxislist[k]
islice = slicelist[k]
i = k
if d is jaxis:
jnewaxis = newaxislist[k]
jslice = slicelist[k]
j = k
k += 1
if i==-1 or j==-1:
raise RuntimeError, 'Grid lat/lon domains do not match variable domain'
return ((islice, jslice), (inewaxis, jnewaxis))
def getIndex(self):
"""Get the grid index"""
if self._index_ is None:
latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
self._index_ = bindex.bindexHorizontalGrid(latlin, lonlin)
return self._index_
def intersect(self, spec):
"""Intersect with the region specification.
'spec' is a region specification of the form defined in the grid module.
Returns (mask, indexspecs) where
'mask' is the mask of the result grid AFTER self and region spec are interested.
'indexspecs' is a list of index specifications suitable for slicing a
variable with the given grid.
"""
ni, nj = self.shape
index = self.getIndex()
latspec = spec[CoordTypeToLoc[LatitudeType]]
lonspec = spec[CoordTypeToLoc[LongitudeType]]
latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
points = bindex.intersectHorizontalGrid(latspec, lonspec, latlin, lonlin, index)
if len(points)==0:
raise CDMSError, 'No data in the specified region, longitude=%s, latitude=%s'%(`lonspec`, `latspec`)
fullmask = numpy.ones(ni*nj)
numpy.put(fullmask, points, 0)
fullmask = numpy.reshape(fullmask, (ni,nj))
iind = points/nj
jind = points - iind*nj
imin, imax, jmin, jmax = (min(iind), max(iind)+1, min(jind), max(jind)+1)
submask = fullmask[imin:imax, jmin:jmax]
yid = self.getAxis(0).id
xid = self.getAxis(1).id
indexspecs = {yid:slice(imin,imax), xid:slice(jmin,jmax)}
return submask, indexspecs
def getAxisList(self):
return (self._lataxis_.getAxis(0), self._lataxis_.getAxis(1))
def isClose(self, g):
"""Return 1 iff g is a grid of the same type and shape. A real element-by-element
comparison would be too expensive here."""
if g is None:
return 0
elif self.shape != g.shape:
return 0
elif not isinstance(g, AbstractCurveGrid):
return 0
else:
return 1
def checkAxes(self, axes):
"""Return 1 iff every element of self.getAxisList() is in the list 'axes'."""
for item in self.getAxisList():
if item not in axes:
result = 0
break
else:
result = 1
return result
def reconcile(self, axes):
"""Return a grid that is consistent with the axes, or None.
For curvilinear grids this means that the grid-related axes are
contained in the 'axes' list.
"""
result = self
selfaxes = self.getAxisList()
missing = []
for i in range(2):
if selfaxes[i] not in axes:
missing.append(i)
result = None
# Some of the grid axes are not in the 'axes' list
if result is None:
result = self.clone()
used = [] # axes already matched
for i in missing:
for item in axes:
if (item not in used) and len(selfaxes[i])==len(item) and allclose(selfaxes[i], item):
result._lataxis_.setAxis(i,item)
result._lonaxis_.setAxis(i,item)
used.append(item)
break
else:
result = None
break
return result
def flatAxes(self):
"""Return (flatlat, flatlon) where flatlat is a 1D NumPy array
having the same length as the number of cells in the grid, similarly
for flatlon."""
if self._flataxes_ is None:
import MV2 as MV
alat = MV.filled(self.getLatitude())
alon = MV.filled(self.getLongitude())
alatflat = numpy.ravel(alat)
alonflat = numpy.ravel(alon)
self._flataxes_ = (alatflat, alonflat)
return self._flataxes_
shape = property(_getShape,None)
## PropertiedClasses.set_property (AbstractCurveGrid, 'shape',
## AbstractCurveGrid._getShape, nowrite=1,
## nodelete=1)
class DatasetCurveGrid(AbstractCurveGrid):
def __init__(self, latAxis, lonAxis, id, parent=None, maskvar=None, tempmask=None, node=None):
"""Create a file curvilinear grid.
"""
AbstractCurveGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
self.parent = parent
def __repr__(self):
return "<DatasetCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
class FileCurveGrid(AbstractCurveGrid):
def __init__(self, latAxis, lonAxis, id, parent=None, maskvar=None, tempmask=None, node=None):
"""Create a file curvilinear grid.
"""
AbstractCurveGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
self.parent = parent
def __repr__(self):
return "<FileCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
class TransientCurveGrid(AbstractCurveGrid):
grid_count = 0
def __init__(self, latAxis, lonAxis, id=None, maskvar=None, tempmask=None):
"""Create a file curvilinear grid.
"""
if id is None:
TransientCurveGrid.grid_count += 1
id = 'grid_' + str(TransientCurveGrid.grid_count)
AbstractCurveGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask)
def __repr__(self):
return "<TransientCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
def toCurveGrid(self, gridid=None):
if gridid is None:
result = self
else:
result = self.clone()
result.id = gridid
return result
def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
"""Read a 'native' SCRIP grid file, returning a transient curvilinear grid.
fileobj is an open CDMS dataset or file object.
dims is the grid shape.
whichType is the type of file, either "grid" or "mapping"
if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
"""
import string
from coord import TransientAxis2D
if 'S' in fileobj.variables.keys():
if whichType=="grid":
gridCornerLatName = 'grid_corner_lat'
gridCornerLonName = 'grid_corner_lon'
gridMaskName = 'grid_imask'
gridCenterLatName = 'grid_center_lat'
gridCenterLonName = 'grid_center_lon'
titleName = 'title'
elif whichGrid=="destination":
gridCornerLatName = 'yv_b'
gridCornerLonName = 'xv_b'
gridMaskName = 'mask_b'
gridCenterLatName = 'yc_b'
gridCenterLonName = 'xc_b'
titleName = 'dest_grid'
else:
gridCornerLatName = 'yv_a'
gridCornerLonName = 'xv_a'
gridMaskName = 'mask_a'
gridCenterLatName = 'yc_a'
gridCenterLonName = 'xc_a'
titleName = 'source_grid'
else:
if whichType=="grid":
gridCornerLatName = 'grid_corner_lat'
gridCornerLonName = 'grid_corner_lon'
gridMaskName = 'grid_imask'
gridCenterLatName = 'grid_center_lat'
gridCenterLonName = 'grid_center_lon'
titleName = 'title'
elif whichGrid=="destination":
gridCornerLatName = 'dst_grid_corner_lat'
gridCornerLonName = 'dst_grid_corner_lon'
gridMaskName = 'dst_grid_imask'
gridCenterLatName = 'dst_grid_center_lat'
gridCenterLonName = 'dst_grid_center_lon'
titleName = 'dest_grid'
else:
gridCornerLatName = 'src_grid_corner_lat'
gridCornerLonName = 'src_grid_corner_lon'
gridMaskName = 'src_grid_imask'
gridCenterLatName = 'src_grid_center_lat'
gridCenterLonName = 'src_grid_center_lon'
titleName = 'source_grid'
vardict = fileobj.variables
cornerLat = fileobj(gridCornerLatName)
cornerLon = fileobj(gridCornerLonName)
ncorners = cornerLat.shape[-1]
ni = dims[1]
nj = dims[0]
gridshape = (ni, nj)
boundsshape = (ni, nj, ncorners)
if hasattr(cornerLat, 'units') and string.lower(cornerLat.units)[0:6]=='radian':
cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
else:
cornerLat = cornerLat.reshape(boundsshape)
cornerLon = cornerLon.reshape(boundsshape)
iaxis = TransientVirtualAxis("i",ni)
jaxis = TransientVirtualAxis("j",nj)
if vardict.has_key(gridMaskName):
# SCRIP convention: 0 for invalid data
# numpy.ma convention: 1 for invalid data
mask = 1 - fileobj(gridMaskName)
mask = mask.reshape(gridshape)
else:
mask = None
if vardict.has_key(gridCenterLatName):
centerLat = fileobj(gridCenterLatName).reshape(gridshape)
gclat = fileobj[gridCenterLatName]
if hasattr(gclat, "units") and string.lower(gclat.units)=='radians':
centerLat *= (180.0/numpy.pi)
else:
centerLat = cornerLat[:,:,0]
if vardict.has_key(gridCenterLonName):
centerLon = fileobj(gridCenterLonName).reshape(gridshape)
gclon = fileobj[gridCenterLonName]
if hasattr(gclon, "units") and string.lower(gclon.units)=='radians':
centerLon *= (180.0/numpy.pi)
else:
centerLon = cornerLon[:,:,0]
if hasattr(fileobj,titleName):
gridid = getattr(fileobj, titleName)
gridid = string.replace(string.strip(gridid), ' ','_')
else:
gridid="<None>"
lataxis = TransientAxis2D(centerLat, axes=(iaxis, jaxis), bounds=cornerLat,
attributes={'units':'degrees_north'}, id="latitude")
lonaxis = TransientAxis2D(centerLon, axes=(iaxis, jaxis), bounds=cornerLon,
attributes={'units':'degrees_east'}, id="longitude")
grid = TransientCurveGrid(lataxis, lonaxis, id=gridid, tempmask=mask)
return grid | PypiClean |
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/cache/list_database_keys.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'ListDatabaseKeysResult',
'AwaitableListDatabaseKeysResult',
'list_database_keys',
'list_database_keys_output',
]
@pulumi.output_type
class ListDatabaseKeysResult:
"""
The secret access keys used for authenticating connections to redis
"""
def __init__(__self__, primary_key=None, secondary_key=None):
if primary_key and not isinstance(primary_key, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", primary_key)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> str:
"""
The current primary key that clients can use to authenticate
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> str:
"""
The current secondary key that clients can use to authenticate
"""
return pulumi.get(self, "secondary_key")
class AwaitableListDatabaseKeysResult(ListDatabaseKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListDatabaseKeysResult(
primary_key=self.primary_key,
secondary_key=self.secondary_key)
def list_database_keys(cluster_name: Optional[str] = None,
database_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListDatabaseKeysResult:
"""
Retrieves the access keys for the RedisEnterprise database.
Azure REST API version: 2023-03-01-preview.
:param str cluster_name: The name of the RedisEnterprise cluster.
:param str database_name: The name of the database.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['clusterName'] = cluster_name
__args__['databaseName'] = database_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:cache:listDatabaseKeys', __args__, opts=opts, typ=ListDatabaseKeysResult).value
return AwaitableListDatabaseKeysResult(
primary_key=pulumi.get(__ret__, 'primary_key'),
secondary_key=pulumi.get(__ret__, 'secondary_key'))
@_utilities.lift_output_func(list_database_keys)
def list_database_keys_output(cluster_name: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListDatabaseKeysResult]:
"""
Retrieves the access keys for the RedisEnterprise database.
Azure REST API version: 2023-03-01-preview.
:param str cluster_name: The name of the RedisEnterprise cluster.
:param str database_name: The name of the database.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... | PypiClean |
/jupyterhub_url_sharing-0.1.0.tar.gz/jupyterhub_url_sharing-0.1.0/node_modules/@typescript-eslint/eslint-plugin/docs/rules/no-redeclare.md | # Disallow variable redeclaration (`no-redeclare`)
## Rule Details
This rule extends the base [`eslint/no-redeclare`](https://eslint.org/docs/rules/no-redeclare) rule.
It adds support for TypeScript function overloads, and declaration merging.
## How to use
```jsonc
{
// note you must disable the base rule as it can report incorrect errors
"no-redeclare": "off",
"@typescript-eslint/no-redeclare": ["error"]
}
```
## Options
See [`eslint/no-redeclare` options](https://eslint.org/docs/rules/no-redeclare#options).
This rule adds the following options:
```ts
interface Options extends BaseNoRedeclareOptions {
ignoreDeclarationMerge?: boolean;
}
const defaultOptions: Options = {
...baseNoRedeclareDefaultOptions,
ignoreDeclarationMerge: true,
};
```
### `ignoreDeclarationMerge`
When set to `true`, the rule will ignore declaration merges between the following sets:
- interface + interface
- namespace + namespace
- class + interface
- class + namespace
- class + interface + namespace
- function + namespace
- enum + namespace
Examples of **correct** code with `{ ignoreDeclarationMerge: true }`:
```ts
interface A {
prop1: 1;
}
interface A {
prop2: 2;
}
namespace Foo {
export const a = 1;
}
namespace Foo {
export const b = 2;
}
class Bar {}
namespace Bar {}
function Baz() {}
namespace Baz {}
```
**Note:** Even with this option set to true, this rule will report if you name a type and a variable the same name. **_This is intentional_**.
Declaring a variable and a type and a variable the same is usually an accident, and it can lead to hard-to-understand code.
If you have a rare case where you're intentionally naming a type the same name as a variable, use a disable comment. For example:
```ts
type something = string;
// eslint-disable-next-line @typescript-eslint/no-redeclare -- intentionally naming the variable the same as the type
const something = 2;
```
<sup>Taken with ❤️ [from ESLint core](https://github.com/eslint/eslint/blob/master/docs/rules/no-redeclare.md)</sup>
| PypiClean |
/spaceone_identity-1.11.0-py3-none-any.whl/spaceone/identity/service/domain_service.py | import logging
from spaceone.core.service import *
from spaceone.core import utils
from spaceone.identity.error import *
from spaceone.identity.manager import DomainManager
from spaceone.identity.manager.domain_secret_manager import DomainSecretManager
from spaceone.identity.model import Domain
_LOGGER = logging.getLogger(__name__)
@authentication_handler(exclude=['create', 'list', 'get_public_key'])
@authorization_handler(exclude=['create', 'list', 'get_public_key'])
@mutation_handler(exclude=['create', 'list', 'get_public_key'])
@event_handler
class DomainService(BaseService):
def __init__(self, metadata):
super().__init__(metadata)
self.domain_mgr: DomainManager = self.locator.get_manager('DomainManager')
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['name'])
def create(self, params):
""" Create domain
Args:
params (dict): {
'name': 'str',
'config': 'dict',
'plugin_info': 'dict',
'tags': 'dict'
}
Returns:
domain_vo (object)
"""
# Create Domain
domain_vo: Domain = self.domain_mgr.create_domain(params)
# Create domain secret
domain_secret_mgr: DomainSecretManager = self._get_domain_secret_manager()
domain_secret_mgr.create_domain_secret(domain_vo.domain_id)
return domain_vo
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def update(self, params):
""" Update domain
Args:
params (dict): {
'domain_id': 'str',
'config': 'dict',
'tags': 'dict'
}
Returns:
domain_vo (object)
"""
return self.domain_mgr.update_domain(params)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def change_auth_plugin(self, params):
""" Change domain auth plugin
Args:
params (dict): {
'domain_id': 'str',
'plugin_info': 'dict',
'release_auth_plugin': 'bool'
}
Returns:
domain_vo (object)
"""
domain_id = params['domain_id']
plugin_info = params.get('plugin_info')
release_auth_plugin = params.get('release_auth_plugin', False)
if release_auth_plugin:
# release auth plugin
_LOGGER.debug(f'[change_auth_plugin] release auth plugin')
return self.domain_mgr.release_auth_plugin(domain_id)
else:
if plugin_info is None:
raise ERROR_REQUIRED_PARAMETER(key='plugin_info')
_LOGGER.debug(f'[change_auth_plugin] update plugin_info: {plugin_info}')
return self.domain_mgr.change_auth_plugin(domain_id, plugin_info)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def update_plugin(self, params):
""" Update Plugin
Args:
params (dict): {
'domain_id': 'str',
'version': 'str',
'options': 'dict',
'upgrade_mode': 'str',
}
Returns:
domain_vo (object)
"""
domain_id = params['domain_id']
version = params.get('version')
options = params.get('options')
upgrade_mode = params.get('upgrade_mode')
return self.domain_mgr.update_domain_plugin(domain_id, version, options, upgrade_mode)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def verify_plugin(self, params):
""" Update Plugin
Args:
params (dict): {
'domain_id': 'str',
}
Returns:
domain_vo (object)
"""
domain_id = params['domain_id']
return self.domain_mgr.verify_auth_plugin(domain_id)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def delete(self, params):
""" Delete domain
Args:
params (dict): {
'domain_id': 'str'
}
Returns:
None
"""
self.domain_mgr.delete_domain(params['domain_id'])
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def enable(self, params):
""" Enable domain
Args:
params (dict): {
'domain_id': 'str'
}
Returns:
domain_vo (object)
"""
return self.domain_mgr.enable_domain(params['domain_id'])
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def disable(self, params):
""" Disable domain
Args:
params (dict): {
'domain_id': 'str'
}
Returns:
domain_vo (object)
"""
return self.domain_mgr.disable_domain(params['domain_id'])
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
def get(self, params):
""" Disable domain
Args:
params (dict): {
'domain_id': 'str',
'only': 'list'
}
Returns:
domain_vo (object)
"""
return self.domain_mgr.get_domain(params['domain_id'], params.get('only'))
@transaction(append_meta={'auth.scope': 'SYSTEM'})
@check_required(['domain_id'])
def get_public_key(self, params):
""" Get domain's public key for authentication
Args:
params (dict): {
'domain_id': 'str'
}
Returns:
result (dict): {
'pub_jwk': 'str',
'domain_id': 'str'
}
"""
domain_id = params['domain_id']
domain_secret_mgr: DomainSecretManager = self._get_domain_secret_manager()
pub_jwk = domain_secret_mgr.get_domain_public_key(domain_id=domain_id)
return {
'pub_jwk': pub_jwk,
'domain_id': domain_id
}
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@append_query_filter(['domain_id', 'name'])
@append_keyword_filter(['domain_id', 'name'])
def list(self, params):
""" List api keys
Args:
params (dict): {
'domain_id': 'str',
'name': 'str',
'query': 'dict (spaceone.api.core.v1.Query)'
}
Returns:
results (list): 'list of domain_vo'
total_count (int)
"""
query = params.get('query', {})
return self.domain_mgr.list_domains(query)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['query'])
@append_keyword_filter(['domain_id', 'name'])
def stat(self, params):
"""
Args:
params (dict): {
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)'
}
Returns:
values (list): 'list of statistics data'
total_count (int)
"""
query = params.get('query', {})
return self.domain_mgr.stat_domains(query)
def _get_domain_secret_manager(self):
return self.locator.get_manager('DomainSecretManager') | PypiClean |
/django-smart-admin-2.6.0.tar.gz/django-smart-admin-2.6.0/src/smart_admin/console/sentry.py | import logging
from urllib.parse import ParseResult, urlparse
from admin_extra_buttons.utils import HttpResponseRedirectToReferrer
from django import forms
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import PermissionDenied, ValidationError
from django.http import Http404
from django.shortcuts import render
from django.utils.html import urlize
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
logger = logging.getLogger(__name__)
def get_sentry_host():
result: ParseResult = urlparse(settings.SENTRY_DSN)
host = f"{result.scheme}://{result.hostname}"
if result.port:
host = f"{host}:{result.port}"
return host
def get_sentry_dashboard():
if getattr(settings, "SENTRY_PROJECT", None):
return f"{get_sentry_host()}/{settings.SENTRY_PROJECT}"
return 'N/A'
def get_event_url(event_id):
try:
return f"{get_sentry_host()}/{settings.SENTRY_PROJECT}/?query={event_id}"
except Exception as e:
logger.exception(e)
def make_sentry_link(event_id):
if getattr(settings, "SENTRY_PROJECT", ""):
try:
return f'<a href="{get_event_url(event_id)}">{event_id}</a>'
except Exception as e:
logger.exception(e)
return event_id
class SentryForm(forms.Form):
ACTIONS = [
("capture_event", "capture_event()"),
("capture_exception", "capture_exception"),
("capture_message", "capture_message"),
("logging_integration", "logging_integration"),
("400", "Error 400"),
("403", "Error 403"),
("404", "Error 404"),
("500", "Error 500"),
]
action = forms.ChoiceField(choices=ACTIONS, widget=forms.RadioSelect)
def panel_sentry(self, request, extra_context=None):
try:
import sentry_sdk
except ImportError as exc:
messages.add_message(request, messages.ERROR, f"{exc.__class__.__name__}: {exc}. Please remove `panel_sentry`.")
return HttpResponseRedirectToReferrer(request)
context = self.each_context(request)
context["title"] = "Sentry"
context["info"] = {
"SENTRY_DSN": settings.SENTRY_DSN,
"SENTRY_SERVER_URL": mark_safe(urlize(get_sentry_host())),
"SENTRY_DASHBOARD": mark_safe(urlize(get_sentry_dashboard())),
"SENTRY_PROJECT": getattr(settings, "SENTRY_PROJECT", "N/A") or "N/A",
"SENTRY_ENVIRONMENT": getattr(settings, "SENTRY_ENVIRONMENT", "N/A") or "N/A",
}
if request.method == "POST":
form = SentryForm(request.POST)
if form.is_valid():
last_event_id = None
opt = form.cleaned_data["action"]
if opt == "capture_event":
last_event_id = sentry_sdk.capture_event({"capture_event() Test": 1})
elif opt == "capture_exception":
last_event_id = sentry_sdk.capture_exception(Exception("capture_exception() Test"))
elif opt == "capture_message":
last_event_id = sentry_sdk.capture_message("capture_message() Test")
elif opt == "logging_integration":
try:
raise Exception("Logging Integration/last_event_id() Test")
except Exception as e:
logger.exception(e)
last_event_id = sentry_sdk.last_event_id()
elif opt in ["400", "403", "404", "500"]:
from django.conf.urls import handler400, handler403, handler404, handler500
mapping = {
"400": (ValidationError, handler400),
"403": (PermissionDenied, handler403),
"404": (Http404, handler404),
"500": (Exception, handler500),
}
error, handler = mapping[opt]
try:
raise error(f"Error {opt} Test")
except Exception as e:
logger.exception(e)
last_event_id = sentry_sdk.last_event_id()
handler(request, e)
messages.add_message(request, messages.SUCCESS,
mark_safe(f"Sentry ID: {make_sentry_link(last_event_id)}"))
else:
form = SentryForm()
context["form"] = form
return render(request, "smart_admin/panels/sentry.html", context)
panel_sentry.verbose_name = _("Sentry") # type: ignore[attr-defined]
panel_sentry.url_name = "sentry" # type: ignore[attr-defined] | PypiClean |
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/webpack/lib/optimize/InnerGraphPlugin.js | "use strict";
const PureExpressionDependency = require("../dependencies/PureExpressionDependency");
const InnerGraph = require("./InnerGraph");
/** @typedef {import("estree").ClassDeclaration} ClassDeclarationNode */
/** @typedef {import("estree").ClassExpression} ClassExpressionNode */
/** @typedef {import("estree").Node} Node */
/** @typedef {import("estree").VariableDeclarator} VariableDeclaratorNode */
/** @typedef {import("../Compiler")} Compiler */
/** @typedef {import("../Dependency")} Dependency */
/** @typedef {import("../dependencies/HarmonyImportSpecifierDependency")} HarmonyImportSpecifierDependency */
/** @typedef {import("../javascript/JavascriptParser")} JavascriptParser */
/** @typedef {import("./InnerGraph").InnerGraph} InnerGraph */
/** @typedef {import("./InnerGraph").TopLevelSymbol} TopLevelSymbol */
const { topLevelSymbolTag } = InnerGraph;
class InnerGraphPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
"InnerGraphPlugin",
(compilation, { normalModuleFactory }) => {
const logger = compilation.getLogger("webpack.InnerGraphPlugin");
compilation.dependencyTemplates.set(
PureExpressionDependency,
new PureExpressionDependency.Template()
);
/**
* @param {JavascriptParser} parser the parser
* @param {Object} parserOptions options
* @returns {void}
*/
const handler = (parser, parserOptions) => {
const onUsageSuper = sup => {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(sup.range);
dep.loc = sup.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
};
parser.hooks.program.tap("InnerGraphPlugin", () => {
InnerGraph.enable(parser.state);
});
parser.hooks.finish.tap("InnerGraphPlugin", () => {
if (!InnerGraph.isEnabled(parser.state)) return;
logger.time("infer dependency usage");
InnerGraph.inferDependencyUsage(parser.state);
logger.timeAggregate("infer dependency usage");
});
// During prewalking the following datastructures are filled with
// nodes that have a TopLevelSymbol assigned and
// variables are tagged with the assigned TopLevelSymbol
// We differ 3 types of nodes:
// 1. full statements (export default, function declaration)
// 2. classes (class declaration, class expression)
// 3. variable declarators (const x = ...)
/** @type {WeakMap<Node, TopLevelSymbol>} */
const statementWithTopLevelSymbol = new WeakMap();
/** @type {WeakMap<Node, Node>} */
const statementPurePart = new WeakMap();
/** @type {WeakMap<ClassExpressionNode | ClassDeclarationNode, TopLevelSymbol>} */
const classWithTopLevelSymbol = new WeakMap();
/** @type {WeakMap<VariableDeclaratorNode, TopLevelSymbol>} */
const declWithTopLevelSymbol = new WeakMap();
/** @type {WeakSet<VariableDeclaratorNode>} */
const pureDeclarators = new WeakSet();
// The following hooks are used during prewalking:
parser.hooks.preStatement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
if (statement.type === "FunctionDeclaration") {
const name = statement.id ? statement.id.name : "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
statementWithTopLevelSymbol.set(statement, fn);
return true;
}
}
});
parser.hooks.blockPreStatement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
if (statement.type === "ClassDeclaration") {
const name = statement.id ? statement.id.name : "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
classWithTopLevelSymbol.set(statement, fn);
return true;
}
if (statement.type === "ExportDefaultDeclaration") {
const name = "*default*";
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
const decl = statement.declaration;
if (
decl.type === "ClassExpression" ||
decl.type === "ClassDeclaration"
) {
classWithTopLevelSymbol.set(decl, fn);
} else if (parser.isPure(decl, statement.range[0])) {
statementWithTopLevelSymbol.set(statement, fn);
if (
!decl.type.endsWith("FunctionExpression") &&
!decl.type.endsWith("Declaration") &&
decl.type !== "Literal"
) {
statementPurePart.set(statement, decl);
}
}
}
}
});
parser.hooks.preDeclarator.tap(
"InnerGraphPlugin",
(decl, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (
parser.scope.topLevelScope === true &&
decl.init &&
decl.id.type === "Identifier"
) {
const name = decl.id.name;
if (decl.init.type === "ClassExpression") {
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
classWithTopLevelSymbol.set(decl.init, fn);
} else if (parser.isPure(decl.init, decl.id.range[1])) {
const fn = InnerGraph.tagTopLevelSymbol(parser, name);
declWithTopLevelSymbol.set(decl, fn);
if (
!decl.init.type.endsWith("FunctionExpression") &&
decl.init.type !== "Literal"
) {
pureDeclarators.add(decl);
}
return true;
}
}
}
);
// During real walking we set the TopLevelSymbol state to the assigned
// TopLevelSymbol by using the fill datastructures.
// In addition to tracking TopLevelSymbols, we sometimes need to
// add a PureExpressionDependency. This is needed to skip execution
// of pure expressions, even when they are not dropped due to
// minimizing. Otherwise symbols used there might not exist anymore
// as they are removed as unused by this optimization
// When we find a reference to a TopLevelSymbol, we register a
// TopLevelSymbol dependency from TopLevelSymbol in state to the
// referenced TopLevelSymbol. This way we get a graph of all
// TopLevelSymbols.
// The following hooks are called during walking:
parser.hooks.statement.tap("InnerGraphPlugin", statement => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
const fn = statementWithTopLevelSymbol.get(statement);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
const purePart = statementPurePart.get(statement);
if (purePart) {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
purePart.range
);
dep.loc = statement.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
}
}
});
parser.hooks.classExtendsExpression.tap(
"InnerGraphPlugin",
(expr, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(statement);
if (
fn &&
parser.isPure(
expr,
statement.id ? statement.id.range[1] : statement.range[0]
)
) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
onUsageSuper(expr);
}
}
}
);
parser.hooks.classBodyElement.tap(
"InnerGraphPlugin",
(element, classDefinition) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(classDefinition);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
}
}
}
);
parser.hooks.classBodyValue.tap(
"InnerGraphPlugin",
(expression, element, classDefinition) => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (parser.scope.topLevelScope === true) {
const fn = classWithTopLevelSymbol.get(classDefinition);
if (fn) {
if (
!element.static ||
parser.isPure(
expression,
element.key ? element.key.range[1] : element.range[0]
)
) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
if (element.type !== "MethodDefinition" && element.static) {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
expression.range
);
dep.loc = expression.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
} else {
InnerGraph.setTopLevelSymbol(parser.state, undefined);
}
}
}
}
);
parser.hooks.declarator.tap("InnerGraphPlugin", (decl, statement) => {
if (!InnerGraph.isEnabled(parser.state)) return;
const fn = declWithTopLevelSymbol.get(decl);
if (fn) {
InnerGraph.setTopLevelSymbol(parser.state, fn);
if (pureDeclarators.has(decl)) {
if (decl.init.type === "ClassExpression") {
if (decl.init.superClass) {
onUsageSuper(decl.init.superClass);
}
} else {
InnerGraph.onUsage(parser.state, usedByExports => {
switch (usedByExports) {
case undefined:
case true:
return;
default: {
const dep = new PureExpressionDependency(
decl.init.range
);
dep.loc = decl.loc;
dep.usedByExports = usedByExports;
parser.state.module.addDependency(dep);
break;
}
}
});
}
}
parser.walkExpression(decl.init);
InnerGraph.setTopLevelSymbol(parser.state, undefined);
return true;
}
});
parser.hooks.expression
.for(topLevelSymbolTag)
.tap("InnerGraphPlugin", () => {
const topLevelSymbol = /** @type {TopLevelSymbol} */ (
parser.currentTagData
);
const currentTopLevelSymbol = InnerGraph.getTopLevelSymbol(
parser.state
);
InnerGraph.addUsage(
parser.state,
topLevelSymbol,
currentTopLevelSymbol || true
);
});
parser.hooks.assign
.for(topLevelSymbolTag)
.tap("InnerGraphPlugin", expr => {
if (!InnerGraph.isEnabled(parser.state)) return;
if (expr.operator === "=") return true;
});
};
normalModuleFactory.hooks.parser
.for("javascript/auto")
.tap("InnerGraphPlugin", handler);
normalModuleFactory.hooks.parser
.for("javascript/esm")
.tap("InnerGraphPlugin", handler);
compilation.hooks.finishModules.tap("InnerGraphPlugin", () => {
logger.timeAggregateEnd("infer dependency usage");
});
}
);
}
}
module.exports = InnerGraphPlugin; | PypiClean |
/lctime-0.0.17.tar.gz/lctime-0.0.17/README.md | # LibreCell - Lib
Characterization kit for CMOS cells.
This Python package comes with a some stand-alone command-line tools:
* Most notably `lctime` for *recognition* and *characterization* of combinational and sequential cells.
* `sp2bool`: Recognition ('reverse engineering') of transistor networks. This is intended for analyzis and debugging.
* `libertyviz`: Visualization of NDLM tables.
## Getting started
```
# Clone this repository.
git clone https://codeberg.org/librecell/lctime
cd lctime
# Install
./install_develop.sh
# Run tests
cd tests
./run_tests.sh
```
### Characterize a cell
An ready-to-run example can be found in the `examples` folder.
The script `run_example.sh` should characterize the `INVX1` inverter.
The following example determines the input capacitances and timing delays of a combinational cell.
It is assumed that `FreePDK45` is installed in the users home directory.
Required inputs are:
* --liberty: A template liberty file which defines how the cells should be characterized.
* --include: SPICE files or models to be included.
* --spice: A SPICE file which contains the transistor level circuit of the cell (best including extracted parasitic capacitances).
* --cell: Name of the cell to be characterized.
* --output: Output liberty file which will contain the characterization data.
Characterize a single cell:
```sh
lctime --liberty ~/FreePDK45/osu_soc/lib/files/gscl45nm.lib \
--include ~/FreePDK45/osu_soc/lib/files/gpdk45nm.m \
--output-loads "0.05, 0.1, 0.2, 0.4, 0.8, 1.6" \
--slew-times "0.1, 0.2, 0.4, 0.8, 1.6, 3.2" \
--spice ~/FreePDK45/osu_soc/lib/source/netlists/AND2X1.pex.netlist \
--cell AND2X1 \
--output /tmp/and2x1.lib
```
Characterize multiple cells in the same run:
```sh
lctime --liberty ~/FreePDK45/osu_soc/lib/files/gscl45nm.lib \
--include ~/FreePDK45/osu_soc/lib/files/gpdk45nm.m \
--output-loads "0.05, 0.1, 0.2, 0.4, 0.8, 1.6" \
--slew-times "0.1, 0.2, 0.4, 0.8, 1.6, 3.2" \
--spice ~/FreePDK45/osu_soc/lib/source/netlists/*.pex.netlist \
--cell INVX1 AND2X1 XOR2X1 \
--output /tmp/invx1_and2x1_xor2x1.lib
```
### Cell recognition
Cell types can be recognized automatically such that only a minimal
liberty file needs to be supplied.
```sh
cd examples
lctime --liberty template.lib \
--analize-cell-function \
--include gpdk45nm.m \
--spice INVX1.pex.netlist \
--cell INVX1 \
--output-loads "0.05, 0.1, 0.2, 0.4, 0.8, 1.6" \
--slew-times "0.1, 0.2, 0.4, 0.8, 1.6, 3.2" \
--output invx1.lib
```
### Sequential cells
Characterization of sequential cells involves finding hold, setup, removal and recovery constraints.
For an example see `examples/run_example_flip-flop.sh`.
### Visualization
Vizualize the result:
```sh
libertyviz -l /tmp/and2x1.lib --cell AND2X1 --pin Y --related-pin A --table cell_rise
```
### Characterize a cell with differential inputs
Differential inputs can be specified in the liberty template with the `complementary_pin` attribute.
Only the non-inverted pin should appear in the liberty file.
Differential pairs can also be recognized based on their naming. For example if pairs are named with suffixes `_p` for
the non-inverted pin and `_n` for the inverted pin:
```sh
lctime --diff %_p,%_n ...
```
### Merging liberty files
`lctime` will output a liberty file containing only one cell. The `libertymerge` command allows to merge this kind of
output file back into the liberty template.
The following example will take `base_liberty.lib` as a template and update its `cell` entries with the data found in
the liberty files in the `characterization` directory.
```sh
libertymerge -b base_liberty.lib \
-o output_liberty.lib \
-u characterization/*.lib
```
This approach allows to run characterization runs of multiple cells independently and in parallel (e.g using `make`).
### Recognize a cell
`lctime` can recognize the boolean function of cells based on the transistor network. Besides combinational functions
also memory-loops can be found and abstracted into latches or flip-flops.
The `sp2bool` command can be used to analyze cells and dump information about their behaviour. This can be useful for debugging and verification.
Example:
```sh
# Analyze a combinational cell.
sp2bool sp2bool --spice ~/FreePDK45/osu_soc/lib/files/cells.sp --cell NAND2X1
# Analyze a flip-flop with asynchronous set and reset signals.
sp2bool sp2bool --spice ~/FreePDK45/osu_soc/lib/files/cells.sp --cell DFFSR
```
For cells with *differential* inputs the `--diff` argument must be used to specify differential pairs.
| PypiClean |
/stringdiff-1.0.3-py3-none-any.whl/stringdiff.py | class EditDistance:
def __init__(self, reference, hypothesis, unit="word"):
"""
Convert the strings into lists
If the strings are phrases with more than one word, split by space;
if the strings have no space, split into a list of chars;
"""
if unit == "word":
self.reference = reference.split(' ')
self.hypothesis= hypothesis.split(' ')
elif unit == "char":
self.reference = list(reference)
self.hypothesis = list(hypothesis)
else:
print("ERROR: invalid name of unit")
self.ref_len = len(self.reference)
def __get_matrix__(self):
#get a list of token, preceded by a placeholder matching the beginning of a phrase;
string1 = ['*'] + self.reference
string2 = ['*'] + self.hypothesis
# set up a (len(string1) + 1) * (len(string2) + 1) matrix
matrix = [[0] * len(string1) for x in range(len(string2))]
#set up the first column and row
for i in range(len(string1)):
matrix[0][i] = i
for j in range(len(string2)):
matrix[j][0] = j
#loop through the array
for i in range(1, len(string2)):
for j in range(1, len(string1)):
UP = matrix[i-1][j]
LEFT = matrix[i][j-1]
DIA = matrix[i-1][j-1]
previous_number = min(#find the smallest number among top, left, corner
UP,
LEFT,
DIA
)
char2 = string2[i]
char1 = string1[j]
if char2 != char1 or UP < DIA or LEFT < DIA:
matrix[i][j] = previous_number + 1
else:
matrix[i][j] = previous_number
return matrix
def error_count(self):
'''
return number of errors
'''
return self.__get_matrix__()[-1][-1]
def get_movement(self):
matrix = self.__get_matrix__()
#traceback initial position
string1 = ['*'] + self.reference
string2 = ['*'] + self.hypothesis
x = len(string2) - 1
y = len(string1) - 1
movement = []
while x > 0 and y > 0:
up = int(matrix[x-1][y])
diagonal = int(matrix[x-1][y-1])
left = int(matrix[x][y-1])
current = int(matrix[x][y])
if up < diagonal and up < left: #up is the smallest number, meaning it moved down
x -= 1
movement = [['down', up - current]] + movement #up-current: if the number changed
elif diagonal < up and diagonal < left:
x -= 1
y -= 1
movement = [['diagonal', diagonal - current]] + movement
elif left < diagonal and left < up:
y -= 1
movement = [['right', left - current]] + movement
elif up == diagonal and up < left: #up and diagonal cell the same
if string1[y] == string2[x]: #if the chars are the same, it must be from diagonal, cause no movement happened
x -= 1
y -= 1
movement = [['diagonal', diagonal - current]] + movement
else:
x -= 1
movement = [['down', up - current]] + movement
elif diagonal == left and diagonal < up:
if string1[y] == string2[x]:
x -= 1
y -= 1
movement = [['diagonal', diagonal - current]] + movement
else:
y -= 1
movement = [['right', left - current]] + movement
else: #it should be impossible that the left and top cells are both the smallest
x -= 1
y -= 1
movement = [['diagonal', diagonal - current]] + movement
#in case the loop has stopped but we have not reached the top left corner
if y == 0 and x != 0:
for i in range(x):
movement = [['down', -1]] + movement
elif x == 0 and y != 0:
for j in range(y):
movement = [['right', -1]] + movement
return movement
def generate_change(self, alignment=False):
changes = self.get_movement()
tracker = []
string1 = self.reference
string2 = self.hypothesis
delete_num = 0
insert_num = 0
index_list_1 = []
index_list_2 = []
for i in range(len(changes)):
move = changes[i]
if move[0] == 'right':
tracker.append([string1[i], 'deleted', i - insert_num])
delete_num += 1
#index_list_1.append(i - insert_num)
string2 = string2[:i] + ['*'] + string2[i:]
# string1 = string1[:i] + ['*'] + string1[i+1:]
elif move[0] == 'down':
tracker.append([string2[i], 'inserted', i - delete_num])
insert_num += 1
#index_list_2.append(i - delete_num)
string1 = string1[:i] + ['*'] + string1[i:]
elif move[0] == 'diagonal' and move[1] == -1:
tracker.append([string1[i], 'substituted', string2[i],i - insert_num,i - delete_num])
if alignment:
return string1, string2
return tracker
def get_deleted_and_substituted(self):
errored_words = []
tracker = self.generate_change()
for change in tracker:
if change[1] in ['deleted', 'substituted']:
errored_words.append(change[0])
return errored_words | PypiClean |
/hello2-2.0.15.tar.gz/hello2-2.0.15/hello/fiftyone/novabot/det_annotate.py | import fiftyone as fo
from fiftyone import ViewField as F
import hello
import hello.fiftyone.annotate as hoa
import hello.fiftyone.coco as hoco
import hello.fiftyone.core as hoc
import hello.fiftyone.dataset as hod
print(hello.__version__)
# %%
dataset_name = "novabot_front_det_20230314_zhengshu_batch01_object9_ver004a"
dataset_type = "detection"
version = "object9"
classes = ["person", "animal", "shoes", "wheel", "other obstacle", "obstacle", "leaf debris", "faeces", "rock", "charging station"]
mask_targets = {}
hod.delete_datasets([dataset_name], non_persistent=False)
dataset = hod.create_dataset(dataset_name, dataset_type, version, classes, mask_targets)
# %%
label_classes = dataset.default_classes
from_dir = "/workspace/users/hejian/todo/novabot_front_det_20230314_zhengshu_batch01_object9_ver004/train"
hod.add_images_dir(dataset, f"{from_dir}/data", "train")
from_dir = "/workspace/users/hejian/todo/novabot_front_det_20230314_zhengshu_batch01_object9_ver004/train"
hod.add_detection_labels(dataset, "ground_truth", f"{from_dir}/labels.json", label_classes, mode="coco")
ret = hoc.count_values(dataset, "ground_truth.detections.label")
print("count-images:", dataset.count("filepath"))
# %%
dataset = dataset.filter_labels("ground_truth", ~F("label").is_in(["leaf debris"]), only_matches=False)
print("count-labels:", dataset.count("ground_truth.detections"))
# %%
new_classes = [
["person"],
["animal"],
["shoes"],
["wheel"],
["other obstacle", "obstacle"],
["leaf debris"],
["faeces"],
["rock"],
["charging station"],
["background"],
]
dataset = hoc.remap_detections_dataset(dataset, new_classes, "ground_truth", background="background", least_one=True)
print(f"{dataset.default_classes=}\n{dataset.default_mask_targets=}")
print("count-labels:", dataset.count("ground_truth.detections"))
print("count-images:", dataset.count("filepath"))
# %%
hod.delete_duplicate_labels(dataset, "ground_truth", iou_thresh=0.99, method="simple", iscrowd=None, classwise=False)
print("count-labels:", dataset.count("ground_truth.detections"))
# %% [markdown]
# ---
# start
# %%
label_classes = dataset.default_classes
from_dir = ""
hod.add_detection_labels(dataset, "ground_truth_iter", f"{from_dir}/labels.json", label_classes, mode="coco")
ret = hoc.count_values(dataset, "ground_truth_iter.detections.label")
print("count-images:", dataset.count("filepath"))
# %%
dataset.clone_sample_field("ground_truth", "ground_truth_iter")
# %%
bbox_area = (
F("$metadata.width") * F("bounding_box")[2] *
F("$metadata.height") * F("bounding_box")[3]
)
view = dataset.filter_labels(
"ground_truth", (512 <= bbox_area), only_matches=False
)
print("count-labels:", view.count("ground_truth.detections"))
# %%
dataset = view.clone()
dataset.name
# %%
view = dataset
hoa.to_cvat(
"novabot_front_det_20230314_iter", view,
label_field="ground_truth",
label_type="detections",
url="http://192.168.0.119:8080",
username="hejian", password="LFIcvat123",
task_size=1000, segment_size=200, task_assignee="hejian", job_assignees=["hejian"])
# %%
_dataset_name = "2023.05.31.16.21.28"
anno_keys = ["novabot_front_det_20230314_iter"]
dataset = hoa.from_cvat(
_dataset_name, anno_keys,
cleanup=False,
url="http://192.168.0.119:8080",
username="hejian", password="LFIcvat123")
ret = hoc.count_values(dataset, "ground_truth.detections.label")
print("count-images:", dataset.count("filepath"))
# %%
results = dataset.evaluate_detections(
"ground_truth",
gt_field="ground_truth_iter",
eval_key="eval",
)
# %%
dataset.untag_samples("ng")
view = dataset.match((F("eval_fp") > 0) | (F("eval_fn") > 0))
print("length:", view.count("filepath"))
view.untag_samples("train")
view.tag_samples("issue")
view = view.filter_labels(
"ground_truth_iter", F("eval") != "tp", only_matches=False
).filter_labels(
"ground_truth", F("eval") != "tp", only_matches=False
).match(
(F("ground_truth_iter.detections").length() > 0) | (F("ground_truth.detections").length() > 0)
)
print("count-labels-old:", view.count("ground_truth_iter.detections"))
print("count-labels-new:", view.count("ground_truth.detections"))
session = fo.launch_app(view, port=20002, address="192.168.0.119", auto=False) # tag sample for `ng`
# %%
view.untag_samples("issue")
view.tag_samples("train")
view.match_tags("ng").untag_samples("train")
view.match_tags("ng").tag_samples("issue")
# %%
view = dataset.match_tags("issue")
hoa.to_cvat(
"novabot_front_det_20230314_iter2", view,
label_field="ground_truth",
label_type="detections",
url="http://192.168.0.119:8080",
username="hejian", password="LFIcvat123",
task_size=1000, segment_size=200, task_assignee="hejian", job_assignees=["hejian"])
# %%
_dataset_name = ""
anno_keys = [""]
dataset = hoa.from_cvat(
_dataset_name, anno_keys,
cleanup=False,
url="http://192.168.0.119:8080",
username="hejian", password="LFIcvat123")
ret = hoc.count_values(dataset, "ground_truth.detections.label")
print("count-images:", dataset.count("filepath"))
# %%
dataset.untag_samples("ok")
view = dataset.match_tags("issue")
session = fo.launch_app(view, port=20003, address="192.168.0.119", auto=False) # tag sample for `ok`
# %%
dataset.match_tags("ok").untag_samples("issue")
dataset.match_tags("ok").tag_samples("train")
ret = hoc.count_values(dataset, "tags")
# %%
dataset.untag_samples("ng")
view = dataset.match_tags("train")
session = fo.launch_app(view, port=20004, address="192.168.0.119", auto=False) # tag sample for `ng`
# %%
dataset.match_tags("ng").untag_samples("train")
dataset.match_tags("ng").tag_samples("issue")
ret = hoc.count_values(dataset, "tags")
# %% [markdown]
# end
#
# ---
# %%
hoco.coco_export(f"exports/{dataset_name}", dataset, label_field="ground_truth", splits=["train", "val", "issue"], to_segmentations=False)
# %%
hoc.random_split(dataset.match_tags("train"), splits={"val": 0.1, "train": 0.9}, seed=51)
hoco.coco_export(f"exports/{dataset_name}", dataset, label_field="ground_truth", splits=["train", "val", "issue"], to_segmentations=False) | PypiClean |
/img_detector_boxes_red-1.0.4.tar.gz/img_detector_boxes_red-1.0.4/img_detector_boxes_red/model.py | import torch
import torch.nn as nn
import torchvision
from torchvision.models.detection.faster_rcnn import FastRCNNPredictor
# Class id to name mapping
COCO_INSTANCE_CATEGORY_NAMES = [
'__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus',
'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'N/A', 'stop sign',
'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
'elephant', 'bear', 'zebra', 'giraffe', 'N/A', 'backpack', 'umbrella', 'N/A', 'N/A',
'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball',
'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', 'tennis racket',
'bottle', 'N/A', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl',
'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza',
'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed', 'N/A', 'dining table',
'N/A', 'N/A', 'toilet', 'N/A', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',
'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'N/A', 'book',
'clock', 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush'
]
# Class definition for the model
class ObjectDetectionModel(object):
'''
The blackbox object detection model (Faster RCNN for those who want to know).
Given an image as numpy array (3, H, W), it detects objects (generates their category ids and bounding boxes).
'''
# __init__ function
def __init__(self):
self.model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)
self.model.eval()
# function for calling the faster-rcnn model
def __call__(self, input):
'''
Arguments:
input (numpy array): A (3, H, W) array of numbers in [0, 1] representing the image.
Returns:
pred_boxes (list): list of bounding boxes, [[x1 y1 x2 y2], ..] where (x1, y1) are the coordinates of the top left corner
and (x2, y2) are the coordinates of the bottom right corner.
pred_class (list): list of predicted classes
pred_score (list): list of the probability (confidence) of prediction of each of the bounding boxes
Tip:
You can print the outputs to get better clarity :)
'''
input_tensor = torch.from_numpy(input)
input_tensor = input_tensor.type(torch.FloatTensor)
input_tensor = input_tensor.unsqueeze(0)
predictions = self.model(input_tensor)
pred_class = [COCO_INSTANCE_CATEGORY_NAMES[i] for i in list(predictions[0]['labels'].numpy())] # Get the Prediction Score
pred_boxes = [[(i[0], i[1]), (i[2], i[3])] for i in list(predictions[0]['boxes'].detach().numpy())] # Bounding boxes
pred_score = list(predictions[0]['scores'].detach().numpy())
return pred_boxes, pred_class, pred_score | PypiClean |
/nsj_queue_lib-0.5.6-py3-none-any.whl/nsj_queue_lib/worker_webhook.py | from nsj_gcf_utils.http_util import HttpUtil
from nsj_gcf_utils.json_util import json_loads
from nsj_queue_lib.worker_base import WorkerBase
from nsj_queue_lib.settings import DEFAULT_WEBHOOK_TIMEOUT, logger
class WorkerWebhook(WorkerBase):
def execute(self, payload: str, tarefa: dict[str, any], _) -> str:
logger.info(f"Disparando Webhook de ID: {tarefa['id']}")
logger.debug(f"Dados do Webhook: {tarefa}")
return self.make_request(payload)
def make_request(self, payload: str) -> str:
# Recuperando os dados da tarefa
payload = json_loads(payload)
subscription = payload["subscription"]
# Fazendo a requisição
logger.info(f"Chamando o endpoint: {subscription['url']}...")
url = subscription["url"]
headers = subscription["headers"]
method = subscription["http_method"].upper()
if method == "GET":
response = HttpUtil.get_retry(
url,
headers,
tries=1,
timeout=DEFAULT_WEBHOOK_TIMEOUT,
)
elif method == "POST":
response = HttpUtil.post_retry(
url,
payload["publication"],
headers,
tries=1,
timeout=DEFAULT_WEBHOOK_TIMEOUT,
)
elif method == "PUT":
response = HttpUtil.put_retry(
url,
payload["publication"],
headers,
tries=1,
timeout=DEFAULT_WEBHOOK_TIMEOUT,
)
elif method == "DELETE":
response = HttpUtil.delete_retry(
url,
headers,
tries=1,
timeout=DEFAULT_WEBHOOK_TIMEOUT,
)
else:
raise Exception(f"Método HTTP não suportado: {method}")
mensagem = f"Webhook disparado com sucesso na URL: {url}. Método HTTP: {method}. Status Resposta: {response.status_code}. Mensagem: {response.text}."
logger.debug(mensagem)
return mensagem[:500]
if __name__ == "__main__":
WorkerWebhook().run() | PypiClean |
/HAllA-0.8.20-py3-none-any.whl/halla/main.py | from .config_loader import config, update_config
from .hierarchy import HierarchicalTree
from .logger import HAllALogger
from .utils.data import preprocess, eval_type, is_all_cont
from .utils.similarity import get_similarity_function
from .utils.stats import get_pvalue_table, pvalues2qvalues, test_pvalue_run_time
from .utils.tree import compare_and_find_dense_block, trim_block
from .utils.report import generate_hallagram, generate_clustermap, \
report_all_associations, report_significant_clusters, \
generate_lattice_plot
from .utils.filesystem import reset_dir
import pandas as pd
import numpy as np
import scipy.spatial.distance as spd
from os.path import join, exists
from os import getcwd
import time
import sys
########
# AllA
########
class AllA(object):
def __init__(self, max_freq_thresh=config.preprocess['max_freq_thresh'],
transform_data_funcs=config.preprocess['transform_funcs'],
discretize_bypass_if_possible=config.preprocess['discretize_bypass_if_possible'],
discretize_func=config.preprocess['discretize_func'], discretize_num_bins=config.preprocess['discretize_num_bins'],
pdist_metric=config.association['pdist_metric'],
permute_func=config.permute['func'], permute_iters=config.permute['iters'], permute_speedup=config.permute['speedup'],
fdr_alpha=config.stats['fdr_alpha'], fdr_method=config.stats['fdr_method'],
out_dir=config.output['dir'], verbose=config.output['verbose'], no_progress=False, dont_copy=False, force_permutations=False,
num_threads=4, dont_skip=False, large_diagnostic_subset=105, splitting_diagnostic_mode=False, gini_uncertainty_level = .02, seed=0):
# update AllA config setting
update_config('output', dir=out_dir, verbose=verbose)
update_config('preprocess', max_freq_thresh=max_freq_thresh,
transform_funcs=transform_data_funcs,
discretize_bypass_if_possible=discretize_bypass_if_possible,
discretize_func=discretize_func, discretize_num_bins=discretize_num_bins)
update_config('association', pdist_metric=pdist_metric)
update_config('permute', func=permute_func, iters=permute_iters, speedup=permute_speedup)
update_config('stats', fdr_alpha=fdr_alpha, fdr_method=fdr_method)
self._reset_attributes()
self.no_progress = no_progress
self.dont_copy = dont_copy
self.force_permutations = force_permutations
self.num_threads = num_threads
self.dont_skip = dont_skip
self.large_diagnostic_subset = large_diagnostic_subset
self.verbose = verbose
self.seed = seed
self.splitting_diagnostic_mode = splitting_diagnostic_mode
self.gini_uncertainty_level = gini_uncertainty_level
if (out_dir == ".") or (out_dir == "./") or (out_dir == getcwd()):
raise ValueError("Please specify an output directory other than the current directory.")
if not hasattr(self, 'name'):
self.name = 'AllA'
self.logger = HAllALogger(name=self.name, config=config)
'''Private functions
'''
def _reset_attributes(self):
self.X, self.Y = None, None
self.X_types, self.Y_types = None, None
self.similarity_table = None
self.pvalue_table, self.qvalue_table = None, None
self.fdr_reject_table = None
self.significant_blocks = None
self.significant_blocks_qvalues = None
self.has_loaded = False
self.has_run = False
self.verbose = False
def _compute_pairwise_similarities(self):
dist_metric = config.association['pdist_metric']
self.logger.log_step_start('Step 1: Computing pairwise similarities, p-values, and q-values', sub=True)
start_time = time.time()
X, Y = self.X.to_numpy(), self.Y.to_numpy()
# obtain similarity matrix
self.logger.log_message('Generating the similarity table...')
self.similarity_table = spd.cdist(X, Y, metric=get_similarity_function(dist_metric))
# obtain p-values
self.logger.log_message('Generating the p-value table...')
confp = config.permute
extrapolated_time, timing_message = test_pvalue_run_time(X, Y, pdist_metric=dist_metric,
permute_func=confp['func'], permute_iters=confp['iters'],
permute_speedup=confp['speedup'],
alpha=config.stats['fdr_alpha'],
force_perms = self.force_permutations,
num_threads=self.num_threads,
seed=self.seed)
if extrapolated_time > 10 and self.verbose:
self.logger.log_message(timing_message)
self.pvalue_table = get_pvalue_table(X, Y, pdist_metric=dist_metric,
permute_func=confp['func'], permute_iters=confp['iters'],
permute_speedup=confp['speedup'],
alpha=config.stats['fdr_alpha'],
no_progress=self.no_progress,
force_permutations=self.force_permutations,
num_threads=self.num_threads,
seed=self.seed)
# obtain q-values
self.logger.log_message('Generating the q-value table...')
self.fdr_reject_table, self.qvalue_table = pvalues2qvalues(self.pvalue_table.flatten(), config.stats['fdr_method'], config.stats['fdr_alpha'])
self.qvalue_table = self.qvalue_table.reshape(self.pvalue_table.shape)
self.fdr_reject_table = self.fdr_reject_table.reshape(self.pvalue_table.shape)
end_time = time.time()
self.logger.log_result('Number of significant associations', self.fdr_reject_table.sum())
self.logger.log_step_end('Computing pairwise similarities, p-values, q-values', end_time - start_time, sub=True)
def _find_dense_associated_blocks(self):
'''Find significant cells based on FDR reject table
'''
def compare_qvalue(x):
return(self.qvalue_table[x[0][0], x[1][0]])
self.logger.log_step_start('Step 2: Finding densely associated blocks', sub=True)
start_time = time.time()
n, m = self.X.shape[0], self.Y.shape[0]
self.significant_blocks = [[[x], [y]] for x in range(n) for y in range(m) if self.fdr_reject_table[x][y]]
# sort by the p-values in ascending order
self.significant_blocks.sort(key=compare_qvalue)
self.significant_blocks_qvalues = [self.qvalue_table[x[0][0]][x[1][0]] for x in self.significant_blocks]
end_time = time.time()
self.logger.log_result('Number of significant clusters', len(self.significant_blocks))
self.logger.log_step_end('Finding densely associated blocks', end_time - start_time, sub=True)
def _generate_reports(self):
'''Generate reports and store in config.output['dir'] directory:
1) all_associations.txt: stores the associations between each feature in X and Y along with its
p-values and q-values in a table
2) sig_clusters.txt : stores only the significant clusters
'''
self.logger.log_step_start('Generating reports')
# create directory
dir_name = config.output['dir']
reset_dir(dir_name, verbose=config.output['verbose'])
# generate performance.txt
self.logger.write_performance_log(dir_name, config)
# generate all_associations.txt
report_all_associations(dir_name,
self.X.index.to_numpy(),
self.Y.index.to_numpy(),
self.similarity_table,
self.pvalue_table,
self.qvalue_table)
# generate sig_clusters.txt
report_significant_clusters(dir_name,
self.significant_blocks,
self.significant_blocks_qvalues,
self.X.index.to_numpy(),
self.Y.index.to_numpy())
# print datasets (original and discretized)
if not self.dont_copy:
self.X.to_csv(join(dir_name, 'X.tsv'), sep='\t', na_rep = "NA")
self.Y.to_csv(join(dir_name, 'Y.tsv'), sep='\t', na_rep = "NA")
self.X_ori.to_csv(join(dir_name, 'X_original.tsv'), sep='\t', na_rep = "NA")
self.Y_ori.to_csv(join(dir_name, 'Y_original.tsv'), sep='\t', na_rep = "NA")
'''Public functions
'''
def load(self, X_file, Y_file=None):
def _read_and_drop_duplicated_indices(filepath):
# drop duplicates and keep the first row
df = pd.read_table(filepath, index_col=0, na_values="").dropna(how='all')
df = df[~df.index.duplicated(keep='first')]
return(df)
self.logger.log_step_start('Loading and preprocessing data')
confp = config.preprocess
start_time = time.time()
X, self.X_types = eval_type(_read_and_drop_duplicated_indices(X_file))
Y, self.Y_types = eval_type(_read_and_drop_duplicated_indices(Y_file)) if Y_file \
else (X.copy(deep=True), np.copy(self.X_types))
# if not all types are continuous but pdist_metric is only for continuous types
self.metric_changed = False
if not (is_all_cont(self.X_types) and is_all_cont(self.Y_types)) and not (config.association['pdist_metric'].lower() in ['mi', 'nmi','xicor']):
self.metric_changed = True
self.logger.log_result('Discrete variables detected. ASSOCIATION METRIC CHANGED TO MI!', "")
update_config('association', pdist_metric = 'mi')
# raise ValueError('pdist_metric should be mi, nmi, or xicor if not all features are continuous...')
# if pdist_metric is nmi but no discretization method is specified, assign to equal frequency (quantile)
if config.association['pdist_metric'].lower() in ['nmi', 'mi'] and confp['discretize_func'] is None:
self.logger.log_message('Discretization function is None; assigning to equal frequency (quantile) given metric in (NMI, MI)...')
update_config('preprocess', discretize_func='quantile')
if config.association['pdist_metric'].lower() == 'xicor' and not (is_all_cont(self.X_types) and is_all_cont(self.Y_types)) and confp['discretize_func'] is None:
self.logger.log_message('Discretization function is None but pdist_metric = XICOR and data contains categorical variables; assigning discretization function to equal frequency (quantile)...')
update_config('preprocess', discretize_func='quantile')
# if all features are continuous and distance metric != nmi, discretization can be bypassed
if is_all_cont(self.X_types) and is_all_cont(self.X_types) and confp['discretize_func'] is not None and \
(not config.association['pdist_metric'].lower() in ['nmi', 'mi']) and confp['discretize_bypass_if_possible']:
self.logger.log_message('All features are continuous and bypassing discretization is enabled; bypassing discretization...')
update_config('preprocess', discretize_func=None)
if config.association['pdist_metric'].lower() == 'nmi' and not self.force_permutations:
self.logger.log_result('Approximating NMI p-values with chi-squared test. Use --force_permutations to disable this behavior.', "")
if config.association['pdist_metric'].lower() == 'mi' and not self.force_permutations:
self.logger.log_result('Approximating MI p-values with chi-squared test. Use --force_permutations to disable this behavior.', "")
# filter tables by intersect columns
intersect_cols = [col for col in X.columns if col in Y.columns]
if (len(intersect_cols) < 5): self.logger.log_message("There don't seem to be many overlapping samples between the two datasets. Are you sure your datasets have features as rows and samples as columns?")
X, Y = X[intersect_cols], Y[intersect_cols]
# clean and preprocess data
func_args = {
'transform_funcs' : confp['transform_funcs'],
'max_freq_thresh' : confp['max_freq_thresh'],
'discretize_func' : confp['discretize_func'],
'discretize_num_bins': confp['discretize_num_bins']
}
self.X, self.X_ori, self.X_types = preprocess(X, self.X_types, **func_args)
self.Y, self.Y_ori, self.Y_types = preprocess(Y, self.Y_types, **func_args)
self.has_loaded = True
end_time = time.time()
self.logger.log_message('Preprocessing step completed:')
self.logger.log_result('X shape (# features, # size)', self.X.shape)
self.logger.log_result('Y shape (# features, # size)', self.Y.shape)
self.logger.log_step_end('Loading and preprocessing data', end_time - start_time)
def run(self):
'''Run AllA:
1) compute pairwise similarity matrix and p-values
2) find significantly-associated cells
'''
if self.has_loaded == False:
raise RuntimeError('load function has not been called!')
self.logger.log_step_start('Performing %s' % self.name)
# step 1: computing pairwise similarity matrix
self._compute_pairwise_similarities()
# step 2: find significantly-associated cells
self._find_dense_associated_blocks()
# generate reports
self._generate_reports()
def generate_hallagram(self, block_num=50, x_dataset_label='', y_dataset_label='',
cmap=None, cbar_label='', figsize=None, text_scale=10,
output_file='hallagram.pdf', mask=False, signif_dots=True, **kwargs):
'''Generate a hallagram showing the top [block_num] significant blocks
'''
if cmap is None:
cmap = 'YlGnBu' if config.association['pdist_metric'] in ['mi','nmi', 'dcor', 'xicor'] else 'RdBu_r'
file_name = join(config.output['dir'], output_file)
if block_num is None:
block_num = len(self.significant_blocks)
else:
block_num = min(block_num, len(self.significant_blocks))
generate_hallagram(self.significant_blocks,
self.X.index.to_numpy(),
self.Y.index.to_numpy(),
[idx for idx in range(self.X.shape[0])],
[idx for idx in range(self.Y.shape[0])],
self.similarity_table,
self.fdr_reject_table,
x_dataset_label=x_dataset_label,
y_dataset_label=y_dataset_label,
figsize=figsize,
text_scale=text_scale,
output_file=file_name,
cmap=cmap, cbar_label=cbar_label,
mask=mask, signif_dots = signif_dots, **kwargs)
########
# HAllA
########
class HAllA(AllA):
def __init__(self, max_freq_thresh=config.preprocess['max_freq_thresh'],
transform_data_funcs=config.preprocess['transform_funcs'],
discretize_bypass_if_possible=config.preprocess['discretize_bypass_if_possible'],
discretize_func=config.preprocess['discretize_func'], discretize_num_bins=config.preprocess['discretize_num_bins'],
pdist_metric=config.association['pdist_metric'], linkage_method=config.hierarchy['linkage_method'],
sim2dist_set_abs=config.hierarchy['sim2dist_set_abs'], sim2dist_func=config.hierarchy['sim2dist_func'],
permute_func=config.permute['func'], permute_iters=config.permute['iters'], permute_speedup=config.permute['speedup'],
fdr_alpha=config.stats['fdr_alpha'], fdr_method=config.stats['fdr_method'],
fnr_thresh=config.stats['fnr_thresh'], rank_cluster=config.stats['rank_cluster'],
out_dir=config.output['dir'], verbose=config.output['verbose'], no_progress=False,
force_permutations=False, num_threads=4, dont_skip=False, large_diagnostic_subset=105, splitting_diagnostic_mode=False, gini_uncertainty_level = .02,
dont_copy = False, seed=0):
# TODO: add restrictions on the input - ensure the methods specified are available
self.name = 'HAllA'
# retrieve AllA variables
alla_vars = vars()
for key in ['linkage_method', 'fnr_thresh', 'rank_cluster', 'sim2dist_set_abs', 'sim2dist_func']:
del alla_vars[key]
# call AllA init function
AllA.__init__(**alla_vars)
# update HAllA config settings
update_config('stats', fnr_thresh=fnr_thresh, rank_cluster=rank_cluster)
update_config('hierarchy', linkage_method=linkage_method,
sim2dist_set_abs=sim2dist_set_abs,
sim2dist_func=sim2dist_func)
self.logger = HAllALogger(self.name, config=config)
'''Private functions
'''
def _reset_attributes(self):
self.X, self.Y = None, None
self.X_types, self.Y_types = None, None
self.X_hierarchy, self.Y_hierarchy = None, None
self.similarity_table = None
self.pvalue_table, self.qvalue_table = None, None
self.fdr_reject_table = None
self.significant_blocks = None
self.significant_blocks_qvalues = None
self.has_loaded = False
self.has_run = False
def _run_clustering(self):
self.logger.log_step_start('Step 2: Performing hierarchical clustering', sub=True)
start_time = time.time()
func_args = {
'pdist_metric' : config.association['pdist_metric'],
'linkage_method' : config.hierarchy['linkage_method'],
'sim2dist_set_abs': config.hierarchy['sim2dist_set_abs'],
'sim2dist_func' : config.hierarchy['sim2dist_func']
}
self.X_hierarchy = HierarchicalTree(self.X, **func_args)
self.Y_hierarchy = HierarchicalTree(self.Y, **func_args)
end_time = time.time()
self.logger.log_step_end('Performing hierarchical clustering', end_time - start_time, sub=True)
def _find_dense_associated_blocks(self):
def sort_by_best_qvalue(x):
qvalue_table = self.qvalue_table[x[0],:][:,x[1]]
return(qvalue_table.min())
def sort_by_avg_qvalue(x):
qvalue_table = self.qvalue_table[x[0],:][:,x[1]]
return(qvalue_table.mean())
self.logger.log_step_start('Step 3: Finding densely associated blocks', sub=True)
start_time = time.time()
self.significant_blocks = compare_and_find_dense_block(self.X_hierarchy.tree, self.Y_hierarchy.tree,
self.fdr_reject_table, fnr_thresh=config.stats['fnr_thresh'],
splitting_diagnostic_mode = self.splitting_diagnostic_mode,
gini_uncertainty_level=self.gini_uncertainty_level)
# sort significant blocks by the rank_cluster method
sort_func = sort_by_best_qvalue if config.stats['rank_cluster'] == 'best' else sort_by_avg_qvalue
self.significant_blocks.sort(key=sort_func)
self.significant_blocks_qvalues = [sort_func(x) for x in self.significant_blocks]
end_time = time.time()
self.logger.log_result('Number of significant clusters', len(self.significant_blocks))
self.logger.log_step_end('Finding densely associated blocks', end_time - start_time, sub=True)
def _generate_reports(self):
'''Generate reports and store in config.output['dir'] directory
'''
AllA._generate_reports(self)
# store the linkages in .npy files
dir_name = config.output['dir']
np.save(join(dir_name, 'X_linkage.npy'), self.X_hierarchy.linkage, allow_pickle=False)
np.save(join(dir_name, 'Y_linkage.npy'), self.Y_hierarchy.linkage, allow_pickle=False)
'''Public functions
'''
def run(self):
'''Run all 3 steps:
1) compute pairwise similarity matrix
2) cluster hierarchically
3) find densely-associated blocks iteratively
'''
if self.has_loaded == False:
raise RuntimeError('load function has not been called!')
self.logger.log_step_start('Performing %s' % self.name)
# step 1: computing pairwise similarity matrix
self._compute_pairwise_similarities()
# step 2: hierarchical clustering
self._run_clustering()
# step 3: iteratively finding densely-associated blocks
self._find_dense_associated_blocks()
# generate reports
self._generate_reports()
def generate_hallagram(self, block_num=50, x_dataset_label='', y_dataset_label='',
cmap=None, cbar_label='', figsize=None, text_scale=10,
output_file='hallagram.pdf', mask=False, signif_dots=True, **kwargs):
'''Generate a hallagram showing the top [block_num] significant blocks
'''
if cmap is None:
cmap = 'YlGnBu' if config.association['pdist_metric'] in ['mi', 'nmi', 'dcor', 'xicor'] else 'RdBu_r'
file_name = join(config.output['dir'], output_file)
if block_num is None:
block_num = len(self.significant_blocks)
else:
block_num = min(block_num, len(self.significant_blocks))
generate_hallagram(self.significant_blocks,
self.X.index.to_numpy(),
self.Y.index.to_numpy(),
self.X_hierarchy.tree.pre_order(),
self.Y_hierarchy.tree.pre_order(),
self.similarity_table,
fdr_reject_table=self.fdr_reject_table,
x_dataset_label=x_dataset_label,
y_dataset_label=y_dataset_label,
figsize=figsize,
text_scale=text_scale,
output_file=file_name,
cmap=cmap, cbar_label=cbar_label,
mask=mask, signif_dots=signif_dots, block_num=block_num, **kwargs)
def generate_clustermap(self, x_dataset_label='', y_dataset_label='',
cmap=None, cbar_label='', figsize=None, text_scale=10,
output_file='clustermap.pdf', mask=False, signif_dots=True, **kwargs):
'''Generate a clustermap (hallagram + dendrogram)
'''
# if the dimension is too large, generate a hallagram instead
if max(self.similarity_table.shape) > 500:
print('The dimension is too large - please generate a hallagram instead.', file = sys.stderr)
return
if cmap is None:
cmap = 'YlGnBu' if config.association['pdist_metric'] in ['mi','nmi', 'dcor', 'xicor'] else 'RdBu_r'
file_name = join(config.output['dir'], output_file)
generate_clustermap(self.significant_blocks,
self.X.index.to_numpy(),
self.Y.index.to_numpy(),
self.X_hierarchy.linkage,
self.Y_hierarchy.linkage,
self.similarity_table,
fdr_reject_table=self.fdr_reject_table,
x_dataset_label=x_dataset_label,
y_dataset_label=y_dataset_label,
figsize=figsize,
text_scale=text_scale,
cmap=cmap, cbar_label=cbar_label,
output_file=file_name,
mask=mask,
signif_dots=signif_dots,
**kwargs)
def generate_diagnostic_plot(self, block_num=50, plot_dir='diagnostic', axis_stretch=1e-5, plot_size=4):
'''Generate a lattice plot for each significant association;
save all plots in the plot_dir folder under config.output['dir']
'''
# create the diagnostic directory under config.output['dir']
reset_dir(join(config.output['dir'], plot_dir))
if block_num is None:
block_num = len(self.significant_blocks)
else:
block_num = min(block_num, len(self.significant_blocks))
for i, block in enumerate(self.significant_blocks[:block_num]):
title = 'Association %d' % (i+1)
out_file = join(config.output['dir'], plot_dir, 'association_%d.pdf' % (i+1))
warn_file = join(config.output['dir'], plot_dir, 'warnings.txt')
x_data = self.X.to_numpy()[block[0],:]
y_data = self.Y.to_numpy()[block[1],:]
x_ori_data = self.X_ori.to_numpy()[block[0],:]
y_ori_data = self.Y_ori.to_numpy()[block[1],:]
x_features = self.X.index.to_numpy()[block[0]]
y_features = self.Y.index.to_numpy()[block[1]]
x_types = np.array(self.X_types)[block[0]]
y_types = np.array(self.Y_types)[block[1]]
if (x_data.shape[0] + y_data.shape[0]) > 15 and (x_data.shape[0] + y_data.shape[0]) <= 45:
warn_string = "Over 15 features included in association %d. Only a subset of features will be shown in the diagnostic plot. Increase --large_diagnostic_subset beyond 105 to show more." % (i+1)
if exists(warn_file):
append_write = 'a'
else:
append_write = 'w'
warn_file_write = open(warn_file, append_write)
warn_file_write.write(warn_string + '\n')
warn_file_write.close()
print(warn_string, file = sys.stderr)
generate_lattice_plot(x_data, y_data, x_ori_data, y_ori_data,
x_features, y_features, x_types, y_types, title,
out_file, axis_stretch=axis_stretch, plot_size=plot_size, n_pairs_to_show = self.large_diagnostic_subset)
continue
if (x_data.shape[0] + y_data.shape[0]) > 45 and not self.dont_skip:
warn_string = "Skipping association %d because there are too many included features. Add --dont_skip_large_blocks to disable this behavior." % (i+1)
if exists(warn_file):
append_write = 'a'
else:
append_write = 'w'
warn_file_write = open(warn_file, append_write)
warn_file_write.write(warn_string + '\n')
warn_file_write.close()
print(warn_string, file = sys.stderr)
continue
generate_lattice_plot(x_data, y_data, x_ori_data, y_ori_data,
x_features, y_features, x_types, y_types, title,
out_file, axis_stretch=axis_stretch, plot_size=plot_size, n_pairs_to_show = (x_data.shape[0] + y_data.shape[0])**2) | PypiClean |
/insights-core-3.2.14.tar.gz/insights-core-3.2.14/insights/specs/datasources/yum_updates.py | import json
import logging
import time
from insights import datasource, HostContext, SkipComponent
from insights.components.rhel_version import IsRhel7, IsRhel8, IsRhel9
from insights.core.spec_factory import DatasourceProvider
from distutils.version import LooseVersion as version
try:
from functools import cmp_to_key
# cmp_to_key is not available in python 2.6, but it has sorted function which accepts cmp function
def sorted_cmp(it, cmp):
return sorted(it, key=cmp_to_key(cmp))
except ImportError:
sorted_cmp = sorted
class DnfManager:
""" Performs package resolution on dnf based systems """
def __init__(self, build_pkgcache=False):
self.base = dnf.base.Base()
self.base.conf.cacheonly = not build_pkgcache
# releasever and basearchs are correctly set after calling load()
self.releasever = dnf.rpm.detect_releasever("/")
self.basearch = dnf.rpm.basearch(hawkey.detect_arch())
self.packages = []
self.repos = []
def __enter__(self):
return self
def __exit__(self, *args):
pass
@staticmethod
def pkg_cmp(a, b):
if a.name != b.name:
return -1 if a.name < b.name else 1
vercmp = rpm.labelCompare((str(a.e), a.v, a.r), (str(b.e), b.v, b.r))
if vercmp != 0:
return vercmp
if a.reponame != b.reponame:
return -1 if a.reponame < b.reponame else 1
return 0
def sorted_pkgs(self, pkgs):
# if package is installed more than once (e.g. kernel)
# don't report other installed (i.e. with @System repo) as updates
return sorted_cmp([pkg for pkg in pkgs if pkg.reponame != "@System"], self.pkg_cmp)
def load(self):
logging.disable(logging.WARNING)
cli = dnf.cli.Cli(self.base)
cli._read_conf_file()
subst = self.base.conf.substitutions
if subst.get("releasever"):
self.releasever = subst["releasever"]
if subst.get("basearch"):
self.basearch = subst["basearch"]
self.base.read_all_repos()
self.packages = hawkey.Query(hawkey.Sack())
try:
if version(dnf.VERSION) >= version("4.7.0") and self.base.conf.cacheonly:
self.base.fill_sack_from_repos_in_cache(load_system_repo=True)
self.packages = self.base.sack.query()
elif not self.base.conf.cacheonly:
self.base.fill_sack()
self.packages = self.base.sack.query()
except dnf.exceptions.RepoError:
# RepoError is raised when cache is empty
pass
self.repos = self.base.repos
logging.disable(logging.NOTSET)
def installed_packages(self):
return self.packages.installed().run()
def updates(self, pkg):
name = pkg.name
evr = "{0}:{1}-{2}".format(pkg.epoch, pkg.version, pkg.release)
arch = pkg.arch
nevra = "{0}-{1}.{2}".format(name, evr, arch)
updates_list = []
for upd in self.packages.filter(name=name, arch=arch, evr__gt=evr):
updates_list.append(upd)
return nevra, updates_list
@staticmethod
def pkg_nevra(pkg):
return "{0}-{1}:{2}-{3}.{4}".format(pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch)
@staticmethod
def pkg_repo(pkg):
return pkg.reponame
@staticmethod
def advisory(pkg):
errata = pkg.get_advisories(hawkey.EQ)
return errata[0].id if len(errata) > 0 else None
def last_update(self):
last_ts = 0
for repo in self.base.repos.iter_enabled():
repo_ts = repo._repo.getTimestamp()
if repo_ts > last_ts:
last_ts = repo_ts
return last_ts
class YumManager(DnfManager):
""" Performs package resolution on yum based systems """
def __init__(self, build_pkgcache=False):
self.base = yum.YumBase()
self.base.doGenericSetup(cache=0 if build_pkgcache else 1)
self.releasever = self.base.conf.yumvar['releasever']
self.basearch = self.base.conf.yumvar['basearch']
self.packages = []
self.repos = []
self.updict = {}
@staticmethod
def pkg_cmp(a, b):
vercmp = a.verCMP(b)
if vercmp != 0:
return vercmp
if a.repoid != b.repoid:
return -1 if a.repoid < b.repoid else 1
return 0
def sorted_pkgs(self, pkgs):
return sorted_cmp(pkgs, self.pkg_cmp)
def load(self):
try:
self.base.doRepoSetup()
self.base.doSackSetup()
except yum.Errors.RepoError:
# RepoError is raised when cache is empty
pass
except AttributeError:
# backwards compatibility, because yum is removing these setup functions in future
# and moving the setups to be getters (https://github.com/rpm-software-management/yum/blob/master/yum/__init__.py#L1099)
pass
try:
self.packages = self.base.pkgSack.returnPackages()
except yum.Errors.RepoError:
# RepoError is raised when cache is empty
pass
self.repos = self.base.repos.repos
self._build_updict()
def _build_updict(self):
self.updict = {}
for pkg in self.packages:
self.updict.setdefault(pkg.na, []).append(pkg)
def installed_packages(self):
return self.base.rpmdb.returnPackages()
def updates(self, pkg):
nevra = pkg.nevra
updates_list = []
for upg in self.updict.get(pkg.na, []):
if upg.verGT(pkg):
updates_list.append(upg)
return nevra, updates_list
@staticmethod
def pkg_repo(pkg):
return pkg.repoid
def advisory(self, pkg):
adv = self.base.upinfo.get_notice(pkg.nvr)
if adv:
return adv.get_metadata()['update_id']
return None
@staticmethod
def last_update():
return 0
# Select which manager to use based on the available system libraries.
try:
import dnf
import dnf.cli
import hawkey
import rpm
UpdatesManager = DnfManager
except ImportError:
try:
import yum
UpdatesManager = YumManager
except ImportError:
UpdatesManager = None
@datasource(HostContext, [IsRhel7, IsRhel8, IsRhel9], timeout=0)
def yum_updates(broker):
"""
This datasource provides a list of available updates on the system.
It uses the yum python library installed locally, and collects list of
available package updates, along with advisory info where applicable.
Sample data returned::
{
"releasever": "8",
"basearch": "x86_64",
"update_list": {
"NetworkManager-1:1.22.8-4.el8.x86_64": {
"available_updates": [
{
"package": "NetworkManager-1:1.22.8-5.el8_2.x86_64",
"repository": "rhel-8-for-x86_64-baseos-rpms",
"basearch": "x86_64",
"releasever": "8",
"erratum": "RHSA-2020:3011"
}
]
}
},
"build_pkgcache": false,
"metadata_time": "2021-01-01T09:39:45Z"
}
Returns:
list: List of available updates
Raises:
SkipComponent: Raised when neither dnf nor yum is found
"""
if UpdatesManager is None:
raise SkipComponent()
build_pkgcache = getattr(broker.get('client_config', object), 'build_packagecache', False)
with UpdatesManager(build_pkgcache=build_pkgcache) as umgr:
umgr.load()
response = {
"releasever": umgr.releasever,
"basearch": umgr.basearch,
"update_list": {},
"build_pkgcache": build_pkgcache,
}
for pkg in umgr.installed_packages():
nevra, updates_list = umgr.updates(pkg)
if updates_list:
out_list = []
update_list = umgr.sorted_pkgs(updates_list)
for p in update_list:
pkg_dict = {
"package": umgr.pkg_nevra(p),
"repository": umgr.pkg_repo(p),
"basearch": response["basearch"],
"releasever": response["releasever"],
}
erratum = umgr.advisory(p)
if erratum:
pkg_dict["erratum"] = erratum
out_list.append(pkg_dict)
response["update_list"][nevra] = {"available_updates": out_list}
ts = umgr.last_update()
if ts:
response["metadata_time"] = time.strftime("%FT%TZ", time.gmtime(ts))
return DatasourceProvider(content=json.dumps(response), relative_path='insights_commands/yum_updates_list') | PypiClean |
/arc1_pyqt-2.0.0rc1.tar.gz/arc1_pyqt-2.0.0rc1/arc1pyqt/ProgPanels/STDP.py |
# (c) Radu Berdan
# ArC Instruments Ltd.
# This code is licensed under GNU v3 license (see LICENSE.txt for details)
####################################
from PyQt5 import QtGui, QtCore, QtWidgets
import sys
import os
import re
import numpy as np
import pyqtgraph as pg
import time
from arc1pyqt import Graphics
from arc1pyqt import state
HW = state.hardware
APP = state.app
CB = state.crossbar
from arc1pyqt.Globals import fonts
from arc1pyqt.modutils import BaseThreadWrapper, BaseProgPanel, \
makeDeviceList, ModTag
tag="stdp"
class ThreadWrapper(BaseThreadWrapper):
def __init__(self, deviceList, values, timeSteps):
super().__init__()
self.deviceList=deviceList
self.gain=values[0]
self.warp=values[1]
self.max_spike_time=values[2]
self.pre_time=values[3]
self.pre_voltage=values[4]
self.post_time=values[5]
self.post_voltage=values[6]
self.timeSteps=timeSteps
@BaseThreadWrapper.runner
def run(self):
global tag
HW.ArC.write_b(str(int(len(self.deviceList)))+"\n")
for device in self.deviceList:
w=device[0]
b=device[1]
self.highlight.emit(w,b)
HW.ArC.queue_select(w, b)
# store a first read
valuesNew=HW.ArC.read_floats(3)
tag_=tag+"_s"
self.sendData.emit(w,b,valuesNew[0],valuesNew[1],valuesNew[2],tag_)
self.displayData.emit()
HW.ArC.write_b(str(int(len(self.timeSteps)))+"\n")
for dt in self.timeSteps:
#dt/=self.warp # bug fix
total_time, total_voltage=self.make_time_series(dt/self.warp, self.gain, self.warp, self.max_spike_time, self.pre_time, \
self.pre_voltage, self.post_time, self.post_voltage)
HW.ArC.write_b(str(int(len(total_time)))+"\n")
for i in range(len(total_time)):
HW.ArC.write_b(str(float(total_time[i]))+"\n")
HW.ArC.write_b(str(float(total_voltage[i]))+"\n")
time.sleep(0.001)
valuesNew=HW.ArC.read_floats(3)
tag_=tag+" dt="+str("%.6f" % dt)+" before"
self.sendData.emit(w,b,valuesNew[0],valuesNew[1],valuesNew[2],tag_)
self.displayData.emit()
valuesNew=HW.ArC.read_floats(3)
tag_=tag+" dt="+str("%.6f" % dt)+" after"
if max(total_voltage)>=abs(min(total_voltage)):
max_ampl=max(total_voltage)
else:
max_ampl=min(total_voltage)
self.sendData.emit(w,b,valuesNew[0],max_ampl,max(total_time),tag_)
self.displayData.emit()
valuesNew=HW.ArC.read_floats(3)
tag_=tag+"_e"
self.sendData.emit(w,b,valuesNew[0],valuesNew[1],valuesNew[2],tag_)
self.displayData.emit()
self.updateTree.emit(w,b)
def make_time_series(self, dt, gain, warp, self_max_spike_time, self_pre_time, \
self_pre_voltage, self_post_time, self_post_voltage):
if dt>0:
pre_time=[x+dt for x in self_pre_time]
pre_time.insert(0,0)
pre_voltage=[0]+self_pre_voltage
post_time=self_post_time+[self_max_spike_time+dt]
post_voltage=self_post_voltage+[0]
elif dt<0:
post_time=[x+abs(dt) for x in self_post_time]
post_time.insert(0,0)
post_voltage=[0]+self_post_voltage
pre_time=self_pre_time+[self_max_spike_time+abs(dt)]
pre_voltage=self_pre_voltage+[0]
else:
pre_time=self_pre_time
pre_voltage=self_pre_voltage
post_time=self_post_time
post_voltage=self_post_voltage
total_time=[0]
total_voltage=[0]
index_pre=1
index_post=1
pre_voltage=[x*gain for x in pre_voltage]
post_voltage=[x*gain for x in post_voltage]
pre_time=[x*warp for x in pre_time]
post_time=[x*warp for x in post_time]
while index_pre<len(pre_time) and index_post<len(post_time):
if pre_time[index_pre]<post_time[index_post]:
total_time.append(pre_time[index_pre])
v1=post_voltage[index_post]
v0=post_voltage[index_post-1]
t1=post_time[index_post]
t0=post_time[index_post-1]
tx=pre_time[index_pre]
vpost=v1-(v1-v0)*(t1-tx)/(t1-t0)
total_voltage.append(pre_voltage[index_pre]-vpost)
index_pre+=1
elif pre_time[index_pre]>post_time[index_post]:
total_time.append(post_time[index_post])
v1=pre_voltage[index_pre]
v0=pre_voltage[index_pre-1]
t1=pre_time[index_pre]
t0=pre_time[index_pre-1]
tx=post_time[index_post]
vpre=v1-(v1-v0)*(t1-tx)/(t1-t0)
total_voltage.append(vpre-post_voltage[index_post])
index_post+=1
else:
total_time.append(post_time[index_post])
total_voltage.append(pre_voltage[index_pre]-post_voltage[index_post])
index_pre+=1
index_post+=1
total_voltage.append(0)
total_time.append(max([pre_time[-1],post_time[-1]]))
return total_time, total_voltage
class STDP(BaseProgPanel):
def __init__(self, short=False):
super().__init__(title="STDP", \
description="Spike-Timing Dependent Plasticity protocol.", \
short=short)
self.initUI()
def initUI(self):
vbox1=QtWidgets.QVBoxLayout()
titleLabel = QtWidgets.QLabel(self.title)
titleLabel.setFont(fonts.font1)
descriptionLabel = QtWidgets.QLabel(self.description)
descriptionLabel.setFont(fonts.font3)
descriptionLabel.setWordWrap(True)
isInt=QtGui.QIntValidator()
isFloat=QtGui.QDoubleValidator()
leftLabels=['Scale voltage', \
'Scale time', \
'Time step (ms)']
leftInit= ['1',\
'1', \
'1']
self.leftEdits=[]
gridLayout=QtWidgets.QGridLayout()
gridLayout.setColumnStretch(0,3)
gridLayout.setColumnStretch(1,1)
if self.short==False:
gridLayout.setColumnStretch(7,2)
self.push_load_pre=QtWidgets.QPushButton("Load Pre Spike")
self.push_load_pre.clicked.connect(self.load_pre)
self.check_identical=QtWidgets.QCheckBox("Identical Spikes")
self.check_identical.setChecked(True)
self.check_identical.stateChanged.connect(self.handleCheckIdentical)
self.push_load_post=QtWidgets.QPushButton("Load Post Spike")
self.push_load_post.setEnabled(False)
self.push_load_post.clicked.connect(self.load_post)
self.pre_filename=QtWidgets.QLabel("Filename")
self.post_filename=QtWidgets.QLabel("Filename")
gridLayout.addWidget(self.push_load_pre,0,0)
gridLayout.addWidget(self.pre_filename,0,1)
gridLayout.addWidget(self.check_identical,1,0)
gridLayout.addWidget(self.post_filename,2,1)
gridLayout.addWidget(self.push_load_post,2,0)
for i in range(len(leftLabels)):
lineLabel=QtWidgets.QLabel()
#lineLabel.setFixedHeight(50)
lineLabel.setText(leftLabels[i])
gridLayout.addWidget(lineLabel, i+3,0)
lineEdit=QtWidgets.QLineEdit()
lineEdit.setText(leftInit[i])
lineEdit.setValidator(isFloat)
self.leftEdits.append(lineEdit)
gridLayout.addWidget(lineEdit, i+3,1)
self.leftEdits[0].textChanged.connect(self.scale_voltage)
self.leftEdits[1].textChanged.connect(self.warp_time)
self.check_single=QtWidgets.QCheckBox("Only single event")
gridLayout.addWidget(self.check_single, 8,0)
self.gain=1
self.warp=1
vbox1.addWidget(titleLabel)
vbox1.addWidget(descriptionLabel)
hbox=QtWidgets.QHBoxLayout()
vbox_left=QtWidgets.QVBoxLayout()
vbox_left.addLayout(gridLayout)
vbox_left.addStretch()
hbox.addLayout(vbox_left)
vbox_spikes=QtWidgets.QVBoxLayout()
pg.setConfigOption('background', 'w')
pg.setConfigOption('foreground', 'k')
view=pg.GraphicsLayoutWidget()
# pen to draw the amplitude curves
pen_blue=QtGui.QPen()
pen_blue.setColor(QtCore.Qt.blue)
# pen to draw the amplitude curves
pen_green=QtGui.QPen()
pen_green.setColor(QtCore.Qt.green)
# pen to draw the amplitude curves
pen_red=QtGui.QPen()
pen_red.setColor(QtCore.Qt.red)
labeltotal_style = {'color': '#000000', 'font-size': '10pt'}
plot_height=80*APP.scalingFactor
plot_width=300*APP.scalingFactor
self.plot_total=view.addPlot()
self.plot_total.setMouseEnabled(False,False)
self.curve_total=self.plot_total.plot(pen=pg.mkPen(color="00F", width=2))
self.plot_total.getAxis('left').setLabel('Pre-Post', units='V', **labeltotal_style)
#self.plot_total.setFixedHeight(plot_height)
self.plot_total.getAxis('left').setGrid(50)
self.plot_total.getAxis('left').setWidth(60)
self.plot_total.getAxis('bottom').setGrid(50)
view.nextRow() # go to next row and add the next plot
self.plot_p=view.addPlot()
self.plot_p.setMouseEnabled(False,False)
self.curve_pre=self.plot_p.plot(pen=pg.mkPen(color="F00", width=2))
self.curve_post=self.plot_p.plot(pen=pg.mkPen(color="0F0", width=2))
#self.plot_pre.setFixedHeight(plot_height)
self.plot_p.getAxis('left').setLabel('Pre and Post', units='V', **labeltotal_style)
self.plot_p.getAxis('left').setGrid(50)
self.plot_p.getAxis('bottom').setGrid(50)
self.plot_p.getAxis('left').setWidth(60)
vbox_spikes.addWidget(view)
spike_desc_lay=QtWidgets.QHBoxLayout()
self.spikes_dt_text=QtWidgets.QLabel("dt=10ms | ")
self.spikes_order_text=QtWidgets.QLabel("before")
self.pre_text=QtWidgets.QLabel("Pre")
self.pre_text.setStyleSheet("color: red")
self.post_text=QtWidgets.QLabel("Post")
self.post_text.setStyleSheet("color: green")
spike_desc_lay.addStretch()
spike_desc_lay.addWidget(self.spikes_dt_text)
spike_desc_lay.addWidget(self.pre_text)
spike_desc_lay.addWidget(self.spikes_order_text)
spike_desc_lay.addWidget(self.post_text)
spike_desc_lay.addStretch()
vbox_spikes.addLayout(spike_desc_lay)
self.slider=QtWidgets.QSlider(QtCore.Qt.Horizontal, parent=self)
self.slider.setValue(50)
self.slider.valueChanged.connect(self.updateSpikes)
vbox_spikes.addWidget(self.slider)
hbox.addLayout(vbox_spikes)
self.vW=QtWidgets.QWidget()
self.vW.setLayout(hbox)
self.vW.setContentsMargins(0,0,0,0)
self.vW.setMaximumHeight(320)
scrlArea=QtWidgets.QScrollArea()
scrlArea.setWidget(self.vW)
scrlArea.setContentsMargins(0,0,0,0)
scrlArea.setWidgetResizable(False)
scrlArea.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
scrlArea.installEventFilter(self)
vbox1.addWidget(scrlArea)
vbox1.addStretch()
if self.short==False:
self.hboxProg=QtWidgets.QHBoxLayout()
push_single = self.makeControlButton('Apply to One', \
self.programOne)
push_range = self.makeControlButton('Apply to Range', \
self.programRange)
push_all = self.makeControlButton('Apply to All', \
self.programAll)
self.hboxProg.addWidget(push_single)
self.hboxProg.addWidget(push_range)
self.hboxProg.addWidget(push_all)
vbox1.addLayout(self.hboxProg)
self.setLayout(vbox1)
self.gridLayout=gridLayout
self.pre_voltage = []
self.pre_time = []
self.post_voltage = []
self.post_time = []
self.pre_full_filename = None
self.post_full_filename = None
self.dt=0
self.registerPropertyWidget(self.leftEdits[0], 'vscale')
self.registerPropertyWidget(self.leftEdits[1], 'tscale')
self.registerPropertyWidget(self.leftEdits[2], 'tstep')
self.registerPropertyWidget(self.check_identical, 'identical_spikes')
self.registerPropertyWidget(self.check_single, 'single_event')
def updateDescription(self,value):
self.spikes_description.setText(str(value))
def handleCheckIdentical(self, value):
if value:
self.push_load_post.setEnabled(False)
else:
self.push_load_post.setEnabled(True)
def load_post(self, filePath=None):
# if no filename provided, ask for one
if not filePath:
openFileName = QtWidgets.QFileDialog().getOpenFileName(self,
'Open spike file', "*.txt")[0]
path = QtCore.QFileInfo(openFileName)
else:
path = QtCore.QFileInfo(filePath)
voltage = []
time = []
try:
arraydata = np.loadtxt(path.absoluteFilePath(), dtype=float,
delimiter=',', comments='#')
for row in arraydata:
(v, t) = row
voltage.append(v)
time.append(t)
self.post_voltage=voltage
self.post_time=time
if self.pre_voltage and self.pre_time:
self.max_spike_time=max([self.pre_time[-1],self.post_time[-1]])
self.slider.setValue(50)
self.fix_spike_timescales()
self.updateSpikes(50.0)
self.post_full_filename = path.canonicalFilePath()
self.post_filename.setText(path.baseName())
except BaseException as exc:
errMessage = QtWidgets.QMessageBox()
errMessage.setText("Invalid spike file! " +
"Possible problem with voltage-time series syntax.")
errMessage.setIcon(QtWidgets.QMessageBox.Critical)
errMessage.setWindowTitle("Error")
errMessage.exec_()
def load_pre(self, filePath=None):
# if no filename provided, ask for one
if not filePath:
openFileName = QtWidgets.QFileDialog().getOpenFileName(self,
'Open spike file', "*.txt")[0]
path = QtCore.QFileInfo(openFileName)
else:
path = QtCore.QFileInfo(filePath)
voltage = []
time = []
try:
arraydata = np.loadtxt(path.absoluteFilePath(), dtype=float,
delimiter=',', comments='#')
for row in arraydata:
(v, t) = row
voltage.append(v)
time.append(t)
self.pre_voltage = voltage
self.pre_time = time
if self.check_identical.isChecked():
self.post_voltage = voltage
self.post_time = time
self.max_spike_time = max([self.pre_time[-1],self.post_time[-1]])
self.slider.setValue(50)
self.fix_spike_timescales()
self.updateSpikes(50.0)
elif self.post_voltage and self.post_time:
self.max_spike_time = max([self.pre_time[-1], self.post_time[-1]])
self.fix_spike_timescales()
self.slider.setValue(50)
self.updateSpikes(50.0)
self.pre_full_filename = path.canonicalFilePath()
self.pre_filename.setText(path.baseName())
except BaseException as exc:
errMessage = QtWidgets.QMessageBox()
errMessage.setText("Invalid spike file! " +
"Possible problem with voltage-time series syntax.")
errMessage.setIcon(QtWidgets.QMessageBox.Critical)
errMessage.setWindowTitle("Error")
errMessage.exec_()
def scale_voltage(self, value):
self.gain=float(value)
self.updateSpikes(self.slider.value())
def warp_time(self, value):
self.warp=float(value)
self.updateSpikes(self.slider.value())
def fix_spike_timescales(self):
if self.pre_time[-1]>self.post_time[-1]:
self.post_time.append(self.pre_time[-1])
self.post_voltage.append(0)
elif self.pre_time[-1]<self.post_time[-1]:
self.pre_time.append(self.post_time[-1])
self.pre_voltage.append(self.pre_time[-1])
def updateSpikes(self, sliderValue):
# Updates the spike figure when the slider is moved.
self.dt=self.max_spike_time*(self.slider.value()-50)/50.0*self.warp
if self.dt<0:
self.spikes_order_text.setText("before")
else:
self.spikes_order_text.setText("after")
msg2="dt=" + str(self.dt) + " s | "
self.spikes_dt_text.setText(msg2)
if sliderValue>50:
dt=self.max_spike_time*(sliderValue-50)/50.0
pre_time=[x+dt for x in self.pre_time]
pre_time.insert(0,0)
pre_voltage=[0]+self.pre_voltage
post_time=self.post_time+[self.max_spike_time+dt]
post_voltage=self.post_voltage+[0]
elif sliderValue<50:
dt=self.max_spike_time*(50-sliderValue)/50.0
post_time=[x+dt for x in self.post_time]
post_time.insert(0,0)
post_voltage=[0]+self.post_voltage
pre_time=self.pre_time+[self.max_spike_time+dt]
pre_voltage=self.pre_voltage+[0]
else:
pre_time=self.pre_time
pre_voltage=self.pre_voltage
post_time=self.post_time
post_voltage=self.post_voltage
# Creates the pre and post voltage waveforms
total_time=[0]
total_voltage=[0]
index_pre=1
index_post=1
pre_voltage=[x*self.gain for x in pre_voltage]
post_voltage=[x*self.gain for x in post_voltage]
pre_time=[x*self.warp for x in pre_time]
post_time=[x*self.warp for x in post_time]
while index_pre<len(pre_time) and index_post<len(post_time):
if pre_time[index_pre]<post_time[index_post]:
total_time.append(pre_time[index_pre])
v1=post_voltage[index_post]
v0=post_voltage[index_post-1]
t1=post_time[index_post]
t0=post_time[index_post-1]
tx=pre_time[index_pre]
vpost=v1-(v1-v0)*(t1-tx)/(t1-t0)
total_voltage.append(pre_voltage[index_pre]-vpost)
index_pre+=1
elif pre_time[index_pre]>post_time[index_post]:
total_time.append(post_time[index_post])
v1=pre_voltage[index_pre]
v0=pre_voltage[index_pre-1]
t1=pre_time[index_pre]
t0=pre_time[index_pre-1]
tx=post_time[index_post]
vpre=v1-(v1-v0)*(t1-tx)/(t1-t0)
total_voltage.append(vpre-post_voltage[index_post])
index_post+=1
else:
total_time.append(post_time[index_post])
total_voltage.append(pre_voltage[index_pre]-post_voltage[index_post])
index_pre+=1
index_post+=1
total_voltage.append(0)
#total_time.append(pre_time[-1])
total_time.append(max([pre_time[-1],post_time[-1]]))
self.curve_pre.setData(pre_time,pre_voltage)
self.curve_post.setData(post_time,post_voltage)
self.curve_total.setData(total_time, total_voltage)
def extractPanelData(self):
data = super().extractPanelData()
data['pre_filename'] = self.pre_full_filename
data['post_filename'] = self.post_full_filename
data['slider'] = self.slider.value()
return data
def setPanelData(self, data):
# prevent the text edit events from firing
# because the panel data are not fully
# initialised
self.leftEdits[0].textChanged.disconnect()
self.leftEdits[1].textChanged.disconnect()
super().setPanelData(data)
self.load_pre(data['pre_filename'])
if data['post_filename']:
self.load_post(data['post_filename'])
self.slider.setValue(data['slider'])
# reconnect the events above
self.leftEdits[0].textChanged.connect(self.scale_voltage)
self.leftEdits[1].textChanged.connect(self.warp_time)
# and force them to fire to make sure everything is
# up to date
self.scale_voltage(self.leftEdits[0].text())
self.warp_time(self.leftEdits[1].text())
self.updateSpikes(self.slider.value())
def eventFilter(self, object, event):
if event.type()==QtCore.QEvent.Resize:
self.vW.setFixedWidth(event.size().width()-object.verticalScrollBar().width())
return False
def prepare_time_steps(self):
timeSteps=[]
if self.check_single.isChecked():
timeSteps.append(self.dt)
else:
timeSteps.append(0)
timeStep=float(self.leftEdits[2].text())/1000.0
#timeSteps.append(timeStep)
max_time=max([self.pre_time[-1],self.post_time[-1]])*self.warp
i=1
# Prepares the timesteps (dt's) for STDP measurement run.
while i*timeStep<=max_time:
timeSteps.append(i*timeStep)
timeSteps.append(i*timeStep*-1)
i+=1
#print " =========> Timesteps", timeSteps
return timeSteps
def programOne(self):
self.programDevs([[CB.word, CB.bit]])
def programRange(self):
devs = makeDeviceList(True)
self.programDevs(devs)
def programAll(self):
devs = makeDeviceList(False)
self.programDevs(devs)
def programDevs(self, devs):
job = "40"
timeSteps = self.prepare_time_steps()
HW.ArC.write_b(job+"\n")
self.sendParams()
wrapper = ThreadWrapper(devs, [self.gain, self.warp, self.max_spike_time, \
self.pre_time, self.pre_voltage, self.post_time, self.post_voltage], \
timeSteps)
self.execute(wrapper, wrapper.run)
def disableProgPanel(self,state):
if state==True:
self.hboxProg.setEnabled(False)
else:
self.hboxProg.setEnabled(True)
@staticmethod
def display(w, b, raw, parent=None):
reg = re.compile(r'-?[0-9\.]+')
i = 0
list_dt = []
Mbefore = 0
Mafter = 0
dG = []
dt = 0
while i < len(raw):
# find the STDP tag
stdp_tag = str(raw[i][3])
if "before" in stdp_tag:
# register resistances before and after
Mbefore = raw[i][0]
Mafter = raw[i+1][0]
# append delta Ts and delta Gs
dt = float(re.findall(reg, stdp_tag)[0])
list_dt.append(dt)
dG.append((1/Mafter-1/Mbefore)/(1/Mbefore))
i += 2
else:
i += 1
resultWindow = QtWidgets.QWidget()
resultWindow.setGeometry(100,100,500,500)
resultWindow.setWindowTitle("STDP: W="+ str(w) + " | B=" + str(b))
resultWindow.setWindowIcon(Graphics.getIcon('appicon'))
resultWindow.show()
view = pg.GraphicsLayoutWidget()
label_style = {'color': '#000000', 'font-size': '10pt'}
plot_stdp = view.addPlot()
curve_stdp = plot_stdp.plot(pen=None, symbolPen=None, \
symbolBrush=(0,0,255), symbol='s', symbolSize=5, pxMode=True)
plot_stdp.getAxis('left').setLabel('dG/G0', **label_style)
plot_stdp.getAxis('bottom').setLabel('deltaT', units='s', **label_style)
plot_stdp.getAxis('left').setGrid(50)
plot_stdp.getAxis('bottom').setGrid(50)
curve_stdp.setData(np.asarray(list_dt),np.asarray(dG))
layout = QtWidgets.QHBoxLayout()
layout.addWidget(view)
layout.setContentsMargins(0,0,0,0)
resultWindow.setLayout(layout)
return resultWindow
tags = { 'top': ModTag(tag, "STDP", STDP.display) } | PypiClean |
/Pycord-Utils-2.3.2.tar.gz/Pycord-Utils-2.3.2/pycord/features/shell.py | from discord.ext import commands
from pycord.codeblocks import Codeblock, codeblock_converter
from pycord.exception_handling import ReplResponseReactor
from pycord.features.baseclass import Feature
from pycord.paginators import PaginatorInterface, WrappedPaginator
from pycord.shell import ShellReader
class ShellFeature(Feature):
"""
Feature containing the shell-related commands
"""
@Feature.Command(
parent="pyc",
name="shell",
aliases=["bash", "sh", "powershell", "ps1", "ps", "cmd"],
)
async def pyc_shell(self, ctx: commands.Context, *, argument: codeblock_converter):
"""
Executes statements in the system shell.
This uses the system shell as defined in $SHELL, or `/bin/bash` otherwise.
Execution can be cancelled by closing the paginator.
"""
async with ReplResponseReactor(ctx.message):
with self.submit(ctx):
with ShellReader(argument.content) as reader:
prefix = "```" + reader.highlight
paginator = WrappedPaginator(prefix=prefix, max_size=1975)
paginator.add_line(f"{reader.ps1} {argument.content}\n")
interface = PaginatorInterface(ctx.bot, paginator, owner=ctx.author)
self.bot.loop.create_task(interface.send_to(ctx))
async for line in reader:
if interface.closed:
return
await interface.add_line(line)
await interface.add_line(f"\n[status] Return code {reader.close_code}")
@Feature.Command(parent="pyc", name="git")
async def pyc_git(self, ctx: commands.Context, *, argument: codeblock_converter):
"""
Shortcut for 'pyc sh git'. Invokes the system shell.
"""
return await ctx.invoke(
self.pyc_shell,
argument=Codeblock(argument.language, "git " + argument.content),
)
@Feature.Command(parent="pyc", name="pip")
async def pyc_pip(self, ctx: commands.Context, *, argument: codeblock_converter):
"""
Shortcut for 'pyc sh pip'. Invokes the system shell.
"""
return await ctx.invoke(
self.pyc_shell,
argument=Codeblock(argument.language, "pip " + argument.content),
) | PypiClean |
/sasa_phys-0.1.tar.gz/sasa_phys-0.1/sasa_phys/stack.py | import numpy as np
from .star_product import *
from .smat_oparations import *
class Layer:
"""
Parrent class of Meta- and NonMetaLayer, contains information about
which symmetry opperations will be applied.
"""
def __init__(self):
self.mirror_bool = False
self.flip_bool = False
self.angle = 0
def flip(self):
self.flip_bool = True
return
def mirror(self):
self.mirror_bool = True
def rotate(self, angle):
self.angle = angle
class MetaLayer(Layer):
"""
Class to describe a Meta-Surface in the Stack.
Parameters
----------
s_mat : L x 4 x 4 numpy Array
the Lx4x4 S-Matrix of the Meta-Layer, externally simulated/measured
cladding : vector
containing the refraction indices of the cladding.
substrate : vector
containing the refraction indices of the substrate.
"""
def __init__(self, s_mat, cladding, substrate):
Layer.__init__(self)
self.s_mat = s_mat
self.cladding = cladding
self.substrate = substrate
class NonMetaLayer(Layer):
"""
Class to describe a homogenous isotropic or anisotropic Layer.
Parameters
----------
height : height in (μm)
n_vec : one or two vactors containing the diffraction indeces.
If only one vector is given homogenous behavior will be assumed.
"""
def __init__(self, *n_vec, height):
Layer.__init__(self)
self.height = height
self.height_len = np.size(self.height)
self.n_x = n_vec[0]
# isotropic material
if len(n_vec) == 1:
self.n_y = self.n_x
# anisotropic material
elif len(n_vec) == 2:
self.n_y = n_vec[1]
else:
raise ValueError("input 1 or 2 refrectiv index vectors")
class Stack:
"""
Class to describe the whole Stack, contains information about the layers,
cladding, substrate and further options.
Parameters
----------
layer_list : list of Layer objects
wav_vec : vector
The target wavelengths where the Meta-Surfaces were simulated/
measured
cladding : vector
The refrectiv indeces of the cladding.
substrate : vector
The refractiv indeces of the substrate. The first material to be
hit by light.
"""
def __init__(self, layer_list, wav_vec, cladding, substrate):
self.layer_list = layer_list
self.cladding = cladding
self.substrate = substrate
self.wav_vec = wav_vec
self.wav_vec_len = len(self.wav_vec)
self.__geo_bool = False
self.__geo_order = 5
def create_propagator(self, layer):
"""
Creates the propagator S-Matrix
Parameters
----------
layer : NonMetaLayer or MetaLayer object
Returns
-------
s_mat : H x L x 4 x 4 numpy array
propagation S-Matrix
"""
if type(layer) is NonMetaLayer:
s_mat = np.zeros((layer.height_len, self.wav_vec_len, 4, 4)).astype(complex)
prop_x = np.exp(2j*np.pi * np.outer(layer.height, layer.n_x/self.wav_vec).squeeze())
prop_y = np.exp(2j*np.pi * np.outer(layer.height, layer.n_y/self.wav_vec).squeeze())
s_mat[:, :, 0, 0] = prop_x
s_mat[:, :, 1, 1] = prop_y
s_mat[:, :, 2, 2] = prop_x
s_mat[:, :, 3, 3] = prop_y
elif type(layer) is MetaLayer:
s_mat = layer.s_mat.reshape((1, self.wav_vec_len, 4, 4))
else:
raise ValueError("Stack has to consist of Mata and \
NonMetaLayers")
# apply symmetry opperations
if layer.mirror_bool:
s_mat = mirror_smat(s_mat)
if layer.flip_bool:
s_mat = flip_smat(s_mat)
if layer.angle != 0:
s_mat = rot_smat(s_mat, layer.angle)
return s_mat
def create_interface(self, l_2, l_1):
"""
Creates the interface S-Matrix for the transmission between two Layers
Parameters
----------
l_1 : NonMetaLayer or MetaLayer Objects
l_2 : NonMetaLayer or MetaLayer Objects
Returns
-------
s_mat : L x 4 x 4 numpy array
interface S-Matrix
"""
# load n_* from the Layers
if (type(l_1) is NonMetaLayer):
n1_x = l_1.n_x
n1_y = l_1.n_y
else:
n1_x = l_1.cladding
n1_y = l_1.cladding
if(type(l_2) is NonMetaLayer):
n2_x = l_2.n_x
n2_y = l_2.n_y
else:
n2_x = l_2.substrate
n2_y = l_2.substrate
# transmission and reflection in x and y direction
s_mat_list = np.zeros((self.wav_vec_len, 4, 4)).astype(complex)
# Transmission
s_mat_list[:, 0, 0] = 2*n1_x/(n1_x + n2_x)
s_mat_list[:, 1, 1] = 2*n1_y/(n1_y + n2_y)
s_mat_list[:, 2, 2] = 2*n2_x/(n1_x + n2_x)
s_mat_list[:, 3, 3] = 2*n2_y/(n1_y + n2_y)
# Reflection
R_x = (n1_x - n2_x)/(n1_x + n2_x)
R_y = (n1_y - n2_y)/(n1_y + n2_y)
s_mat_list[:, 0, 2] = R_x
s_mat_list[:, 1, 3] = R_y
s_mat_list[:, 2, 0] = -1*R_x
s_mat_list[:, 3, 1] = -1*R_y
"""
This Operrator is constructed:
[T_x , 0 , R_x, 0],
[ 0 , T_y , 0, R_y],
[-1*R_x, 0 , T_x, 0 ],
[ 0 ,-1*R_y, 0 , T_y ]
"""
return s_mat_list.reshape((1, self.wav_vec_len, 4, 4))
def create_interface_rot(self, l_2, l_1):
"""
Creates the interface S-Matrix for the transmission between
two Layers in case of rotation, uses create_interface
Parameters
----------
l_1 : NonMetaLayer or MetaLayer Objects
l_2 : NonMetaLayer or MetaLayer Objects
Returns
-------
s_mat : Lx4x4 S-Matrix
"""
vacuum_layer = NonMetaLayer(np.ones(self.wav_vec_len), height=None)
s_mat1 = self.create_interface(vacuum_layer, l_2)
s_mat2 = self.create_interface(l_1, vacuum_layer)
s_mat = star_product_analyt(rot_smat(s_mat1, l_2.angle),
rot_smat(s_mat2, l_1.angle))
return s_mat
def build(self):
"""
Builds all the propagation and interface matrices and multiplies them.
Returns
-------
s_mat : Lx4x4 or HxLx4x4 numpy array
S-matrix describing the behavior of the whole stack. The
dimension is HxLx4x4 when a height vector was given
"""
# Create Layer-Objects for the cladding and substrate
clad_layer = NonMetaLayer(self.cladding, height=None)
subs_layer = NonMetaLayer(self.substrate, height=None)
# add the substrate layer to the back
self.layer_list.append(subs_layer)
# create interface between the cladding and the first layer
inter = self.create_interface(clad_layer, self.layer_list[0])
s_mat_list = [inter]
for i in range(len(self.layer_list) - 1):
current_layer = self.layer_list[i]
next_layer = self.layer_list[i+1]
prop = self.create_propagator(current_layer)
# This can be further optimized by a better differentiation between
# the cases
if (current_layer.angle != 0) or (next_layer.angle != 0):
inter = self.create_interface_rot(current_layer, next_layer)
else:
inter = self.create_interface(current_layer, next_layer)
s_mat_list.append(prop)
s_mat_list.append(inter)
# end building loop
if self.__geo_bool:
s_out = star_product_cascaded_geo(s_mat_list, self.geo_order).squeeze()
else:
s_out = star_product_cascaded(s_mat_list).squeeze()
# remove subs_layer from the layer list
del self.layer_list[-1]
return s_out
def build_geo(self, order):
"""
A version of build using star_product_cascaded_geo(), change this doc_str
Returns
-------
s_mat : Lx4x4 or HxLx4x4 numpy array
S-matrix describing the behavior of the whole stack. The
dimension is HxLx4x4 when a height vector was given
"""
self.geo_order = order
self.geo_bool = True
s_mat = self.build()
self.geo_bool = False
return s_mat
def order(self, order):
"""
Returns the nth order S-Matrix of the starproduct developt via the
geometric series.
Parameters
----------
order : int
Returns
-------
s_out : H x L x 4 x 4 numpy Array
S-Matrix of the order'th series developt
"""
self.geo_bool = True
previous_smat = 0
if order > 1:
# calculate previous S-matrix
self.geo_order = order - 1
previous_smat = self.build()
# calculate current S-matrix
self.geo_order = order
current_smat = self.build()
s_out = current_smat - previous_smat
self.geo_bool = False
return s_out
def order_up_to(self, order):
"""
Builds a list of S-matrices up to the target order.
Parameters
----------
order : int
Returns
-------
s_list : list of HxLx4x4 numpy Arrays
"""
"""
currently cant get this working will use the stupid way,
maybe the optimisation is unnecassry
s_list = []
self.geo_bool = True
self.order = order
previous_order = self.build()
for i in range(order-1, 0, -1):
self.order = i
print(i,end=":")
current_order = self.build()
print(previous_order[0,0,0,0])
print(current_order[0,0,0,0])
s_list.insert(0, previous_order - current_order)
previous_order = current_order
self.geo_bool = False
return s_list
"""
s_list = []
self.geo_bool = True
for i in range(1, order+1):
s_list.append(self.order(i))
return s_list | PypiClean |
/silva.app.sitemap-1.0.tar.gz/silva.app.sitemap-1.0/src/silva/app/sitemap/utils.py |
_marker = object()
class TupleMap(object):
def __init__(self):
self.clear()
def clear(self):
self._store = {}
self._len = 0
def add(self, key, value):
store = self._store
for piece in key:
store = store.setdefault(piece, {})
if None in store:
# There is already a value in the store.
raise KeyError(key)
store[None] = value
self._len += 1
return value
def remove(self, key):
store = self._store
components = []
for piece in key:
following = store.get(piece)
if following is None:
raise KeyError(piece)
components.append((store, piece))
store = following
else:
if None in store:
del store[None]
else:
raise KeyError(None)
while components:
lstore, lkey = components.pop()
if len(lstore[lkey]) > 0:
break
del lstore[lkey]
def remove_all(self, key):
store = self._store
parent = None
for piece in key:
following = store.get(piece)
if following is None:
raise KeyError(piece)
parent = store
store = following
else:
del parent[piece]
def get(self, key, default=None, fallback=False):
store = self._store
default_index = 0
index = 0
for index, piece in enumerate(key):
following = store.get(piece)
if fallback:
# Update default if fallback is on
default_fallback = store.get(None)
if default_fallback is not None:
default = default_fallback
default_index = index
if following is None:
# Not found, return default.
return default, default_index
store = following
# Look for value or return default.
value = store.get(None)
if value is not None:
return value, index + 1
return default, default_index
def list(self):
result = []
def walk(level):
for piece, value in level.iteritems():
if piece is None:
result.append(value)
else:
walk(value)
walk(self._store)
return result
def __getitem__(self, key):
value = self.get(key, _marker)[0]
if value is _marker:
raise KeyError(key)
return value
def __len__(self):
return self._len
# if __name__ == '__main__':
# from pprint import pprint as pp
# tm = TupleMap()
# paths = ['/path/to/something', '/path/to/something/else', '/path/to/nothing']
# for p in paths:
# tm.add(p.split('/'), True)
# pp(tm._store)
# pp(tm.get('/path/to/something/else/to'.split('/'), fallback=True))
# pp(tm.get('/path/to'.split('/'), fallback=True))
# #tm.remove_all('/path/to/something'.split('/'))
# #pp(tm._store) | PypiClean |
/nni_daily-1.5.2005180104-py3-none-manylinux1_x86_64.whl/nni_daily-1.5.2005180104.data/data/nni/node_modules/argparse/lib/action_container.js | 'use strict';
var format = require('util').format;
// Constants
var c = require('./const');
var $$ = require('./utils');
//Actions
var ActionHelp = require('./action/help');
var ActionAppend = require('./action/append');
var ActionAppendConstant = require('./action/append/constant');
var ActionCount = require('./action/count');
var ActionStore = require('./action/store');
var ActionStoreConstant = require('./action/store/constant');
var ActionStoreTrue = require('./action/store/true');
var ActionStoreFalse = require('./action/store/false');
var ActionVersion = require('./action/version');
var ActionSubparsers = require('./action/subparsers');
// Errors
var argumentErrorHelper = require('./argument/error');
/**
* new ActionContainer(options)
*
* Action container. Parent for [[ArgumentParser]] and [[ArgumentGroup]]
*
* ##### Options:
*
* - `description` -- A description of what the program does
* - `prefixChars` -- Characters that prefix optional arguments
* - `argumentDefault` -- The default value for all arguments
* - `conflictHandler` -- The conflict handler to use for duplicate arguments
**/
var ActionContainer = module.exports = function ActionContainer(options) {
options = options || {};
this.description = options.description;
this.argumentDefault = options.argumentDefault;
this.prefixChars = options.prefixChars || '';
this.conflictHandler = options.conflictHandler;
// set up registries
this._registries = {};
// register actions
this.register('action', null, ActionStore);
this.register('action', 'store', ActionStore);
this.register('action', 'storeConst', ActionStoreConstant);
this.register('action', 'storeTrue', ActionStoreTrue);
this.register('action', 'storeFalse', ActionStoreFalse);
this.register('action', 'append', ActionAppend);
this.register('action', 'appendConst', ActionAppendConstant);
this.register('action', 'count', ActionCount);
this.register('action', 'help', ActionHelp);
this.register('action', 'version', ActionVersion);
this.register('action', 'parsers', ActionSubparsers);
// raise an exception if the conflict handler is invalid
this._getHandler();
// action storage
this._actions = [];
this._optionStringActions = {};
// groups
this._actionGroups = [];
this._mutuallyExclusiveGroups = [];
// defaults storage
this._defaults = {};
// determines whether an "option" looks like a negative number
// -1, -1.5 -5e+4
this._regexpNegativeNumber = new RegExp('^[-]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?$');
// whether or not there are any optionals that look like negative
// numbers -- uses a list so it can be shared and edited
this._hasNegativeNumberOptionals = [];
};
// Groups must be required, then ActionContainer already defined
var ArgumentGroup = require('./argument/group');
var MutuallyExclusiveGroup = require('./argument/exclusive');
//
// Registration methods
//
/**
* ActionContainer#register(registryName, value, object) -> Void
* - registryName (String) : object type action|type
* - value (string) : keyword
* - object (Object|Function) : handler
*
* Register handlers
**/
ActionContainer.prototype.register = function (registryName, value, object) {
this._registries[registryName] = this._registries[registryName] || {};
this._registries[registryName][value] = object;
};
ActionContainer.prototype._registryGet = function (registryName, value, defaultValue) {
if (arguments.length < 3) {
defaultValue = null;
}
return this._registries[registryName][value] || defaultValue;
};
//
// Namespace default accessor methods
//
/**
* ActionContainer#setDefaults(options) -> Void
* - options (object):hash of options see [[Action.new]]
*
* Set defaults
**/
ActionContainer.prototype.setDefaults = function (options) {
options = options || {};
for (var property in options) {
if ($$.has(options, property)) {
this._defaults[property] = options[property];
}
}
// if these defaults match any existing arguments, replace the previous
// default on the object with the new one
this._actions.forEach(function (action) {
if ($$.has(options, action.dest)) {
action.defaultValue = options[action.dest];
}
});
};
/**
* ActionContainer#getDefault(dest) -> Mixed
* - dest (string): action destination
*
* Return action default value
**/
ActionContainer.prototype.getDefault = function (dest) {
var result = $$.has(this._defaults, dest) ? this._defaults[dest] : null;
this._actions.forEach(function (action) {
if (action.dest === dest && $$.has(action, 'defaultValue')) {
result = action.defaultValue;
}
});
return result;
};
//
// Adding argument actions
//
/**
* ActionContainer#addArgument(args, options) -> Object
* - args (String|Array): argument key, or array of argument keys
* - options (Object): action objects see [[Action.new]]
*
* #### Examples
* - addArgument([ '-f', '--foo' ], { action: 'store', defaultValue: 1, ... })
* - addArgument([ 'bar' ], { action: 'store', nargs: 1, ... })
* - addArgument('--baz', { action: 'store', nargs: 1, ... })
**/
ActionContainer.prototype.addArgument = function (args, options) {
args = args;
options = options || {};
if (typeof args === 'string') {
args = [ args ];
}
if (!Array.isArray(args)) {
throw new TypeError('addArgument first argument should be a string or an array');
}
if (typeof options !== 'object' || Array.isArray(options)) {
throw new TypeError('addArgument second argument should be a hash');
}
// if no positional args are supplied or only one is supplied and
// it doesn't look like an option string, parse a positional argument
if (!args || args.length === 1 && this.prefixChars.indexOf(args[0][0]) < 0) {
if (args && !!options.dest) {
throw new Error('dest supplied twice for positional argument');
}
options = this._getPositional(args, options);
// otherwise, we're adding an optional argument
} else {
options = this._getOptional(args, options);
}
// if no default was supplied, use the parser-level default
if (typeof options.defaultValue === 'undefined') {
var dest = options.dest;
if ($$.has(this._defaults, dest)) {
options.defaultValue = this._defaults[dest];
} else if (typeof this.argumentDefault !== 'undefined') {
options.defaultValue = this.argumentDefault;
}
}
// create the action object, and add it to the parser
var ActionClass = this._popActionClass(options);
if (typeof ActionClass !== 'function') {
throw new Error(format('Unknown action "%s".', ActionClass));
}
var action = new ActionClass(options);
// throw an error if the action type is not callable
var typeFunction = this._registryGet('type', action.type, action.type);
if (typeof typeFunction !== 'function') {
throw new Error(format('"%s" is not callable', typeFunction));
}
return this._addAction(action);
};
/**
* ActionContainer#addArgumentGroup(options) -> ArgumentGroup
* - options (Object): hash of options see [[ArgumentGroup.new]]
*
* Create new arguments groups
**/
ActionContainer.prototype.addArgumentGroup = function (options) {
var group = new ArgumentGroup(this, options);
this._actionGroups.push(group);
return group;
};
/**
* ActionContainer#addMutuallyExclusiveGroup(options) -> ArgumentGroup
* - options (Object): {required: false}
*
* Create new mutual exclusive groups
**/
ActionContainer.prototype.addMutuallyExclusiveGroup = function (options) {
var group = new MutuallyExclusiveGroup(this, options);
this._mutuallyExclusiveGroups.push(group);
return group;
};
ActionContainer.prototype._addAction = function (action) {
var self = this;
// resolve any conflicts
this._checkConflict(action);
// add to actions list
this._actions.push(action);
action.container = this;
// index the action by any option strings it has
action.optionStrings.forEach(function (optionString) {
self._optionStringActions[optionString] = action;
});
// set the flag if any option strings look like negative numbers
action.optionStrings.forEach(function (optionString) {
if (optionString.match(self._regexpNegativeNumber)) {
if (!self._hasNegativeNumberOptionals.some(Boolean)) {
self._hasNegativeNumberOptionals.push(true);
}
}
});
// return the created action
return action;
};
ActionContainer.prototype._removeAction = function (action) {
var actionIndex = this._actions.indexOf(action);
if (actionIndex >= 0) {
this._actions.splice(actionIndex, 1);
}
};
ActionContainer.prototype._addContainerActions = function (container) {
// collect groups by titles
var titleGroupMap = {};
this._actionGroups.forEach(function (group) {
if (titleGroupMap[group.title]) {
throw new Error(format('Cannot merge actions - two groups are named "%s".', group.title));
}
titleGroupMap[group.title] = group;
});
// map each action to its group
var groupMap = {};
function actionHash(action) {
// unique (hopefully?) string suitable as dictionary key
return action.getName();
}
container._actionGroups.forEach(function (group) {
// if a group with the title exists, use that, otherwise
// create a new group matching the container's group
if (!titleGroupMap[group.title]) {
titleGroupMap[group.title] = this.addArgumentGroup({
title: group.title,
description: group.description
});
}
// map the actions to their new group
group._groupActions.forEach(function (action) {
groupMap[actionHash(action)] = titleGroupMap[group.title];
});
}, this);
// add container's mutually exclusive groups
// NOTE: if add_mutually_exclusive_group ever gains title= and
// description= then this code will need to be expanded as above
var mutexGroup;
container._mutuallyExclusiveGroups.forEach(function (group) {
mutexGroup = this.addMutuallyExclusiveGroup({
required: group.required
});
// map the actions to their new mutex group
group._groupActions.forEach(function (action) {
groupMap[actionHash(action)] = mutexGroup;
});
}, this); // forEach takes a 'this' argument
// add all actions to this container or their group
container._actions.forEach(function (action) {
var key = actionHash(action);
if (groupMap[key]) {
groupMap[key]._addAction(action);
} else {
this._addAction(action);
}
});
};
ActionContainer.prototype._getPositional = function (dest, options) {
if (Array.isArray(dest)) {
dest = dest[0];
}
// make sure required is not specified
if (options.required) {
throw new Error('"required" is an invalid argument for positionals.');
}
// mark positional arguments as required if at least one is
// always required
if (options.nargs !== c.OPTIONAL && options.nargs !== c.ZERO_OR_MORE) {
options.required = true;
}
if (options.nargs === c.ZERO_OR_MORE && typeof options.defaultValue === 'undefined') {
options.required = true;
}
// return the keyword arguments with no option strings
options.dest = dest;
options.optionStrings = [];
return options;
};
ActionContainer.prototype._getOptional = function (args, options) {
var prefixChars = this.prefixChars;
var optionStrings = [];
var optionStringsLong = [];
// determine short and long option strings
args.forEach(function (optionString) {
// error on strings that don't start with an appropriate prefix
if (prefixChars.indexOf(optionString[0]) < 0) {
throw new Error(format('Invalid option string "%s": must start with a "%s".',
optionString,
prefixChars
));
}
// strings starting with two prefix characters are long options
optionStrings.push(optionString);
if (optionString.length > 1 && prefixChars.indexOf(optionString[1]) >= 0) {
optionStringsLong.push(optionString);
}
});
// infer dest, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
var dest = options.dest || null;
delete options.dest;
if (!dest) {
var optionStringDest = optionStringsLong.length ? optionStringsLong[0] : optionStrings[0];
dest = $$.trimChars(optionStringDest, this.prefixChars);
if (dest.length === 0) {
throw new Error(
format('dest= is required for options like "%s"', optionStrings.join(', '))
);
}
dest = dest.replace(/-/g, '_');
}
// return the updated keyword arguments
options.dest = dest;
options.optionStrings = optionStrings;
return options;
};
ActionContainer.prototype._popActionClass = function (options, defaultValue) {
defaultValue = defaultValue || null;
var action = (options.action || defaultValue);
delete options.action;
var actionClass = this._registryGet('action', action, action);
return actionClass;
};
ActionContainer.prototype._getHandler = function () {
var handlerString = this.conflictHandler;
var handlerFuncName = '_handleConflict' + $$.capitalize(handlerString);
var func = this[handlerFuncName];
if (typeof func === 'undefined') {
var msg = 'invalid conflict resolution value: ' + handlerString;
throw new Error(msg);
} else {
return func;
}
};
ActionContainer.prototype._checkConflict = function (action) {
var optionStringActions = this._optionStringActions;
var conflictOptionals = [];
// find all options that conflict with this option
// collect pairs, the string, and an existing action that it conflicts with
action.optionStrings.forEach(function (optionString) {
var conflOptional = optionStringActions[optionString];
if (typeof conflOptional !== 'undefined') {
conflictOptionals.push([ optionString, conflOptional ]);
}
});
if (conflictOptionals.length > 0) {
var conflictHandler = this._getHandler();
conflictHandler.call(this, action, conflictOptionals);
}
};
ActionContainer.prototype._handleConflictError = function (action, conflOptionals) {
var conflicts = conflOptionals.map(function (pair) { return pair[0]; });
conflicts = conflicts.join(', ');
throw argumentErrorHelper(
action,
format('Conflicting option string(s): %s', conflicts)
);
};
ActionContainer.prototype._handleConflictResolve = function (action, conflOptionals) {
// remove all conflicting options
var self = this;
conflOptionals.forEach(function (pair) {
var optionString = pair[0];
var conflictingAction = pair[1];
// remove the conflicting option string
var i = conflictingAction.optionStrings.indexOf(optionString);
if (i >= 0) {
conflictingAction.optionStrings.splice(i, 1);
}
delete self._optionStringActions[optionString];
// if the option now has no option string, remove it from the
// container holding it
if (conflictingAction.optionStrings.length === 0) {
conflictingAction.container._removeAction(conflictingAction);
}
});
}; | PypiClean |
/purity-fb-1.12.3.tar.gz/purity-fb-1.12.3/purity_fb/purity_fb_1dot8/__init__.py | from __future__ import absolute_import
# import models into sdk package
from .models.admin import Admin
from .models.admin_api_token import AdminApiToken
from .models.admin_cache import AdminCache
from .models.admin_cache_response import AdminCacheResponse
from .models.admin_response import AdminResponse
from .models.alert import Alert
from .models.alert_response import AlertResponse
from .models.alert_watcher import AlertWatcher
from .models.alert_watcher_response import AlertWatcherResponse
from .models.alert_watcher_test import AlertWatcherTest
from .models.alert_watcher_test_response import AlertWatcherTestResponse
from .models.array_http_performance import ArrayHttpPerformance
from .models.array_http_performance_response import ArrayHttpPerformanceResponse
from .models.array_performance import ArrayPerformance
from .models.array_performance_response import ArrayPerformanceResponse
from .models.array_response import ArrayResponse
from .models.array_s3_performance import ArrayS3Performance
from .models.array_s3_performance_response import ArrayS3PerformanceResponse
from .models.array_space import ArraySpace
from .models.array_space_response import ArraySpaceResponse
from .models.blade import Blade
from .models.blade_response import BladeResponse
from .models.bucket import Bucket
from .models.bucket_patch import BucketPatch
from .models.bucket_performance import BucketPerformance
from .models.bucket_performance_response import BucketPerformanceResponse
from .models.bucket_post import BucketPost
from .models.bucket_response import BucketResponse
from .models.bucket_s3_performance import BucketS3Performance
from .models.bucket_s3_performance_response import BucketS3PerformanceResponse
from .models._built_in import BuiltIn
from .models._built_in_with_id import BuiltInWithId
from .models.certificate import Certificate
from .models.certificate_group import CertificateGroup
from .models.certificate_group_response import CertificateGroupResponse
from .models.certificate_group_use import CertificateGroupUse
from .models.certificate_group_use_response import CertificateGroupUseResponse
from .models.certificate_post import CertificatePost
from .models.certificate_response import CertificateResponse
from .models.certificate_use import CertificateUse
from .models.certificate_use_response import CertificateUseResponse
from .models.client_performance import ClientPerformance
from .models.client_performance_response import ClientPerformanceResponse
from .models.directory_service import DirectoryService
from .models.directory_service_response import DirectoryServiceResponse
from .models.directory_service_role import DirectoryServiceRole
from .models.directory_service_roles_response import DirectoryServiceRolesResponse
from .models.directoryservice_nfs import DirectoryserviceNfs
from .models.directoryservice_smb import DirectoryserviceSmb
from .models.dns import Dns
from .models.dns_response import DnsResponse
from .models.error_response import ErrorResponse
from .models.file_system import FileSystem
from .models.file_system_performance import FileSystemPerformance
from .models.file_system_performance_response import FileSystemPerformanceResponse
from .models.file_system_response import FileSystemResponse
from .models.file_system_snapshot import FileSystemSnapshot
from .models.file_system_snapshot_response import FileSystemSnapshotResponse
from .models._fixed_reference import FixedReference
from .models._fixed_reference_with_id import FixedReferenceWithId
from .models.hardware import Hardware
from .models.hardware_connector import HardwareConnector
from .models.hardware_connector_response import HardwareConnectorResponse
from .models.hardware_response import HardwareResponse
from .models.link_aggregation_group import LinkAggregationGroup
from .models.link_aggregation_group_patch import LinkAggregationGroupPatch
from .models.link_aggregation_group_response import LinkAggregationGroupResponse
from .models.log_download_response import LogDownloadResponse
from .models.login_response import LoginResponse
from .models.member import Member
from .models.member_response import MemberResponse
from .models.network_interface import NetworkInterface
from .models.network_interface_response import NetworkInterfaceResponse
from .models.nfs_rule import NfsRule
from .models.object_response import ObjectResponse
from .models.object_store_access_key import ObjectStoreAccessKey
from .models.object_store_access_key_response import ObjectStoreAccessKeyResponse
from .models.object_store_account import ObjectStoreAccount
from .models.object_store_account_response import ObjectStoreAccountResponse
from .models.object_store_user import ObjectStoreUser
from .models.object_store_user_response import ObjectStoreUserResponse
from .models.objectstoreaccesskey import Objectstoreaccesskey
from .models.pagination_info import PaginationInfo
from .models.policy import Policy
from .models.policy_objects import PolicyObjects
from .models.policy_objects_response import PolicyObjectsResponse
from .models.policy_patch import PolicyPatch
from .models._policy_reference_with_id import PolicyReferenceWithId
from .models.policy_response import PolicyResponse
from .models.protocol_rule import ProtocolRule
from .models.pure_array import PureArray
from .models.pure_error import PureError
from .models.pure_object import PureObject
from .models.quotas_group import QuotasGroup
from .models.quotas_group_response import QuotasGroupResponse
from .models.quotas_setting import QuotasSetting
from .models.quotas_setting_response import QuotasSettingResponse
from .models.quotas_user import QuotasUser
from .models.quotas_user_response import QuotasUserResponse
from .models.quotasgroup_group import QuotasgroupGroup
from .models.quotasuser_user import QuotasuserUser
from .models.reference import Reference
from .models._resource import Resource
from .models._resource_rule import ResourceRule
from .models._resource_type import ResourceType
from .models.smb_rule import SmbRule
from .models.smtp import Smtp
from .models.smtp_response import SmtpResponse
from .models.snapshot_suffix import SnapshotSuffix
from .models.space import Space
from .models.subnet import Subnet
from .models.subnet_response import SubnetResponse
from .models.support import Support
from .models.support_remote_assist_paths import SupportRemoteAssistPaths
from .models.support_response import SupportResponse
from .models.test_result import TestResult
from .models.test_result_response import TestResultResponse
from .models.version_response import VersionResponse
# import apis into sdk package
from .apis.admins_api import AdminsApi
from .apis.admins_cache_api import AdminsCacheApi
from .apis.alert_watchers_api import AlertWatchersApi
from .apis.alerts_api import AlertsApi
from .apis.arrays_api import ArraysApi
from .apis.authentication_api import AuthenticationApi
from .apis.blade_api import BladeApi
from .apis.buckets_api import BucketsApi
from .apis.certificate_groups_api import CertificateGroupsApi
from .apis.certificates_api import CertificatesApi
from .apis.directory_services_api import DirectoryServicesApi
from .apis.dns_api import DnsApi
from .apis.file_system_snapshots_api import FileSystemSnapshotsApi
from .apis.file_systems_api import FileSystemsApi
from .apis.hardware_api import HardwareApi
from .apis.hardware_connectors_api import HardwareConnectorsApi
from .apis.link_aggregation_groups_api import LinkAggregationGroupsApi
from .apis.logs_api import LogsApi
from .apis.network_interfaces_api import NetworkInterfacesApi
from .apis.object_store_access_keys_api import ObjectStoreAccessKeysApi
from .apis.object_store_accounts_api import ObjectStoreAccountsApi
from .apis.object_store_users_api import ObjectStoreUsersApi
from .apis.policies_api import PoliciesApi
from .apis.quotas_groups_api import QuotasGroupsApi
from .apis.quotas_settings_api import QuotasSettingsApi
from .apis.quotas_users_api import QuotasUsersApi
from .apis.smtp_api import SmtpApi
from .apis.subnets_api import SubnetsApi
from .apis.support_api import SupportApi
from .apis.usage_groups_api import UsageGroupsApi
from .apis.usage_users_api import UsageUsersApi
from .apis.version_api import VersionApi
# import ApiClient
from .api_client import ApiClient
from .configuration import Configuration
configuration = Configuration() | PypiClean |
/pulumi_oci-1.9.0a1693465256.tar.gz/pulumi_oci-1.9.0a1693465256/pulumi_oci/jms/get_fleet_java_migration_analysis_result.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetFleetJavaMigrationAnalysisResultResult',
'AwaitableGetFleetJavaMigrationAnalysisResultResult',
'get_fleet_java_migration_analysis_result',
'get_fleet_java_migration_analysis_result_output',
]
@pulumi.output_type
class GetFleetJavaMigrationAnalysisResultResult:
"""
A collection of values returned by getFleetJavaMigrationAnalysisResult.
"""
def __init__(__self__, application_execution_type=None, application_name=None, application_path=None, bucket=None, fleet_id=None, host_name=None, id=None, java_migration_analysis_result_id=None, managed_instance_id=None, metadata=None, namespace=None, object_lists=None, object_storage_upload_dir_path=None, source_jdk_version=None, target_jdk_version=None, time_created=None, work_request_id=None):
if application_execution_type and not isinstance(application_execution_type, str):
raise TypeError("Expected argument 'application_execution_type' to be a str")
pulumi.set(__self__, "application_execution_type", application_execution_type)
if application_name and not isinstance(application_name, str):
raise TypeError("Expected argument 'application_name' to be a str")
pulumi.set(__self__, "application_name", application_name)
if application_path and not isinstance(application_path, str):
raise TypeError("Expected argument 'application_path' to be a str")
pulumi.set(__self__, "application_path", application_path)
if bucket and not isinstance(bucket, str):
raise TypeError("Expected argument 'bucket' to be a str")
pulumi.set(__self__, "bucket", bucket)
if fleet_id and not isinstance(fleet_id, str):
raise TypeError("Expected argument 'fleet_id' to be a str")
pulumi.set(__self__, "fleet_id", fleet_id)
if host_name and not isinstance(host_name, str):
raise TypeError("Expected argument 'host_name' to be a str")
pulumi.set(__self__, "host_name", host_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if java_migration_analysis_result_id and not isinstance(java_migration_analysis_result_id, str):
raise TypeError("Expected argument 'java_migration_analysis_result_id' to be a str")
pulumi.set(__self__, "java_migration_analysis_result_id", java_migration_analysis_result_id)
if managed_instance_id and not isinstance(managed_instance_id, str):
raise TypeError("Expected argument 'managed_instance_id' to be a str")
pulumi.set(__self__, "managed_instance_id", managed_instance_id)
if metadata and not isinstance(metadata, str):
raise TypeError("Expected argument 'metadata' to be a str")
pulumi.set(__self__, "metadata", metadata)
if namespace and not isinstance(namespace, str):
raise TypeError("Expected argument 'namespace' to be a str")
pulumi.set(__self__, "namespace", namespace)
if object_lists and not isinstance(object_lists, list):
raise TypeError("Expected argument 'object_lists' to be a list")
pulumi.set(__self__, "object_lists", object_lists)
if object_storage_upload_dir_path and not isinstance(object_storage_upload_dir_path, str):
raise TypeError("Expected argument 'object_storage_upload_dir_path' to be a str")
pulumi.set(__self__, "object_storage_upload_dir_path", object_storage_upload_dir_path)
if source_jdk_version and not isinstance(source_jdk_version, str):
raise TypeError("Expected argument 'source_jdk_version' to be a str")
pulumi.set(__self__, "source_jdk_version", source_jdk_version)
if target_jdk_version and not isinstance(target_jdk_version, str):
raise TypeError("Expected argument 'target_jdk_version' to be a str")
pulumi.set(__self__, "target_jdk_version", target_jdk_version)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if work_request_id and not isinstance(work_request_id, str):
raise TypeError("Expected argument 'work_request_id' to be a str")
pulumi.set(__self__, "work_request_id", work_request_id)
@property
@pulumi.getter(name="applicationExecutionType")
def application_execution_type(self) -> str:
"""
Execution type of the application for an application type, such as WAR and EAR, that is deployed or installed.
"""
return pulumi.get(self, "application_execution_type")
@property
@pulumi.getter(name="applicationName")
def application_name(self) -> str:
"""
The name of the application for which the Java migration analysis was performed.
"""
return pulumi.get(self, "application_name")
@property
@pulumi.getter(name="applicationPath")
def application_path(self) -> str:
"""
The installation path of the application for which the Java migration analysis was performed.
"""
return pulumi.get(self, "application_path")
@property
@pulumi.getter
def bucket(self) -> str:
"""
The name of the object storage bucket that contains the results of the migration analysis.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="fleetId")
def fleet_id(self) -> str:
"""
The fleet OCID.
"""
return pulumi.get(self, "fleet_id")
@property
@pulumi.getter(name="hostName")
def host_name(self) -> str:
"""
The hostname of the managed instance that hosts the application for which the Java migration analysis was performed.
"""
return pulumi.get(self, "host_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="javaMigrationAnalysisResultId")
def java_migration_analysis_result_id(self) -> str:
return pulumi.get(self, "java_migration_analysis_result_id")
@property
@pulumi.getter(name="managedInstanceId")
def managed_instance_id(self) -> str:
"""
The managed instance OCID.
"""
return pulumi.get(self, "managed_instance_id")
@property
@pulumi.getter
def metadata(self) -> str:
"""
Additional info reserved for future use.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The object storage namespace that contains the results of the migration analysis.
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="objectLists")
def object_lists(self) -> Sequence[str]:
"""
The names of the object storage objects that contain the results of the migration analysis.
"""
return pulumi.get(self, "object_lists")
@property
@pulumi.getter(name="objectStorageUploadDirPath")
def object_storage_upload_dir_path(self) -> str:
"""
The directory path of the object storage bucket that contains the results of the migration analysis.
"""
return pulumi.get(self, "object_storage_upload_dir_path")
@property
@pulumi.getter(name="sourceJdkVersion")
def source_jdk_version(self) -> str:
"""
The source JDK version of the application that's currently running.
"""
return pulumi.get(self, "source_jdk_version")
@property
@pulumi.getter(name="targetJdkVersion")
def target_jdk_version(self) -> str:
"""
The target JDK version of the application to be migrated.
"""
return pulumi.get(self, "target_jdk_version")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The time the result is compiled.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="workRequestId")
def work_request_id(self) -> str:
"""
The OCID of the work request of this analysis.
"""
return pulumi.get(self, "work_request_id")
class AwaitableGetFleetJavaMigrationAnalysisResultResult(GetFleetJavaMigrationAnalysisResultResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetFleetJavaMigrationAnalysisResultResult(
application_execution_type=self.application_execution_type,
application_name=self.application_name,
application_path=self.application_path,
bucket=self.bucket,
fleet_id=self.fleet_id,
host_name=self.host_name,
id=self.id,
java_migration_analysis_result_id=self.java_migration_analysis_result_id,
managed_instance_id=self.managed_instance_id,
metadata=self.metadata,
namespace=self.namespace,
object_lists=self.object_lists,
object_storage_upload_dir_path=self.object_storage_upload_dir_path,
source_jdk_version=self.source_jdk_version,
target_jdk_version=self.target_jdk_version,
time_created=self.time_created,
work_request_id=self.work_request_id)
def get_fleet_java_migration_analysis_result(fleet_id: Optional[str] = None,
java_migration_analysis_result_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetFleetJavaMigrationAnalysisResultResult:
"""
This data source provides details about a specific Fleet Java Migration Analysis Result resource in Oracle Cloud Infrastructure Jms service.
Retrieve Java Migration Analysis result.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_fleet_java_migration_analysis_result = oci.Jms.get_fleet_java_migration_analysis_result(fleet_id=oci_jms_fleet["test_fleet"]["id"],
java_migration_analysis_result_id=oci_apm_synthetics_result["test_result"]["id"])
```
:param str fleet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Fleet.
:param str java_migration_analysis_result_id: The OCID of the analysis result.
"""
__args__ = dict()
__args__['fleetId'] = fleet_id
__args__['javaMigrationAnalysisResultId'] = java_migration_analysis_result_id
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('oci:Jms/getFleetJavaMigrationAnalysisResult:getFleetJavaMigrationAnalysisResult', __args__, opts=opts, typ=GetFleetJavaMigrationAnalysisResultResult).value
return AwaitableGetFleetJavaMigrationAnalysisResultResult(
application_execution_type=pulumi.get(__ret__, 'application_execution_type'),
application_name=pulumi.get(__ret__, 'application_name'),
application_path=pulumi.get(__ret__, 'application_path'),
bucket=pulumi.get(__ret__, 'bucket'),
fleet_id=pulumi.get(__ret__, 'fleet_id'),
host_name=pulumi.get(__ret__, 'host_name'),
id=pulumi.get(__ret__, 'id'),
java_migration_analysis_result_id=pulumi.get(__ret__, 'java_migration_analysis_result_id'),
managed_instance_id=pulumi.get(__ret__, 'managed_instance_id'),
metadata=pulumi.get(__ret__, 'metadata'),
namespace=pulumi.get(__ret__, 'namespace'),
object_lists=pulumi.get(__ret__, 'object_lists'),
object_storage_upload_dir_path=pulumi.get(__ret__, 'object_storage_upload_dir_path'),
source_jdk_version=pulumi.get(__ret__, 'source_jdk_version'),
target_jdk_version=pulumi.get(__ret__, 'target_jdk_version'),
time_created=pulumi.get(__ret__, 'time_created'),
work_request_id=pulumi.get(__ret__, 'work_request_id'))
@_utilities.lift_output_func(get_fleet_java_migration_analysis_result)
def get_fleet_java_migration_analysis_result_output(fleet_id: Optional[pulumi.Input[str]] = None,
java_migration_analysis_result_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetFleetJavaMigrationAnalysisResultResult]:
"""
This data source provides details about a specific Fleet Java Migration Analysis Result resource in Oracle Cloud Infrastructure Jms service.
Retrieve Java Migration Analysis result.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_fleet_java_migration_analysis_result = oci.Jms.get_fleet_java_migration_analysis_result(fleet_id=oci_jms_fleet["test_fleet"]["id"],
java_migration_analysis_result_id=oci_apm_synthetics_result["test_result"]["id"])
```
:param str fleet_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Fleet.
:param str java_migration_analysis_result_id: The OCID of the analysis result.
"""
... | PypiClean |
/anaconda-build-0.14.0.tar.gz/anaconda-build-0.14.0/binstar_build_client/worker/utils/process_wrappers.py | import logging
import psutil
import requests
import subprocess
import os
import signal
WIN_32 = os.name == 'nt'
if WIN_32:
import win32job
import pywintypes
log = logging.getLogger('binstar.build')
class DockerBuildProcess(object):
def __init__(self, cli, cont):
self.cli = cli
self.cont = cont
self.stream = self.cli.attach(cont, stream=True, stdout=True, stderr=True)
self.pid = 'docker container'
def kill(self):
self.cli.kill(self.cont)
def wait(self):
return self.cli.wait(self.cont)
def remove(self):
self.cli.remove_container(self.cont, v=True)
def readline(self):
return next(self.stream, b'')
def poll(self):
try:
return self.cli.wait(self.cont, timeout=0.1)
except requests.exceptions.ReadTimeout:
return None
def create_job(hProcess):
hJob = win32job.CreateJobObject(None, "")
extended_info = win32job.QueryInformationJobObject(hJob, win32job.JobObjectExtendedLimitInformation)
extended_info['BasicLimitInformation']['LimitFlags'] = win32job.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
win32job.SetInformationJobObject(hJob, win32job.JobObjectExtendedLimitInformation, extended_info)
win32job.AssignProcessToJobObject(hJob, hProcess)
return hJob
class BuildProcess(subprocess.Popen):
def __init__(self, args, cwd):
if WIN_32:
preexec_fn = None
else:
preexec_fn = os.setpgrp
super(BuildProcess, self).__init__(args=args, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
preexec_fn=preexec_fn
)
if WIN_32:
self.job = create_job(self._handle)
else:
self.job = None
def kill_job(self):
if self.job is None:
return
log.warning("Kill win32 JobObject handle: {0}".format(self.job))
try:
win32job.TerminateJobObject(self.job, 1)
except pywintypes.error as err:
log.warning("Could not terminate job object")
log.warning(err)
def kill_pg(self):
if WIN_32:
return
try:
pgid = os.getpgid(self.pid)
except OSError as err:
log.warning("Could not get process group for pid {}".format(self.pid))
log.warning(err)
return
log.warning("Kill posix process group pgid: {0}".format(pgid))
try:
os.killpg(pgid, signal.SIGTERM)
except OSError as err:
log.warning("Could not kill process group for pid {}".format(self.pid))
log.warning(err)
def kill(self):
'''Kill all processes and child processes'''
try:
log.warning("Kill Tree parent pid: {0}".format(self.pid))
parent = psutil.Process(self.pid)
children = parent.children(recursive=True)
except psutil.NoSuchProcess:
log.warning("Parent pid {0} is already dead".format(self.pid))
# Already dead
parent = None
children = []
self.kill_job()
self.kill_pg()
if parent and parent.is_running():
log.info("BuildProcess.kill: parent pid {} is being killed".format(parent.pid))
super(BuildProcess, self).kill()
for child in children:
if child.is_running():
log.info("BuildProcess.kill: child pid {} is being killed".format(child.pid))
child.kill()
def readline(self):
return self.stdout.readline() | PypiClean |
/fro_AI-0.14.0-py3-none-any.whl/fro_AI/face_evoLve/util/extract_feature_v2.py | import torch
import cv2
import numpy as np
import os
import matplotlib.pyplot as plt
def l2_norm(input, axis = 1):
norm = torch.norm(input, 2, axis, True)
output = torch.div(input, norm)
return output
def extract_feature(img_root, backbone, model_root, device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu"), tta = True):
# pre-requisites
assert(os.path.exists(img_root))
print('Testing Data Root:', img_root)
assert (os.path.exists(model_root))
print('Backbone Model Root:', model_root)
# load image
img = cv2.imread(img_root)
# resize image to [128, 128]
resized = cv2.resize(img, (128, 128))
# center crop image
a=int((128-112)/2) # x start
b=int((128-112)/2+112) # x end
c=int((128-112)/2) # y start
d=int((128-112)/2+112) # y end
ccropped = resized[a:b, c:d] # center crop the image
ccropped = ccropped[...,::-1] # BGR to RGB
# flip image horizontally
flipped = cv2.flip(ccropped, 1)
# load numpy to tensor
ccropped = ccropped.swapaxes(1, 2).swapaxes(0, 1)
ccropped = np.reshape(ccropped, [1, 3, 112, 112])
ccropped = np.array(ccropped, dtype = np.float32)
ccropped = (ccropped - 127.5) / 128.0
ccropped = torch.from_numpy(ccropped)
flipped = flipped.swapaxes(1, 2).swapaxes(0, 1)
flipped = np.reshape(flipped, [1, 3, 112, 112])
flipped = np.array(flipped, dtype = np.float32)
flipped = (flipped - 127.5) / 128.0
flipped = torch.from_numpy(flipped)
# load backbone from a checkpoint
print("Loading Backbone Checkpoint '{}'".format(model_root))
backbone.load_state_dict(torch.load(model_root))
backbone.to(device)
# extract features
backbone.eval() # set to evaluation mode
with torch.no_grad():
if tta:
emb_batch = backbone(ccropped.to(device)).cpu() + backbone(flipped.to(device)).cpu()
features = l2_norm(emb_batch)
else:
features = l2_norm(backbone(ccropped.to(device)).cpu())
# np.save("features.npy", features)
# features = np.load("features.npy")
return features | PypiClean |
/cdktf-cdktf-provider-google_beta-9.0.1.tar.gz/cdktf-cdktf-provider-google_beta-9.0.1/src/cdktf_cdktf_provider_google_beta/data_google_compute_region_instance_template/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class DataGoogleComputeRegionInstanceTemplate(
_cdktf_9a9027ec.TerraformDataSource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplate",
):
'''Represents a {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template google_compute_region_instance_template}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
filter: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
most_recent: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template google_compute_region_instance_template} Data Source.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param filter: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#filter DataGoogleComputeRegionInstanceTemplate#filter}.
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#id DataGoogleComputeRegionInstanceTemplate#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param most_recent: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#most_recent DataGoogleComputeRegionInstanceTemplate#most_recent}.
:param name: The name of the instance template. If you leave this blank, Terraform will auto-generate a unique name. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#name DataGoogleComputeRegionInstanceTemplate#name}
:param project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#project DataGoogleComputeRegionInstanceTemplate#project}
:param region: The region in which the instance template is located. If it is not provided, the provider region is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#region DataGoogleComputeRegionInstanceTemplate#region}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0445be5f52a77d335997618ebeee5d1ceab1d03828e756af5675194186f193ce)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = DataGoogleComputeRegionInstanceTemplateConfig(
filter=filter,
id=id,
most_recent=most_recent,
name=name,
project=project,
region=region,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="resetFilter")
def reset_filter(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFilter", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetMostRecent")
def reset_most_recent(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMostRecent", []))
@jsii.member(jsii_name="resetName")
def reset_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetName", []))
@jsii.member(jsii_name="resetProject")
def reset_project(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetProject", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="advancedMachineFeatures")
def advanced_machine_features(
self,
) -> "DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesList", jsii.get(self, "advancedMachineFeatures"))
@builtins.property
@jsii.member(jsii_name="canIpForward")
def can_ip_forward(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "canIpForward"))
@builtins.property
@jsii.member(jsii_name="confidentialInstanceConfig")
def confidential_instance_config(
self,
) -> "DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigList", jsii.get(self, "confidentialInstanceConfig"))
@builtins.property
@jsii.member(jsii_name="description")
def description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "description"))
@builtins.property
@jsii.member(jsii_name="disk")
def disk(self) -> "DataGoogleComputeRegionInstanceTemplateDiskList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskList", jsii.get(self, "disk"))
@builtins.property
@jsii.member(jsii_name="enableDisplay")
def enable_display(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableDisplay"))
@builtins.property
@jsii.member(jsii_name="guestAccelerator")
def guest_accelerator(
self,
) -> "DataGoogleComputeRegionInstanceTemplateGuestAcceleratorList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateGuestAcceleratorList", jsii.get(self, "guestAccelerator"))
@builtins.property
@jsii.member(jsii_name="instanceDescription")
def instance_description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "instanceDescription"))
@builtins.property
@jsii.member(jsii_name="labels")
def labels(self) -> _cdktf_9a9027ec.StringMap:
return typing.cast(_cdktf_9a9027ec.StringMap, jsii.get(self, "labels"))
@builtins.property
@jsii.member(jsii_name="machineType")
def machine_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "machineType"))
@builtins.property
@jsii.member(jsii_name="metadata")
def metadata(self) -> _cdktf_9a9027ec.StringMap:
return typing.cast(_cdktf_9a9027ec.StringMap, jsii.get(self, "metadata"))
@builtins.property
@jsii.member(jsii_name="metadataFingerprint")
def metadata_fingerprint(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "metadataFingerprint"))
@builtins.property
@jsii.member(jsii_name="metadataStartupScript")
def metadata_startup_script(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "metadataStartupScript"))
@builtins.property
@jsii.member(jsii_name="minCpuPlatform")
def min_cpu_platform(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "minCpuPlatform"))
@builtins.property
@jsii.member(jsii_name="namePrefix")
def name_prefix(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "namePrefix"))
@builtins.property
@jsii.member(jsii_name="networkInterface")
def network_interface(
self,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkInterfaceList", jsii.get(self, "networkInterface"))
@builtins.property
@jsii.member(jsii_name="networkPerformanceConfig")
def network_performance_config(
self,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigList", jsii.get(self, "networkPerformanceConfig"))
@builtins.property
@jsii.member(jsii_name="reservationAffinity")
def reservation_affinity(
self,
) -> "DataGoogleComputeRegionInstanceTemplateReservationAffinityList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateReservationAffinityList", jsii.get(self, "reservationAffinity"))
@builtins.property
@jsii.member(jsii_name="resourcePolicies")
def resource_policies(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "resourcePolicies"))
@builtins.property
@jsii.member(jsii_name="scheduling")
def scheduling(self) -> "DataGoogleComputeRegionInstanceTemplateSchedulingList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateSchedulingList", jsii.get(self, "scheduling"))
@builtins.property
@jsii.member(jsii_name="selfLink")
def self_link(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "selfLink"))
@builtins.property
@jsii.member(jsii_name="serviceAccount")
def service_account(
self,
) -> "DataGoogleComputeRegionInstanceTemplateServiceAccountList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateServiceAccountList", jsii.get(self, "serviceAccount"))
@builtins.property
@jsii.member(jsii_name="shieldedInstanceConfig")
def shielded_instance_config(
self,
) -> "DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigList", jsii.get(self, "shieldedInstanceConfig"))
@builtins.property
@jsii.member(jsii_name="tags")
def tags(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "tags"))
@builtins.property
@jsii.member(jsii_name="tagsFingerprint")
def tags_fingerprint(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "tagsFingerprint"))
@builtins.property
@jsii.member(jsii_name="filterInput")
def filter_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "filterInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="mostRecentInput")
def most_recent_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "mostRecentInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="projectInput")
def project_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "projectInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="filter")
def filter(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "filter"))
@filter.setter
def filter(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9377170382217cf2da6c8c6c2f3947c59739c40747b96f581cb4f127f12bc55f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "filter", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__85ffc496216e7d54f1b49457b15ef6300affe34aa81046701e22239b2210551a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="mostRecent")
def most_recent(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "mostRecent"))
@most_recent.setter
def most_recent(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__082471c1e41e8b4b84f0255e878273f96bbd17d3ef4a6994741fe47205461674)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "mostRecent", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f634f60c7aa7c22b6ad4a20ec87bdc821b364f0e264d82b8a7b2ec39785538f6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="project")
def project(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "project"))
@project.setter
def project(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ca0583ccb9ed584b7e02b9f984bef46ec3e19f5468ec83bb7d5807fd4845e164)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "project", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__55ef2b478c9ffb791b51cd5d2f092e292b0847d0a6d398635a2a05f682287cad)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__270f21f41796a803333fe41531e87bce7822cb59d9fa5241a0989b59c5953807)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__aae884b8450c166417b1b1b7398adaccd2e8a7b151c1416d9fa7227178483a33)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e71ded2b196bb4490aabcfd71fc0d0b77e4882c9dd2d98915ace7640ec43c626)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__96518d0d7a969faa4359328f2b3b5418ddc0b04aba0071866479b937aa1c4e27)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0da2d5cc4b6fd5a6b8ac7e312c297c0efc416891dec85c9b3c2617ba677e7106)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1f167d0552f5a6fe81277f191da82191d2bcca34125f72538eb3794fba1eeee3)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="enableNestedVirtualization")
def enable_nested_virtualization(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableNestedVirtualization"))
@builtins.property
@jsii.member(jsii_name="threadsPerCore")
def threads_per_core(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "threadsPerCore"))
@builtins.property
@jsii.member(jsii_name="visibleCoreCount")
def visible_core_count(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "visibleCoreCount"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ebde7f995f587176e41b8778f2f4dfda0078e11f40358c0705d4297744e70411)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__313b81cead38eca5584f95ba9e9c634c81747038289f3450242514b845df8614)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6d362bf5d081ccc7f0c954844a6ffbf222d8c3fb5354456db63b9f85fceef5e5)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7b50115387e0fba90a6cb645f6509af30e51fce6f50f34e7781fc685d6a3061a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3a39bf8737e7650843eb4619f68f198cb18a51eaf63556d39d144332eeb87891)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d859d606a8ef61ff12462a1f67aa53e704782cfeb24d6f006a7d89d58e53b03e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9a4bae0fa27b3dc60dedc4b779cc9cf2d3db2746292c5e2039082815cba8bb99)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="enableConfidentialCompute")
def enable_confidential_compute(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableConfidentialCompute"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__de1aec36dbaf33c9bb9dfd6662161add8cd5c8c53f79bbbc6879a1438d729bbf)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"filter": "filter",
"id": "id",
"most_recent": "mostRecent",
"name": "name",
"project": "project",
"region": "region",
},
)
class DataGoogleComputeRegionInstanceTemplateConfig(
_cdktf_9a9027ec.TerraformMetaArguments,
):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
filter: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
most_recent: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param filter: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#filter DataGoogleComputeRegionInstanceTemplate#filter}.
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#id DataGoogleComputeRegionInstanceTemplate#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param most_recent: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#most_recent DataGoogleComputeRegionInstanceTemplate#most_recent}.
:param name: The name of the instance template. If you leave this blank, Terraform will auto-generate a unique name. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#name DataGoogleComputeRegionInstanceTemplate#name}
:param project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#project DataGoogleComputeRegionInstanceTemplate#project}
:param region: The region in which the instance template is located. If it is not provided, the provider region is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#region DataGoogleComputeRegionInstanceTemplate#region}
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__179c23af75c3264dc0d66483cbbca36c9c26a1e725bd9fef39e578ede8b0fe25)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument filter", value=filter, expected_type=type_hints["filter"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument most_recent", value=most_recent, expected_type=type_hints["most_recent"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument project", value=project, expected_type=type_hints["project"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if filter is not None:
self._values["filter"] = filter
if id is not None:
self._values["id"] = id
if most_recent is not None:
self._values["most_recent"] = most_recent
if name is not None:
self._values["name"] = name
if project is not None:
self._values["project"] = project
if region is not None:
self._values["region"] = region
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def filter(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#filter DataGoogleComputeRegionInstanceTemplate#filter}.'''
result = self._values.get("filter")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#id DataGoogleComputeRegionInstanceTemplate#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def most_recent(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#most_recent DataGoogleComputeRegionInstanceTemplate#most_recent}.'''
result = self._values.get("most_recent")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def name(self) -> typing.Optional[builtins.str]:
'''The name of the instance template. If you leave this blank, Terraform will auto-generate a unique name.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#name DataGoogleComputeRegionInstanceTemplate#name}
'''
result = self._values.get("name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def project(self) -> typing.Optional[builtins.str]:
'''The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#project DataGoogleComputeRegionInstanceTemplate#project}
'''
result = self._values.get("project")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''The region in which the instance template is located. If it is not provided, the provider region is used.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google-beta/4.80.0/docs/data-sources/google_compute_region_instance_template#region DataGoogleComputeRegionInstanceTemplate#region}
'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDisk",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateDisk:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateDisk(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6425fcfda2b4c17fec99c70b0355db94f12011800eef2d3c45889a978fe4add8)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a109be4dcb8377dcb4ab4562c90af1af1944c891d9e5e42ec9a990e12d9f882f)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__564eb73b6e8a6ee51dbb418f0953c147c28b591b7a92adb1380c6f88a22acc81)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__aae2c86912373ceaaa36b78256487a20aa7e68c43189a51adeac933cd262dedd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c7e76463f762bfd37dc8dce3c48366a7ea6a8dafc988f15b6306807447775013)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e2c7447f1525c6b02c7457f8717ef9fc40e05111d134075f068efa95570b3212)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="kmsKeySelfLink")
def kms_key_self_link(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "kmsKeySelfLink"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c68f0eef93578d773a89480e4073c5ce15df9e25632ee64668973e7823f62121)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataGoogleComputeRegionInstanceTemplateDiskList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1d377c1961de674bfd33bd0dbee81c1da082261b264a8e2d74f154714cd18c95)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateDiskOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__48cfa10fafb18c7ad72549d13501116b4b0fd7e011f3ddd922b4de357ad22c81)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c9d234e18cb4f8ff621af580ed2cc41333efb6d1e6bffdcf80d80e08e9a66d4d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__709dcb879e0900eb3771c8ff41b914de96dddc47ceda9601776a9aae26de5a31)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3e41aba5a35092d87620d8d9a3e912405dcc5a54d9e5bf178a74abe3c2f0a0a9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateDiskOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__931c8da5d1986c171ebd6ebb826de308b033fb7641e7914d3dfe42296776ea37)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="autoDelete")
def auto_delete(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "autoDelete"))
@builtins.property
@jsii.member(jsii_name="boot")
def boot(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "boot"))
@builtins.property
@jsii.member(jsii_name="deviceName")
def device_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "deviceName"))
@builtins.property
@jsii.member(jsii_name="diskEncryptionKey")
def disk_encryption_key(
self,
) -> DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyList, jsii.get(self, "diskEncryptionKey"))
@builtins.property
@jsii.member(jsii_name="diskName")
def disk_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "diskName"))
@builtins.property
@jsii.member(jsii_name="diskSizeGb")
def disk_size_gb(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "diskSizeGb"))
@builtins.property
@jsii.member(jsii_name="diskType")
def disk_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "diskType"))
@builtins.property
@jsii.member(jsii_name="interface")
def interface(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "interface"))
@builtins.property
@jsii.member(jsii_name="labels")
def labels(self) -> _cdktf_9a9027ec.StringMap:
return typing.cast(_cdktf_9a9027ec.StringMap, jsii.get(self, "labels"))
@builtins.property
@jsii.member(jsii_name="mode")
def mode(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "mode"))
@builtins.property
@jsii.member(jsii_name="provisionedIops")
def provisioned_iops(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "provisionedIops"))
@builtins.property
@jsii.member(jsii_name="resourcePolicies")
def resource_policies(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "resourcePolicies"))
@builtins.property
@jsii.member(jsii_name="source")
def source(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "source"))
@builtins.property
@jsii.member(jsii_name="sourceImage")
def source_image(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "sourceImage"))
@builtins.property
@jsii.member(jsii_name="sourceImageEncryptionKey")
def source_image_encryption_key(
self,
) -> "DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyList", jsii.get(self, "sourceImageEncryptionKey"))
@builtins.property
@jsii.member(jsii_name="sourceSnapshot")
def source_snapshot(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "sourceSnapshot"))
@builtins.property
@jsii.member(jsii_name="sourceSnapshotEncryptionKey")
def source_snapshot_encryption_key(
self,
) -> "DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyList", jsii.get(self, "sourceSnapshotEncryptionKey"))
@builtins.property
@jsii.member(jsii_name="type")
def type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "type"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateDisk]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateDisk], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDisk],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2e929ce5732320e943d22d8db254b6de76e9e9e8b98db83d03b26dcf453771fe)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5662c2647a1ecc2a85f5a10ccb254ced87db34d8c252a8e5817d05e35755ef0d)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__98bb11c74508bbd44d96b67ebc422d9010898b5ea4d9a755bbce4296dd307467)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d25507bce96f9fc52bc537d5fce4eba14bfdbeb71860e911e0d8372de7c5d64b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__586d8b813888d952cce1092f775af326badd90a79b583b37e02fec91f34b7ef0)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bcecdd405063903a2b05edf089aa4f15c626b2eb0b09e49b291ad4369926bc78)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0ecf3cfe37d89d7ca4758063253ad9a4de35838af5a9ae46b34eba74493fea6a)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="kmsKeySelfLink")
def kms_key_self_link(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "kmsKeySelfLink"))
@builtins.property
@jsii.member(jsii_name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "kmsKeyServiceAccount"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fdaefa1f92d5ac98ee5a90b086f6847c31842d8e459836329886f0fad793f976)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1846f9f09808f12ad195907a0d41480dac567521e5844ca96a085b590d0f8879)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__df9b4d9b7b8b9110698644d867f08cb211888d986f5c17863e5a677b6e957d77)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4980f66b48566aff282a591fb5b5ffcfae87a690b018d57079aee4d030e06f98)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3d9333419fc21a545c98f696fd86d3ba028c1e3c689c0c2fe423d74033cab76e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c60b478bccd6e47413c67acd602fccd46844c87822d4fb79cb4e1b10034293dd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7287c1a6abc6a12964e0f3dd4c1df927f9c3a2fb9c5fe8efe8d4b442d54dd2ad)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="kmsKeySelfLink")
def kms_key_self_link(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "kmsKeySelfLink"))
@builtins.property
@jsii.member(jsii_name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "kmsKeyServiceAccount"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3d9812be2a1231f8aa42037c5a85aaa14730757178a78767057d2b60f95dd94a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateGuestAccelerator",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateGuestAccelerator:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateGuestAccelerator(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateGuestAcceleratorList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateGuestAcceleratorList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fdabeb0f74505f96f669cd42ce62cc7c5d0577e0339b71169be0f180dddfdc41)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateGuestAcceleratorOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__128719fd656324b78d5e822e0dd90f0d70e9fdd32f8f9684d5e1504acacb07cf)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateGuestAcceleratorOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__aa4e521c7eab22475668c4550efad71d4ddec0364e99bdc06d01062f78ca620f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e8ede773b18e86993b8b8cdd493dde7d6797a7fa4fc2561f475aaf186c0a392e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__289390970e507815e670a56da9c385213b18f7147a49f7c181d4ce825e20f7da)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateGuestAcceleratorOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateGuestAcceleratorOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__437c5fdd80a2f70c5e4f8abaa409d6ac2669d848f78698e2a3bf6b247441be99)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="count")
def count(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "count"))
@builtins.property
@jsii.member(jsii_name="type")
def type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "type"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateGuestAccelerator]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateGuestAccelerator], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateGuestAccelerator],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__45ea3ee5f039de3112a4d2aca107fea0db54e71c2c19bcd1a6c3361d6f58bb1f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterface",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterface:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateNetworkInterface(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__222b7dd8c76aa0b7527e6a74b16d0728df4117f78eef66f01fa1b3b236b7ee68)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0afe3380928a882219901b0d62057a48574b35022becefaf11540c5a65a70d2e)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1a300cd75f8ebebb361eaa839eb81aab8118d4a0a69ffbaa5fd85b53f0bec040)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__77bb8fccee7182547a5903c16ed5d1a951ab55be5f82aa90f2670b043d1699bd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__442b9c3df29b0fa859f24afe4d7cb89e7126c88daea922396475dc7cbf33b3b2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__20336661d03ce316794bc6131d925789924fb012ba3c5164e0a664f4268bb1b9)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="natIp")
def nat_ip(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "natIp"))
@builtins.property
@jsii.member(jsii_name="networkTier")
def network_tier(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "networkTier"))
@builtins.property
@jsii.member(jsii_name="publicPtrDomainName")
def public_ptr_domain_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "publicPtrDomainName"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c1a371706f9eff4186ff4ebfd4abcbde3c40186a02176b3d931a317e6d8ea447)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__616817ba84588cd47b6c785adda0b15790464aa85f2ba49e393647bb2df9bf26)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ba47853d256ca8899c84ddfddc47cfd08bd73af2eb47edd6c234dbbdba476243)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__59578361ddedecb318c4a05893e50d64cb3117ecb2f3c18914165cc2a7755504)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2506faf45cfe3d0cc15aab0da6362b1076fa768f7e8fadbd3539524f07b32085)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ebcd564b75021034756f50fbd05100fa42bfbc2063005042396eb6e06b29a588)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__78ea2e21936eb98d8fe50bf61bdc81e2edad89d5271c9734d659be0465a06aa6)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="ipCidrRange")
def ip_cidr_range(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ipCidrRange"))
@builtins.property
@jsii.member(jsii_name="subnetworkRangeName")
def subnetwork_range_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "subnetworkRangeName"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0d7ad95835fe1f206be1db28f51421d4ce0e99f03f0177da3b3c142257a2c5bd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ddf014ee458a02642b363eefc9dd6b49f1d7bfd72ca950d643c166405dcbbf93)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ebe5166037b1d63466ff137fb457e197dbf50ca48f1fae53b28a13cdf70321ab)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__599f39215f7414bc06f1a91fa5866cd5a0cfa75f91eef15f334008c9de71f2be)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5a2f9dba55b9ad094efafbfd1896790c95c75c7423c2f4a9db68aebb13e0eb80)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__613d934ce31f561bd63cedce929534b0938d20014baa9296decb088a20578e8f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f001c976a563b66cb9396924c188db793ed20e777610bf5ee9986d2e05f5d0b6)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="externalIpv6")
def external_ipv6(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "externalIpv6"))
@builtins.property
@jsii.member(jsii_name="externalIpv6PrefixLength")
def external_ipv6_prefix_length(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "externalIpv6PrefixLength"))
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@builtins.property
@jsii.member(jsii_name="networkTier")
def network_tier(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "networkTier"))
@builtins.property
@jsii.member(jsii_name="publicPtrDomainName")
def public_ptr_domain_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "publicPtrDomainName"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__661ea64fdf39a2f24ecbd3c3ad3d483298fcf303001c13602010b5598b032d4d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__16f5e3201a577edd2bc8a332fee232a267b586fddd50f79438a1c2068bebde4f)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkInterfaceOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__894eda52be1e9cafed5924f885a7e70f6fdf370d145c64acb37abcbbe10ef119)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkInterfaceOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9c70e69646bdcd74d51e726f1ffdcd69bf3713da0c52b31c0ddf26160ad656b0)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e9d89f7de8a2b949cec337841aff194e9ec35db5d03703ddd8d6f8efa2310dfa)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__58703a4312d7b8baa109bf4c2f5251dcb2ae9420a2f5fac76ac92fe7831d69ef)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateNetworkInterfaceOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkInterfaceOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4b8a2724281380348a44ab189e92e0b083c50bd17d673c7c263da24397642ae4)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="accessConfig")
def access_config(
self,
) -> DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigList, jsii.get(self, "accessConfig"))
@builtins.property
@jsii.member(jsii_name="aliasIpRange")
def alias_ip_range(
self,
) -> DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeList, jsii.get(self, "aliasIpRange"))
@builtins.property
@jsii.member(jsii_name="ipv6AccessConfig")
def ipv6_access_config(
self,
) -> DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigList, jsii.get(self, "ipv6AccessConfig"))
@builtins.property
@jsii.member(jsii_name="ipv6AccessType")
def ipv6_access_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "ipv6AccessType"))
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@builtins.property
@jsii.member(jsii_name="network")
def network(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "network"))
@builtins.property
@jsii.member(jsii_name="networkIp")
def network_ip(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "networkIp"))
@builtins.property
@jsii.member(jsii_name="nicType")
def nic_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "nicType"))
@builtins.property
@jsii.member(jsii_name="queueCount")
def queue_count(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "queueCount"))
@builtins.property
@jsii.member(jsii_name="stackType")
def stack_type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "stackType"))
@builtins.property
@jsii.member(jsii_name="subnetwork")
def subnetwork(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "subnetwork"))
@builtins.property
@jsii.member(jsii_name="subnetworkProject")
def subnetwork_project(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "subnetworkProject"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterface]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterface], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterface],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__aa9ac5d53f38c069b15fccd416b41fcb4148e27cc18335e0a39a70aa261a9fcd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8c439964d61a0f1f572a1395ad4cae216eb2aa47ac3b2cc83f0ace7049c34462)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__72a230ac2f3dc50b6c23b6c9058735dfb0dc5b5f892eec34c69aba3c66408fe8)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__613f3405244717eb160fd08b03c1f73bc5d2ad62495e2d2c7a851a80dc7987bc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3d08e0d2276a58953c750576377a6dd5333eadd311cc2fd91b41c9c5961ff165)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__96f0b778f0f7b2d47ccd688ac3ee10c27c8cae3e4645c4e3a3461f9b71b65cf6)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__65ffdc08082cc7084cb279418e176b1bcda049cc89d77d2a479f10dc881066a4)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="totalEgressBandwidthTier")
def total_egress_bandwidth_tier(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "totalEgressBandwidthTier"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1b6f4d0ea37a34cbf9c428e54ce3c7a2b86081366dd7547798875b5e076071f7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinity",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateReservationAffinity:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateReservationAffinity(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateReservationAffinityList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinityList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__463cbfb7e4e5de1d887bf9951013671b9da75d5c6d3f50ed44c119fe0d0594f0)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateReservationAffinityOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c19985995c174e15a77a560850f511eb78596749ca7cfa2ddac2d09fb2f2698a)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateReservationAffinityOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f056e49ef502c3a09e38b2a8c378294288002f76fdc525cb5d6c0c62a8978c5a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__dc6edb0ae62ff6de12c585d69a544c228770163628bdf4df60e5b32c97ba4895)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b81d59310856fbd48a15aacda1b59a1050978a0bab0e725f5947e51f23ac0d64)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateReservationAffinityOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinityOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f58d9ee3f30fe0f9547b918913e750cc6ee059fac04fd99808a06d35cd7480ba)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="specificReservation")
def specific_reservation(
self,
) -> "DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationList":
return typing.cast("DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationList", jsii.get(self, "specificReservation"))
@builtins.property
@jsii.member(jsii_name="type")
def type(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "type"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinity]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinity], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinity],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a9e20c4624f38f81710d11bebb1ea182df9050bd73d2f666966b0241099f9482)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__52b16e49cb5ed0ef5a1dad9bac9078222bb6eea150da90f7d4e7aa52286be721)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__83332ddb1f3ed9c60d6ed18ff7f208a942a79dbf428839cef01be95a764634c7)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__34b4088938efc6d4f99247d9119ae856a3b523a4828f625d3fa9102c24b8aeda)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__db8a41025affee9a14d3dd043d5609202db046ab93187dfbde0ce745cb3485c8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f8651206dd660245f2585cc8c296ad6ee01073541662d6e3efac063f6a6e2e61)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9c8243ef74fbb77f138c78c7b82f6b7a5dffcc10763cdcc343520c2589a9fee3)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="key")
def key(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "key"))
@builtins.property
@jsii.member(jsii_name="values")
def values(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "values"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5c08328a137fccc2fa2c129ca332d1d1c466bce5c430aef958e617d5bfa853b7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateScheduling",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateScheduling:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateScheduling(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateSchedulingList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__97854e26560ed7de0e468d2656da7b58173246f2c87980e8f1ec76664552123b)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateSchedulingOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__59453c5fbc54343e700f2c2055d8202c694039a0bee6a94ba986e43bd9c8e8ad)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateSchedulingOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f7c663d88d9d3232320f5a238e8e1486097bd6cec58b8ed915d7e9fd1c11a210)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a35ed9c52c2d074565a134e4f89787e0da8009a57b0033dc42d564ce6715bd74)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ab7953552333e4244744275ae293b1961c398217d6cc6d67951fdd1257f2ce07)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__12d3752dc22d08545adf3bfa1554130f9e888b38b77f93ab7ac2d4d68288b080)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__41dec3e337d4750e5f24046b1eb98e44e5e105ca4667bf01429c9f0c06e57c9d)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__38192640f78fe860d6d6e83ac873572b17ad65769e4b0a7f60869c7f1fb76de5)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3698216655e739db4184dc36214c3562a45605ac5bc6c7b26c450048c89e37dd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a49afada5445c4009b9b533d6555fae793543916d608506aeeec5faf3ec55fb7)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5ddee32d9b7afe1b580592640e74a805331afd2e601e1553275f9256a9fcdc8b)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="nanos")
def nanos(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "nanos"))
@builtins.property
@jsii.member(jsii_name="seconds")
def seconds(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "seconds"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__740dce54ccc9bf5c5c6e6da6591730f81ed7734c3c711df49e7d14161b6acc36)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__555004bb1acee97be3e541feaa15afb80d66b19fba1fb032b4815f30bc7e6127)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__90e3e98efaaa1bb3252fd368450f2bc63bcab2c2e1c73babc8832f576747f35e)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__de95472714a52803518c04f4a4796510662679ca9e3c78817a04d571ff07527d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1e2b2d100d94f8bdedd834765112ed7863a47e662e2891875c46ed6324efd71d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b8058360755a10a337efb3a221d1f0e49c3d6aa84afc558e9c6f7fd1509c96ce)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8cecb66c8f476178ce380e9b3cd50263fa85b0c07748bbd51938572e9989a05e)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="nanos")
def nanos(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "nanos"))
@builtins.property
@jsii.member(jsii_name="seconds")
def seconds(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "seconds"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c115b3bfb24cd7dd4b96dc7a5732361410018743c22dd8ee3e331dea4a5d1479)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__247ac4293e58493ce0645c79758c49ad1cb50d5bf1b070590455ecd4df8a3ff0)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d9af4bf260d9ebe928543291942eeadcb357a264b42c2e312a8a344185427d26)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3a5747ca70f748c78d86ba23a51c1be208d39bb3e15804e39a3a08df86d7cc59)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7ef977d6e884a47584ff7087000c0b67b6138a7680968539c1a3ef1c28ad5587)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__62edb1797edd436ba5b18b49f09ea9147d39388bbc83299564cec337839d273c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__03ec9069b83aebbbc5dc32c29059c7ce9b04e902c096c3d5e5ff6552e2bbb293)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="key")
def key(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "key"))
@builtins.property
@jsii.member(jsii_name="operator")
def operator(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "operator"))
@builtins.property
@jsii.member(jsii_name="values")
def values(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "values"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4dcdabf07c6907405f38a68515ed61eb98e5186f1a73cba026213adbe1a4f6fb)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class DataGoogleComputeRegionInstanceTemplateSchedulingOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateSchedulingOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d565a5e2929136ca4d76c4d019158ae011b75dc19c260abce73f73bfbf39d13f)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="automaticRestart")
def automatic_restart(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "automaticRestart"))
@builtins.property
@jsii.member(jsii_name="instanceTerminationAction")
def instance_termination_action(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "instanceTerminationAction"))
@builtins.property
@jsii.member(jsii_name="localSsdRecoveryTimeout")
def local_ssd_recovery_timeout(
self,
) -> DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutList, jsii.get(self, "localSsdRecoveryTimeout"))
@builtins.property
@jsii.member(jsii_name="maintenanceInterval")
def maintenance_interval(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "maintenanceInterval"))
@builtins.property
@jsii.member(jsii_name="maxRunDuration")
def max_run_duration(
self,
) -> DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationList, jsii.get(self, "maxRunDuration"))
@builtins.property
@jsii.member(jsii_name="minNodeCpus")
def min_node_cpus(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "minNodeCpus"))
@builtins.property
@jsii.member(jsii_name="nodeAffinities")
def node_affinities(
self,
) -> DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesList:
return typing.cast(DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesList, jsii.get(self, "nodeAffinities"))
@builtins.property
@jsii.member(jsii_name="onHostMaintenance")
def on_host_maintenance(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "onHostMaintenance"))
@builtins.property
@jsii.member(jsii_name="preemptible")
def preemptible(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "preemptible"))
@builtins.property
@jsii.member(jsii_name="provisioningModel")
def provisioning_model(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "provisioningModel"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateScheduling]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateScheduling], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateScheduling],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__917db9c6762fa851274946c828c3b208c5230a25e9a21e86e02d1618c0328fe3)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateServiceAccount",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateServiceAccount:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateServiceAccount(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateServiceAccountList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateServiceAccountList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__66f3bbd6bea1934eeee1275e8a20f55a578cba14b13fbe4cc6b408a2cb92b8bd)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateServiceAccountOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5d7c5aadf783d1833dce4e17fdf7eed27bf216c5a36a6611b216ca95a212711b)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateServiceAccountOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0e200d32968b8ad9bfd642234154bc1a0e6a8263efffb7ad382d3b6d46dc5e34)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cfcbd63a5bd08102d341260e73345fc00f10c59d0f8a4bb45d6c0f85f3516e15)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__98aaf1660283a3073dd845933576ae8ce083579a52e9524785c9d8d432f90a9b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateServiceAccountOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateServiceAccountOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4b7ec21479681c6c3d2c3c896a702b978641503879cc0372ef2612a7bf57ad57)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="email")
def email(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "email"))
@builtins.property
@jsii.member(jsii_name="scopes")
def scopes(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "scopes"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateServiceAccount]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateServiceAccount], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateServiceAccount],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7269df35f44820a3113aba0b2f9b93248bd04b5620c8c812ae9ab952582c3a5a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig",
jsii_struct_bases=[],
name_mapping={},
)
class DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig:
def __init__(self) -> None:
self._values: typing.Dict[builtins.str, typing.Any] = {}
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c373f1ac2909071c31a14981a140fbe8c453b251a4f578621affc63dacbc62b1)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__dd3f55f94ba428af17b730ad9efdd82722a44893de6e9d75a8df092c6f9c331e)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__565408d8e00aaa0a53f1b9490f6b08ff75efdba59cb5bcdb19f9cd49962cce7b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__14bc62f1ccf458e64ec9ca1df408fac32b0dab4ca98aa8972c0e9b03cc314374)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8bcdeb4c2aa455d399da8f1085f83bdd39d9de8145fa1e461e67f74cdc65033b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
class DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google-beta.dataGoogleComputeRegionInstanceTemplate.DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__160d45965290ff6cd69c902137b6ac8cba85dd6af4ba8a9c44924a32630a6003)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@builtins.property
@jsii.member(jsii_name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableIntegrityMonitoring"))
@builtins.property
@jsii.member(jsii_name="enableSecureBoot")
def enable_secure_boot(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableSecureBoot"))
@builtins.property
@jsii.member(jsii_name="enableVtpm")
def enable_vtpm(self) -> _cdktf_9a9027ec.IResolvable:
return typing.cast(_cdktf_9a9027ec.IResolvable, jsii.get(self, "enableVtpm"))
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig]:
return typing.cast(typing.Optional[DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5015cd4a05b0b1e876282de1b58758ef00a6251a457ac00bcdbf4508bbe24260)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"DataGoogleComputeRegionInstanceTemplate",
"DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures",
"DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesList",
"DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeaturesOutputReference",
"DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig",
"DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigList",
"DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfigOutputReference",
"DataGoogleComputeRegionInstanceTemplateConfig",
"DataGoogleComputeRegionInstanceTemplateDisk",
"DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey",
"DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyList",
"DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKeyOutputReference",
"DataGoogleComputeRegionInstanceTemplateDiskList",
"DataGoogleComputeRegionInstanceTemplateDiskOutputReference",
"DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey",
"DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyList",
"DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKeyOutputReference",
"DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey",
"DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyList",
"DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKeyOutputReference",
"DataGoogleComputeRegionInstanceTemplateGuestAccelerator",
"DataGoogleComputeRegionInstanceTemplateGuestAcceleratorList",
"DataGoogleComputeRegionInstanceTemplateGuestAcceleratorOutputReference",
"DataGoogleComputeRegionInstanceTemplateNetworkInterface",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigList",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfigOutputReference",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeList",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRangeOutputReference",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigList",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfigOutputReference",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceList",
"DataGoogleComputeRegionInstanceTemplateNetworkInterfaceOutputReference",
"DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig",
"DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigList",
"DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfigOutputReference",
"DataGoogleComputeRegionInstanceTemplateReservationAffinity",
"DataGoogleComputeRegionInstanceTemplateReservationAffinityList",
"DataGoogleComputeRegionInstanceTemplateReservationAffinityOutputReference",
"DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation",
"DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationList",
"DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservationOutputReference",
"DataGoogleComputeRegionInstanceTemplateScheduling",
"DataGoogleComputeRegionInstanceTemplateSchedulingList",
"DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout",
"DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutList",
"DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeoutOutputReference",
"DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration",
"DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationList",
"DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDurationOutputReference",
"DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities",
"DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesList",
"DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinitiesOutputReference",
"DataGoogleComputeRegionInstanceTemplateSchedulingOutputReference",
"DataGoogleComputeRegionInstanceTemplateServiceAccount",
"DataGoogleComputeRegionInstanceTemplateServiceAccountList",
"DataGoogleComputeRegionInstanceTemplateServiceAccountOutputReference",
"DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig",
"DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigList",
"DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfigOutputReference",
]
publication.publish()
def _typecheckingstub__0445be5f52a77d335997618ebeee5d1ceab1d03828e756af5675194186f193ce(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
filter: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
most_recent: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9377170382217cf2da6c8c6c2f3947c59739c40747b96f581cb4f127f12bc55f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__85ffc496216e7d54f1b49457b15ef6300affe34aa81046701e22239b2210551a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__082471c1e41e8b4b84f0255e878273f96bbd17d3ef4a6994741fe47205461674(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f634f60c7aa7c22b6ad4a20ec87bdc821b364f0e264d82b8a7b2ec39785538f6(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ca0583ccb9ed584b7e02b9f984bef46ec3e19f5468ec83bb7d5807fd4845e164(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__55ef2b478c9ffb791b51cd5d2f092e292b0847d0a6d398635a2a05f682287cad(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__270f21f41796a803333fe41531e87bce7822cb59d9fa5241a0989b59c5953807(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__aae884b8450c166417b1b1b7398adaccd2e8a7b151c1416d9fa7227178483a33(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e71ded2b196bb4490aabcfd71fc0d0b77e4882c9dd2d98915ace7640ec43c626(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__96518d0d7a969faa4359328f2b3b5418ddc0b04aba0071866479b937aa1c4e27(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0da2d5cc4b6fd5a6b8ac7e312c297c0efc416891dec85c9b3c2617ba677e7106(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1f167d0552f5a6fe81277f191da82191d2bcca34125f72538eb3794fba1eeee3(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ebde7f995f587176e41b8778f2f4dfda0078e11f40358c0705d4297744e70411(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateAdvancedMachineFeatures],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__313b81cead38eca5584f95ba9e9c634c81747038289f3450242514b845df8614(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6d362bf5d081ccc7f0c954844a6ffbf222d8c3fb5354456db63b9f85fceef5e5(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7b50115387e0fba90a6cb645f6509af30e51fce6f50f34e7781fc685d6a3061a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3a39bf8737e7650843eb4619f68f198cb18a51eaf63556d39d144332eeb87891(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d859d606a8ef61ff12462a1f67aa53e704782cfeb24d6f006a7d89d58e53b03e(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9a4bae0fa27b3dc60dedc4b779cc9cf2d3db2746292c5e2039082815cba8bb99(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__de1aec36dbaf33c9bb9dfd6662161add8cd5c8c53f79bbbc6879a1438d729bbf(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateConfidentialInstanceConfig],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__179c23af75c3264dc0d66483cbbca36c9c26a1e725bd9fef39e578ede8b0fe25(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
filter: typing.Optional[builtins.str] = None,
id: typing.Optional[builtins.str] = None,
most_recent: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6425fcfda2b4c17fec99c70b0355db94f12011800eef2d3c45889a978fe4add8(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a109be4dcb8377dcb4ab4562c90af1af1944c891d9e5e42ec9a990e12d9f882f(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__564eb73b6e8a6ee51dbb418f0953c147c28b591b7a92adb1380c6f88a22acc81(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__aae2c86912373ceaaa36b78256487a20aa7e68c43189a51adeac933cd262dedd(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c7e76463f762bfd37dc8dce3c48366a7ea6a8dafc988f15b6306807447775013(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e2c7447f1525c6b02c7457f8717ef9fc40e05111d134075f068efa95570b3212(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c68f0eef93578d773a89480e4073c5ce15df9e25632ee64668973e7823f62121(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskDiskEncryptionKey],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1d377c1961de674bfd33bd0dbee81c1da082261b264a8e2d74f154714cd18c95(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__48cfa10fafb18c7ad72549d13501116b4b0fd7e011f3ddd922b4de357ad22c81(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c9d234e18cb4f8ff621af580ed2cc41333efb6d1e6bffdcf80d80e08e9a66d4d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__709dcb879e0900eb3771c8ff41b914de96dddc47ceda9601776a9aae26de5a31(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3e41aba5a35092d87620d8d9a3e912405dcc5a54d9e5bf178a74abe3c2f0a0a9(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__931c8da5d1986c171ebd6ebb826de308b033fb7641e7914d3dfe42296776ea37(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2e929ce5732320e943d22d8db254b6de76e9e9e8b98db83d03b26dcf453771fe(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDisk],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5662c2647a1ecc2a85f5a10ccb254ced87db34d8c252a8e5817d05e35755ef0d(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__98bb11c74508bbd44d96b67ebc422d9010898b5ea4d9a755bbce4296dd307467(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d25507bce96f9fc52bc537d5fce4eba14bfdbeb71860e911e0d8372de7c5d64b(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__586d8b813888d952cce1092f775af326badd90a79b583b37e02fec91f34b7ef0(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bcecdd405063903a2b05edf089aa4f15c626b2eb0b09e49b291ad4369926bc78(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0ecf3cfe37d89d7ca4758063253ad9a4de35838af5a9ae46b34eba74493fea6a(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fdaefa1f92d5ac98ee5a90b086f6847c31842d8e459836329886f0fad793f976(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceImageEncryptionKey],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1846f9f09808f12ad195907a0d41480dac567521e5844ca96a085b590d0f8879(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__df9b4d9b7b8b9110698644d867f08cb211888d986f5c17863e5a677b6e957d77(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4980f66b48566aff282a591fb5b5ffcfae87a690b018d57079aee4d030e06f98(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3d9333419fc21a545c98f696fd86d3ba028c1e3c689c0c2fe423d74033cab76e(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c60b478bccd6e47413c67acd602fccd46844c87822d4fb79cb4e1b10034293dd(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7287c1a6abc6a12964e0f3dd4c1df927f9c3a2fb9c5fe8efe8d4b442d54dd2ad(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3d9812be2a1231f8aa42037c5a85aaa14730757178a78767057d2b60f95dd94a(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateDiskSourceSnapshotEncryptionKey],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fdabeb0f74505f96f669cd42ce62cc7c5d0577e0339b71169be0f180dddfdc41(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__128719fd656324b78d5e822e0dd90f0d70e9fdd32f8f9684d5e1504acacb07cf(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__aa4e521c7eab22475668c4550efad71d4ddec0364e99bdc06d01062f78ca620f(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e8ede773b18e86993b8b8cdd493dde7d6797a7fa4fc2561f475aaf186c0a392e(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__289390970e507815e670a56da9c385213b18f7147a49f7c181d4ce825e20f7da(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__437c5fdd80a2f70c5e4f8abaa409d6ac2669d848f78698e2a3bf6b247441be99(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__45ea3ee5f039de3112a4d2aca107fea0db54e71c2c19bcd1a6c3361d6f58bb1f(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateGuestAccelerator],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__222b7dd8c76aa0b7527e6a74b16d0728df4117f78eef66f01fa1b3b236b7ee68(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0afe3380928a882219901b0d62057a48574b35022becefaf11540c5a65a70d2e(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1a300cd75f8ebebb361eaa839eb81aab8118d4a0a69ffbaa5fd85b53f0bec040(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__77bb8fccee7182547a5903c16ed5d1a951ab55be5f82aa90f2670b043d1699bd(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__442b9c3df29b0fa859f24afe4d7cb89e7126c88daea922396475dc7cbf33b3b2(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__20336661d03ce316794bc6131d925789924fb012ba3c5164e0a664f4268bb1b9(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c1a371706f9eff4186ff4ebfd4abcbde3c40186a02176b3d931a317e6d8ea447(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAccessConfig],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__616817ba84588cd47b6c785adda0b15790464aa85f2ba49e393647bb2df9bf26(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ba47853d256ca8899c84ddfddc47cfd08bd73af2eb47edd6c234dbbdba476243(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__59578361ddedecb318c4a05893e50d64cb3117ecb2f3c18914165cc2a7755504(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2506faf45cfe3d0cc15aab0da6362b1076fa768f7e8fadbd3539524f07b32085(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ebcd564b75021034756f50fbd05100fa42bfbc2063005042396eb6e06b29a588(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__78ea2e21936eb98d8fe50bf61bdc81e2edad89d5271c9734d659be0465a06aa6(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0d7ad95835fe1f206be1db28f51421d4ce0e99f03f0177da3b3c142257a2c5bd(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceAliasIpRange],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ddf014ee458a02642b363eefc9dd6b49f1d7bfd72ca950d643c166405dcbbf93(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ebe5166037b1d63466ff137fb457e197dbf50ca48f1fae53b28a13cdf70321ab(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__599f39215f7414bc06f1a91fa5866cd5a0cfa75f91eef15f334008c9de71f2be(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5a2f9dba55b9ad094efafbfd1896790c95c75c7423c2f4a9db68aebb13e0eb80(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__613d934ce31f561bd63cedce929534b0938d20014baa9296decb088a20578e8f(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f001c976a563b66cb9396924c188db793ed20e777610bf5ee9986d2e05f5d0b6(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__661ea64fdf39a2f24ecbd3c3ad3d483298fcf303001c13602010b5598b032d4d(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterfaceIpv6AccessConfig],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__16f5e3201a577edd2bc8a332fee232a267b586fddd50f79438a1c2068bebde4f(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__894eda52be1e9cafed5924f885a7e70f6fdf370d145c64acb37abcbbe10ef119(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9c70e69646bdcd74d51e726f1ffdcd69bf3713da0c52b31c0ddf26160ad656b0(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e9d89f7de8a2b949cec337841aff194e9ec35db5d03703ddd8d6f8efa2310dfa(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__58703a4312d7b8baa109bf4c2f5251dcb2ae9420a2f5fac76ac92fe7831d69ef(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4b8a2724281380348a44ab189e92e0b083c50bd17d673c7c263da24397642ae4(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__aa9ac5d53f38c069b15fccd416b41fcb4148e27cc18335e0a39a70aa261a9fcd(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkInterface],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8c439964d61a0f1f572a1395ad4cae216eb2aa47ac3b2cc83f0ace7049c34462(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__72a230ac2f3dc50b6c23b6c9058735dfb0dc5b5f892eec34c69aba3c66408fe8(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__613f3405244717eb160fd08b03c1f73bc5d2ad62495e2d2c7a851a80dc7987bc(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3d08e0d2276a58953c750576377a6dd5333eadd311cc2fd91b41c9c5961ff165(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__96f0b778f0f7b2d47ccd688ac3ee10c27c8cae3e4645c4e3a3461f9b71b65cf6(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__65ffdc08082cc7084cb279418e176b1bcda049cc89d77d2a479f10dc881066a4(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1b6f4d0ea37a34cbf9c428e54ce3c7a2b86081366dd7547798875b5e076071f7(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateNetworkPerformanceConfig],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__463cbfb7e4e5de1d887bf9951013671b9da75d5c6d3f50ed44c119fe0d0594f0(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c19985995c174e15a77a560850f511eb78596749ca7cfa2ddac2d09fb2f2698a(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f056e49ef502c3a09e38b2a8c378294288002f76fdc525cb5d6c0c62a8978c5a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__dc6edb0ae62ff6de12c585d69a544c228770163628bdf4df60e5b32c97ba4895(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b81d59310856fbd48a15aacda1b59a1050978a0bab0e725f5947e51f23ac0d64(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f58d9ee3f30fe0f9547b918913e750cc6ee059fac04fd99808a06d35cd7480ba(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a9e20c4624f38f81710d11bebb1ea182df9050bd73d2f666966b0241099f9482(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinity],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__52b16e49cb5ed0ef5a1dad9bac9078222bb6eea150da90f7d4e7aa52286be721(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__83332ddb1f3ed9c60d6ed18ff7f208a942a79dbf428839cef01be95a764634c7(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__34b4088938efc6d4f99247d9119ae856a3b523a4828f625d3fa9102c24b8aeda(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__db8a41025affee9a14d3dd043d5609202db046ab93187dfbde0ce745cb3485c8(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f8651206dd660245f2585cc8c296ad6ee01073541662d6e3efac063f6a6e2e61(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9c8243ef74fbb77f138c78c7b82f6b7a5dffcc10763cdcc343520c2589a9fee3(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5c08328a137fccc2fa2c129ca332d1d1c466bce5c430aef958e617d5bfa853b7(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateReservationAffinitySpecificReservation],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__97854e26560ed7de0e468d2656da7b58173246f2c87980e8f1ec76664552123b(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__59453c5fbc54343e700f2c2055d8202c694039a0bee6a94ba986e43bd9c8e8ad(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f7c663d88d9d3232320f5a238e8e1486097bd6cec58b8ed915d7e9fd1c11a210(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a35ed9c52c2d074565a134e4f89787e0da8009a57b0033dc42d564ce6715bd74(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ab7953552333e4244744275ae293b1961c398217d6cc6d67951fdd1257f2ce07(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__12d3752dc22d08545adf3bfa1554130f9e888b38b77f93ab7ac2d4d68288b080(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__41dec3e337d4750e5f24046b1eb98e44e5e105ca4667bf01429c9f0c06e57c9d(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__38192640f78fe860d6d6e83ac873572b17ad65769e4b0a7f60869c7f1fb76de5(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3698216655e739db4184dc36214c3562a45605ac5bc6c7b26c450048c89e37dd(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a49afada5445c4009b9b533d6555fae793543916d608506aeeec5faf3ec55fb7(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5ddee32d9b7afe1b580592640e74a805331afd2e601e1553275f9256a9fcdc8b(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__740dce54ccc9bf5c5c6e6da6591730f81ed7734c3c711df49e7d14161b6acc36(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingLocalSsdRecoveryTimeout],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__555004bb1acee97be3e541feaa15afb80d66b19fba1fb032b4815f30bc7e6127(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__90e3e98efaaa1bb3252fd368450f2bc63bcab2c2e1c73babc8832f576747f35e(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__de95472714a52803518c04f4a4796510662679ca9e3c78817a04d571ff07527d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1e2b2d100d94f8bdedd834765112ed7863a47e662e2891875c46ed6324efd71d(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b8058360755a10a337efb3a221d1f0e49c3d6aa84afc558e9c6f7fd1509c96ce(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8cecb66c8f476178ce380e9b3cd50263fa85b0c07748bbd51938572e9989a05e(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c115b3bfb24cd7dd4b96dc7a5732361410018743c22dd8ee3e331dea4a5d1479(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingMaxRunDuration],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__247ac4293e58493ce0645c79758c49ad1cb50d5bf1b070590455ecd4df8a3ff0(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d9af4bf260d9ebe928543291942eeadcb357a264b42c2e312a8a344185427d26(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3a5747ca70f748c78d86ba23a51c1be208d39bb3e15804e39a3a08df86d7cc59(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7ef977d6e884a47584ff7087000c0b67b6138a7680968539c1a3ef1c28ad5587(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__62edb1797edd436ba5b18b49f09ea9147d39388bbc83299564cec337839d273c(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__03ec9069b83aebbbc5dc32c29059c7ce9b04e902c096c3d5e5ff6552e2bbb293(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4dcdabf07c6907405f38a68515ed61eb98e5186f1a73cba026213adbe1a4f6fb(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateSchedulingNodeAffinities],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d565a5e2929136ca4d76c4d019158ae011b75dc19c260abce73f73bfbf39d13f(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__917db9c6762fa851274946c828c3b208c5230a25e9a21e86e02d1618c0328fe3(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateScheduling],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__66f3bbd6bea1934eeee1275e8a20f55a578cba14b13fbe4cc6b408a2cb92b8bd(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5d7c5aadf783d1833dce4e17fdf7eed27bf216c5a36a6611b216ca95a212711b(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0e200d32968b8ad9bfd642234154bc1a0e6a8263efffb7ad382d3b6d46dc5e34(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cfcbd63a5bd08102d341260e73345fc00f10c59d0f8a4bb45d6c0f85f3516e15(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__98aaf1660283a3073dd845933576ae8ce083579a52e9524785c9d8d432f90a9b(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4b7ec21479681c6c3d2c3c896a702b978641503879cc0372ef2612a7bf57ad57(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7269df35f44820a3113aba0b2f9b93248bd04b5620c8c812ae9ab952582c3a5a(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateServiceAccount],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c373f1ac2909071c31a14981a140fbe8c453b251a4f578621affc63dacbc62b1(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__dd3f55f94ba428af17b730ad9efdd82722a44893de6e9d75a8df092c6f9c331e(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__565408d8e00aaa0a53f1b9490f6b08ff75efdba59cb5bcdb19f9cd49962cce7b(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__14bc62f1ccf458e64ec9ca1df408fac32b0dab4ca98aa8972c0e9b03cc314374(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8bcdeb4c2aa455d399da8f1085f83bdd39d9de8145fa1e461e67f74cdc65033b(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__160d45965290ff6cd69c902137b6ac8cba85dd6af4ba8a9c44924a32630a6003(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5015cd4a05b0b1e876282de1b58758ef00a6251a457ac00bcdbf4508bbe24260(
value: typing.Optional[DataGoogleComputeRegionInstanceTemplateShieldedInstanceConfig],
) -> None:
"""Type checking stubs"""
pass | PypiClean |
/nni_yds-0.3.7-py3-none-any.whl/nni_yds-0.3.7.data/data/nni/node_modules/isemail/lib/isemail.js | var Dns = require('dns');
var internals = {
defaultThreshold: 16,
maxIPv6Groups: 8,
categories: {
valid: 1,
dnsWarn: 7,
rfc5321: 15,
cfws: 31,
deprecated: 63,
rfc5322: 127,
error: 255
},
diagnoses: {
// Address is valid
valid: 0,
// Address is valid, but the DNS check failed
dnsWarnNoMXRecord: 5,
dnsWarnNoRecord: 6,
// Address is valid for SMTP but has unusual elements
rfc5321TLD: 9,
rfc5321TLDNumeric: 10,
rfc5321QuotedString: 11,
rfc5321AddressLiteral: 12,
// Address is valid for message, but must be modified for envelope
cfwsComment: 17,
cfwsFWS: 18,
// Address contains deprecated elements, but may still be valid in some contexts
deprecatedLocalPart: 33,
deprecatedFWS: 34,
deprecatedQTEXT: 35,
deprecatedQP: 36,
deprecatedComment: 37,
deprecatedCTEXT: 38,
deprecatedIPv6: 39,
deprecatedCFWSNearAt: 49,
// Address is only valid according to broad definition in RFC 5322, but is otherwise invalid
rfc5322Domain: 65,
rfc5322TooLong: 66,
rfc5322LocalTooLong: 67,
rfc5322DomainTooLong: 68,
rfc5322LabelTooLong: 69,
rfc5322DomainLiteral: 70,
rfc5322DomainLiteralOBSDText: 71,
rfc5322IPv6GroupCount: 72,
rfc5322IPv62x2xColon: 73,
rfc5322IPv6BadCharacter: 74,
rfc5322IPv6MaxGroups: 75,
rfc5322IPv6ColonStart: 76,
rfc5322IPv6ColonEnd: 77,
// Address is invalid for any purpose
errExpectingDTEXT: 129,
errNoLocalPart: 130,
errNoDomain: 131,
errConsecutiveDots: 132,
errATEXTAfterCFWS: 133,
errATEXTAfterQS: 134,
errATEXTAfterDomainLiteral: 135,
errExpectingQPair: 136,
errExpectingATEXT: 137,
errExpectingQTEXT: 138,
errExpectingCTEXT: 139,
errBackslashEnd: 140,
errDotStart: 141,
errDotEnd: 142,
errDomainHyphenStart: 143,
errDomainHyphenEnd: 144,
errUnclosedQuotedString: 145,
errUnclosedComment: 146,
errUnclosedDomainLiteral: 147,
errFWSCRLFx2: 148,
errFWSCRLFEnd: 149,
errCRNoLF: 150,
errUnknownTLD: 160,
errDomainTooShort: 161
},
components: {
localpart: 0,
domain: 1,
literal: 2,
contextComment: 3,
contextFWS: 4,
contextQuotedString: 5,
contextQuotedPair: 6
}
};
// $lab:coverage:off$
internals.defer = typeof process !== 'undefined' && process && typeof process.nextTick === 'function' ?
process.nextTick.bind(process) :
function (callback) {
return setTimeout(callback, 0);
};
// $lab:coverage:on$
// US-ASCII visible characters not valid for atext
// (http://tools.ietf.org/html/rfc5322#section-3.2.3)
var SPECIALS = '()<>[]:;@\\,."';
// A silly little optimized function generator
var optimizeLookup = function optimizeLookup (string) {
var lookup = new Array(0x100);
for (var i = 0xff; i >= 0; --i) {
lookup[i] = false;
}
for (var il = string.length; i < il; ++i) {
lookup[string.charCodeAt(i)] = true;
}
var body = 'return function (code) {\n';
body += ' return lookup[code];\n';
body += '}';
return (new Function('lookup', body))(lookup);
};
var specialsLookup = optimizeLookup(SPECIALS);
// This matches valid IPv4 addresses from the end of a string
var IPv4_REGEX =
/\b(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)$/;
var IPv6_REGEX = /^[a-fA-F\d]{0,4}$/;
var IPv6_REGEX_TEST = IPv6_REGEX.test.bind(IPv6_REGEX);
var hasOwn = Object.prototype.hasOwnProperty;
/**
* Check that an email address conforms to RFCs 5321, 5322 and others
*
* We distinguish clearly between a Mailbox as defined by RFC 5321 and an
* addr-spec as defined by RFC 5322. Depending on the context, either can be
* regarded as a valid email address. The RFC 5321 Mailbox specification is
* more restrictive (comments, white space and obsolete forms are not allowed).
*
* @param {string} email The email address to check.
* @param {Object} options The (optional) options:
* {boolean} checkDNS If true then will check DNS for MX records. If
* true this call to isEmail _will_ be asynchronous.
* {*} errorLevel Determines the boundary between valid and invalid
* addresses. Status codes above this number will be returned as-is, status
* codes below will be returned as valid. Thus the calling program can
* simply look for diagnoses.valid if it is only interested in whether an
* address is valid or not. The errorLevel will determine how "picky"
* isEmail() is about the address. If omitted or passed as false then
* isEmail() will return true or false rather than an integer error or
* warning. NB Note the difference between errorLevel = false and
* errorLevel = 0.
* @param {function(number|boolean)} callback The (optional) callback handler.
* @return {*}
*/
var isEmail = function isEmail (email, options, callback) {
if (typeof options === 'function') {
callback = options;
options = {};
}
if (!options) {
options = {};
}
if (typeof callback !== 'function') {
if (options.checkDNS) {
throw new TypeError('expected callback function for checkDNS option');
}
callback = null;
}
var diagnose;
var threshold;
if (typeof options.errorLevel === 'number') {
diagnose = true;
threshold = options.errorLevel;
}
else {
diagnose = !!options.errorLevel;
threshold = internals.diagnoses.valid;
}
if (options.tldWhitelist) {
if (typeof options.tldWhitelist === 'string') {
options.tldWhitelist = [options.tldWhitelist];
} else if (typeof options.tldWhitelist !== 'object') {
throw new TypeError('expected array or object tldWhitelist');
}
}
if (options.minDomainAtoms && (options.minDomainAtoms !== ((+options.minDomainAtoms) | 0) || options.minDomainAtoms < 0)) {
throw new TypeError('expected positive integer minDomainAtoms');
}
var maxResult = internals.diagnoses.valid;
var updateResult = function updateResult (value) {
if (value > maxResult) {
maxResult = value;
}
};
var context = {
now: internals.components.localpart,
prev: internals.components.localpart,
stack: [internals.components.localpart]
};
var token;
var prevToken = '';
var charCode = 0;
var parseData = {
local: '',
domain: ''
};
var atomData = {
locals: [''],
domains: ['']
};
var elementCount = 0;
var elementLength = 0;
var crlfCount = 0;
var hyphenFlag = false;
var assertEnd = false;
var emailLength = email.length;
for (var i = 0, il = emailLength; i < il; ++i) {
// Token is used outside the loop, must declare similarly
token = email[i];
switch (context.now) {
// Local-part
case internals.components.localpart:
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// local-part = dot-atom / quoted-string / obs-local-part
//
// dot-atom = [CFWS] dot-atom-text [CFWS]
//
// dot-atom-text = 1*atext *("." 1*atext)
//
// quoted-string = [CFWS]
// DQUOTE *([FWS] qcontent) [FWS] DQUOTE
// [CFWS]
//
// obs-local-part = word *("." word)
//
// word = atom / quoted-string
//
// atom = [CFWS] 1*atext [CFWS]
switch (token) {
// Comment
case '(':
if (elementLength === 0) {
// Comments are OK at the beginning of an element
updateResult(elementCount === 0 ? internals.diagnoses.cfwsComment : internals.diagnoses.deprecatedComment);
}
else {
updateResult(internals.diagnoses.cfwsComment);
// Cannot start a comment in an element, should be end
assertEnd = true;
}
context.stack.push(context.now);
context.now = internals.components.contextComment;
break;
// Next dot-atom element
case '.':
if (elementLength === 0) {
// Another dot, already?
updateResult(elementCount === 0 ? internals.diagnoses.errDotStart : internals.diagnoses.errConsecutiveDots);
}
else {
// The entire local-part can be a quoted string for RFC 5321; if one atom is quoted it's an RFC 5322 obsolete form
if (assertEnd) {
updateResult(internals.diagnoses.deprecatedLocalPart);
}
// CFWS & quoted strings are OK again now we're at the beginning of an element (although they are obsolete forms)
assertEnd = false;
elementLength = 0;
++elementCount;
parseData.local += token;
atomData.locals[elementCount] = '';
}
break;
// Quoted string
case '"':
if (elementLength === 0) {
// The entire local-part can be a quoted string for RFC 5321; if one atom is quoted it's an RFC 5322 obsolete form
updateResult(elementCount === 0 ? internals.diagnoses.rfc5321QuotedString : internals.diagnoses.deprecatedLocalPart);
parseData.local += token;
atomData.locals[elementCount] += token;
++elementLength;
// Quoted string must be the entire element
assertEnd = true;
context.stack.push(context.now);
context.now = internals.components.contextQuotedString;
}
else {
updateResult(internals.diagnoses.errExpectingATEXT);
}
break;
// Folding white space
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
// Fatal error
updateResult(internals.diagnoses.errCRNoLF);
break;
}
// Fallthrough
case ' ':
case '\t':
if (elementLength === 0) {
updateResult(elementCount === 0 ? internals.diagnoses.cfwsFWS : internals.diagnoses.deprecatedFWS);
}
else {
// We can't start FWS in the middle of an element, better be end
assertEnd = true;
}
context.stack.push(context.now);
context.now = internals.components.contextFWS;
prevToken = token;
break;
case '@':
// At this point we should have a valid local-part
// $lab:coverage:off$
if (context.stack.length !== 1) {
throw new Error('unexpected item on context stack');
}
// $lab:coverage:on$
if (parseData.local.length === 0) {
// Fatal error
updateResult(internals.diagnoses.errNoLocalPart);
}
else if (elementLength === 0) {
// Fatal error
updateResult(internals.diagnoses.errDotEnd);
}
// http://tools.ietf.org/html/rfc5321#section-4.5.3.1.1 the maximum total length of a user name or other local-part is 64
// octets
else if (parseData.local.length > 64) {
updateResult(internals.diagnoses.rfc5322LocalTooLong);
}
// http://tools.ietf.org/html/rfc5322#section-3.4.1 comments and folding white space SHOULD NOT be used around "@" in the
// addr-spec
//
// http://tools.ietf.org/html/rfc2119
// 4. SHOULD NOT this phrase, or the phrase "NOT RECOMMENDED" mean that there may exist valid reasons in particular
// circumstances when the particular behavior is acceptable or even useful, but the full implications should be understood
// and the case carefully weighed before implementing any behavior described with this label.
else if (context.prev === internals.components.contextComment || context.prev === internals.components.contextFWS) {
updateResult(internals.diagnoses.deprecatedCFWSNearAt);
}
// Clear everything down for the domain parsing
context.now = internals.components.domain;
context.stack[0] = internals.components.domain;
elementCount = 0;
elementLength = 0;
assertEnd = false; // CFWS can only appear at the end of the element
break;
// ATEXT
default:
// http://tools.ietf.org/html/rfc5322#section-3.2.3
// atext = ALPHA / DIGIT / ; Printable US-ASCII
// "!" / "#" / ; characters not including
// "$" / "%" / ; specials. Used for atoms.
// "&" / "'" /
// "*" / "+" /
// "-" / "/" /
// "=" / "?" /
// "^" / "_" /
// "`" / "{" /
// "|" / "}" /
// "~"
if (assertEnd) {
// We have encountered atext where it is no longer valid
switch (context.prev) {
case internals.components.contextComment:
case internals.components.contextFWS:
updateResult(internals.diagnoses.errATEXTAfterCFWS);
break;
case internals.components.contextQuotedString:
updateResult(internals.diagnoses.errATEXTAfterQS);
break;
// $lab:coverage:off$
default:
throw new Error('more atext found where none is allowed, but unrecognized prev context: ' + context.prev);
// $lab:coverage:on$
}
}
else {
context.prev = context.now;
charCode = token.charCodeAt(0);
// Especially if charCode == 10
if (charCode < 33 || charCode > 126 || specialsLookup(charCode)) {
// Fatal error
updateResult(internals.diagnoses.errExpectingATEXT);
}
parseData.local += token;
atomData.locals[elementCount] += token;
++elementLength;
}
}
break;
case internals.components.domain:
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// domain = dot-atom / domain-literal / obs-domain
//
// dot-atom = [CFWS] dot-atom-text [CFWS]
//
// dot-atom-text = 1*atext *("." 1*atext)
//
// domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
//
// dtext = %d33-90 / ; Printable US-ASCII
// %d94-126 / ; characters not including
// obs-dtext ; "[", "]", or "\"
//
// obs-domain = atom *("." atom)
//
// atom = [CFWS] 1*atext [CFWS]
// http://tools.ietf.org/html/rfc5321#section-4.1.2
// Mailbox = Local-part "@" ( Domain / address-literal )
//
// Domain = sub-domain *("." sub-domain)
//
// address-literal = "[" ( IPv4-address-literal /
// IPv6-address-literal /
// General-address-literal ) "]"
// ; See Section 4.1.3
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// Note: A liberal syntax for the domain portion of addr-spec is
// given here. However, the domain portion contains addressing
// information specified by and used in other protocols (e.g.,
// [RFC1034], [RFC1035], [RFC1123], [RFC5321]). It is therefore
// incumbent upon implementations to conform to the syntax of
// addresses for the context in which they are used.
//
// is_email() author's note: it's not clear how to interpret this in
// he context of a general email address validator. The conclusion I
// have reached is this: "addressing information" must comply with
// RFC 5321 (and in turn RFC 1035), anything that is "semantically
// invisible" must comply only with RFC 5322.
switch (token) {
// Comment
case '(':
if (elementLength === 0) {
// Comments at the start of the domain are deprecated in the text, comments at the start of a subdomain are obs-domain
// http://tools.ietf.org/html/rfc5322#section-3.4.1
updateResult(elementCount === 0 ? internals.diagnoses.deprecatedCFWSNearAt : internals.diagnoses.deprecatedComment);
}
else {
// We can't start a comment mid-element, better be at the end
assertEnd = true;
updateResult(internals.diagnoses.cfwsComment);
}
context.stack.push(context.now);
context.now = internals.components.contextComment;
break;
// Next dot-atom element
case '.':
if (elementLength === 0) {
// Another dot, already? Fatal error.
updateResult(elementCount === 0 ? internals.diagnoses.errDotStart : internals.diagnoses.errConsecutiveDots);
}
else if (hyphenFlag) {
// Previous subdomain ended in a hyphen. Fatal error.
updateResult(internals.diagnoses.errDomainHyphenEnd);
}
else if (elementLength > 63) {
// Nowhere in RFC 5321 does it say explicitly that the domain part of a Mailbox must be a valid domain according to the
// DNS standards set out in RFC 1035, but this *is* implied in several places. For instance, wherever the idea of host
// routing is discussed the RFC says that the domain must be looked up in the DNS. This would be nonsense unless the
// domain was designed to be a valid DNS domain. Hence we must conclude that the RFC 1035 restriction on label length
// also applies to RFC 5321 domains.
//
// http://tools.ietf.org/html/rfc1035#section-2.3.4
// labels 63 octets or less
updateResult(internals.diagnoses.rfc5322LabelTooLong);
}
// CFWS is OK again now we're at the beginning of an element (although
// it may be obsolete CFWS)
assertEnd = false;
elementLength = 0;
++elementCount;
atomData.domains[elementCount] = '';
parseData.domain += token;
break;
// Domain literal
case '[':
if (parseData.domain.length === 0) {
// Domain literal must be the only component
assertEnd = true;
++elementLength;
context.stack.push(context.now);
context.now = internals.components.literal;
parseData.domain += token;
atomData.domains[elementCount] += token;
parseData.literal = '';
}
else {
// Fatal error
updateResult(internals.diagnoses.errExpectingATEXT);
}
break;
// Folding white space
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
// Fatal error
updateResult(internals.diagnoses.errCRNoLF);
break;
}
// Fallthrough
case ' ':
case '\t':
if (elementLength === 0) {
updateResult(elementCount === 0 ? internals.diagnoses.deprecatedCFWSNearAt : internals.diagnoses.deprecatedFWS);
}
else {
// We can't start FWS in the middle of an element, so this better be the end
updateResult(internals.diagnoses.cfwsFWS);
assertEnd = true;
}
context.stack.push(context.now);
context.now = internals.components.contextFWS;
prevToken = token;
break;
// This must be ATEXT
default:
// RFC 5322 allows any atext...
// http://tools.ietf.org/html/rfc5322#section-3.2.3
// atext = ALPHA / DIGIT / ; Printable US-ASCII
// "!" / "#" / ; characters not including
// "$" / "%" / ; specials. Used for atoms.
// "&" / "'" /
// "*" / "+" /
// "-" / "/" /
// "=" / "?" /
// "^" / "_" /
// "`" / "{" /
// "|" / "}" /
// "~"
// But RFC 5321 only allows letter-digit-hyphen to comply with DNS rules
// (RFCs 1034 & 1123)
// http://tools.ietf.org/html/rfc5321#section-4.1.2
// sub-domain = Let-dig [Ldh-str]
//
// Let-dig = ALPHA / DIGIT
//
// Ldh-str = *( ALPHA / DIGIT / "-" ) Let-dig
//
if (assertEnd) {
// We have encountered ATEXT where it is no longer valid
switch (context.prev) {
case internals.components.contextComment:
case internals.components.contextFWS:
updateResult(internals.diagnoses.errATEXTAfterCFWS);
break;
case internals.components.literal:
updateResult(internals.diagnoses.errATEXTAfterDomainLiteral);
break;
// $lab:coverage:off$
default:
throw new Error('more atext found where none is allowed, but unrecognized prev context: ' + context.prev);
// $lab:coverage:on$
}
}
charCode = token.charCodeAt(0);
// Assume this token isn't a hyphen unless we discover it is
hyphenFlag = false;
if (charCode < 33 || charCode > 126 || specialsLookup(charCode)) {
// Fatal error
updateResult(internals.diagnoses.errExpectingATEXT);
}
else if (token === '-') {
if (elementLength === 0) {
// Hyphens cannot be at the beginning of a subdomain, fatal error
updateResult(internals.diagnoses.errDomainHyphenStart);
}
hyphenFlag = true;
}
// Check if it's a neither a number nor a latin letter
else if (charCode < 48 || charCode > 122 || (charCode > 57 && charCode < 65) || (charCode > 90 && charCode < 97)) {
// This is not an RFC 5321 subdomain, but still OK by RFC 5322
updateResult(internals.diagnoses.rfc5322Domain);
}
parseData.domain += token;
atomData.domains[elementCount] += token;
++elementLength;
}
break;
// Domain literal
case internals.components.literal:
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
//
// dtext = %d33-90 / ; Printable US-ASCII
// %d94-126 / ; characters not including
// obs-dtext ; "[", "]", or "\"
//
// obs-dtext = obs-NO-WS-CTL / quoted-pair
switch (token) {
// End of domain literal
case ']':
if (maxResult < internals.categories.deprecated) {
// Could be a valid RFC 5321 address literal, so let's check
// http://tools.ietf.org/html/rfc5321#section-4.1.2
// address-literal = "[" ( IPv4-address-literal /
// IPv6-address-literal /
// General-address-literal ) "]"
// ; See Section 4.1.3
//
// http://tools.ietf.org/html/rfc5321#section-4.1.3
// IPv4-address-literal = Snum 3("." Snum)
//
// IPv6-address-literal = "IPv6:" IPv6-addr
//
// General-address-literal = Standardized-tag ":" 1*dcontent
//
// Standardized-tag = Ldh-str
// ; Standardized-tag MUST be specified in a
// ; Standards-Track RFC and registered with IANA
//
// dcontent = %d33-90 / ; Printable US-ASCII
// %d94-126 ; excl. "[", "\", "]"
//
// Snum = 1*3DIGIT
// ; representing a decimal integer
// ; value in the range 0 through 255
//
// IPv6-addr = IPv6-full / IPv6-comp / IPv6v4-full / IPv6v4-comp
//
// IPv6-hex = 1*4HEXDIG
//
// IPv6-full = IPv6-hex 7(":" IPv6-hex)
//
// IPv6-comp = [IPv6-hex *5(":" IPv6-hex)] "::"
// [IPv6-hex *5(":" IPv6-hex)]
// ; The "::" represents at least 2 16-bit groups of
// ; zeros. No more than 6 groups in addition to the
// ; "::" may be present.
//
// IPv6v4-full = IPv6-hex 5(":" IPv6-hex) ":" IPv4-address-literal
//
// IPv6v4-comp = [IPv6-hex *3(":" IPv6-hex)] "::"
// [IPv6-hex *3(":" IPv6-hex) ":"]
// IPv4-address-literal
// ; The "::" represents at least 2 16-bit groups of
// ; zeros. No more than 4 groups in addition to the
// ; "::" and IPv4-address-literal may be present.
var index = -1;
var addressLiteral = parseData.literal;
var matchesIP = IPv4_REGEX.exec(addressLiteral);
// Maybe extract IPv4 part from the end of the address-literal
if (matchesIP) {
index = matchesIP.index;
if (index !== 0) {
// Convert IPv4 part to IPv6 format for futher testing
addressLiteral = addressLiteral.slice(0, index) + '0:0';
}
}
if (index === 0) {
// Nothing there except a valid IPv4 address, so...
updateResult(internals.diagnoses.rfc5321AddressLiteral);
}
else if (addressLiteral.slice(0, 5).toLowerCase() !== 'ipv6:') {
updateResult(internals.diagnoses.rfc5322DomainLiteral);
}
else {
var match = addressLiteral.slice(5);
var maxGroups = internals.maxIPv6Groups;
var groups = match.split(':');
index = match.indexOf('::');
if (!~index) {
// Need exactly the right number of groups
if (groups.length !== maxGroups) {
updateResult(internals.diagnoses.rfc5322IPv6GroupCount);
}
}
else if (index !== match.lastIndexOf('::')) {
updateResult(internals.diagnoses.rfc5322IPv62x2xColon);
}
else {
if (index === 0 || index === match.length - 2) {
// RFC 4291 allows :: at the start or end of an address with 7 other groups in addition
++maxGroups;
}
if (groups.length > maxGroups) {
updateResult(internals.diagnoses.rfc5322IPv6MaxGroups);
}
else if (groups.length === maxGroups) {
// Eliding a single "::"
updateResult(internals.diagnoses.deprecatedIPv6);
}
}
// IPv6 testing strategy
if (match[0] === ':' && match[1] !== ':') {
updateResult(internals.diagnoses.rfc5322IPv6ColonStart);
}
else if (match[match.length - 1] === ':' && match[match.length - 2] !== ':') {
updateResult(internals.diagnoses.rfc5322IPv6ColonEnd);
}
else if (groups.every(IPv6_REGEX_TEST)) {
updateResult(internals.diagnoses.rfc5321AddressLiteral);
}
else {
updateResult(internals.diagnoses.rfc5322IPv6BadCharacter);
}
}
}
else {
updateResult(internals.diagnoses.rfc5322DomainLiteral);
}
parseData.domain += token;
atomData.domains[elementCount] += token;
++elementLength;
context.prev = context.now;
context.now = context.stack.pop();
break;
case '\\':
updateResult(internals.diagnoses.rfc5322DomainLiteralOBSDText);
context.stack.push(context.now);
context.now = internals.components.contextQuotedPair;
break;
// Folding white space
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
updateResult(internals.diagnoses.errCRNoLF);
break;
}
// Fallthrough
case ' ':
case '\t':
updateResult(internals.diagnoses.cfwsFWS);
context.stack.push(context.now);
context.now = internals.components.contextFWS;
prevToken = token;
break;
// DTEXT
default:
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// dtext = %d33-90 / ; Printable US-ASCII
// %d94-126 / ; characters not including
// obs-dtext ; "[", "]", or "\"
//
// obs-dtext = obs-NO-WS-CTL / quoted-pair
//
// obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
// %d11 / ; characters that do not
// %d12 / ; include the carriage
// %d14-31 / ; return, line feed, and
// %d127 ; white space characters
charCode = token.charCodeAt(0);
// '\r', '\n', ' ', and '\t' have already been parsed above
if (charCode > 127 || charCode === 0 || token === '[') {
// Fatal error
updateResult(internals.diagnoses.errExpectingDTEXT);
break;
}
else if (charCode < 33 || charCode === 127) {
updateResult(internals.diagnoses.rfc5322DomainLiteralOBSDText);
}
parseData.literal += token;
parseData.domain += token;
atomData.domains[elementCount] += token;
++elementLength;
}
break;
// Quoted string
case internals.components.contextQuotedString:
// http://tools.ietf.org/html/rfc5322#section-3.2.4
// quoted-string = [CFWS]
// DQUOTE *([FWS] qcontent) [FWS] DQUOTE
// [CFWS]
//
// qcontent = qtext / quoted-pair
switch (token) {
// Quoted pair
case '\\':
context.stack.push(context.now);
context.now = internals.components.contextQuotedPair;
break;
// Folding white space. Spaces are allowed as regular characters inside a quoted string - it's only FWS if we include '\t' or '\r\n'
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
// Fatal error
updateResult(internals.diagnoses.errCRNoLF);
break;
}
// Fallthrough
case '\t':
// http://tools.ietf.org/html/rfc5322#section-3.2.2
// Runs of FWS, comment, or CFWS that occur between lexical tokens in
// a structured header field are semantically interpreted as a single
// space character.
// http://tools.ietf.org/html/rfc5322#section-3.2.4
// the CRLF in any FWS/CFWS that appears within the quoted-string [is]
// semantically "invisible" and therefore not part of the
// quoted-string
parseData.local += ' ';
atomData.locals[elementCount] += ' ';
++elementLength;
updateResult(internals.diagnoses.cfwsFWS);
context.stack.push(context.now);
context.now = internals.components.contextFWS;
prevToken = token;
break;
// End of quoted string
case '"':
parseData.local += token;
atomData.locals[elementCount] += token;
++elementLength;
context.prev = context.now;
context.now = context.stack.pop();
break;
// QTEXT
default:
// http://tools.ietf.org/html/rfc5322#section-3.2.4
// qtext = %d33 / ; Printable US-ASCII
// %d35-91 / ; characters not including
// %d93-126 / ; "\" or the quote character
// obs-qtext
//
// obs-qtext = obs-NO-WS-CTL
//
// obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
// %d11 / ; characters that do not
// %d12 / ; include the carriage
// %d14-31 / ; return, line feed, and
// %d127 ; white space characters
charCode = token.charCodeAt(0);
if (charCode > 127 || charCode === 0 || charCode === 10) {
updateResult(internals.diagnoses.errExpectingQTEXT);
}
else if (charCode < 32 || charCode === 127) {
updateResult(internals.diagnoses.deprecatedQTEXT);
}
parseData.local += token;
atomData.locals[elementCount] += token;
++elementLength;
}
// http://tools.ietf.org/html/rfc5322#section-3.4.1
// If the string can be represented as a dot-atom (that is, it contains
// no characters other than atext characters or "." surrounded by atext
// characters), then the dot-atom form SHOULD be used and the quoted-
// string form SHOULD NOT be used.
break;
// Quoted pair
case internals.components.contextQuotedPair:
// http://tools.ietf.org/html/rfc5322#section-3.2.1
// quoted-pair = ("\" (VCHAR / WSP)) / obs-qp
//
// VCHAR = %d33-126 ; visible (printing) characters
// WSP = SP / HTAB ; white space
//
// obs-qp = "\" (%d0 / obs-NO-WS-CTL / LF / CR)
//
// obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
// %d11 / ; characters that do not
// %d12 / ; include the carriage
// %d14-31 / ; return, line feed, and
// %d127 ; white space characters
//
// i.e. obs-qp = "\" (%d0-8, %d10-31 / %d127)
charCode = token.charCodeAt(0);
if (charCode > 127) {
// Fatal error
updateResult(internals.diagnoses.errExpectingQPair);
}
else if ((charCode < 31 && charCode !== 9) || charCode === 127) {
// ' ' and '\t' are allowed
updateResult(internals.diagnoses.deprecatedQP);
}
// At this point we know where this qpair occurred so we could check to see if the character actually needed to be quoted at all.
// http://tools.ietf.org/html/rfc5321#section-4.1.2
// the sending system SHOULD transmit the form that uses the minimum quoting possible.
context.prev = context.now;
// End of qpair
context.now = context.stack.pop();
token = '\\' + token;
switch (context.now) {
case internals.components.contextComment:
break;
case internals.components.contextQuotedString:
parseData.local += token;
atomData.locals[elementCount] += token;
// The maximum sizes specified by RFC 5321 are octet counts, so we must include the backslash
elementLength += 2;
break;
case internals.components.literal:
parseData.domain += token;
atomData.domains[elementCount] += token;
// The maximum sizes specified by RFC 5321 are octet counts, so we must include the backslash
elementLength += 2;
break;
// $lab:coverage:off$
default:
throw new Error('quoted pair logic invoked in an invalid context: ' + context.now);
// $lab:coverage:on$
}
break;
// Comment
case internals.components.contextComment:
// http://tools.ietf.org/html/rfc5322#section-3.2.2
// comment = "(" *([FWS] ccontent) [FWS] ")"
//
// ccontent = ctext / quoted-pair / comment
switch (token) {
// Nested comment
case '(':
// Nested comments are ok
context.stack.push(context.now);
context.now = internals.components.contextComment;
break;
// End of comment
case ')':
context.prev = context.now;
context.now = context.stack.pop();
break;
// Quoted pair
case '\\':
context.stack.push(context.now);
context.now = internals.components.contextQuotedPair;
break;
// Folding white space
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
// Fatal error
updateResult(internals.diagnoses.errCRNoLF);
break;
}
// Fallthrough
case ' ':
case '\t':
updateResult(internals.diagnoses.cfwsFWS);
context.stack.push(context.now);
context.now = internals.components.contextFWS;
prevToken = token;
break;
// CTEXT
default:
// http://tools.ietf.org/html/rfc5322#section-3.2.3
// ctext = %d33-39 / ; Printable US-ASCII
// %d42-91 / ; characters not including
// %d93-126 / ; "(", ")", or "\"
// obs-ctext
//
// obs-ctext = obs-NO-WS-CTL
//
// obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
// %d11 / ; characters that do not
// %d12 / ; include the carriage
// %d14-31 / ; return, line feed, and
// %d127 ; white space characters
charCode = token.charCodeAt(0);
if (charCode > 127 || charCode === 0 || charCode === 10) {
// Fatal error
updateResult(internals.diagnoses.errExpectingCTEXT);
break;
}
else if (charCode < 32 || charCode === 127) {
updateResult(internals.diagnoses.deprecatedCTEXT);
}
}
break;
// Folding white space
case internals.components.contextFWS:
// http://tools.ietf.org/html/rfc5322#section-3.2.2
// FWS = ([*WSP CRLF] 1*WSP) / obs-FWS
// ; Folding white space
// But note the erratum:
// http://www.rfc-editor.org/errata_search.php?rfc=5322&eid=1908:
// In the obsolete syntax, any amount of folding white space MAY be
// inserted where the obs-FWS rule is allowed. This creates the
// possibility of having two consecutive "folds" in a line, and
// therefore the possibility that a line which makes up a folded header
// field could be composed entirely of white space.
//
// obs-FWS = 1*([CRLF] WSP)
if (prevToken === '\r') {
if (token === '\r') {
// Fatal error
updateResult(internals.diagnoses.errFWSCRLFx2);
break;
}
if (++crlfCount > 1) {
// Multiple folds => obsolete FWS
updateResult(internals.diagnoses.deprecatedFWS);
}
else {
crlfCount = 1;
}
}
switch (token) {
case '\r':
if (emailLength === ++i || email[i] !== '\n') {
// Fatal error
updateResult(internals.diagnoses.errCRNoLF);
}
break;
case ' ':
case '\t':
break;
default:
if (prevToken === '\r') {
// Fatal error
updateResult(internals.diagnoses.errFWSCRLFEnd);
}
crlfCount = 0;
// End of FWS
context.prev = context.now;
context.now = context.stack.pop();
// Look at this token again in the parent context
--i;
}
prevToken = token;
break;
// Unexpected context
// $lab:coverage:off$
default:
throw new Error('unknown context: ' + context.now);
// $lab:coverage:on$
} // Primary state machine
if (maxResult > internals.categories.rfc5322) {
// Fatal error, no point continuing
break;
}
} // Token loop
// Check for errors
if (maxResult < internals.categories.rfc5322) {
// Fatal errors
if (context.now === internals.components.contextQuotedString) {
updateResult(internals.diagnoses.errUnclosedQuotedString);
}
else if (context.now === internals.components.contextQuotedPair) {
updateResult(internals.diagnoses.errBackslashEnd);
}
else if (context.now === internals.components.contextComment) {
updateResult(internals.diagnoses.errUnclosedComment);
}
else if (context.now === internals.components.literal) {
updateResult(internals.diagnoses.errUnclosedDomainLiteral);
}
else if (token === '\r') {
updateResult(internals.diagnoses.errFWSCRLFEnd);
}
else if (parseData.domain.length === 0) {
updateResult(internals.diagnoses.errNoDomain);
}
else if (elementLength === 0) {
updateResult(internals.diagnoses.errDotEnd);
}
else if (hyphenFlag) {
updateResult(internals.diagnoses.errDomainHyphenEnd);
}
// Other errors
else if (parseData.domain.length > 255) {
// http://tools.ietf.org/html/rfc5321#section-4.5.3.1.2
// The maximum total length of a domain name or number is 255 octets.
updateResult(internals.diagnoses.rfc5322DomainTooLong);
}
else if (parseData.local.length + parseData.domain.length + /* '@' */ 1 > 254) {
// http://tools.ietf.org/html/rfc5321#section-4.1.2
// Forward-path = Path
//
// Path = "<" [ A-d-l ":" ] Mailbox ">"
//
// http://tools.ietf.org/html/rfc5321#section-4.5.3.1.3
// The maximum total length of a reverse-path or forward-path is 256 octets (including the punctuation and element separators).
//
// Thus, even without (obsolete) routing information, the Mailbox can only be 254 characters long. This is confirmed by this verified
// erratum to RFC 3696:
//
// http://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690
// However, there is a restriction in RFC 2821 on the length of an address in MAIL and RCPT commands of 254 characters. Since
// addresses that do not fit in those fields are not normally useful, the upper limit on address lengths should normally be considered
// to be 254.
updateResult(internals.diagnoses.rfc5322TooLong);
}
else if (elementLength > 63) {
// http://tools.ietf.org/html/rfc1035#section-2.3.4
// labels 63 octets or less
updateResult(internals.diagnoses.rfc5322LabelTooLong);
}
else if (options.minDomainAtoms && atomData.domains.length < options.minDomainAtoms) {
updateResult(internals.diagnoses.errDomainTooShort);
}
else if (options.tldWhitelist) {
var tldAtom = atomData.domains[elementCount];
if (Array.isArray(options.tldWhitelist)) {
var tldValid = false;
for (i = 0, il = options.tldWhitelist.length; i < il; ++i) {
if (tldAtom === options.tldWhitelist[i]) {
tldValid = true;
break;
}
}
if (!tldValid) {
updateResult(internals.diagnoses.errUnknownTLD);
}
}
else if (!hasOwn.call(options.tldWhitelist, tldAtom)) {
updateResult(internals.diagnoses.errUnknownTLD);
}
}
} // Check for errors
var dnsPositive = false;
var finishImmediately = false;
var finish = function finish () {
if (!dnsPositive && maxResult < internals.categories.dnsWarn) {
// Per RFC 5321, domain atoms are limited to letter-digit-hyphen, so we only need to check code <= 57 to check for a digit
var code = atomData.domains[elementCount].charCodeAt(0);
if (code <= 57) {
updateResult(internals.diagnoses.rfc5321TLDNumeric);
}
else if (elementCount === 0) {
updateResult(internals.diagnoses.rfc5321TLD);
}
}
if (maxResult < threshold) {
maxResult = internals.diagnoses.valid;
}
var finishResult = diagnose ? maxResult : maxResult < internals.defaultThreshold;
if (callback) {
if (finishImmediately) {
callback(finishResult);
} else {
internals.defer(callback.bind(null, finishResult));
}
}
return finishResult;
}; // Finish
if (options.checkDNS && maxResult < internals.categories.dnsWarn) {
// http://tools.ietf.org/html/rfc5321#section-2.3.5
// Names that can be resolved to MX RRs or address (i.e., A or AAAA) RRs (as discussed in Section 5) are permitted, as are CNAME RRs whose
// targets can be resolved, in turn, to MX or address RRs.
//
// http://tools.ietf.org/html/rfc5321#section-5.1
// The lookup first attempts to locate an MX record associated with the name. If a CNAME record is found, the resulting name is processed
// as if it were the initial name. ... If an empty list of MXs is returned, the address is treated as if it was associated with an implicit
// MX RR, with a preference of 0, pointing to that host.
//
// isEmail() author's note: We will regard the existence of a CNAME to be sufficient evidence of the domain's existence. For performance
// reasons we will not repeat the DNS lookup for the CNAME's target, but we will raise a warning because we didn't immediately find an MX
// record.
if (elementCount === 0) {
// Checking TLD DNS only works if you explicitly check from the root
parseData.domain += '.';
}
var dnsDomain = parseData.domain;
Dns.resolveMx(dnsDomain, function resolveDNS (err, mxRecords) {
// If we have a fatal error, then we must assume that there are no records
if (err && err.code !== Dns.NODATA) {
updateResult(internals.diagnoses.dnsWarnNoRecord);
return finish();
}
if (mxRecords && mxRecords.length) {
dnsPositive = true;
return finish();
}
var count = 3;
var done = false;
updateResult(internals.diagnoses.dnsWarnNoMXRecord);
var handleRecords = function handleRecords (err, records) {
if (done) {
return;
}
--count;
if (records && records.length) {
done = true;
return finish();
}
if (count === 0) {
// No usable records for the domain can be found
updateResult(internals.diagnoses.dnsWarnNoRecord);
done = true;
finish();
}
};
Dns.resolveCname(dnsDomain, handleRecords);
Dns.resolve4(dnsDomain, handleRecords);
Dns.resolve6(dnsDomain, handleRecords);
});
finishImmediately = true;
}
else {
var result = finish();
finishImmediately = true;
return result;
} // CheckDNS
};
isEmail.diagnoses = (function exportDiagnoses () {
var diag = {};
for (var key in internals.diagnoses) {
diag[key] = internals.diagnoses[key];
}
return diag;
})();
module.exports = isEmail; | PypiClean |
/hipims_io-0.6.2.tar.gz/hipims_io-0.6.2/hipims_io/Landcover.py | import numpy as np
from hydro_raster.Raster import Raster
from . import indep_functions as indep_f
class Landcover:
""" A class to set landcover data and use it to set grid parameters
Attributes:
mask_header: dictionary showing mask georeference
mask_dict: dict with two keys:'value' and 'index', providing int array showing landcover type and their index respectively
"""
def __init__(self, ras_data, dem_ras=None):
"""
"""
if type(ras_data) is str:
obj_landcover = Raster(ras_data)
elif hasattr(ras_data, 'header'):
obj_landcover = ras_data
if hasattr(dem_ras, 'header'):
# landcover resample to the same shape with DEM
self.mask_dict = indep_f._mask2dict(obj_landcover, dem_ras.header)
self.mask_header = dem_ras.header
self.subs_in = np.where(~np.isnan(dem_ras.array))
else:
self.mask_dict = indep_f._mask2dict(obj_landcover)
self.mask_header = obj_landcover.header
def get_mask_array(self):
"""Return a mask array
"""
array_shape = (self.mask_header['nrows'], self.mask_header['ncols'])
mask_array = indep_f._dict2grid(self.mask_dict, array_shape)
return mask_array
def to_grid_parameter(self, param_value, land_value, default_value=0):
""" Set grid parameter according to landcover data
Args:
param_value: scalar or a list of scalar
land_ids: index representing landcover, scalar, list of scalar,
or list of list
"""
mask_array = self.get_mask_array() #landcover value
param_array = mask_array*0+default_value
if type(param_value) is list:
for i in np.arange(len(param_value)):
onevalue = param_value[i]
one_ids = land_value[i]
ind = np.isin(mask_array, one_ids)
param_array[ind] = onevalue
else:
ind = np.isin(mask_array, land_value)
param_array[ind] = param_value
return param_array | PypiClean |
/maze_rl-0.2.0-py3-none-any.whl/maze/train/trainers/es/distributed/es_dummy_distributed_rollouts.py | from typing import Generator, Optional, Union
from maze.core.agent.policy import Policy
from maze.core.agent.torch_model import TorchModel
from maze.core.annotations import override
from maze.core.env.structured_env import StructuredEnv
from maze.core.wrappers.log_stats_wrapper import LogStatsWrapper
from maze.core.wrappers.observation_normalization.normalization_strategies.base import StructuredStatisticsType
from maze.core.wrappers.time_limit_wrapper import TimeLimitWrapper
from maze.train.trainers.es.distributed.es_distributed_rollouts import ESDistributedRollouts, ESRolloutResult
from maze.train.trainers.es.distributed.es_rollout_wrapper import ESRolloutWorkerWrapper
from maze.train.trainers.es.es_shared_noise_table import SharedNoiseTable
class ESDummyDistributedRollouts(ESDistributedRollouts):
"""Implementation of the ES distribution by running the rollouts synchronously in the same process."""
def __init__(self, env: StructuredEnv, n_eval_rollouts: int, shared_noise: SharedNoiseTable,
agent_instance_seed: int):
env = TimeLimitWrapper.wrap(env)
env = LogStatsWrapper.wrap(env)
self.env = ESRolloutWorkerWrapper.wrap(env=env, shared_noise=shared_noise,
agent_instance_seed=agent_instance_seed)
self.n_eval_rollouts = n_eval_rollouts
@override(ESDistributedRollouts)
def generate_rollouts(self,
policy: Union[Policy, TorchModel],
max_steps: Optional[int],
noise_stddev: float,
normalization_stats: StructuredStatisticsType
) -> Generator[ESRolloutResult, None, None]:
"""First execute a fixed number of eval rollouts and then continue with producing training samples."""
self.env.set_max_episode_steps(max_steps)
if normalization_stats:
self.env.set_normalization_statistics(normalization_stats)
for _ in range(self.n_eval_rollouts):
yield self.env.generate_evaluation(policy)
while True:
yield self.env.generate_training(policy, noise_stddev) | PypiClean |
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/storagecache/get_required_aml_fs_subnets_size.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from ._inputs import *
__all__ = [
'GetRequiredAmlFSSubnetsSizeResult',
'AwaitableGetRequiredAmlFSSubnetsSizeResult',
'get_required_aml_fs_subnets_size',
'get_required_aml_fs_subnets_size_output',
]
@pulumi.output_type
class GetRequiredAmlFSSubnetsSizeResult:
"""
Information about the number of available IP addresses that are required for the AML file system.
"""
def __init__(__self__, filesystem_subnet_size=None):
if filesystem_subnet_size and not isinstance(filesystem_subnet_size, int):
raise TypeError("Expected argument 'filesystem_subnet_size' to be a int")
pulumi.set(__self__, "filesystem_subnet_size", filesystem_subnet_size)
@property
@pulumi.getter(name="filesystemSubnetSize")
def filesystem_subnet_size(self) -> Optional[int]:
"""
The number of available IP addresses that are required for the AML file system.
"""
return pulumi.get(self, "filesystem_subnet_size")
class AwaitableGetRequiredAmlFSSubnetsSizeResult(GetRequiredAmlFSSubnetsSizeResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRequiredAmlFSSubnetsSizeResult(
filesystem_subnet_size=self.filesystem_subnet_size)
def get_required_aml_fs_subnets_size(sku: Optional[pulumi.InputType['SkuName']] = None,
storage_capacity_ti_b: Optional[float] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRequiredAmlFSSubnetsSizeResult:
"""
Get the number of available IP addresses needed for the AML file system information provided.
Azure REST API version: 2023-05-01.
:param pulumi.InputType['SkuName'] sku: SKU for the resource.
:param float storage_capacity_ti_b: The size of the AML file system, in TiB.
"""
__args__ = dict()
__args__['sku'] = sku
__args__['storageCapacityTiB'] = storage_capacity_ti_b
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:storagecache:getRequiredAmlFSSubnetsSize', __args__, opts=opts, typ=GetRequiredAmlFSSubnetsSizeResult).value
return AwaitableGetRequiredAmlFSSubnetsSizeResult(
filesystem_subnet_size=pulumi.get(__ret__, 'filesystem_subnet_size'))
@_utilities.lift_output_func(get_required_aml_fs_subnets_size)
def get_required_aml_fs_subnets_size_output(sku: Optional[pulumi.Input[Optional[pulumi.InputType['SkuName']]]] = None,
storage_capacity_ti_b: Optional[pulumi.Input[Optional[float]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetRequiredAmlFSSubnetsSizeResult]:
"""
Get the number of available IP addresses needed for the AML file system information provided.
Azure REST API version: 2023-05-01.
:param pulumi.InputType['SkuName'] sku: SKU for the resource.
:param float storage_capacity_ti_b: The size of the AML file system, in TiB.
"""
... | PypiClean |
/agent_link-0.1.2.tar.gz/agent_link-0.1.2/agent_link/agent_link/modules/.settings/figma_v2/plugin/plugins/dist/functions/encode-images.js | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
/**
* Figma's `figma.createImage()` only accepts PNG, JPEG and GIF. We therefore need to transform webp images.
* This code is borrowed from https://www.figma.com/plugin-docs/working-with-images/
*/
export const transformWebpToPNG = (bytes) => __awaiter(void 0, void 0, void 0, function* () {
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
const imageData = yield decode(canvas, ctx, bytes);
const newBytes = yield encode(canvas, ctx, imageData);
return newBytes;
});
// Encoding an image is also done by sticking pixels in an
// HTML canvas and by asking the canvas to serialize it into
// an actual PNG file via canvas.toBlob().
function encode(canvas, ctx, imageData) {
ctx.putImageData(imageData, 0, 0);
return new Promise((resolve, reject) => {
canvas.toBlob((blob) => {
const reader = new FileReader();
reader.onload = () => resolve(new Uint8Array(reader.result));
reader.onerror = () => reject(new Error("Could not read from blob"));
reader.readAsArrayBuffer(blob);
});
});
}
// Decoding an image can be done by sticking it in an HTML
// canvas, as we can read individual pixels off the canvas.
function decode(canvas, ctx, bytes) {
return __awaiter(this, void 0, void 0, function* () {
const url = URL.createObjectURL(new Blob([bytes]));
const image = yield new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = () => reject();
img.src = url;
});
canvas.width = image.width;
canvas.height = image.height;
ctx.drawImage(image, 0, 0);
const imageData = ctx.getImageData(0, 0, image.width, image.height);
return imageData;
});
} | PypiClean |
/react-in-jupyter-0.5.tar.gz/react-in-jupyter-0.5/react_jupyter/setup.js | window.REACT_JUPYTER_SETUP_LOADED = true;
/**
* RequireJS dependencies
*/
let d3requireLoaded = null;
window.d3require = library => {
if (d3requireLoaded == null) {
return new Promise(resolve => {
require(["https://cdn.jsdelivr.net/npm/d3-require@1"], d3 => {
const d3require = d3.require.alias({
react: "react@16/umd/react.production.min.js",
"react-dom": "react-dom@16/umd/react-dom.production.min.js"
});
d3requireLoaded = d3require;
resolve(d3require(library));
});
});
} else {
return d3requireLoaded(library);
}
};
/**
* Execute Python code and wait for the response
*/
window.python = function(pythonCode) {
return new Promise((resolve, reject) => {
let streamOutput = [];
let errorOutput = null;
const callbacks = {
shell: {
reply: msg => {
if (msg.msg_type === "execute_reply") {
if (msg.content.status === "ok") {
resolve(streamOutput.join(""));
} else if (msg.content.status === "error") {
reject(new Error(errorOutput.exception + ": " + errorOutput.message));
} else {
throw new Error("Unknown status " + msg.content.status);
}
}
},
payload: {
set_next_input: x => {
throw new Error("Don't know what to do with a set_next_input message.");
}
}
},
iopub: {
output: msg => {
if (msg.msg_type === "stream") {
streamOutput.push(msg.content.text);
} else if (msg.msg_type === "error") {
errorOutput = {
exception: msg.content.ename,
message: msg.content.evalue,
traceback: msg.content.traceback
};
} else {
throw new Error("Received output with unknown type " + msg.msg_type);
}
},
clear_output: msg => {
streamOutput = [];
}
},
input: msg => {
throw new Error("Don't know what to do with an input message.");
}
};
Jupyter.notebook.kernel.execute(pythonCode, callbacks, { silent: false });
});
};
/**
* Fetch a Python variable from the server
*/
window.fetchVariable = function(variableName) {
const code = `
from react_jupyter import CustomJSONEncoder
import json
print(json.dumps(${variableName}, cls=CustomJSONEncoder))`.trim();
return window.python(code).then(x => JSON.parse(x.trim()));
};
/**
* Enable syntax highlighting for JSX cells
*/
Jupyter.CodeCell.options_default.highlight_modes.magic_jsx = { reg: ["^%%jsx"] };
class CleanupManager {
constructor() {
this.callbacks = [];
}
register(callback) {
this.callbacks.push(callback);
}
cleanup() {
for (let callback of this.callbacks) {
callback();
}
this.callbacks = [];
}
childScope() {
const manager = new CleanupManager();
this.register(() => manager.cleanup());
return manager;
}
}
/**
* A Cell handles rendering and cleanup
*/
class Cell {
constructor(element) {
this.elem = element;
this.outputDiv = this._findParentOutputDiv(element);
this.notebookElement = this.outputDiv.parentElement.parentElement;
this.notebookContainer = this.notebookElement.parentElement;
this.cleanupManager = new CleanupManager();
this._listenForCleanup();
}
render(tree) {
d3require("react-dom").then(ReactDOM => {
ReactDOM.render(tree, this.elem);
});
}
renderError(errorMessage) {
d3require("react").then(React => {
this.render(
React.createElement(
"div",
{
style: {
borderLeft: "3px solid red",
padding: "0.5em 1em",
backgroundColor: "rgba(255, 0, 0, 0.1)"
}
},
[
React.createElement("strong", {}, ["Error"]),
React.createElement("div", { style: { whiteSpace: "pre", fontFamily: "monospace" } }, [
errorMessage
])
]
)
);
});
}
_cleanup() {
this.cleanupManager.cleanup();
}
_listenForCleanup() {
const reExecuteDetector = new MutationObserver(this._cleanup.bind(this));
reExecuteDetector.observe(this.outputDiv, { childList: true });
this.cleanupManager.register(() => reExecuteDetector.disconnect());
const cellDeletionDetector = new MutationObserver(mutationRecord => {
for (let mutation of mutationRecord) {
if (mutation.type === "childList") {
for (let node of mutation.removedNodes) {
if (node === this.notebookElement) {
return this._cleanup(mutationRecord);
}
}
}
}
});
cellDeletionDetector.observe(this.notebookContainer, { childList: true });
this.cleanupManager.register(() => cellDeletionDetector.disconnect());
}
_findParentOutputDiv(el) {
let candidate = el;
while (candidate) {
candidate = candidate.parentElement;
if (candidate.className === "output") {
return candidate;
}
}
throw Error("parent output div not found");
}
}
window.Cell = Cell;
class Registry {
constructor() {
this.registry = new Map();
this.dependencies = new Set();
}
publishMany(pairs) {
for (const [variable, value] of pairs) {
this.registry.set(variable, value);
}
const changedVariables = pairs.map(p => p[0]);
for (const { variables, callback } of this.dependencies) {
const nonZeroIntersection = changedVariables.find(v => variables.has(v));
if (nonZeroIntersection) {
this.call(variables, callback);
}
}
}
call(variables, callback) {
const values = {};
for (const v of variables) {
values[v] = this.get(v);
}
callback(values);
}
publish(variable, value) {
this.publishMany([[variable, value]]);
}
get(variable) {
return this.registry.get(variable);
}
listen(variables, callback) {
variables = new Set(variables);
const obj = { variables, callback };
this.dependencies.add(obj);
this.call(variables, callback);
return () => this.dependencies.delete(obj);
}
}
window.registry = new Registry();
/**
* Registry entry for the width of a code cell,
* so cells can auto-update on resize.
*/
let previousCellWidth = null;
function updateCellWidth() {
const width = Math.round(document.getElementsByClassName("inner_cell")[0].clientWidth - 18.8);
if (width !== previousCellWidth) {
previousCellWidth = width;
registry.publish("width", width);
}
}
updateCellWidth();
window.addEventListener("resize", updateCellWidth); | PypiClean |
/holoviews_samap-2.0.0.tar.gz/holoviews_samap-2.0.0/holoviews_samap/core/sheetcoords.py | import numpy as np
from .boundingregion import BoundingBox
from .util import datetime_types
# Note about the 'bounds-master' approach we have adopted
# =======================================================
#
# Our current approach is a "bounds-master" approach, where we trust
# the user's specified x width, and choose the nearest density and y
# height to make that possible. The advantage of this is that when we
# change the density (which is often), each such simulation is the
# best possible approximation to the given area. Generally, the area
# is much more meaningful than the density, so this approach makes
# sense. Plus, the y height is usually the same as the x width, so
# it's not usually a problem that the y height is not respected. The
# disadvantage is that the user's area can only be trusted in one
# dimension, because of wanting to avoid the complication of separate
# xdensity and ydensity, which makes this approach very difficult to
# explain to the user.
#
# The other approach is density-master: trust the user's specified
# density as-is, and simply choose the nearest area that fits that
# density. The advantages are that (a) it's very simple to describe
# and reason about, and (b) if a user asks for a different area, they
# get a true subsection of the same simulation they would have gotten
# at the larger area. The disadvantage is that the simulation isn't
# the best approximation of the given area that it could be -- e.g. at
# low densities, the sheet area could be quite significantly different
# than the one the user requested. Plus, if we took this approach
# seriously, then we'd let the density specify the matrix coordinate
# system entirely, including the origin, which would mean that the
# actual area would often be offset from the intended one, which is
# even worse. Differences between the area and the offset could cause
# severe problems in the alignment of projections between sheets with
# different densities, which would make low-density versions of
# hierarchical models behave very strangely.
class SheetCoordinateSystem(object):
"""
Provides methods to allow conversion between sheet and matrix
coordinates.
"""
def __get_xdensity(self):
return self.__xdensity
def __get_ydensity(self):
return self.__ydensity
def __get_shape(self):
return self.__shape
xdensity = property(__get_xdensity, doc="""
The spacing between elements in an underlying matrix
representation, in the x direction.""")
ydensity = property(__get_ydensity, doc="""
The spacing between elements in an underlying matrix
representation, in the y direction.""")
shape = property(__get_shape)
# Determines the unit of time densities are defined relative to
# when one or both axes are datetime types
_time_unit = 'us'
def __init__(self,bounds,xdensity,ydensity=None):
"""
Store the bounds (as l,b,r,t in an array), xdensity, and
ydensity.
If ydensity is not specified, it is assumed that the specified
xdensity is nominal and that the true xdensity should be
calculated. The top and bottom bounds are adjusted so that the
ydensity is equal to the xdensity.
If both xdensity and ydensity are specified, these and the
bounds are taken to be exact and are not adjusted.
"""
if not ydensity:
bounds,xdensity = self.__equalize_densities(bounds,xdensity)
self.bounds = bounds
self.__set_xdensity(xdensity)
self.__set_ydensity(ydensity or xdensity)
self.lbrt = np.array(bounds.lbrt())
r1,r2,c1,c2 = Slice._boundsspec2slicespec(self.lbrt,self)
self.__shape = (r2-r1,c2-c1)
# xstep and ystep allow division to be avoid for speed reasons
def __set_xdensity(self,density):
self.__xdensity=density
self.__xstep = 1.0/density
def __set_ydensity(self,density):
self.__ydensity=density
self.__ystep = 1.0/density
def __equalize_densities(self,nominal_bounds,nominal_density):
"""
Calculate the true density along x, and adjust the top and
bottom bounds so that the density along y will be equal.
Returns (adjusted_bounds, true_density)
"""
left,bottom,right,top = nominal_bounds.lbrt()
width = right-left; height = top-bottom
center_y = bottom + height/2.0
# True density is not equal to the nominal_density when
# nominal_density*(right-left) is not an integer.
true_density = int(nominal_density*(width))/float(width)
n_cells = round(height*true_density,0)
adjusted_half_height = n_cells/true_density/2.0
return (BoundingBox(points=((left, center_y-adjusted_half_height),
(right, center_y+adjusted_half_height))),
true_density)
def sheet2matrix(self,x,y):
"""
Convert a point (x,y) in Sheet coordinates to continuous
matrix coordinates.
Returns (float_row,float_col), where float_row corresponds to
y, and float_col to x.
Valid for scalar or array x and y.
Note about Bounds For a Sheet with
BoundingBox(points=((-0.5,-0.5),(0.5,0.5))) and density=3,
x=-0.5 corresponds to float_col=0.0 and x=0.5 corresponds to
float_col=3.0. float_col=3.0 is not inside the matrix
representing this Sheet, which has the three columns
(0,1,2). That is, x=-0.5 is inside the BoundingBox but x=0.5
is outside. Similarly, y=0.5 is inside (at row 0) but y=-0.5
is outside (at row 3) (it's the other way round for y because
the matrix row index increases as y decreases).
"""
# First translate to (left,top), which is [0,0] in the matrix,
# then scale to the size of the matrix. The y coordinate needs
# to be flipped, because the points are moving down in the
# sheet as the y index increases in the matrix.
xdensity = self.__xdensity
if ((isinstance(x, np.ndarray) and x.dtype.kind == 'M') or
isinstance(x, datetime_types)):
xdensity = np.timedelta64(int(round(1./xdensity)), self._time_unit)
float_col = (x-self.lbrt[0]) / xdensity
else:
float_col = (x-self.lbrt[0]) * xdensity
ydensity = self.__ydensity
if ((isinstance(y, np.ndarray) and y.dtype.kind == 'M') or
isinstance(y, datetime_types)):
ydensity = np.timedelta64(int(round(1./ydensity)), self._time_unit)
float_row = (self.lbrt[3]-y) / ydensity
else:
float_row = (self.lbrt[3]-y) * ydensity
return float_row, float_col
def sheet2matrixidx(self,x,y):
"""
Convert a point (x,y) in sheet coordinates to the integer row
and column index of the matrix cell in which that point falls,
given a bounds and density. Returns (row,column).
Note that if coordinates along the right or bottom boundary
are passed into this function, the returned matrix coordinate
of the boundary will be just outside the matrix, because the
right and bottom boundaries are exclusive.
Valid for scalar or array x and y.
"""
r,c = self.sheet2matrix(x,y)
r = np.floor(r)
c = np.floor(c)
if hasattr(r,'astype'):
return r.astype(int), c.astype(int)
else:
return int(r),int(c)
def matrix2sheet(self,float_row,float_col):
"""
Convert a floating-point location (float_row,float_col) in
matrix coordinates to its corresponding location (x,y) in
sheet coordinates.
Valid for scalar or array float_row and float_col.
Inverse of sheet2matrix().
"""
xoffset = float_col*self.__xstep
if isinstance(self.lbrt[0], datetime_types):
xoffset = np.timedelta64(int(round(xoffset)), self._time_unit)
x = self.lbrt[0] + xoffset
yoffset = float_row*self.__ystep
if isinstance(self.lbrt[3], datetime_types):
yoffset = np.timedelta64(int(round(yoffset)), self._time_unit)
y = self.lbrt[3] - yoffset
return x, y
def matrixidx2sheet(self,row,col):
"""
Return (x,y) where x and y are the floating point coordinates
of the *center* of the given matrix cell (row,col). If the
matrix cell represents a 0.2 by 0.2 region, then the center
location returned would be 0.1,0.1.
NOTE: This is NOT the strict mathematical inverse of
sheet2matrixidx(), because sheet2matrixidx() discards all but
the integer portion of the continuous matrix coordinate.
Valid only for scalar or array row and col.
"""
x,y = self.matrix2sheet((row+0.5), (col+0.5))
# Rounding allows easier comparison with user specified values
if not isinstance(x, datetime_types):
x = np.around(x,10)
if not isinstance(y, datetime_types):
y = np.around(y,10)
return x, y
def closest_cell_center(self,x,y):
"""
Given arbitrary sheet coordinates, return the sheet coordinates
of the center of the closest unit.
"""
return self.matrixidx2sheet(*self.sheet2matrixidx(x,y))
def sheetcoordinates_of_matrixidx(self):
"""
Return x,y where x is a vector of sheet coordinates
representing the x-center of each matrix cell, and y
represents the corresponding y-center of the cell.
"""
rows,cols = self.shape
return self.matrixidx2sheet(np.arange(rows), np.arange(cols))
class Slice(np.ndarray):
"""
Represents a slice of a SheetCoordinateSystem; i.e., an array
specifying the row and column start and end points for a submatrix
of the SheetCoordinateSystem.
The slice is created from the supplied bounds by calculating the
slice that corresponds most closely to the specified bounds.
Therefore, the slice does not necessarily correspond exactly to
the specified bounds. The bounds that do exactly correspond to the
slice are available via the 'bounds' attribute.
Note that the slice does not respect the bounds of the
SheetCoordinateSystem, and that actions such as translate() also
do not respect the bounds. To ensure that the slice is within the
SheetCoordinateSystem's bounds, use crop_to_sheet().
"""
__slots__ = []
def compute_bounds(self,scs):
spec = self._slicespec2boundsspec(self,scs)
return BoundingBox(points=spec)
def __new__(cls, bounds, sheet_coordinate_system, force_odd=False,
min_matrix_radius=1):
"""
Create a slice of the given sheet_coordinate_system from the
specified bounds.
"""
if force_odd:
slicespec=Slice._createoddslicespec(bounds,sheet_coordinate_system,
min_matrix_radius)
else:
slicespec=Slice._boundsspec2slicespec(bounds.lbrt(),sheet_coordinate_system)
# Using numpy.int32 for legacy reasons
a = np.array(slicespec, dtype=np.int32, copy=False).view(cls)
return a
def submatrix(self,matrix):
"""
Return the submatrix of the given matrix specified by this
slice.
Equivalent to computing the intersection between the
SheetCoordinateSystem's bounds and the bounds, and returning
the corresponding submatrix of the given matrix.
The submatrix is just a view into the sheet_matrix; it is not
an independent copy.
"""
return matrix[self[0]:self[1],self[2]:self[3]]
@staticmethod
def findinputslice(coord, sliceshape, sheetshape):
"""
Gets the matrix indices of a slice within an array of size
sheetshape from a sliceshape, positioned at coord.
"""
center_row, center_col = coord
n_rows, n_cols = sliceshape
sheet_rows, sheet_cols = sheetshape
c1 = -min(0, center_col-n_cols/2) # assuming odd shape (n_cols/2)
r1 = -min(0, center_row-n_rows/2) # top and bottom
c2 = -max(-n_cols, center_col-sheet_cols-n_cols/2)
r2 = -max(-n_rows, center_row-sheet_rows-n_rows/2)
return (r1, r2, c1, c2)
def positionlesscrop(self,x,y,sheet_coord_system):
"""
Return the correct slice for a weights/mask matrix at this
ConnectionField's location on the sheet (i.e. for getting the
correct submatrix of the weights or mask in case the unit is
near the edge of the sheet).
"""
slice_inds = self.findinputslice(
sheet_coord_system.sheet2matrixidx(x,y),
self.shape_on_sheet(), sheet_coord_system.shape)
self.set(slice_inds)
def positionedcrop(self,x,y,sheet_coord_system):
"""
Offset the bounds_template to this cf's location and store the
result in the 'bounds' attribute.
Also stores the input_sheet_slice for access by C.
"""
cf_row,cf_col = sheet_coord_system.sheet2matrixidx(x,y)
bounds_x,bounds_y=self.compute_bounds(sheet_coord_system).centroid()
b_row,b_col=sheet_coord_system.sheet2matrixidx(bounds_x,bounds_y)
row_offset = cf_row-b_row
col_offset = cf_col-b_col
self.translate(row_offset,col_offset)
def translate(self, r, c):
"Translate the slice by the given number of rows and columns."
self+=[r,r,c,c]
def set(self,slice_specification):
"Set this slice from some iterable that specifies (r1,r2,c1,c2)."
self.put([0,1,2,3],slice_specification) # pylint: disable-msg=E1101
def shape_on_sheet(self):
"Return the shape of the array of the Slice on its sheet."
return self[1]-self[0],self[3]-self[2]
def crop_to_sheet(self,sheet_coord_system):
"Crop the slice to the SheetCoordinateSystem's bounds."
maxrow,maxcol = sheet_coord_system.shape
self[0] = max(0,self[0])
self[1] = min(maxrow,self[1])
self[2] = max(0,self[2])
self[3] = min(maxcol,self[3])
@staticmethod
def _createoddslicespec(bounds,scs,min_matrix_radius):
"""
Create the 'odd' Slice that best approximates the specified
sheet-coordinate bounds.
The supplied bounds are translated to have a center at the
center of one of the sheet's units (we arbitrarily use the
center unit), and then these bounds are converted to a slice
in such a way that the slice exactly includes all units whose
centers are within the bounds (see boundsspec2slicespec()).
However, to ensure that the bounds are treated symmetrically,
we take the right and bottom bounds and reflect these about
the center of the slice (i.e. we take the 'xradius' to be
right_col-center_col and the 'yradius' to be
bottom_col-center_row). Hence, if the bounds happen to go
through units, if the units are included on the right and
bottom bounds, they will be included on the left and top
bounds. This ensures that the slice has odd dimensions.
"""
bounds_xcenter,bounds_ycenter=bounds.centroid()
sheet_rows,sheet_cols = scs.shape
center_row,center_col = sheet_rows/2,sheet_cols/2
unit_xcenter,unit_ycenter=scs.matrixidx2sheet(center_row,
center_col)
bounds.translate(unit_xcenter-bounds_xcenter,
unit_ycenter-bounds_ycenter)
r1,r2,c1,c2 = Slice._boundsspec2slicespec(bounds.lbrt(),scs)
xrad=max(c2-center_col-1,min_matrix_radius)
yrad=max(r2-center_row-1,min_matrix_radius)
r2=center_row+yrad+1
c2=center_col+xrad+1
r1=center_row-yrad
c1=center_col-xrad
return (r1,r2,c1,c2)
@staticmethod
def _boundsspec2slicespec(boundsspec,scs):
"""
Convert an iterable boundsspec (supplying l,b,r,t of a
BoundingRegion) into a Slice specification.
Includes all units whose centers are within the specified
sheet-coordinate bounds specified by boundsspec.
Exact inverse of _slicespec2boundsspec().
"""
l,b,r,t = boundsspec
t_m,l_m = scs.sheet2matrix(l,t)
b_m,r_m = scs.sheet2matrix(r,b)
l_idx = int(np.ceil(l_m-0.5))
t_idx = int(np.ceil(t_m-0.5))
# CBENHANCEMENT: Python 2.6's math.trunc()?
r_idx = int(np.floor(r_m+0.5))
b_idx = int(np.floor(b_m+0.5))
return t_idx,b_idx,l_idx,r_idx
@staticmethod
def _slicespec2boundsspec(slicespec,scs):
"""
Convert an iterable slicespec (supplying r1,r2,c1,c2 of a
Slice) into a BoundingRegion specification.
Exact inverse of _boundsspec2slicespec().
"""
r1,r2,c1,c2 = slicespec
left,bottom = scs.matrix2sheet(r2,c1)
right, top = scs.matrix2sheet(r1,c2)
return ((left,bottom),(right,top)) | PypiClean |
/PEATDB-2.3.tar.gz/PEATDB-2.3/pKaTool/dist_geom.py |
from Tkinter import *
import numpy
import numpy.linalg
import sys
import math
def average(l):
"""Calculate the average value of the elements of a list"""
sum=0.0
for i in l:
sum=sum+i
return sum/float(len(l))
def avg_sq(l):
sum=0.0
for i in l:
sum=sum+i
return sum/float(len(l)*len(l))
#
# ----
#
class distance_optimisation:
def __init__(self,distance_matrix,titration_curves):
"""Set up the control window and the PyOPenGL window"""
#
# Variables
#
self.P=None # Var for Protool instance
#
# Open window
#
self.Dcontrol=Toplevel()
self.Dcontrol.geometry('+500+200')
self.Dcontrol.title('Distance geometry control')
#
# Text window
#
self.coord_text=Text(self.Dcontrol,background='white',
foreground='black',
state=NORMAL,
exportselection=1,
width=60,
height=20)#,
#yscrollcommand= detail_yscrollbar.set,
# xscrollcommand=detail_xscrollbar.set)
self.coord_text.grid(row=0,column=0,columnspan=3)
self.specific_residue = StringVar()
self.specific_residue.set('Chose residue')
#
self.eps=DoubleVar()
self.eps.set(20)
self.eps_scale=Scale(self.Dcontrol,from_=1,to=80,resolution=1,
orient='horizontal',relief='ridge',
variable=self.eps,
label='Effective dielectric constant')
self.eps_scale.grid(row=8,column=0,columnspan=3)
#
Button(self.Dcontrol,text='Do new DG',command=self.update_all).grid(row=9,column=0)
#
Button(self.Dcontrol,text='Load PDB',command=self.load_PDB).grid(row=10,column=0)
#
Button(self.Dcontrol,text='Find match in PDB',command=self.find_match).grid(row=10,column=1)
#
Button(self.Dcontrol,text='Load pKa matrix file',command=self.load_matrix).grid(row=11,column=0)
#
Button(self.Dcontrol,text='Find match in matrix',command=self.find_matrix_match).grid(row=11,column=1)
#
Button(self.Dcontrol,text='Find match with specific residue',command=self.find_match_ineraction_with_specific_residue).grid(row=12,column=0)
#
Entry(self.Dcontrol,text='Residue',textvariable=self.specific_residue).grid(row=12,column=1)
#
# mb = Menubutton(self.Dcontrol, textvariable=self.specific_residue)
# menu = Menu(mb,tearoff=0)
# mb['menu'] = menu
# for i in range(len(titration_curves.keys())):
# menu.add_radiobutton(label=titration_curves.keys()[i],variable=self.specific_residue, value=titration_curves.keys()[i], indicatoron=1)
# mb.grid(row=12,column=1)
#
#
# Window for PDB search results
#
self.PDB_text=Text(self.Dcontrol,background='white',
foreground='black',
state=NORMAL,
exportselection=1,
width=60,
height=20)#,
#yscrollcommand= detail_yscrollbar.set,
# xscrollcommand=detail_xscrollbar.set)
self.PDB_text.grid(row=13,column=0,columnspan=3)
#
# Do the first opt
#
self.distance_matrix=distance_matrix
self.update_all()
#
# Start the OpenGL
#
#X=OGL(self.coords,self)
#glutMainLoop()
return
#
# ----
#
def update_all(self):
"""Do new distance geometry"""
self.coord_text.delete(1.0,END)
#
# Set the initial eps
#
matrix=self.set_eps(self.distance_matrix.copy(),self.eps.get())
self.DG=distance_geometry(matrix)
new_dist_matrix=self.DG.do_triangle_smoothing()
self.coords=self.DG.construct_metric_matrix()
#
# Insert the coordinates
#
groups=self.coords.keys()
groups.sort()
self.coord_text.insert(END,'%10s %7s %7s %7s\n' %('Group','X','Y','Z'))
for group in groups:
self.coord_text.insert(END,'%10s %7.2f %7.2f %7.2f\n' %(group,
self.coords[group]['X'],
self.coords[group]['Y'],
self.coords[group]['Z']))
#
# Insert distances
#
for g1 in range(len(groups)):
for g2 in range(g1,len(groups)):
group=groups[g1]
group2=groups[g2]
if g1==g2:
continue
self.coord_text.insert(END,'%10s %10s %7.2f\n' %(group,group2,new_dist_matrix[group][group2]))
#
# ----
#
def set_eps(self,matrix,eps):
"""Set a new eps for all interactions"""
new_matrix={}
for A in matrix.keys():
new_matrix[A]={}
for B in matrix[A].keys():
if matrix[A][B]==0:
new_matrix[A][B]=0
elif matrix[A][B]<999.9:
new_matrix[A][B]=matrix[A][B]/eps
else:
# distances of 1000 are infinite
new_matrix[A][B]=matrix[A][B]
return new_matrix
#
# ----
#
def load_PDB(self):
"""Load a PDB file"""
import tkFileDialog, os
filename=tkFileDialog.askopenfilename(defaultextension='.pdb',
initialdir=os.getcwd(),
parent=self.Dcontrol,
filetypes=[("PDB file","*.pdb"),
("All files","*.*")])
if filename:
import Protool
self.P=Protool.structureIO()
self.P.readpdb(filename)
return
#
# -----
#
def find_match(self):
"""find match for the coordinates in teh PDB"""
if not self.P:
return
#
# Number of groups from distance geometry
#
groups=self.coords.keys()
num_groups=len(groups)
#
# Get the titratable groups in the protein
#
self.P.get_titratable_groups()
tit_grps=self.P.titratable_groups
tres=tit_grps.keys()
tres.sort()
#
#
#
grp_centers={}
for res in tres:
name=self.P.resname(res)
if not name in ['ARG','LYS','GLU','ASP','HIS']:
continue
for group in tit_grps[res]:
atoms=[]
for atom in group['atoms']:
atomname=res+':'+atom
if self.P.atoms.has_key(atomname):
atoms.append(self.P.GetPosition(atomname))
#
# Get avg coordinate
#
avg=numpy.zeros([3])
for c in atoms:
avg=avg+c
avg=avg/float(len(atoms))
#
# Construct group name
#
gname=res+':'+group['name']
grp_centers[gname]=avg
#
# Construct all permutations and find best match
#
# Number of groups to fit is num_groups
#
prot_groups=grp_centers.keys()
prot_groups.sort()
print 'Protein groups'
print prot_groups
self.permutations=self.construct_permutations(num_groups,prot_groups)
#
# Load the coordinates from the distance geometry calc
#
fit_coords=numpy.zeros([num_groups,3])
count=0
for group in groups:
fit_coords[count][0]=self.coords[group]['X']
fit_coords[count][1]=self.coords[group]['Y']
fit_coords[count][2]=self.coords[group]['Z']
count=count+1
#
# Search for good matches in all group_centers
#
print 'Number of permutations to search: %d' %(len(self.permutations.keys()))
fit_results={}
big_count=0
import sys
for perm in self.permutations.keys():
ref_coords=numpy.zeros([num_groups,3])
count=0
for group_name in eval(perm):
ref_coords[count]=grp_centers[group_name]
count=count+1
#
# Do the fit
#
rot_mat,trans_vec,rms,rtv,ftv=self.P.superpose(ref_coords,fit_coords)
self.permutations[perm]=rms
big_count=big_count+1
streng='\b\b\b\b\b\b\b\b\b\b %8d' %big_count
print streng,
sys.stdout.flush()
#
# Get the best ten solutions
#
sols=[]
rmsds=self.permutations.values()
rmsds.sort()
print rmsds[:10]
for rms in rmsds:
if rms:
for perm in self.permutations.keys():
if not perm in sols:
if self.permutations[perm]==rms:
sols.append(perm)
if len(sols)==10:
break
#
# Print them
#
self.PDB_text.delete(1.0,END)
for sol in sols:
self.PDB_text.insert(END,'%30s %7.2f\n' %(sol,self.permutations[sol]))
x=[[':0035:',':0078:',':0172:'],[':0035:',':0172:',':0078:'],
[':0078:',':0035:',':0172:'],[':0078:',':0172:',':0035:'],
[':0172:',':0035:',':0078:'],[':0172:',':0078:',':0035:']]
for p in x:
self.PDB_text.insert(END,'\n%30s %7.2f\n' %(str(p),self.permutations[str(p)]))
return
#
# -----
#
def load_matrix(self):
"""Load a matrix file"""
import tkFileDialog, os
filename=tkFileDialog.askopenfilename(defaultextension='.MATRIX.DAT',
initialdir=os.getcwd(),
parent=self.Dcontrol,
filetypes=[("pKa MATRIX file","*.MATRIX.DAT"),
("All files","*.*")])
if filename:
import pKaIO
self.M=pKaIO.pKaIO()
self.pkamatrix=self.M.read_matrix(filename)
for res1 in self.pkamatrix.keys():
for res2 in self.pkamatrix.keys():
print res1,res2,self.pkamatrix[res1][res2][0]-self.pkamatrix[res1][res2][1]-self.pkamatrix[res1][res2][2]+self.pkamatrix[res1][res2][3]
return
#
# ----
#
def find_matrix_match(self):
"""Find a match in the energies in pkamatrix"""
#
# Complete distances
#
distmat=self.distance_matrix.copy()
for group in distmat.keys():
distmat[group][group]=0.0
#
# Number of groups in distance matrix
#
groups=distmat.keys()
num_groups=len(groups)
#
# Make it into a Numeric matrix with energies
#
import math
EM=numpy.zeros([num_groups,num_groups])
for x in range(num_groups):
for y in range(num_groups):
if distmat[groups[x]][groups[y]]>0.0:
EM[x][y]=(243.4*math.log(10))/distmat[groups[x]][groups[y]]
else:
EM[x][y]=0.0
#distance=243.3*math.log(10)/(eps*E)
self.PDB_text.insert(END,'REF: %30s %7.2f\n' %(groups,0.0))
EMT = EM.transpose() # not really necessary as EM is symmetric
print 'Energy matrix',EM
print 'Transposed energy matrix',EMT
#
# Now get all the groups in the protein
#
tmp_titgrps=self.pkamatrix.keys()
tmp_titgrps.sort()
titgrps=[]
for grp in tmp_titgrps:
if not 'TYR' in grp:
titgrps.append(grp)
print titgrps
permutations=self.construct_permutations(num_groups,titgrps)
#
# Calculate the differences between the matrices
#
diffs=[]
for perm in permutations.keys():
# print perm
this_perm=eval(perm)
this_matrix=numpy.zeros([len(this_perm),len(this_perm)])
count1=0
for res1 in this_perm:
count2=0
for res2 in this_perm:
if res1==res2:
this_matrix[count1][count2]=0.0
else:
# takes the average of the two interaction energies
this_matrix[count1][count2]=((self.pkamatrix[res1][res2][0]-self.pkamatrix[res1][res2][1]-self.pkamatrix[res1][res2][2]+self.pkamatrix[res1][res2][3])
+(self.pkamatrix[res2][res1][0]-self.pkamatrix[res2][res1][1]-self.pkamatrix[res2][res1][2]+self.pkamatrix[res2][res1][3]))/2
# this_matrix[count1][count2]=self.pkamatrix[res1][res2][0]-self.pkamatrix[res1][res2][1]-self.pkamatrix[res1][res2][2]+self.pkamatrix[res1][res2][3]
# this_matrix[count1][count2]=min((self.pkamatrix[res1][res2][0]-self.pkamatrix[res1][res2][1]-self.pkamatrix[res1][res2][2]+self.pkamatrix[res1][res2][3]),(self.pkamatrix[res2][res1][0]-self.pkamatrix[res2][res1][1]-self.pkamatrix[res2][res1][2]+self.pkamatrix[res2][res1][3]))
count2=count2+1
count1=count1+1
#
# Get the difference
#
diff=EM-this_matrix
sum=0.0
for x in diff:
for y in x:
sum=sum+abs(y)
#
# Calculate Frobenius inner product
#
product = numpy.dot(EMT,this_matrix)
# normThis_matrix = math.sqrt(numpy.trace(numpy.dot(this_matrix.transpose(),this_matrix)))
Frobenius = math.sqrt(numpy.trace(numpy.dot(diff.transpose(),diff)))
#Frobenius = numpy.trace(product)
diffs.append([Frobenius,sum,perm])
#
# Sort and report
#
diffs.sort()
#diffs.reverse()
for fro,sum,sol in diffs:#[:10]:
self.PDB_text.insert(END,'%30s %7.2f %7.2f\n' %(sol,sum,fro))
self.PDB_text.insert(END,'Number of permutations: %d' %(len(permutations.keys())))
return
#
# ----
#
def find_match_ineraction_with_specific_residue(self):
if not len(self.distance_matrix.keys()) == 2:
print 'Method can only be used when two titrating groups are fitted'
return
groups = self.distance_matrix.keys()
print 'groups ',groups
this_energy = (243.4*math.log(10))/self.distance_matrix[groups[0]][groups[1]]
this_res = self.specific_residue.get()
try:
print 'all ',self.pkamatrix.keys()
all_groups = self.pkamatrix.keys()
except:
return
if not this_res in all_groups:
print 'residue',this_res,' not found'
return
diffs = []
for g in all_groups:
E1 = self.pkamatrix[g][this_res][0]-self.pkamatrix[g][this_res][1]-self.pkamatrix[g][this_res][2]+self.pkamatrix[g][this_res][3]
E2 = self.pkamatrix[this_res][g][0]-self.pkamatrix[this_res][g][1]-self.pkamatrix[this_res][g][2]+self.pkamatrix[this_res][g][3]
E=(E1+E2)/2
print 'E',E,'E1',E1,'E2',E2, 'this_energy',this_energy
diff = abs(this_energy-E)
diffs.append([diff,g,E])
diffs.sort()
print diffs
self.PDB_text.insert(END,'Residue, Energy, Diff\n')
for diff,group,E in diffs[:100]:
self.PDB_text.insert(END,'%10s %7.2f %7.2f\n' %(group,E,diff))
return
def construct_permutations(self,permutation_size,choices):
"""Construct all permutations of a combination of <permutation_size> items of <choices>"""
print 'Construct all permutations of a combination of %d items of %d' %(permutation_size,len(choices))
permutations={}
x=[]
count=[]
for level in range(permutation_size):
x.append(choices)
count.append(0)
done=None
while not done:
this_perm=[]
#
# Construct this permutation
#
for pos in range(len(count)):
c_value=count[pos]
if not x[pos][c_value] in this_perm:
this_perm.append(x[pos][c_value])
#
# Is this a valid permutation?
#
if len(this_perm)==permutation_size:
this_perm=str(this_perm)
if not permutations.has_key(this_perm):
permutations[this_perm]=None
#
# Increment count
#
count[0]=count[0]+1
for pos in range(len(count)):
if count[pos]==len(x[pos]):
if pos+1==len(count):
done=1
break
count[pos+1]=count[pos+1]+1
count[pos]=0
return permutations
#
# ------
#
class distance_geometry:
def __init__(self,D_matrix):
"""Store the matrix"""
self.D_matrix=D_matrix
self.complete_matrix()
print self.D_matrix
return
#
# ----
#
def complete_matrix(self):
"""Fill in distances of 0 with self"""
for group in self.D_matrix.keys():
self.D_matrix[group][group]=0.0
#
# ----
#
def do_triangle_smoothing(self):
"""Make sure that the distances are consistent"""
anychange=1
count=1
while anychange:
anychange=None
for A in self.D_matrix.keys():
for B in self.D_matrix.keys():
if B==A:
continue
for C in self.D_matrix.keys():
if C==B or C==A:
continue
#
# Flag for changes
#
changed=None
#
# Get the distances
#
AB=self.D_matrix[A][B]
AC=self.D_matrix[A][C]
BC=self.D_matrix[B][C]
#
# AC cannot be bigger than AB+BC
#
if AC>AB+BC:
AC=AB+BC
#
# AC dist must at least be AB-CB
#
if AC<AB-BC:
AC=AB-BC
#
# Put vals back into the matrix
#
if changed:
self.D_matrix[A][B]=AB
self.D_matrix[B][A]=AB
self.D_matrix[A][C]=AC
self.D_matrix[C][A]=AC
self.D_matrix[B][C]=BC
self.D_matrix[C][B]=BC
anychange=1
print 'Triangle smoothing round %3d done' %count
count=count+1
print 'Triangle smoothing converged'
print self.D_matrix
print
return self.D_matrix
#
# -----
#
def construct_metric_matrix(self):
"""Construct the metric matrix"""
#
# Set up groups
#
groups=self.D_matrix.keys()
groups.sort()
#
# Distance from centre of coordinates
#
import math
centre_dist_sq={}
#
# Avg of all distances
#
for A in groups:
#
# The avg sq dist for this group
#
sum=[]
for B in groups:
sum.append(math.pow(self.D_matrix[A][B],2))
term1=average(sum)
#
# term2
#
term2=[]
for x in range(1,len(groups)):
for y in range(0,x):
term2.append(math.pow(self.D_matrix[groups[x]][groups[y]],2))
print term2
sum=0.0
for i in term2:
sum=sum+i
term2=sum/float(math.pow(len(groups),2))
print term2
#
# Get the dist
#
centre_dist_sq[A]=term1-term2
print 'Distance to centre',A,centre_dist_sq[A]
#
# Metric matrix
#
numgroups=len(groups)
metric=numpy.zeros([numgroups,numgroups])
for x in range(len(groups)):
for y in range(len(groups)):
A=groups[x]
B=groups[y]
metric[x][y]=(centre_dist_sq[A]+centre_dist_sq[B]-pow(self.D_matrix[A][B],2.0))/2.0
#
print 'Metric matrix'
print metric
print
vals,vecs=numpy.linalg.eig(metric)
print
print 'Eigenvalues'
print vals
print 'Eigenvectors'
print vecs
l=[]
for val in vals:
l.append(val)
l.sort()
l.reverse()
#
done=[]
count=0
for best_val in l[:3]:
for x in range(len(vals)):
if best_val==vals[x] and not x in done:
done.append(x)
print done
count=0
coords={}
for group in groups:
coords[group]={}
coords[group]['X']=vecs[count][done[0]]*math.sqrt(abs(l[0]))
coords[group]['Y']=vecs[count][done[1]]*math.sqrt(abs(l[1]))
coords[group]['Z']=vecs[count][done[2]]*math.sqrt(abs(l[2]))
print '%s %5.2f %5.2f %5.2f' %(group,coords[group]['X'],coords[group]['Y'],coords[group]['Z'])
count=count+1
return coords
# Some api in the chain is translating the keystrokes to this octal string
# so instead of saying: ESCAPE = 27, we use the following.
ESCAPE = '\033'
# Number of the glut window.
window = 0
# Rotation angle for the triangle.
rtri = 0.0
# Rotation angle for the quadrilateral.
rquad = 0.0
class OGL(Frame):
def __init__(self,coords,parent):
self.parent=parent
#
# Vars
#
self.x_center=1
self.y_center=1
self.z_center=1
self.count=0
self.count_add=-1
self.add=numpy.array([0,0,-1])
#
#
self.main(coords) # draw the spheres
#self.sphere_center=numpy.array([-1.5,0.0,-6.0])
#self.add2=-numpy.array([0,0,0.1])
#
# Open a tkinter window
#
#self.geom_control=Toplevel()
#self.geom_control.title('Tkinter')
#self.geom_control.geometry('+700+100')
#self.X=DoubleVar()
#self.X.set(-1.5)
#Scale(self.geom_control,variable=self.X,from_=-10,to=10,resolution=0.1).grid(row=0,column=0)
#Button(self.geom_control,text='Quit',command=self.exit_application).grid(row=1,column=0)
return
#
# ----
#
def exit_application(self,event=None):
"""Quit the application"""
self.geom_control.destroy()
import time
time.sleep(1)
glutDestroyWindow(window) #This apparently destroys everything...
return
#
# ------
#
def DrawGLScene(self):
#
# The main drawing function.
#
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) # Clear The Screen And The Depth Buffer
#
# Colours
#
cols=[[1,0,0],
[0,1,0],
[0,0,1],
[1,1,0],
[1,0,1],
[0,1,1]]
#
# Draw the spheres
#
count=0
for group in self.coords.keys():
self.coords[group]=self.coords[group]+self.add
center=self.coords[group]
self.count=self.count+self.count_add
self.sphere(center,0.1,cols[count])
count=count+1
#
# Update counter
#
if self.count<-70 or self.count>0:
self.count_add=-self.count_add
self.add=-1*self.add
#
# since this is double buffered, swap the buffers to display what just got drawn.
#
glutSwapBuffers()
#
# Sleep
#
import time
time.sleep(0.1)
#
# Update Tkinter
#
self.parent.Dcontrol.update()
return
#
# -----
#
# A general OpenGL initialization function. Sets all of the initial parameters.
def InitGL(self,Width, Height): # We call this right after our OpenGL window is created.
glClearColor(0.0, 0.0, 0.0, 0.0) # This Will Clear The Background Color To Black
glClearDepth(1.0) # Enables Clearing Of The Depth Buffer
glDepthFunc(GL_LESS) # The Type Of Depth Test To Do
glEnable(GL_DEPTH_TEST) # Enables Depth Testing
glShadeModel(GL_SMOOTH) # Enables Smooth Color Shading
glMatrixMode(GL_PROJECTION)
glLoadIdentity() # Reset The Projection Matrix
# Calculate The Aspect Ratio Of The Window
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
return
# The function called when our window is resized (which shouldn't happen if you enable fullscreen, below)
def ReSizeGLScene(self,Width, Height):
if Height == 0: # Prevent A Divide By Zero If The Window Is Too Small
Height = 1
glViewport(0, 0, Width, Height) # Reset The Current Viewport And Perspective Transformation
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
return
#
# ----
#
def sphere(self,position,radius,rgb):
glLoadIdentity() # Reset The View
glTranslatef(position[0],position[1],position[2]) # Move to the position
quadratic=gluNewQuadric()
glColor3f(rgb[0],rgb[1],rgb[2]) # Set The Color To Blue
gluSphere(quadratic,radius,100,100)
return
# The function called whenever a key is pressed. Note the use of Python tuples to pass in: (key, x, y)
def keyPressed(self,*args):
# If escape is pressed, kill everything.
if args[0] == ESCAPE:
sys.exit()
def main(self,coords):
"""Set up everything"""
self.coords={}
for group in coords.keys():
self.coords[group]=numpy.array([coords[group]['X'],coords[group]['Y'],coords[group]['Z']])
#
#
#
global window
glutInit(sys.argv)
# Select type of Display mode:
# Double buffer
# RGBA color
# Alpha components supported
# Depth buffer
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH)
# get a 640 x 480 window
glutInitWindowSize(640, 480)
# the window starts at the upper left corner of the screen
glutInitWindowPosition(0, 0)
# Okay, like the C version we retain the window id to use when closing, but for those of you new
# to Python (like myself), remember this assignment would make the variable local and not global
# if it weren't for the global declaration at the start of main.
window = glutCreateWindow("3D representation")
# Register the drawing function with glut, BUT in Python land, at least using PyOpenGL, we need to
# set the function pointer and invoke a function to actually register the callback, otherwise it
# would be very much like the C version of the code.
glutDisplayFunc(self.DrawGLScene)
# Uncomment this line to get full screen.
# glutFullScreen()
# When we are doing nothing, redraw the scene.
glutIdleFunc(self.DrawGLScene)
# Register the function called when our window is resized.
glutReshapeFunc(self.ReSizeGLScene)
# Register the function called when the keyboard is pressed.
glutKeyboardFunc(self.keyPressed)
# Initialize our window.
self.InitGL(640, 480)
return
#
# ---------
# | PypiClean |
/django3_auth_saml2-0.6.1-py3-none-any.whl/django3_auth_saml2/backends.py | from django.contrib.auth.models import User
from django.contrib.auth.backends import RemoteUserBackend
from django.core.handlers.wsgi import WSGIRequest
from saml2.response import AuthnResponse
class SAML2DottedEmailUserBackend(RemoteUserBackend):
"""
By default the User name is going to be set to the email adddress by the
django3_auth_saml2 package. That said, we want to configure the User
first name, last name, and email fields as well; but only if the username
follows the form "[email protected]"
"""
def configure_user(self, request, user: User):
user.email = user.username
name, *_ = user.username.partition('@')
if name.count('.') == 1:
user.first_name, user.last_name = map(str.title, name.split('.'))
user.save()
return super().configure_user(request, user)
class SAML2AttrUserBackend(RemoteUserBackend):
"""
Do not use email as the User name. Use the SAML2 attributes to configure
the username, first name, and last name values. This presumes that the
SAML2 SSO system has been setup to provide the attributes:
* first_name
* last_name
* email
The User name will be set to <first_name>.<last_name> in lower-case.
"""
def authenticate(self, request: WSGIRequest, remote_user: str) -> User:
"""
This method must use the SAML2 attributes to formulate the User name
the way we want it.
"""
saml2_auth_resp: AuthnResponse = request.META['SAML2_AUTH_RESPONSE']
user_ident = saml2_auth_resp.get_identity()
try:
first_name = user_ident['first_name'][0].lower()
last_name = user_ident['last_name'][0].lower()
remote_user = f"{first_name}.{last_name}"
return super().authenticate(request, remote_user)
except KeyError as exc:
missing_attr = exc.args[0]
be_name = self.__class__.__name__
raise PermissionError(f"SAML2 backend {be_name} missing attribute: {missing_attr}")
def configure_user(self, request: WSGIRequest, user: User) -> User:
"""
This method is only called when a new User is created. This method
will use the SAML2 user identity to configure addition properies about
the user. This will include:
* first_name
* last_name
* email
"""
saml2_auth_resp: AuthnResponse = request.META['SAML2_AUTH_RESPONSE']
user_ident = saml2_auth_resp.get_identity()
user.first_name, user.last_name = map(str.title, user.username.split('.'))
try:
user.email = user_ident['email'][0]
user.save()
except KeyError as exc:
missing_attr = exc.args[0]
be_name = self.__class__.__name__
raise PermissionError(f"SAML2 backend {be_name} missing attribute: {missing_attr}")
return user | PypiClean |
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/request/AlipayOpenMiniInnerversionGrayRollbackRequest.py | import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayOpenMiniInnerversionGrayRollbackModel import AlipayOpenMiniInnerversionGrayRollbackModel
class AlipayOpenMiniInnerversionGrayRollbackRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayOpenMiniInnerversionGrayRollbackModel):
self._biz_content = value
else:
self._biz_content = AlipayOpenMiniInnerversionGrayRollbackModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.open.mini.innerversion.gray.rollback'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params | PypiClean |
/django-mdeditor-0.1.20.tar.gz/django-mdeditor-0.1.20/mdeditor/static/mdeditor/js/lib/codemirror/addon/hint/show-hint.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
var HINT_ELEMENT_CLASS = "CodeMirror-hint";
var ACTIVE_HINT_ELEMENT_CLASS = "CodeMirror-hint-active";
// This is the old interface, kept around for now to stay
// backwards-compatible.
CodeMirror.showHint = function(cm, getHints, options) {
if (!getHints) return cm.showHint(options);
if (options && options.async) getHints.async = true;
var newOpts = {hint: getHints};
if (options) for (var prop in options) newOpts[prop] = options[prop];
return cm.showHint(newOpts);
};
var asyncRunID = 0;
function retrieveHints(getter, cm, options, then) {
if (getter.async) {
var id = ++asyncRunID;
getter(cm, function(hints) {
if (asyncRunID == id) then(hints);
}, options);
} else {
then(getter(cm, options));
}
}
CodeMirror.defineExtension("showHint", function(options) {
// We want a single cursor position.
if (this.listSelections().length > 1 || this.somethingSelected()) return;
if (this.state.completionActive) this.state.completionActive.close();
var completion = this.state.completionActive = new Completion(this, options);
var getHints = completion.options.hint;
if (!getHints) return;
CodeMirror.signal(this, "startCompletion", this);
return retrieveHints(getHints, this, completion.options, function(hints) { completion.showHints(hints); });
});
function Completion(cm, options) {
this.cm = cm;
this.options = this.buildOptions(options);
this.widget = this.onClose = null;
}
Completion.prototype = {
close: function() {
if (!this.active()) return;
this.cm.state.completionActive = null;
if (this.widget) this.widget.close();
if (this.onClose) this.onClose();
CodeMirror.signal(this.cm, "endCompletion", this.cm);
},
active: function() {
return this.cm.state.completionActive == this;
},
pick: function(data, i) {
var completion = data.list[i];
if (completion.hint) completion.hint(this.cm, data, completion);
else this.cm.replaceRange(getText(completion), completion.from || data.from,
completion.to || data.to, "complete");
CodeMirror.signal(data, "pick", completion);
this.close();
},
showHints: function(data) {
if (!data || !data.list.length || !this.active()) return this.close();
if (this.options.completeSingle && data.list.length == 1)
this.pick(data, 0);
else
this.showWidget(data);
},
showWidget: function(data) {
this.widget = new Widget(this, data);
CodeMirror.signal(data, "shown");
var debounce = 0, completion = this, finished;
var closeOn = this.options.closeCharacters;
var startPos = this.cm.getCursor(), startLen = this.cm.getLine(startPos.line).length;
var requestAnimationFrame = window.requestAnimationFrame || function(fn) {
return setTimeout(fn, 1000/60);
};
var cancelAnimationFrame = window.cancelAnimationFrame || clearTimeout;
function done() {
if (finished) return;
finished = true;
completion.close();
completion.cm.off("cursorActivity", activity);
if (data) CodeMirror.signal(data, "close");
}
function update() {
if (finished) return;
CodeMirror.signal(data, "update");
retrieveHints(completion.options.hint, completion.cm, completion.options, finishUpdate);
}
function finishUpdate(data_) {
data = data_;
if (finished) return;
if (!data || !data.list.length) return done();
if (completion.widget) completion.widget.close();
completion.widget = new Widget(completion, data);
}
function clearDebounce() {
if (debounce) {
cancelAnimationFrame(debounce);
debounce = 0;
}
}
function activity() {
clearDebounce();
var pos = completion.cm.getCursor(), line = completion.cm.getLine(pos.line);
if (pos.line != startPos.line || line.length - pos.ch != startLen - startPos.ch ||
pos.ch < startPos.ch || completion.cm.somethingSelected() ||
(pos.ch && closeOn.test(line.charAt(pos.ch - 1)))) {
completion.close();
} else {
debounce = requestAnimationFrame(update);
if (completion.widget) completion.widget.close();
}
}
this.cm.on("cursorActivity", activity);
this.onClose = done;
},
buildOptions: function(options) {
var editor = this.cm.options.hintOptions;
var out = {};
for (var prop in defaultOptions) out[prop] = defaultOptions[prop];
if (editor) for (var prop in editor)
if (editor[prop] !== undefined) out[prop] = editor[prop];
if (options) for (var prop in options)
if (options[prop] !== undefined) out[prop] = options[prop];
return out;
}
};
function getText(completion) {
if (typeof completion == "string") return completion;
else return completion.text;
}
function buildKeyMap(completion, handle) {
var baseMap = {
Up: function() {handle.moveFocus(-1);},
Down: function() {handle.moveFocus(1);},
PageUp: function() {handle.moveFocus(-handle.menuSize() + 1, true);},
PageDown: function() {handle.moveFocus(handle.menuSize() - 1, true);},
Home: function() {handle.setFocus(0);},
End: function() {handle.setFocus(handle.length - 1);},
Enter: handle.pick,
Tab: handle.pick,
Esc: handle.close
};
var custom = completion.options.customKeys;
var ourMap = custom ? {} : baseMap;
function addBinding(key, val) {
var bound;
if (typeof val != "string")
bound = function(cm) { return val(cm, handle); };
// This mechanism is deprecated
else if (baseMap.hasOwnProperty(val))
bound = baseMap[val];
else
bound = val;
ourMap[key] = bound;
}
if (custom)
for (var key in custom) if (custom.hasOwnProperty(key))
addBinding(key, custom[key]);
var extra = completion.options.extraKeys;
if (extra)
for (var key in extra) if (extra.hasOwnProperty(key))
addBinding(key, extra[key]);
return ourMap;
}
function getHintElement(hintsElement, el) {
while (el && el != hintsElement) {
if (el.nodeName.toUpperCase() === "LI" && el.parentNode == hintsElement) return el;
el = el.parentNode;
}
}
function Widget(completion, data) {
this.completion = completion;
this.data = data;
var widget = this, cm = completion.cm;
var hints = this.hints = document.createElement("ul");
hints.className = "CodeMirror-hints";
this.selectedHint = data.selectedHint || 0;
var completions = data.list;
for (var i = 0; i < completions.length; ++i) {
var elt = hints.appendChild(document.createElement("li")), cur = completions[i];
var className = HINT_ELEMENT_CLASS + (i != this.selectedHint ? "" : " " + ACTIVE_HINT_ELEMENT_CLASS);
if (cur.className != null) className = cur.className + " " + className;
elt.className = className;
if (cur.render) cur.render(elt, data, cur);
else elt.appendChild(document.createTextNode(cur.displayText || getText(cur)));
elt.hintId = i;
}
var pos = cm.cursorCoords(completion.options.alignWithWord ? data.from : null);
var left = pos.left, top = pos.bottom, below = true;
hints.style.left = left + "px";
hints.style.top = top + "px";
// If we're at the edge of the screen, then we want the menu to appear on the left of the cursor.
var winW = window.innerWidth || Math.max(document.body.offsetWidth, document.documentElement.offsetWidth);
var winH = window.innerHeight || Math.max(document.body.offsetHeight, document.documentElement.offsetHeight);
(completion.options.container || document.body).appendChild(hints);
var box = hints.getBoundingClientRect(), overlapY = box.bottom - winH;
if (overlapY > 0) {
var height = box.bottom - box.top, curTop = pos.top - (pos.bottom - box.top);
if (curTop - height > 0) { // Fits above cursor
hints.style.top = (top = pos.top - height) + "px";
below = false;
} else if (height > winH) {
hints.style.height = (winH - 5) + "px";
hints.style.top = (top = pos.bottom - box.top) + "px";
var cursor = cm.getCursor();
if (data.from.ch != cursor.ch) {
pos = cm.cursorCoords(cursor);
hints.style.left = (left = pos.left) + "px";
box = hints.getBoundingClientRect();
}
}
}
var overlapX = box.right - winW;
if (overlapX > 0) {
if (box.right - box.left > winW) {
hints.style.width = (winW - 5) + "px";
overlapX -= (box.right - box.left) - winW;
}
hints.style.left = (left = pos.left - overlapX) + "px";
}
cm.addKeyMap(this.keyMap = buildKeyMap(completion, {
moveFocus: function(n, avoidWrap) { widget.changeActive(widget.selectedHint + n, avoidWrap); },
setFocus: function(n) { widget.changeActive(n); },
menuSize: function() { return widget.screenAmount(); },
length: completions.length,
close: function() { completion.close(); },
pick: function() { widget.pick(); },
data: data
}));
if (completion.options.closeOnUnfocus) {
var closingOnBlur;
cm.on("blur", this.onBlur = function() { closingOnBlur = setTimeout(function() { completion.close(); }, 100); });
cm.on("focus", this.onFocus = function() { clearTimeout(closingOnBlur); });
}
var startScroll = cm.getScrollInfo();
cm.on("scroll", this.onScroll = function() {
var curScroll = cm.getScrollInfo(), editor = cm.getWrapperElement().getBoundingClientRect();
var newTop = top + startScroll.top - curScroll.top;
var point = newTop - (window.pageYOffset || (document.documentElement || document.body).scrollTop);
if (!below) point += hints.offsetHeight;
if (point <= editor.top || point >= editor.bottom) return completion.close();
hints.style.top = newTop + "px";
hints.style.left = (left + startScroll.left - curScroll.left) + "px";
});
CodeMirror.on(hints, "dblclick", function(e) {
var t = getHintElement(hints, e.target || e.srcElement);
if (t && t.hintId != null) {widget.changeActive(t.hintId); widget.pick();}
});
CodeMirror.on(hints, "click", function(e) {
var t = getHintElement(hints, e.target || e.srcElement);
if (t && t.hintId != null) {
widget.changeActive(t.hintId);
if (completion.options.completeOnSingleClick) widget.pick();
}
});
CodeMirror.on(hints, "mousedown", function() {
setTimeout(function(){cm.focus();}, 20);
});
CodeMirror.signal(data, "select", completions[0], hints.firstChild);
return true;
}
Widget.prototype = {
close: function() {
if (this.completion.widget != this) return;
this.completion.widget = null;
this.hints.parentNode.removeChild(this.hints);
this.completion.cm.removeKeyMap(this.keyMap);
var cm = this.completion.cm;
if (this.completion.options.closeOnUnfocus) {
cm.off("blur", this.onBlur);
cm.off("focus", this.onFocus);
}
cm.off("scroll", this.onScroll);
},
pick: function() {
this.completion.pick(this.data, this.selectedHint);
},
changeActive: function(i, avoidWrap) {
if (i >= this.data.list.length)
i = avoidWrap ? this.data.list.length - 1 : 0;
else if (i < 0)
i = avoidWrap ? 0 : this.data.list.length - 1;
if (this.selectedHint == i) return;
var node = this.hints.childNodes[this.selectedHint];
node.className = node.className.replace(" " + ACTIVE_HINT_ELEMENT_CLASS, "");
node = this.hints.childNodes[this.selectedHint = i];
node.className += " " + ACTIVE_HINT_ELEMENT_CLASS;
if (node.offsetTop < this.hints.scrollTop)
this.hints.scrollTop = node.offsetTop - 3;
else if (node.offsetTop + node.offsetHeight > this.hints.scrollTop + this.hints.clientHeight)
this.hints.scrollTop = node.offsetTop + node.offsetHeight - this.hints.clientHeight + 3;
CodeMirror.signal(this.data, "select", this.data.list[this.selectedHint], node);
},
screenAmount: function() {
return Math.floor(this.hints.clientHeight / this.hints.firstChild.offsetHeight) || 1;
}
};
CodeMirror.registerHelper("hint", "auto", function(cm, options) {
var helpers = cm.getHelpers(cm.getCursor(), "hint"), words;
if (helpers.length) {
for (var i = 0; i < helpers.length; i++) {
var cur = helpers[i](cm, options);
if (cur && cur.list.length) return cur;
}
} else if (words = cm.getHelper(cm.getCursor(), "hintWords")) {
if (words) return CodeMirror.hint.fromList(cm, {words: words});
} else if (CodeMirror.hint.anyword) {
return CodeMirror.hint.anyword(cm, options);
}
});
CodeMirror.registerHelper("hint", "fromList", function(cm, options) {
var cur = cm.getCursor(), token = cm.getTokenAt(cur);
var found = [];
for (var i = 0; i < options.words.length; i++) {
var word = options.words[i];
if (word.slice(0, token.string.length) == token.string)
found.push(word);
}
if (found.length) return {
list: found,
from: CodeMirror.Pos(cur.line, token.start),
to: CodeMirror.Pos(cur.line, token.end)
};
});
CodeMirror.commands.autocomplete = CodeMirror.showHint;
var defaultOptions = {
hint: CodeMirror.hint.auto,
completeSingle: true,
alignWithWord: true,
closeCharacters: /[\s()\[\]{};:>,]/,
closeOnUnfocus: true,
completeOnSingleClick: false,
container: null,
customKeys: null,
extraKeys: null
};
CodeMirror.defineOption("hintOptions", null);
}); | PypiClean |
/raptus.article.table-2.0b14.zip/raptus.article.table-2.0b14/raptus/article/table/browser/configlet.py | import transaction
from Acquisition import aq_inner
from Products.Five.browser import BrowserView
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.ZCatalog.ZCatalog import ZCatalog
from Products.CMFCore.utils import getToolByName
from Products.CMFPlone import PloneMessageFactory as _p
from Products.statusmessages.interfaces import IStatusMessage
from raptus.article.core import RaptusArticleMessageFactory as _
from raptus.article.table.interfaces import IDefinitions, IStyles, ITable
from raptus.article.table.utils import parseColumn
class Configlet(BrowserView):
""" Manage table definitions
"""
template = ViewPageTemplateFile('configlet.pt')
def __call__(self):
self.request.set('disable_border', True)
self.styles = IStyles(self.context).styles()
self._definitions = IDefinitions(self.context)
if self.request.form.has_key('raptus_article_table_save'):
self.setProperties()
self.definitions = []
raw_definitions = self._definitions.getAvailableDefinitions()
for name, definition in raw_definitions.iteritems():
if not len(definition['columns']) or self.request.form.has_key('definition_columns_%s_add_column' % definition['name']):
definition['columns'].append({})
definition['blocked'] = self.checkBlocked(name)
definition['id'] = name
self.definitions.append(definition)
self.new_definition = {
'name': self.request.form.get('new_definition_name', ''),
'columns': self.request.form.get('new_definition_columns', []),
'style': self.request.form.get('new_definition_style', '')
}
if not len(self.new_definition['columns']) or self.request.form.has_key('new_definition_columns_add_column'):
self.new_definition['columns'].append({})
return self.template()
def _formatColumns(self, columns):
formatted = []
for column in columns:
if not column['name'].strip() or column.get('delete', 0):
continue
column = column.copy()
formatted.append(column)
return formatted
def setProperties(self):
context = aq_inner(self.context)
new = self.request.form.get('new_definition', None)
columns = self.request.form.get('new_definition_columns', [])
error = 0
if new and new['name']:
try:
self._definitions.addDefinition(new['name'], new['style'], self._formatColumns(columns))
except:
transaction.abort()
error = _(u'Unable to parse the columns field of the definition to be added')
modify = self.request.form.get('definitions', [])[:]
for definition in modify:
if definition.has_key('delete'):
self._definitions.removeDefinition(definition['id'])
if definition.has_key('delete'):
continue
try:
columns = self.request.form.get('definition_columns_%s' % definition['origname'], [])
self._definitions.addDefinition(definition['name'], definition['style'], self._formatColumns(columns), definition['id'])
except:
transaction.abort()
error = _(u'Unable to parse the columns field of one of the definitions to be modified')
statusmessage = IStatusMessage(self.request)
if error:
statusmessage.addStatusMessage(error, 'error')
else:
statusmessage.addStatusMessage(_p(u'Changes saved.'), 'info')
def checkBlocked(self, definition):
""" check if a table has already this definition
and we ban the user to make some modifications.
"""
catalog = getToolByName(self.context, 'portal_catalog')
return len(catalog.unrestrictedSearchResults(object_provides=ITable.__identifier__, getDefinition=definition)) > 0 | PypiClean |
/creme-crm-2.4.4.tar.gz/creme-crm-2.4.4/mediagenerator/filters/i18n.py | from hashlib import sha1
from django.apps import apps
from django.conf import settings
from django.http import HttpRequest
from django.utils import translation
from django.utils.encoding import smart_str
from django.views.i18n import JavaScriptCatalog
from mediagenerator.generators.bundles.base import Filter
if settings.USE_I18N:
LANGUAGES = [code for code, _ in settings.LANGUAGES]
else:
LANGUAGES = (settings.LANGUAGE_CODE,)
class I18N(Filter):
takes_input = False
def __init__(self, **kwargs):
super().__init__(**kwargs)
assert self.filetype == 'js', (
f'I18N only supports compilation to js. '
f'The parent filter expects "{self.filetype}".')
def get_variations(self):
return {'language': LANGUAGES}
def get_output(self, variation):
language = variation['language']
yield self._generate(language)
def get_dev_output(self, name, variation):
language = variation['language']
assert language == name
return self._generate(language)
def get_dev_output_names(self, variation):
language = variation['language']
content = self._generate(language)
hash = sha1(smart_str(content)).hexdigest()
yield language, hash
def _generate(self, language):
language_bidi = language.split('-')[0] in settings.LANGUAGES_BIDI
# Add some JavaScript data
content = f'var LANGUAGE_CODE = "{language}";\n'
content += 'var LANGUAGE_BIDI = ' + \
(language_bidi and 'true' or 'false') + ';\n'
translation.activate(language)
content += JavaScriptCatalog(
packages=[app_config.name for app_config in apps.app_configs.values()],
).get(HttpRequest()).content.decode()
# The hgettext() function just calls gettext() internally,
# but it won't get indexed by makemessages.
content += '\nwindow.hgettext = function(text) { return gettext(text); };\n'
# Add a similar hngettext() function
content += (
'window.hngettext = function(singular, plural, count) {'
' return ngettext(singular, plural, count); '
'};\n'
)
return content | PypiClean |
/mycloudbackup-0.1.0.tar.gz/mycloudbackup-0.1.0/README.rst | =============
MyCloudBackup
=============
We use dozens of cloud services on a daily basis. Most of them are a wonderful
thing, making our lifes easier and bringing great functionality.
But should you trust these services with all your valued private data?
What if one of those services should go down, cease operations, or be hacked?
This program gets all of your precious data from your cloud services, and
backs it up to a storage of your choice (your own hard drive, Dropbox, Amazon S3,
an FTP Server, ...) so you always have a backup if something happens.
You can select the services you use, and back them up to the storage of your
choice.
MCB works from the command line, and also has a GUI.
Services
========
MyCloudBackup supports the following cloud services:
* Google Gmail - Back up all your Gmail mails into mbox files, does not preserv tags
* Google Calendar - Back up all your calendars as ical files
* Dropbox - Back up your entire Dropbox folder
* Email (Imap) - Back up any IMAP-accesible email account into widely used mbox files
* Github - Copy all your repositories and their issues
* Evernote - Back up all your notes and files
* Facebook - Back up your Facebook conversations, wall posts, photos, etc.
Outputs
=======
MyCloudBackup supports the following outputs (backup targets):
* Filesystem - Backup to your own computer
* Dropbox - Back up to your Dropbox account
* FTP - Back up to a remote FTP server
Soon to come:
* Amazon S3
...
Installation
============
You can install MyCloudBackup by using setuptools pip
Install setuptools: http://pypi.python.org/pypi/setuptools
On linux systems you would do this:
sudo pip install mycloudbackup
---------------------------------
-IMPORTANT NOTE - PYTHON VERSION-
---------------------------------
MCB runs well with Python3, but many of the dependencies exist only for Python2.
So you should run MCB with python2.
If you have multible pythons on your system,
be sure to use the Python2 pip for installing.
If your systems default version is Python3,
you also need to start MCB with the python2 executable.
for example:
python2 mcb
python2 mcb-gui
Usage
=====
Usage - GUI
===========
Start the GUI with mcb-gui
The GUI is a work in progress, but the essentials work.
You can already add new services and run the backup process.
Usage - CLI
===========
Run mcb -h to show help.
You can easily add configuration and run backups with the CLI.
================================================================================
Feedback and Development
========================
MCB is under the New BSD License (see LICENSE.txt).
Bug reports, suggestions and contributions are very welcome.
Development happens at https://github.com/theduke/mycloudbackup .
Dependencies
============
These will automaticall be installed by pip.
Required python packages:
* pyyaml - For config files (http://pyyaml.org)
* requests To ease working with various APIs(http://python-requests.org)
DropboxService:
* Dropbox python library: https://www.dropbox.com/developers/reference/sdk
GithubService:
* GitPython: https://github.com/gitpython-developers
EvernoteService:
* evernote: https://github.com/evernote/evernote-sdk-python
Plugin System
=============
MCB has a modular plugin system, that makes it easy to add new services and
outputs.
Will write documentation on how to write plugins soon.
For now, just copy an existing one and adapt it.
Starting point for services: mcb/services/github.py
for outputs: mcb/outputs/dropbox.py
Contributors
============
Christoph Herzog - [email protected]
| PypiClean |
/ADPTC_LIB-0.0.7.tar.gz/ADPTC_LIB-0.0.7/README.md | <!--
* @Description:
* @Author: SongJ
* @Date: 2020-12-29 13:52:28
* @LastEditTime: 2021-04-12 10:44:01
* @LastEditors: SongJ
-->
## 自适应密度峰值树聚类(Adaptive Density Peak Tree Clustering)
本算法是在快速搜索与发现密度峰值聚类算法(Clustering by fast search and find of density peaks)CFSFDP的基础上进行改进的成果,主要解决的问题有:
- 手动选择聚类中心
- 单簇多密度峰值导致类簇误分
- 面向时空数据聚类时,无法顾及时空耦合
### 原理:
通过CFSFDP算法的核心概念:局部密度和斥群值,构建密度峰值树,通过直达点、连通点和切割点分离子树,达到类簇划分的目的。
<img src="https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210409210616098.png" alt="image-20210409210616098" style="zoom: 80%;" />
![image-20210409210731545](https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210409210731545.png)
![image-20210409212843640](https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210409212843640.png)
### 使用方法:
#### 1. 安装:
```python
pip install ADPTC-LIB
```
#### 2. 空间数据聚类:
```python
import numpy as np
from ADPTC_LIB.cluster import ADPTC
from ADPTC_LIB import visual
X = np.loadtxt(r"../test_data/Aggregation.txt", delimiter="\t")
X = X[:,[0,1]]
atdpc_obj = ADPTC(X)
atdpc_obj.clustering(2)
visual.show_result(atdpc_obj.labels,X,np.array(list(atdpc_obj.core_points)))
```
![image-20210410095608378](https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210410095608378.png)
#### 3. 空间属性数据聚类:
```python
from ADPTC_LIB.cluster import ADPTC
from ADPTC_LIB import visual
import xarray as xr
import os
import numpy as np
filePath = os.path.join(r'Z:\regions_daily_010deg\\05\\2013.nc')
dataset = xr.open_dataset(filePath)
pre_ds = dataset['precipitation']
lon = pre_ds.lon
lat = pre_ds.lat
lon_range = lon[(lon>-30)&(lon<70)]
lat_range = lat[(lat>30)&(lat<90)]
var = pre_ds.sel(lon=lon_range,lat = lat_range)
var = var.resample(time='1M',skipna=True).sum()
var_t = var.sel(time=var.time[0])
reduced = var_t.coarsen(lon=5).mean().coarsen(lat=5).mean()
data_nc = np.array(reduced)
spatial_eps=4
attr_eps=8
density_metric='gauss'
spre = ADPTC(data_nc)
spre.spacial_clustering_raster(spatial_eps,attr_eps,density_metric,knn_num=100,leaf_size=3000,connect_eps=0.9)
visual.show_result_2d(reduced,spre.labels)
```
![image-20210410104300578](https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210410104300578.png)
#### 4.时空属性聚类:
```python
from ADPTC_LIB.cluster import ADPTC
from ADPTC_LIB import visual
import xarray as xr
import numpy as np
temp= xr.open_dataset(r'Z:\MSWX\temp\2020.nc')
temp_2020 = temp['air_temperature']
lon = temp_2020.lon
lat = temp_2020.lat
time = temp_2020.time
lon_range = lon[(lon>70)&(lon<140)]
lat_range = lat[(lat>15)&(lat<55)]
var = temp_2020.sel(lon=lon_range,lat = lat_range)
reduced = var.coarsen(lon=5).mean().coarsen(lat=5).mean()
data_nc = np.array(reduced)
s_eps = 5
t_eps = 1
attr_eps = 2.5
density_metric='gauss'
spre = ADPTC(data_nc)
spre.st_clustering_raster(s_eps,t_eps,attr_eps,density_metric,knn_num=100,leaf_size=3000,connect_eps=0.9)
visual.show_result_3d(reduced,spre,[70, 140, 15, 50],[0,12],21)
```
![image-20210412095947596](https://cdn.jsdelivr.net/gh/SuilandCoder/PicStorage//img/image-20210412095947596.png) | PypiClean |
/Products.Archetypes-1.16.6.tar.gz/Products.Archetypes-1.16.6/Products/Archetypes/examples/ComplexType.py | from Products.Archetypes import atapi
from Products.Archetypes import Field
from SimpleType import SimpleType
from Products.Archetypes.config import PKG_NAME
from zope.i18nmessageid import MessageFactory
fields = ['StringField',
'FileField', 'TextField', 'DateTimeField', 'LinesField',
'IntegerField', 'FloatField', 'FixedPointField',
'BooleanField', 'ImageField'
]
field_instances = []
for f in fields:
field_instances.append(getattr(Field, f)(f.lower()))
schema = atapi.Schema(tuple(field_instances) + (
atapi.LinesField('selectionlinesfield1',
vocabulary='_get_selection_vocab',
enforceVocabulary=1,
widget=atapi.SelectionWidget(label='Selection'),
),
atapi.LinesField('selectionlinesfield2',
vocabulary='_get_selection_vocab',
widget=atapi.SelectionWidget(
label='Selection', i18n_domain="attesti18n"),
),
atapi.LinesField('selectionlinesfield3',
vocabulary='_get_selection_vocab2',
widget=atapi.MultiSelectionWidget(
label='MultiSelection', i18n_domain="attesti18n"),
),
atapi.TextField('textarea_appendonly',
widget=atapi.TextAreaWidget(label='TextArea',
append_only=1,),
),
atapi.TextField('textarea_appendonly_timestamp',
widget=atapi.TextAreaWidget(label='TextArea',
append_only=1,
timestamp=1,),
),
atapi.TextField('textarea_maxlength',
widget=atapi.TextAreaWidget(label='TextArea',
maxlength=20,),
),
atapi.TextField('richtextfield',
allowable_content_types=('text/plain',
'text/structured',
'text/restructured',
'text/html',
'application/msword'),
widget=atapi.RichWidget(label='rich'),
),
atapi.ReferenceField('referencefield',
relationship='complextype',
widget=atapi.ReferenceWidget(addable=1),
allowed_types=('ComplexType', ),
multiValued=1,
),
)) + atapi.ExtensibleMetadata.schema
_domain1 = MessageFactory('domain1')
_domain2 = MessageFactory('domain2')
class ComplexType(SimpleType):
"""A simple archetype"""
schema = SimpleType.schema + schema
archetype_name = meta_type = "ComplexType"
portal_type = 'ComplexType'
def _get_selection_vocab(self):
return atapi.DisplayList((
('foo', u'Foo'),
('complex', u'C\xf6mpl\xe8x'),
('bar', _domain1(u'Bar')),
('hello', _domain2(u'Hello')),
))
def _get_selection_vocab2(self):
return atapi.DisplayList((
('foo2', u'Foo 2'),
('complex2', u'C\xf6mpl\xe8x 2'),
('bar2', _domain1(u'Bar 2')),
('hello2', _domain2(u'Hello 2')),
))
atapi.registerType(ComplexType, PKG_NAME) | PypiClean |
/azure_ai_ml-1.9.0-py3-none-any.whl/azure/ai/ml/_restclient/v2023_04_01_preview/aio/operations/_featurestore_entity_containers_operations.py | from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._featurestore_entity_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_entity_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class FeaturestoreEntityContainersOperations:
"""FeaturestoreEntityContainersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.machinelearningservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
workspace_name: str,
skip: Optional[str] = None,
tags: Optional[str] = None,
list_view_type: Optional[Union[str, "_models.ListViewType"]] = None,
page_size: Optional[int] = 20,
name: Optional[str] = None,
description: Optional[str] = None,
created_by: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"]:
"""List featurestore entity containers.
List featurestore entity containers.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param skip: Continuation token for pagination.
:type skip: str
:param tags: Comma-separated list of tag names (and optionally values). Example:
tag1,tag2=value2.
:type tags: str
:param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly,
ListViewType.All]View type for including/excluding (for example) archived entities.
:type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType
:param page_size: page size.
:type page_size: int
:param name: name for the featurestore entity.
:type name: str
:param description: description for the featurestore entity.
:type description: str
:param created_by: createdBy user name.
:type created_by: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either
FeaturestoreEntityContainerResourceArmPaginatedResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerResourceArmPaginatedResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
api_version=api_version,
skip=skip,
tags=tags,
list_view_type=list_view_type,
page_size=page_size,
name=name,
description=description,
created_by=created_by,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
api_version=api_version,
skip=skip,
tags=tags,
list_view_type=list_view_type,
page_size=page_size,
name=name,
description=description,
created_by=created_by,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("FeaturestoreEntityContainerResourceArmPaginatedResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
workspace_name: str,
name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
if cls:
return cls(pipeline_response, None, response_headers)
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
workspace_name: str,
name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Delete container.
Delete container.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore
@distributed_trace_async
async def get_entity(
self,
resource_group_name: str,
workspace_name: str,
name: str,
**kwargs: Any
) -> "_models.FeaturestoreEntityContainer":
"""Get container.
Get container.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FeaturestoreEntityContainer, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
request = build_get_entity_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
template_url=self.get_entity.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_entity.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
workspace_name: str,
name: str,
body: "_models.FeaturestoreEntityContainer",
**kwargs: Any
) -> "_models.FeaturestoreEntityContainer":
cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'FeaturestoreEntityContainer')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response)
if response.status_code == 201:
response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout'))
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
workspace_name: str,
name: str,
body: "_models.FeaturestoreEntityContainer",
**kwargs: Any
) -> AsyncLROPoller["_models.FeaturestoreEntityContainer"]:
"""Create or update container.
Create or update container.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:param body: Container entity to create or update.
:type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2023-04-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
body=body,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore | PypiClean |
/panda3d-1.10.13-cp38-cp38-manylinux2010_i686.whl/direct/plugin_standalone/make_osx_bundle.py | import getopt
import sys
import os
import glob
import shutil
import direct
from panda3d.core import Filename, DSearchPath, getModelPath, ExecutionEnvironment
def usage(code, msg = ''):
sys.stderr.write(__doc__)
sys.stderr.write(msg + '\n')
sys.exit(code)
def makeBundle(startDir):
fstartDir = Filename.fromOsSpecific(startDir)
# Search for panda3d_mac along $PATH.
path = DSearchPath()
if 'PATH' in os.environ:
path.appendPath(os.environ['PATH'])
path.appendPath(os.defpath)
panda3d_mac = path.findFile('panda3d_mac')
if not panda3d_mac:
raise Exception("Couldn't find panda3d_mac on path.")
# Construct a search path to look for the images.
search = DSearchPath()
# First on the path: an explicit $PLUGIN_IMAGES env var.
if ExecutionEnvironment.hasEnvironmentVariable('PLUGIN_IMAGES'):
search.appendDirectory(Filename.expandFrom('$PLUGIN_IMAGES'))
# Next on the path: the models/plugin_images directory within the
# current directory.
search.appendDirectory('models/plugin_images')
# Finally on the path: models/plugin_images within the model
# search path.
for dir in getModelPath().getDirectories():
search.appendDirectory(Filename(dir, 'plugin_images'))
# Now find the icon file on the above search path.
icons = search.findFile('panda3d.icns')
if not icons:
raise Exception("Couldn't find panda3d.icns on model-path.")
# Generate the bundle directory structure
rootFilename = Filename(fstartDir)
bundleFilename = Filename(rootFilename, 'Panda3D.app')
if os.path.exists(bundleFilename.toOsSpecific()):
shutil.rmtree(bundleFilename.toOsSpecific())
plistFilename = Filename(bundleFilename, 'Contents/Info.plist')
plistFilename.makeDir()
exeFilename = Filename(bundleFilename, 'Contents/MacOS/panda3d_mac')
exeFilename.makeDir()
iconFilename = Filename(bundleFilename, 'Contents/Resources/panda3d.icns')
iconFilename.makeDir()
# Copy in Info.plist, the icon file, and the compiled executable.
shutil.copyfile(Filename(fstartDir, "panda3d_mac.plist").toOsSpecific(), plistFilename.toOsSpecific())
shutil.copyfile(icons.toOsSpecific(), iconFilename.toOsSpecific())
print('%s %s' % (panda3d_mac, exeFilename))
shutil.copyfile(panda3d_mac.toOsSpecific(), exeFilename.toOsSpecific())
os.chmod(exeFilename.toOsSpecific(), 0o755)
# All done!
bundleFilename.touch()
print(bundleFilename.toOsSpecific())
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], 'h')
except getopt.error as msg:
usage(1, msg)
for opt, arg in opts:
if opt == '-h':
usage(0)
if args:
usage(1, 'No arguments are expected.')
startDir = os.path.split(sys.argv[0])[0]
makeBundle(startDir) | PypiClean |
/ai_traineree-0.5.2-py3-none-any.whl/ai_traineree/agents/agent_utils.py | import torch
import torch.nn as nn
from torch import Tensor
EPS = 1e-7
def soft_update(target: nn.Module, source: nn.Module, tau: float) -> None:
for target_param, param in zip(target.parameters(), source.parameters()):
target_param.data.copy_(target_param.data * (1.0 - tau) + param.data * tau) # type: ignore
def hard_update(target: nn.Module, source: nn.Module):
"""Updates one network based on another."""
for target_param, param in zip(target.parameters(), source.parameters()):
target_param.data.copy_(param.data) # type: ignore
def compute_gae(rewards: Tensor, dones: Tensor, values: Tensor, next_value: Tensor, gamma=0.99, lamb=0.9) -> Tensor:
"""Uses General Advantage Estimator to compute... general advantage estimation."""
_tmp_values = torch.cat((values, next_value[None, ...]))
masks = 1 - dones.int()
gaes = torch.zeros_like(_tmp_values)
deltas = rewards + gamma * _tmp_values[1:] * masks - _tmp_values[:-1]
gamma_lambda = gamma * lamb
for idx in reversed(range(len(rewards))):
gaes[idx] = deltas[idx] + gamma_lambda * masks[idx] * gaes[idx + 1]
return gaes[:-1]
def normalize(t: Tensor, dim: int = 0) -> Tensor:
"""Returns normalized (zero 0 and std 1) tensor along specified axis (default: 0)."""
if dim == 0:
# Special case since by default it reduces on dim 0 and it should be faster.
return (t - t.mean(dim=dim)) / torch.clamp(t.std(dim=dim), EPS)
else:
return (t - t.mean(dim=dim, keepdim=True)) / torch.clamp(t.std(dim=dim, keepdim=True), EPS)
def revert_norm_returns(rewards: Tensor, dones: Tensor, gamma: float = 0.99) -> Tensor:
"""
Parameters:
rewards: Rewards to discount. Expected shape (..., 1)
dones: Tensor with termination flag. Expected ints {0, 1} in shape (..., 1)
gamma: Discount factor.
"""
discounted_reward = torch.zeros(rewards.shape[1:], dtype=rewards.dtype, device=rewards.device)
returns = torch.zeros_like(rewards).float()
len_returns = returns.shape[0]
for idx, (reward, done) in enumerate(zip(reversed(rewards), reversed(dones.int()))):
discounted_reward = reward + gamma * discounted_reward * (1 - done)
returns[len_returns - idx - 1] = discounted_reward
return normalize(returns, dim=0) | PypiClean |
/pano_airflow-2.7.1-py3-none-any.whl/airflow/utils/helpers.py | from __future__ import annotations
import copy
import re
import signal
import warnings
from datetime import datetime
from functools import reduce
from itertools import filterfalse, tee
from typing import TYPE_CHECKING, Any, Callable, Generator, Iterable, Mapping, MutableMapping, TypeVar, cast
from airflow.configuration import conf
from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
from airflow.utils.context import Context
from airflow.utils.module_loading import import_string
from airflow.utils.types import NOTSET
if TYPE_CHECKING:
import jinja2
from airflow.models.taskinstance import TaskInstance
KEY_REGEX = re.compile(r"^[\w.-]+$")
GROUP_KEY_REGEX = re.compile(r"^[\w-]+$")
CAMELCASE_TO_SNAKE_CASE_REGEX = re.compile(r"(?!^)([A-Z]+)")
T = TypeVar("T")
S = TypeVar("S")
def validate_key(k: str, max_length: int = 250):
"""Validates value used as a key."""
if not isinstance(k, str):
raise TypeError(f"The key has to be a string and is {type(k)}:{k}")
if len(k) > max_length:
raise AirflowException(f"The key has to be less than {max_length} characters")
if not KEY_REGEX.match(k):
raise AirflowException(
f"The key {k!r} has to be made of alphanumeric characters, dashes, "
f"dots and underscores exclusively"
)
def validate_group_key(k: str, max_length: int = 200):
"""Validates value used as a group key."""
if not isinstance(k, str):
raise TypeError(f"The key has to be a string and is {type(k)}:{k}")
if len(k) > max_length:
raise AirflowException(f"The key has to be less than {max_length} characters")
if not GROUP_KEY_REGEX.match(k):
raise AirflowException(
f"The key {k!r} has to be made of alphanumeric characters, dashes and underscores exclusively"
)
def alchemy_to_dict(obj: Any) -> dict | None:
"""Transforms a SQLAlchemy model instance into a dictionary"""
if not obj:
return None
output = {}
for col in obj.__table__.columns:
value = getattr(obj, col.name)
if isinstance(value, datetime):
value = value.isoformat()
output[col.name] = value
return output
def ask_yesno(question: str, default: bool | None = None) -> bool:
"""Helper to get a yes or no answer from the user."""
yes = {"yes", "y"}
no = {"no", "n"}
print(question)
while True:
choice = input().lower()
if choice == "" and default is not None:
return default
if choice in yes:
return True
if choice in no:
return False
print("Please respond with y/yes or n/no.")
def prompt_with_timeout(question: str, timeout: int, default: bool | None = None) -> bool:
"""Ask the user a question and timeout if they don't respond"""
def handler(signum, frame):
raise AirflowException(f"Timeout {timeout}s reached")
signal.signal(signal.SIGALRM, handler)
signal.alarm(timeout)
try:
return ask_yesno(question, default)
finally:
signal.alarm(0)
def is_container(obj: Any) -> bool:
"""Test if an object is a container (iterable) but not a string"""
return hasattr(obj, "__iter__") and not isinstance(obj, str)
def as_tuple(obj: Any) -> tuple:
"""
If obj is a container, returns obj as a tuple.
Otherwise, returns a tuple containing obj.
"""
if is_container(obj):
return tuple(obj)
else:
return tuple([obj])
def chunks(items: list[T], chunk_size: int) -> Generator[list[T], None, None]:
"""Yield successive chunks of a given size from a list of items"""
if chunk_size <= 0:
raise ValueError("Chunk size must be a positive integer")
for i in range(0, len(items), chunk_size):
yield items[i : i + chunk_size]
def reduce_in_chunks(fn: Callable[[S, list[T]], S], iterable: list[T], initializer: S, chunk_size: int = 0):
"""
Reduce the given list of items by splitting it into chunks
of the given size and passing each chunk through the reducer
"""
if len(iterable) == 0:
return initializer
if chunk_size == 0:
chunk_size = len(iterable)
return reduce(fn, chunks(iterable, chunk_size), initializer)
def as_flattened_list(iterable: Iterable[Iterable[T]]) -> list[T]:
"""
Return an iterable with one level flattened
>>> as_flattened_list((('blue', 'red'), ('green', 'yellow', 'pink')))
['blue', 'red', 'green', 'yellow', 'pink']
"""
return [e for i in iterable for e in i]
def parse_template_string(template_string: str) -> tuple[str | None, jinja2.Template | None]:
"""Parses Jinja template string."""
import jinja2
if "{{" in template_string: # jinja mode
return None, jinja2.Template(template_string)
else:
return template_string, None
def render_log_filename(ti: TaskInstance, try_number, filename_template) -> str:
"""
Given task instance, try_number, filename_template, return the rendered log
filename
:param ti: task instance
:param try_number: try_number of the task
:param filename_template: filename template, which can be jinja template or
python string template
"""
filename_template, filename_jinja_template = parse_template_string(filename_template)
if filename_jinja_template:
jinja_context = ti.get_template_context()
jinja_context["try_number"] = try_number
return render_template_to_string(filename_jinja_template, jinja_context)
return filename_template.format(
dag_id=ti.dag_id,
task_id=ti.task_id,
execution_date=ti.execution_date.isoformat(),
try_number=try_number,
)
def convert_camel_to_snake(camel_str: str) -> str:
"""Converts CamelCase to snake_case."""
return CAMELCASE_TO_SNAKE_CASE_REGEX.sub(r"_\1", camel_str).lower()
def merge_dicts(dict1: dict, dict2: dict) -> dict:
"""
Merge two dicts recursively, returning new dict (input dict is not mutated).
Lists are not concatenated. Items in dict2 overwrite those also found in dict1.
"""
merged = dict1.copy()
for k, v in dict2.items():
if k in merged and isinstance(v, dict):
merged[k] = merge_dicts(merged.get(k, {}), v)
else:
merged[k] = v
return merged
def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> tuple[Iterable[T], Iterable[T]]:
"""Use a predicate to partition entries into false entries and true entries"""
iter_1, iter_2 = tee(iterable)
return filterfalse(pred, iter_1), filter(pred, iter_2)
def chain(*args, **kwargs):
"""This function is deprecated. Please use `airflow.models.baseoperator.chain`."""
warnings.warn(
"This function is deprecated. Please use `airflow.models.baseoperator.chain`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
return import_string("airflow.models.baseoperator.chain")(*args, **kwargs)
def cross_downstream(*args, **kwargs):
"""This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`."""
warnings.warn(
"This function is deprecated. Please use `airflow.models.baseoperator.cross_downstream`.",
RemovedInAirflow3Warning,
stacklevel=2,
)
return import_string("airflow.models.baseoperator.cross_downstream")(*args, **kwargs)
def build_airflow_url_with_query(query: dict[str, Any]) -> str:
"""
Build airflow url using base_url and default_view and provided query
For example:
'http://0.0.0.0:8000/base/graph?dag_id=my-task&root=&execution_date=2020-10-27T10%3A59%3A25.615587
"""
import flask
view = conf.get_mandatory_value("webserver", "dag_default_view").lower()
return flask.url_for(f"Airflow.{view}", **query)
# The 'template' argument is typed as Any because the jinja2.Template is too
# dynamic to be effectively type-checked.
def render_template(template: Any, context: MutableMapping[str, Any], *, native: bool) -> Any:
"""Render a Jinja2 template with given Airflow context.
The default implementation of ``jinja2.Template.render()`` converts the
input context into dict eagerly many times, which triggers deprecation
messages in our custom context class. This takes the implementation apart
and retain the context mapping without resolving instead.
:param template: A Jinja2 template to render.
:param context: The Airflow task context to render the template with.
:param native: If set to *True*, render the template into a native type. A
DAG can enable this with ``render_template_as_native_obj=True``.
:returns: The render result.
"""
context = copy.copy(context)
env = template.environment
if template.globals:
context.update((k, v) for k, v in template.globals.items() if k not in context)
try:
nodes = template.root_render_func(env.context_class(env, context, template.name, template.blocks))
except Exception:
env.handle_exception() # Rewrite traceback to point to the template.
if native:
import jinja2.nativetypes
return jinja2.nativetypes.native_concat(nodes)
return "".join(nodes)
def render_template_to_string(template: jinja2.Template, context: Context) -> str:
"""Shorthand to ``render_template(native=False)`` with better typing support."""
return render_template(template, cast(MutableMapping[str, Any], context), native=False)
def render_template_as_native(template: jinja2.Template, context: Context) -> Any:
"""Shorthand to ``render_template(native=True)`` with better typing support."""
return render_template(template, cast(MutableMapping[str, Any], context), native=True)
def exactly_one(*args) -> bool:
"""
Returns True if exactly one of *args is "truthy", and False otherwise.
If user supplies an iterable, we raise ValueError and force them to unpack.
"""
if is_container(args[0]):
raise ValueError(
"Not supported for iterable args. Use `*` to unpack your iterable in the function call."
)
return sum(map(bool, args)) == 1
def at_most_one(*args) -> bool:
"""
Returns True if at most one of *args is "truthy", and False otherwise.
NOTSET is treated the same as None.
If user supplies an iterable, we raise ValueError and force them to unpack.
"""
def is_set(val):
if val is NOTSET:
return False
else:
return bool(val)
return sum(map(is_set, args)) in (0, 1)
def prune_dict(val: Any, mode="strict"):
"""
Given dict ``val``, returns new dict based on ``val`` with all
empty elements removed.
What constitutes "empty" is controlled by the ``mode`` parameter. If mode is 'strict'
then only ``None`` elements will be removed. If mode is ``truthy``, then element ``x``
will be removed if ``bool(x) is False``.
"""
def is_empty(x):
if mode == "strict":
return x is None
elif mode == "truthy":
return bool(x) is False
raise ValueError("allowable values for `mode` include 'truthy' and 'strict'")
if isinstance(val, dict):
new_dict = {}
for k, v in val.items():
if is_empty(v):
continue
elif isinstance(v, (list, dict)):
new_val = prune_dict(v, mode=mode)
if new_val:
new_dict[k] = new_val
else:
new_dict[k] = v
return new_dict
elif isinstance(val, list):
new_list = []
for v in val:
if is_empty(v):
continue
elif isinstance(v, (list, dict)):
new_val = prune_dict(v, mode=mode)
if new_val:
new_list.append(new_val)
else:
new_list.append(v)
return new_list
else:
return val
def prevent_duplicates(kwargs1: dict[str, Any], kwargs2: Mapping[str, Any], *, fail_reason: str) -> None:
"""Ensure *kwargs1* and *kwargs2* do not contain common keys.
:raises TypeError: If common keys are found.
"""
duplicated_keys = set(kwargs1).intersection(kwargs2)
if not duplicated_keys:
return
if len(duplicated_keys) == 1:
raise TypeError(f"{fail_reason} argument: {duplicated_keys.pop()}")
duplicated_keys_display = ", ".join(sorted(duplicated_keys))
raise TypeError(f"{fail_reason} arguments: {duplicated_keys_display}") | PypiClean |
/PhotoRename-1.0.8.tar.gz/PhotoRename-1.0.8/photorename/photorename.py |
import argparse
import os
import os.path
import sys
from hashlib import md5, sha224, sha512
parser = argparse.ArgumentParser(
description="Rename photos to give them more generic and unified names")
parser.add_argument("-v", "--verbose",
help="increase output verbosity", action="store_true")
parser.add_argument(
"-a", "--all", help="rename all files in the current directory", action="store_true")
parser.add_argument("-i", "--input_file",
help="set the input file to be renamed", type=str)
parser.add_argument("-o", "--output_path",
help="set the output files location", type=str)
parser.add_argument(
"--md5", help="use md5 to rename output file", action="store_true")
parser.add_argument(
"--sha224", help="use sha224 to rename output file", action="store_true")
parser.add_argument(
"--sha512", help="use sha512 to rename output file", action="store_true")
args = parser.parse_args()
def verbose(args, string):
if args.verbose:
print(string)
def get_md5_string(input_file):
m = md5()
m.update(input_file)
md5string = str(m.hexdigest())
return md5string
def get_sha224_string(input_file):
s = sha224()
s.update(input_file)
sha224string = str(s.hexdigest())
return sha224string
def get_sha512_string(input_file):
s = sha512()
s.update(input_file)
sha512string = str(s.hexdigest())
return sha512string
def rename():
try:
verbose(args, "Input File: " + os.path.realpath(args.input_file))
in_f = os.path.realpath(args.input_file)
except TypeError:
print("You must specify an input file.")
sys.exit(1)
_, f_ext = os.path.splitext(in_f)
if args.output_path:
output_path = "{0}//".format(os.path.realpath(args.output_path))
else:
output_path = "{0}//".format(os.getcwd())
if args.md5:
out_f = "{0}\\{1}{2}".format(output_path, get_md5_string(
args.input_file.encode('utf-8')), f_ext)
elif args.sha224:
out_f = "{0}\\{1}{2}".format(output_path, get_sha224_string(
args.input_file.encode('utf-8')), f_ext)
elif args.sha512:
out_f = "{0}\\{1}{2}".format(output_path, get_sha512_string(
args.input_file.encode('utf-8')), f_ext)
try:
verbose(args, "Output File: " + out_f)
os.rename(in_f, out_f)
except IOError as error:
print(error)
sys.exit(1) | PypiClean |
/smartautomatic_server_frontend-20220907.2-py3-none-any.whl/sas_frontend/frontend_es5/425a2114.js | "use strict";(self.webpackChunksmartautomatic_server_frontend=self.webpackChunksmartautomatic_server_frontend||[]).push([[1359],{25516:function(e,t,r){r.d(t,{i:function(){return n}});var n=function(e){return function(t){return{kind:"method",placement:"prototype",key:t.key,descriptor:{set:function(e){this["__".concat(String(t.key))]=e},get:function(){return this["__".concat(String(t.key))]},enumerable:!0,configurable:!0},finisher:function(r){var n=r.prototype.connectedCallback;r.prototype.connectedCallback=function(){if(n.call(this),this[t.key]){var r=this.renderRoot.querySelector(e);if(!r)return;r.scrollTop=this[t.key]}}}}}}},1359:function(e,t,r){r(91156);var n,i,o,a,s=r(37500),l=r(33310),c=r(8636),d=r(14516),u=r(7323),f=r(25516),p=r(87744),h=(r(2315),r(48932),r(52039),r(98734)),m=r(51346);function v(e){return v="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},v(e)}function y(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function b(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function k(e,t){return k=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e},k(e,t)}function w(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=x(e);if(t){var i=x(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return g(this,r)}}function g(e,t){if(t&&("object"===v(t)||"function"==typeof t))return t;if(void 0!==t)throw new TypeError("Derived constructors may only return object or undefined");return E(e)}function E(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function x(e){return x=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)},x(e)}function P(){P=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var o="static"===i?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var n=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!O(e))return r.push(e);var t=this.decorateElement(e,i);r.push(t.element),r.push.apply(r,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:r,finishers:n};var o=this.decorateConstructor(r,t);return n.push.apply(n,o.finishers),o.finishers=n,o},addElementPlacement:function(e,t,r){var n=t[e.placement];if(!r&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var r=[],n=[],i=e.decorators,o=i.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,i[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&n.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:n,extras:r}},decorateConstructor:function(e,t){for(var r=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return T(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?T(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=A(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:_(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=_(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var n=(0,t[r])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function C(e){var t,r=A(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function S(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function O(e){return e.decorators&&e.decorators.length}function D(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function _(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function A(e){var t=function(e,t){if("object"!==v(e)||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var n=r.call(e,t||"default");if("object"!==v(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"===v(t)?t:String(t)}function T(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}var j,R,z,F,B,H,I,M,K;!function(e,t,r,n){var i=P();if(n)for(var o=0;o<n.length;o++)i=n[o](i);var a=t((function(e){i.initializeInstanceElements(e,s.elements)}),r),s=i.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},n=0;n<e.length;n++){var i,o=e[n];if("method"===o.kind&&(i=t.find(r)))if(D(o.descriptor)||D(i.descriptor)){if(O(o)||O(i))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");i.descriptor=o.descriptor}else{if(O(o)){if(O(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");i.decorators=o.decorators}S(o,i)}else t.push(o)}return t}(a.d.map(C)),e);i.initializeClassElements(a.F,s.elements),i.runClassFinishers(a.F,s.finishers)}([(0,l.Mo)("ha-tab")],(function(e,t){var r=function(t){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&k(e,t)}(n,t);var r=w(n);function n(){var t;b(this,n);for(var i=arguments.length,o=new Array(i),a=0;a<i;a++)o[a]=arguments[a];return t=r.call.apply(r,[this].concat(o)),e(E(t)),t}return n}(t);return{F:r,d:[{kind:"field",decorators:[(0,l.Cb)({type:Boolean,reflect:!0})],key:"active",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)({type:Boolean,reflect:!0})],key:"narrow",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)()],key:"name",value:void 0},{kind:"field",decorators:[(0,l.GC)("mwc-ripple")],key:"_ripple",value:void 0},{kind:"field",decorators:[(0,l.SB)()],key:"_shouldRenderRipple",value:function(){return!1}},{kind:"method",key:"render",value:function(){return(0,s.dy)(n||(n=y(['\n <div\n tabindex="0"\n role="tab"\n aria-selected=',"\n aria-label=","\n @focus=","\n @blur=","\n @mousedown=","\n @mouseup=","\n @mouseenter=","\n @mouseleave=","\n @touchstart=","\n @touchend=","\n @touchcancel=","\n @keydown=","\n >\n ",'\n <span class="name">',"</span>\n ","\n </div>\n "])),this.active,(0,m.o)(this.name),this.handleRippleFocus,this.handleRippleBlur,this.handleRippleActivate,this.handleRippleDeactivate,this.handleRippleMouseEnter,this.handleRippleMouseLeave,this.handleRippleActivate,this.handleRippleDeactivate,this.handleRippleDeactivate,this._handleKeyDown,this.narrow?(0,s.dy)(i||(i=y(['<slot name="icon"></slot>']))):"",this.name,this._shouldRenderRipple?(0,s.dy)(o||(o=y(["<mwc-ripple></mwc-ripple>"]))):"")}},{kind:"field",key:"_rippleHandlers",value:function(){var e=this;return new h.A((function(){return e._shouldRenderRipple=!0,e._ripple}))}},{kind:"method",key:"_handleKeyDown",value:function(e){13===e.keyCode&&e.target.click()}},{kind:"method",decorators:[(0,l.hO)({passive:!0})],key:"handleRippleActivate",value:function(e){this._rippleHandlers.startPress(e)}},{kind:"method",key:"handleRippleDeactivate",value:function(){this._rippleHandlers.endPress()}},{kind:"method",key:"handleRippleMouseEnter",value:function(){this._rippleHandlers.startHover()}},{kind:"method",key:"handleRippleMouseLeave",value:function(){this._rippleHandlers.endHover()}},{kind:"method",key:"handleRippleFocus",value:function(){this._rippleHandlers.startFocus()}},{kind:"method",key:"handleRippleBlur",value:function(){this._rippleHandlers.endFocus()}},{kind:"get",static:!0,key:"styles",value:function(){return(0,s.iv)(a||(a=y(["\n div {\n padding: 0 32px;\n display: flex;\n flex-direction: column;\n text-align: center;\n box-sizing: border-box;\n align-items: center;\n justify-content: center;\n width: 100%;\n height: var(--header-height);\n cursor: pointer;\n position: relative;\n outline: none;\n }\n\n .name {\n white-space: nowrap;\n overflow: hidden;\n text-overflow: ellipsis;\n max-width: 100%;\n }\n\n :host([active]) {\n color: var(--primary-color);\n }\n\n :host(:not([narrow])[active]) div {\n border-bottom: 2px solid var(--primary-color);\n }\n\n :host([narrow]) {\n min-width: 0;\n display: flex;\n justify-content: center;\n overflow: hidden;\n }\n\n :host([narrow]) div {\n padding: 0 4px;\n }\n "])))}}]}}),s.oi);function $(e){return $="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},$(e)}function U(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function L(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function N(e,t){return N=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e},N(e,t)}function q(e){var t=function(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}();return function(){var r,n=ne(e);if(t){var i=ne(this).constructor;r=Reflect.construct(n,arguments,i)}else r=n.apply(this,arguments);return G(this,r)}}function G(e,t){if(t&&("object"===$(t)||"function"==typeof t))return t;if(void 0!==t)throw new TypeError("Derived constructors may only return object or undefined");return W(e)}function W(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function Z(){Z=function(){return e};var e={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(e,t){["method","field"].forEach((function(r){t.forEach((function(t){t.kind===r&&"own"===t.placement&&this.defineClassElement(e,t)}),this)}),this)},initializeClassElements:function(e,t){var r=e.prototype;["method","field"].forEach((function(n){t.forEach((function(t){var i=t.placement;if(t.kind===n&&("static"===i||"prototype"===i)){var o="static"===i?e:r;this.defineClassElement(o,t)}}),this)}),this)},defineClassElement:function(e,t){var r=t.descriptor;if("field"===t.kind){var n=t.initializer;r={enumerable:r.enumerable,writable:r.writable,configurable:r.configurable,value:void 0===n?void 0:n.call(e)}}Object.defineProperty(e,t.key,r)},decorateClass:function(e,t){var r=[],n=[],i={static:[],prototype:[],own:[]};if(e.forEach((function(e){this.addElementPlacement(e,i)}),this),e.forEach((function(e){if(!V(e))return r.push(e);var t=this.decorateElement(e,i);r.push(t.element),r.push.apply(r,t.extras),n.push.apply(n,t.finishers)}),this),!t)return{elements:r,finishers:n};var o=this.decorateConstructor(r,t);return n.push.apply(n,o.finishers),o.finishers=n,o},addElementPlacement:function(e,t,r){var n=t[e.placement];if(!r&&-1!==n.indexOf(e.key))throw new TypeError("Duplicated element ("+e.key+")");n.push(e.key)},decorateElement:function(e,t){for(var r=[],n=[],i=e.decorators,o=i.length-1;o>=0;o--){var a=t[e.placement];a.splice(a.indexOf(e.key),1);var s=this.fromElementDescriptor(e),l=this.toElementFinisherExtras((0,i[o])(s)||s);e=l.element,this.addElementPlacement(e,t),l.finisher&&n.push(l.finisher);var c=l.extras;if(c){for(var d=0;d<c.length;d++)this.addElementPlacement(c[d],t);r.push.apply(r,c)}}return{element:e,finishers:n,extras:r}},decorateConstructor:function(e,t){for(var r=[],n=t.length-1;n>=0;n--){var i=this.fromClassDescriptor(e),o=this.toClassDescriptor((0,t[n])(i)||i);if(void 0!==o.finisher&&r.push(o.finisher),void 0!==o.elements){e=o.elements;for(var a=0;a<e.length-1;a++)for(var s=a+1;s<e.length;s++)if(e[a].key===e[s].key&&e[a].placement===e[s].placement)throw new TypeError("Duplicated element ("+e[a].key+")")}}return{elements:e,finishers:r}},fromElementDescriptor:function(e){var t={kind:e.kind,key:e.key,placement:e.placement,descriptor:e.descriptor};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),"field"===e.kind&&(t.initializer=e.initializer),t},toElementDescriptors:function(e){var t;if(void 0!==e)return(t=e,function(e){if(Array.isArray(e))return e}(t)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(t)||function(e,t){if(e){if("string"==typeof e)return te(e,t);var r=Object.prototype.toString.call(e).slice(8,-1);return"Object"===r&&e.constructor&&(r=e.constructor.name),"Map"===r||"Set"===r?Array.from(e):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?te(e,t):void 0}}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()).map((function(e){var t=this.toElementDescriptor(e);return this.disallowProperty(e,"finisher","An element descriptor"),this.disallowProperty(e,"extras","An element descriptor"),t}),this)},toElementDescriptor:function(e){var t=String(e.kind);if("method"!==t&&"field"!==t)throw new TypeError('An element descriptor\'s .kind property must be either "method" or "field", but a decorator created an element descriptor with .kind "'+t+'"');var r=ee(e.key),n=String(e.placement);if("static"!==n&&"prototype"!==n&&"own"!==n)throw new TypeError('An element descriptor\'s .placement property must be one of "static", "prototype" or "own", but a decorator created an element descriptor with .placement "'+n+'"');var i=e.descriptor;this.disallowProperty(e,"elements","An element descriptor");var o={kind:t,key:r,placement:n,descriptor:Object.assign({},i)};return"field"!==t?this.disallowProperty(e,"initializer","A method descriptor"):(this.disallowProperty(i,"get","The property descriptor of a field descriptor"),this.disallowProperty(i,"set","The property descriptor of a field descriptor"),this.disallowProperty(i,"value","The property descriptor of a field descriptor"),o.initializer=e.initializer),o},toElementFinisherExtras:function(e){return{element:this.toElementDescriptor(e),finisher:Y(e,"finisher"),extras:this.toElementDescriptors(e.extras)}},fromClassDescriptor:function(e){var t={kind:"class",elements:e.map(this.fromElementDescriptor,this)};return Object.defineProperty(t,Symbol.toStringTag,{value:"Descriptor",configurable:!0}),t},toClassDescriptor:function(e){var t=String(e.kind);if("class"!==t)throw new TypeError('A class descriptor\'s .kind property must be "class", but a decorator created a class descriptor with .kind "'+t+'"');this.disallowProperty(e,"key","A class descriptor"),this.disallowProperty(e,"placement","A class descriptor"),this.disallowProperty(e,"descriptor","A class descriptor"),this.disallowProperty(e,"initializer","A class descriptor"),this.disallowProperty(e,"extras","A class descriptor");var r=Y(e,"finisher");return{elements:this.toElementDescriptors(e.elements),finisher:r}},runClassFinishers:function(e,t){for(var r=0;r<t.length;r++){var n=(0,t[r])(e);if(void 0!==n){if("function"!=typeof n)throw new TypeError("Finishers must return a constructor.");e=n}}return e},disallowProperty:function(e,t,r){if(void 0!==e[t])throw new TypeError(r+" can't have a ."+t+" property.")}};return e}function J(e){var t,r=ee(e.key);"method"===e.kind?t={value:e.value,writable:!0,configurable:!0,enumerable:!1}:"get"===e.kind?t={get:e.value,configurable:!0,enumerable:!1}:"set"===e.kind?t={set:e.value,configurable:!0,enumerable:!1}:"field"===e.kind&&(t={configurable:!0,writable:!0,enumerable:!0});var n={kind:"field"===e.kind?"field":"method",key:r,placement:e.static?"static":"field"===e.kind?"own":"prototype",descriptor:t};return e.decorators&&(n.decorators=e.decorators),"field"===e.kind&&(n.initializer=e.value),n}function Q(e,t){void 0!==e.descriptor.get?t.descriptor.get=e.descriptor.get:t.descriptor.set=e.descriptor.set}function V(e){return e.decorators&&e.decorators.length}function X(e){return void 0!==e&&!(void 0===e.value&&void 0===e.writable)}function Y(e,t){var r=e[t];if(void 0!==r&&"function"!=typeof r)throw new TypeError("Expected '"+t+"' to be a function");return r}function ee(e){var t=function(e,t){if("object"!==$(e)||null===e)return e;var r=e[Symbol.toPrimitive];if(void 0!==r){var n=r.call(e,t||"default");if("object"!==$(n))return n;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(e,"string");return"symbol"===$(t)?t:String(t)}function te(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}function re(e,t,r){return re="undefined"!=typeof Reflect&&Reflect.get?Reflect.get:function(e,t,r){var n=function(e,t){for(;!Object.prototype.hasOwnProperty.call(e,t)&&null!==(e=ne(e)););return e}(e,t);if(n){var i=Object.getOwnPropertyDescriptor(n,t);return i.get?i.get.call(r):i.value}},re(e,t,r||e)}function ne(e){return ne=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)},ne(e)}!function(e,t,r,n){var i=Z();if(n)for(var o=0;o<n.length;o++)i=n[o](i);var a=t((function(e){i.initializeInstanceElements(e,s.elements)}),r),s=i.decorateClass(function(e){for(var t=[],r=function(e){return"method"===e.kind&&e.key===o.key&&e.placement===o.placement},n=0;n<e.length;n++){var i,o=e[n];if("method"===o.kind&&(i=t.find(r)))if(X(o.descriptor)||X(i.descriptor)){if(V(o)||V(i))throw new ReferenceError("Duplicated methods ("+o.key+") can't be decorated.");i.descriptor=o.descriptor}else{if(V(o)){if(V(i))throw new ReferenceError("Decorators can't be placed on different accessors with for the same property ("+o.key+").");i.decorators=o.decorators}Q(o,i)}else t.push(o)}return t}(a.d.map(J)),e);i.initializeClassElements(a.F,s.elements),i.runClassFinishers(a.F,s.finishers)}([(0,l.Mo)("hass-tabs-subpage")],(function(e,t){var r=function(t){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&N(e,t)}(n,t);var r=q(n);function n(){var t;L(this,n);for(var i=arguments.length,o=new Array(i),a=0;a<i;a++)o[a]=arguments[a];return t=r.call.apply(r,[this].concat(o)),e(W(t)),t}return n}(t);return{F:r,d:[{kind:"field",decorators:[(0,l.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,l.Cb)({type:Boolean})],key:"supervisor",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)({attribute:!1})],key:"localizeFunc",value:void 0},{kind:"field",decorators:[(0,l.Cb)({type:String,attribute:"back-path"})],key:"backPath",value:void 0},{kind:"field",decorators:[(0,l.Cb)()],key:"backCallback",value:void 0},{kind:"field",decorators:[(0,l.Cb)({type:Boolean,attribute:"main-page"})],key:"mainPage",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)({attribute:!1})],key:"route",value:void 0},{kind:"field",decorators:[(0,l.Cb)({attribute:!1})],key:"tabs",value:void 0},{kind:"field",decorators:[(0,l.Cb)({type:Boolean,reflect:!0})],key:"narrow",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)({type:Boolean,reflect:!0,attribute:"is-wide"})],key:"isWide",value:function(){return!1}},{kind:"field",decorators:[(0,l.Cb)({type:Boolean,reflect:!0})],key:"rtl",value:function(){return!1}},{kind:"field",decorators:[(0,l.SB)()],key:"_activeTab",value:void 0},{kind:"field",decorators:[(0,f.i)(".content")],key:"_savedScrollPos",value:void 0},{kind:"field",key:"_getTabs",value:function(){var e=this;return(0,d.Z)((function(t,r,n,i,o,a,l){var c=t.filter((function(t){return(!t.component||t.core||(0,u.p)(e.hass,t.component))&&(!t.advancedOnly||n)}));if(c.length<2){if(1===c.length){var d=c[0];return[d.translationKey?l(d.translationKey):d.name]}return[""]}return c.map((function(t){return(0,s.dy)(j||(j=U(["\n <a href=",">\n <ha-tab\n .hass=","\n .active=","\n .narrow=","\n .name=","\n >\n ","\n </ha-tab>\n </a>\n "])),t.path,e.hass,t.path===(null==r?void 0:r.path),e.narrow,t.translationKey?l(t.translationKey):t.name,t.iconPath?(0,s.dy)(R||(R=U(['<ha-svg-icon\n slot="icon"\n .path=',"\n ></ha-svg-icon>"])),t.iconPath):"")}))}))}},{kind:"method",key:"willUpdate",value:function(e){var t=this;if(e.has("route")&&(this._activeTab=this.tabs.find((function(e){return"".concat(t.route.prefix).concat(t.route.path).includes(e.path)}))),e.has("hass")){var n=e.get("hass");n&&n.language===this.hass.language||(this.rtl=(0,p.HE)(this.hass))}re(ne(r.prototype),"willUpdate",this).call(this,e)}},{kind:"method",key:"render",value:function(){var e,t,r=this._getTabs(this.tabs,this._activeTab,null===(e=this.hass.userData)||void 0===e?void 0:e.showAdvanced,this.hass.config.components,this.hass.language,this.narrow,this.localizeFunc||this.hass.localize),n=r.length>1;return(0,s.dy)(z||(z=U(['\n <div class="toolbar">\n ',"\n ","\n ",'\n <div id="toolbar-icon">\n <slot name="toolbar-icon"></slot>\n </div>\n </div>\n <div\n class="content ','"\n @scroll=','\n >\n <slot></slot>\n </div>\n <div id="fab" class=','>\n <slot name="fab"></slot>\n </div>\n '])),this.mainPage||!this.backPath&&null!==(t=history.state)&&void 0!==t&&t.root?(0,s.dy)(F||(F=U(["\n <ha-menu-button\n .saserver=","\n .hass=","\n .narrow=","\n ></ha-menu-button>\n "])),this.supervisor,this.hass,this.narrow):this.backPath?(0,s.dy)(B||(B=U(["\n <a href=",">\n <ha-icon-button-arrow-prev\n .hass=","\n ></ha-icon-button-arrow-prev>\n </a>\n "])),this.backPath,this.hass):(0,s.dy)(H||(H=U(["\n <ha-icon-button-arrow-prev\n .hass=","\n @click=","\n ></ha-icon-button-arrow-prev>\n "])),this.hass,this._backTapped),this.narrow||!n?(0,s.dy)(I||(I=U(['<div class="main-title">\n <slot name="header">',"</slot>\n </div>"])),n?"":r[0]):"",n?(0,s.dy)(M||(M=U(['\n <div id="tabbar" class=',">\n ","\n </div>\n "])),(0,c.$)({"bottom-bar":this.narrow}),r):"",(0,c.$)({tabs:n}),this._saveScrollPos,(0,c.$)({tabs:n}))}},{kind:"method",decorators:[(0,l.hO)({passive:!0})],key:"_saveScrollPos",value:function(e){this._savedScrollPos=e.target.scrollTop}},{kind:"method",key:"_backTapped",value:function(){this.backCallback?this.backCallback():history.back()}},{kind:"get",static:!0,key:"styles",value:function(){return(0,s.iv)(K||(K=U(['\n :host {\n display: block;\n height: 100%;\n background-color: var(--primary-background-color);\n }\n\n :host([narrow]) {\n width: 100%;\n position: fixed;\n }\n\n ha-menu-button {\n margin-right: 24px;\n }\n\n .toolbar {\n display: flex;\n align-items: center;\n font-size: 20px;\n height: var(--header-height);\n background-color: var(--sidebar-background-color);\n font-weight: 400;\n border-bottom: 1px solid var(--divider-color);\n padding: 0 16px;\n box-sizing: border-box;\n }\n .toolbar a {\n color: var(--sidebar-text-color);\n text-decoration: none;\n }\n .bottom-bar a {\n width: 25%;\n }\n\n #tabbar {\n display: flex;\n font-size: 14px;\n overflow: hidden;\n }\n\n #tabbar > a {\n overflow: hidden;\n max-width: 45%;\n }\n\n #tabbar.bottom-bar {\n position: absolute;\n bottom: 0;\n left: 0;\n padding: 0 16px;\n box-sizing: border-box;\n background-color: var(--sidebar-background-color);\n border-top: 1px solid var(--divider-color);\n justify-content: space-around;\n z-index: 2;\n font-size: 12px;\n width: 100%;\n padding-bottom: env(safe-area-inset-bottom);\n }\n\n #tabbar:not(.bottom-bar) {\n flex: 1;\n justify-content: center;\n }\n\n :host(:not([narrow])) #toolbar-icon {\n min-width: 40px;\n }\n\n ha-menu-button,\n ha-icon-button-arrow-prev,\n ::slotted([slot="toolbar-icon"]) {\n display: flex;\n flex-shrink: 0;\n pointer-events: auto;\n color: var(--sidebar-icon-color);\n }\n\n .main-title {\n flex: 1;\n max-height: var(--header-height);\n line-height: 20px;\n color: var(--sidebar-text-color);\n margin: var(--main-title-margin, 0 0 0 24px);\n }\n\n .content {\n position: relative;\n width: calc(\n 100% - env(safe-area-inset-left) - env(safe-area-inset-right)\n );\n margin-left: env(safe-area-inset-left);\n margin-right: env(safe-area-inset-right);\n height: calc(100% - 1px - var(--header-height));\n height: calc(\n 100% - 1px - var(--header-height) - env(safe-area-inset-bottom)\n );\n overflow: auto;\n -webkit-overflow-scrolling: touch;\n }\n\n :host([narrow]) .content.tabs {\n height: calc(100% - 2 * var(--header-height));\n height: calc(\n 100% - 2 * var(--header-height) - env(safe-area-inset-bottom)\n );\n }\n\n #fab {\n position: fixed;\n right: calc(16px + env(safe-area-inset-right));\n bottom: calc(16px + env(safe-area-inset-bottom));\n z-index: 1;\n }\n :host([narrow]) #fab.tabs {\n bottom: calc(84px + env(safe-area-inset-bottom));\n }\n #fab[is-wide] {\n bottom: 24px;\n right: 24px;\n }\n :host([rtl]) #fab {\n right: auto;\n left: calc(16px + env(safe-area-inset-left));\n }\n :host([rtl][is-wide]) #fab {\n bottom: 24px;\n left: 24px;\n right: auto;\n }\n '])))}}]}}),s.oi)}}]); | PypiClean |
/dsp_ml-0.1.8-py3-none-any.whl/dsp/utils/metrics.py | import re
import string
import unicodedata
from collections import Counter
from dsp.utils.utils import print_message
def EM(prediction, answers_list):
assert type(answers_list) == list
return max(em_score(prediction, ans) for ans in answers_list)
def F1(prediction, answers_list):
assert type(answers_list) == list
return max(f1_score(prediction, ans) for ans in answers_list)
def HotPotF1(prediction, answers_list):
assert type(answers_list) == list
return max(hotpot_f1_score(prediction, ans) for ans in answers_list)
def nF1(history, prediction, answers_list, return_recall=False):
assert type(answers_list) == list
return max(novel_f1_score(history, prediction, ans, return_recall=return_recall) for ans in answers_list)
def normalize_text(s):
s = unicodedata.normalize('NFD', s)
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def em_score(prediction, ground_truth):
return normalize_text(prediction) == normalize_text(ground_truth)
# See: https://github.com/hotpotqa/hotpot/blob/master/hotpot_evaluate_v1.py
# See: https://rajpurkar.github.io/SQuAD-explorer/ under Evaluation Script
# See: QReCC's
def f1_score(prediction, ground_truth):
prediction_tokens = normalize_text(prediction).split()
ground_truth_tokens = normalize_text(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if len(prediction_tokens) == len(ground_truth_tokens) == 0:
# Unlike most tasks, QReCC and SQuAD-2.0 assign 1.0 in this edge case. We don't for uniformity.
print_message(
"\n#> F1 Metric: Rare edge case of len(prediction_tokens) == len(ground_truth_tokens) == 0.\n")
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
def hotpot_f1_score(prediction, ground_truth):
normalized_prediction = normalize_text(prediction)
normalized_ground_truth = normalize_text(ground_truth)
if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return 0
if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return 0
prediction_tokens = normalized_prediction.split()
ground_truth_tokens = normalized_ground_truth.split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
def precision_score(prediction, ground_truth):
prediction_tokens = normalize_text(prediction).split()
ground_truth_tokens = normalize_text(ground_truth).split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if len(prediction_tokens) == len(ground_truth_tokens) == 0:
# Unlike most tasks, QReCC and SQuAD-2.0 assign 1.0 in this edge case. We don't for uniformity.
print_message(
"\n#> F1 Metric: Rare edge case of len(prediction_tokens) == len(ground_truth_tokens) == 0.\n")
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
return precision
# Source: https://gist.github.com/sebleier/554280
stopwords = ["i", "me", "my", "myself", "we", "our", "ours", "ourselves", "you", "your", "yours", "yourself",
"yourselves", "he", "him", "his", "himself", "she", "her", "hers", "herself", "it", "its", "itself",
"they", "them", "their", "theirs", "themselves", "what", "which", "who", "whom", "this", "that", "these",
"those", "am", "is", "are", "was", "were", "be", "been", "being", "have", "has", "had", "having", "do",
"does", "did", "doing", "a", "an", "the", "and", "but", "if", "or", "because", "as", "until", "while",
"of", "at", "by", "for", "with", "about", "against", "between", "into", "through", "during", "before",
"after", "above", "below", "to", "from", "up", "down", "in", "out", "on", "off", "over", "under", "again",
"further", "then", "once", "here", "there", "when", "where", "why", "how", "all", "any", "both", "each",
"few", "more", "most", "other", "some", "such", "no", "nor", "not", "only", "own", "same", "so", "than",
"too", "very", "s", "t", "can", "will", "just", "don", "should", "now"]
def novel_f1_score(history, prediction, ground_truth, return_recall=False):
history_tokens = normalize_text(history).split()
prediction_tokens = normalize_text(prediction).split()
ground_truth_tokens = normalize_text(ground_truth).split()
history_tokens = set(history_tokens + stopwords)
prediction_tokens = [
t for t in prediction_tokens if t not in history_tokens]
ground_truth_tokens = [
t for t in ground_truth_tokens if t not in history_tokens]
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
if return_recall:
return recall
return f1 | PypiClean |
/internet_ml-3.1.0-py3-none-any.whl/internet_ml/NLP/no_context/QA.py | from typing import Any, List, Tuple
import os
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent.parent) + "/tools/NLP/data")
sys.path.append(str(Path(__file__).parent.parent.parent) + "/tools/NLP")
sys.path.append(str(Path(__file__).parent.parent.parent) + "/tools")
sys.path.append(str(Path(__file__).parent.parent.parent) + "/utils")
import config
import dotenv
import internet
import openai
from ChatGPT import Chatbot
from transformers import pipeline
dotenv.load_dotenv()
def answer(
query: str,
model: str = "openai-chatgpt",
GOOGLE_SEARCH_API_KEY: str = "",
GOOGLE_SEARCH_ENGINE_ID: str = "",
OPENAI_API_KEY: str = "",
CHATGPT_SESSION_TOKEN: str = "",
) -> tuple[Any, list[str]]:
if OPENAI_API_KEY == "":
OPENAI_API_KEY = str(os.environ.get("OPENAI_API_KEY"))
openai.api_key = OPENAI_API_KEY
if CHATGPT_SESSION_TOKEN == "":
CHATGPT_SESSION_TOKEN = str(os.environ.get("CHATGPT_SESSION_TOKEN"))
if not (model.startswith("openai-") or model.startswith("hf-")):
model = "openai-chatgpt" # Default
results: tuple[list[str], list[str]] = internet.Google(
query, GOOGLE_SEARCH_API_KEY, GOOGLE_SEARCH_ENGINE_ID
).google()
context: str = str(" ".join([str(string) for string in results[0]]))
print(f"context: {context}")
if model.startswith("openai-"):
if model == "openai-chatgpt":
# ChatGPT
prompt = f'Use the context: {context[:4000]} and answer the question: "{query}" with the context and prior knowledge. Also write at the very least long answers.'
chatbot = Chatbot(
{"session_token": CHATGPT_SESSION_TOKEN},
conversation_id=None,
parent_id=None,
)
response = chatbot.ask(
prompt=prompt,
conversation_id=None,
parent_id=None,
)
return (response["message"], results[1])
else:
if model == "openai-text-davinci-003":
# text-davinci-003
prompt = f'Use the context: {context[:3000]} and answer the question: "{query}" with the context and prior knowledge. Also write at the very least long answers.'
response = openai.Completion.create(
model="text-davinci-003",
prompt=prompt,
max_tokens=len(context),
n=1,
stop=None,
temperature=0.5,
)
return (response.choices[0].text, results[1])
# TODO: add suport later
else:
# HuggingFace
model = model.replace("hf-", "", 1)
qa_model = pipeline("question-answering", model=model)
response = qa_model(question=query, context=context)
return (response["answer"], results[1])
print(
answer(
query="What is the newest pokemon game?",
model="hf-deepset/xlm-roberta-large-squad2",
)
) | PypiClean |
/ansible-base-2.10.17.tar.gz/ansible-base-2.10.17/lib/ansible/plugins/callback/minimal.py |
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: minimal
type: stdout
short_description: minimal Ansible screen output
version_added: historical
description:
- This is the default output callback used by the ansible command (ad-hoc)
'''
from ansible.plugins.callback import CallbackBase
from ansible import constants as C
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'minimal'
def _command_generic_msg(self, host, result, caption):
''' output the result of a command run '''
buf = "%s | %s | rc=%s >>\n" % (host, caption, result.get('rc', -1))
buf += result.get('stdout', '')
buf += result.get('stderr', '')
buf += result.get('msg', '')
return buf + "\n"
def v2_runner_on_failed(self, result, ignore_errors=False):
self._handle_exception(result._result)
self._handle_warnings(result._result)
if result._task.action in C.MODULE_NO_JSON and 'module_stderr' not in result._result:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result, "FAILED"), color=C.COLOR_ERROR)
else:
self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_ERROR)
def v2_runner_on_ok(self, result):
self._clean_results(result._result, result._task.action)
self._handle_warnings(result._result)
if result._result.get('changed', False):
color = C.COLOR_CHANGED
state = 'CHANGED'
else:
color = C.COLOR_OK
state = 'SUCCESS'
if result._task.action in C.MODULE_NO_JSON and 'ansible_job_id' not in result._result:
self._display.display(self._command_generic_msg(result._host.get_name(), result._result, state), color=color)
else:
self._display.display("%s | %s => %s" % (result._host.get_name(), state, self._dump_results(result._result, indent=4)), color=color)
def v2_runner_on_skipped(self, result):
self._display.display("%s | SKIPPED" % (result._host.get_name()), color=C.COLOR_SKIP)
def v2_runner_on_unreachable(self, result):
self._display.display("%s | UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=4)), color=C.COLOR_UNREACHABLE)
def v2_on_file_diff(self, result):
if 'diff' in result._result and result._result['diff']:
self._display.display(self._get_diff(result._result['diff'])) | PypiClean |
/jsonschema-extended-0.6.tar.gz/jsonschema-extended-0.6/jsonschema_extended/cli.py | from __future__ import absolute_import
import argparse
import json
import sys
from jsonschema_extended._reflect import namedAny
from jsonschema_extended.validators import validator_for
def _namedAnyWithDefault(name):
if "." not in name:
name = "jsonschema." + name
return namedAny(name)
def _json_file(path):
with open(path) as file:
return json.load(file)
parser = argparse.ArgumentParser(
description="JSON Schema Validation CLI",
)
parser.add_argument(
"-i", "--instance",
action="append",
dest="instances",
type=_json_file,
help=(
"a path to a JSON instance (i.e. filename.json)"
"to validate (may be specified multiple times)"
),
)
parser.add_argument(
"-F", "--error-format",
default="{error.instance}: {error.message}\n",
help=(
"the format to use for each error output message, specified in "
"a form suitable for passing to str.format, which will be called "
"with 'error' for each error"
),
)
parser.add_argument(
"-V", "--validator",
type=_namedAnyWithDefault,
help=(
"the fully qualified object name of a validator to use, or, for "
"validators that are registered with jsonschema, simply the name "
"of the class."
),
)
parser.add_argument(
"schema",
help="the JSON Schema to validate with (i.e. filename.schema)",
type=_json_file,
)
def parse_args(args):
arguments = vars(parser.parse_args(args=args or ["--help"]))
if arguments["validator"] is None:
arguments["validator"] = validator_for(arguments["schema"])
return arguments
def main(args=sys.argv[1:]):
sys.exit(run(arguments=parse_args(args=args)))
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
error_format = arguments["error_format"]
validator = arguments["validator"](schema=arguments["schema"])
validator.check_schema(arguments["schema"])
errored = False
for instance in arguments["instances"] or ():
for error in validator.iter_errors(instance):
stderr.write(error_format.format(error=error))
errored = True
return errored | PypiClean |
/SHET-1.0.1.tar.gz/SHET-1.0.1/src/client/client.py | from twisted.internet.protocol import ReconnectingClientFactory
from twisted.internet.defer import Deferred
from twisted.internet import reactor
from shet import commands
from shet import ShetProtocol
from shet.command_runner import command
from shet import path as shetpath
import os
from types import MethodType
import collections
import uuid
class Decorator(object):
func_name = "f"
def __init__(self, f):
self.has_func = not isinstance(f, basestring)
if self.has_func:
self.name = None
setattr(self, self.func_name, f)
else:
self.f = None
self.name = f
def __call__(self, *args, **kwargs):
if self.has_func:
return getattr(self, self.func_name)(*args, **kwargs)
else:
setattr(self, self.func_name, args[0])
self.has_func = False
return self
class shet_action(Decorator):
pass
class shet_property(Decorator):
func_name = "get"
def __init__(self, f):
Decorator.__init__(self, f)
self.set_f = None
def set(self, f):
self.set_f = f
return f
class PingingShetProtocol(ShetProtocol):
"""A SHET protocol that pings the server every factory.ping_interval seconds.
If the last ping does not return by the time the next one is scheduled, the
connection is dropped.
"""
def connectionMade(self):
# Make do_ping think it was successfull the last time the first time it is
# called.
self.ping_returned = True
self.ping_timer = reactor.callLater(self.factory.ping_interval, self.do_ping)
def connectionLost(self, reason):
if self.ping_timer.active():
self.ping_timer.cancel()
def on_ping_return(self, *args):
"""Called when a ping is returned from the server."""
self.ping_returned = True
def do_ping(self):
"""Check the status of the last ping. If it returned, ping again and set up
another delayed call. If it didn't return, drop the connection.
"""
if self.ping_returned:
d = self.send_ping()
d.addCallbacks(self.on_ping_return, self.on_ping_return)
self.ping_timer = reactor.callLater(self.factory.ping_interval, self.do_ping)
else:
self.transport.loseConnection()
def send_ping(self, *args):
"""Send a ping to the server, returning a deferred."""
return self.send_command_with_callback(commands.ping, *args)
class ShetClientProtocol(PingingShetProtocol):
"""The low-level SHET client protocol.
This must be used with the ShetClient as a factory - use that to
implement clients.
"""
def connectionMade(self):
"""Set up the connection - initialise all properties, events etc.
and send all queued items.
"""
self.factory.resetDelay()
PingingShetProtocol.connectionMade(self)
self.factory.client = self
self.factory.on_connect()
self.send_register(self.factory.connection_id)
# Set up properties
for prop_path in self.factory.properties:
self.send_mkprop(prop_path)
# Set up events
for event_path in self.factory.events:
self.send_mkevent(event_path)
# Set up watched events.
for event_path in self.factory.watched_events:
self.send_watch(event_path)
# Set up actions.
for event_path in self.factory.actions:
self.send_mkaction(event_path)
# Perform any queued get operations.
for path, d in self.factory.get_queue:
self.send_get(path).chainDeferred(d)
self.factory.get_queue = []
# Perform any queued set operations.
for path, value, d in self.factory.set_queue:
self.send_set(path, value).chainDeferred(d)
self.factory.set_queue = []
# Perform any queued raise operations.
for path, args, d in self.factory.raise_queue:
self.send_raise(path, *args).chainDeferred(d)
self.factory.raise_queue = []
# Perform any queued call operations.
for path, args, d in self.factory.call_queue:
self.send_call(path, *args).chainDeferred(d)
self.factory.call_queue = []
def connectionLost(self, reason):
"""Called on disconnect."""
PingingShetProtocol.connectionLost(self, reason)
self.factory.client = None
self.factory.on_disconnect()
# Lots of boring code.
def send_register(self, connection_id):
return self.send_command_with_callback(commands.register, connection_id)
# Properties
def send_mkprop(self, path):
return self.send_command_with_callback(commands.mkprop, path)
def send_rmprop(self, path):
return self.send_command_with_callback(commands.rmprop, path)
def send_get(self, path):
return self.send_command_with_callback(commands.get, path)
def send_set(self, path, value):
return self.send_command_with_callback(commands.set, path, value)
# Events
def send_mkevent(self, path):
return self.send_command_with_callback(commands.mkevent, path)
def send_rmevent(self, path):
return self.send_command_with_callback(commands.rmevent, path)
def send_raise(self, path, *args):
return self.send_command_with_callback(commands._raise,
path, *args)
def send_watch(self, path):
return self.send_command_with_callback(commands.watch, path)
def send_ignore(self, path):
return self.send_command_with_callback(commands.ignore, path)
# Actions
def send_mkaction(self, path):
return self.send_command_with_callback(commands.mkaction, path)
def send_rmaction(self, path):
return self.send_command_with_callback(commands.rmaction, path)
def send_call(self, path, *args):
return self.send_command_with_callback(commands.call, path, *args)
@command(commands.ping)
def cmd_ping(self, *args):
return args
@command(commands.getprop)
def cmd_getprop(self, path):
return self.factory._get_property(path)
@command(commands.setprop)
def cmd_setprop(self, path, value):
return self.factory._set_property(path, value)
@command(commands.event)
def cmd_event(self, path, *args):
# Make a copy of the dict so that if the callbacks watch events they
# aren't immediately called.
for event in list(self.factory.watched_events[path]):
if event.callback is not None:
event.callback(*args)
@command(commands.eventdeleted)
def cmd_eventdeleted(self, path):
for event in list(self.factory.watched_events[path]):
if event.delete_callback is not None:
event.delete_callback()
@command(commands.eventcreated)
def cmd_eventcreated(self, path):
for event in list(self.factory.watched_events[path]):
if event.create_callback is not None:
event.create_callback()
@command(commands.docall)
def cmd_docall(self, path, *args):
return self.factory.actions[path].call(*args)
class ShetClient(ReconnectingClientFactory):
"""A SHET client.
Subclass this to add functionality, or possibly use it as-is.
"""
protocol = ShetClientProtocol
root = '/'
# Maximum reconnection delay.
maxDelay = 5
# Time between sending pings to the server.
ping_interval = 30
def __init__(self):
self.connection_id = str(uuid.uuid1())
self.properties = {}
self.events = {}
self.watched_events = collections.defaultdict(list)
self.actions = {}
self.get_queue = []
self.set_queue = []
self.raise_queue = []
self.call_queue = []
self.client = None
for name in dir(self):
attr = getattr(self, name)
if isinstance(attr, shet_action):
action_name = attr.name or name
self.add_action(action_name, MethodType(attr.f, self, self.__class__))
setattr(self, name, MethodType(attr.f, self, self.__class__))
elif isinstance(attr, shet_property):
prop_name = attr.name or name
self.add_property(prop_name,
MethodType(attr.get, self, self.__class__),
MethodType(attr.set_f, self, self.__class__))
setattr(self, name, MethodType(attr.get, self, self.__class__),)
def on_connect(self):
"""Called when the client connects to the server.
"""
pass
def on_disconnect(self):
"""Called when the client disconnects from the server.
"""
def reset(self):
"""Unregister everything!
"""
for prop in self.properties.values():
self.remove_property(prop)
for event in self.events.values():
self.remove_event(event)
for action in self.actions.values():
self.remove_action(action)
for event in self.watched_events:
for watch in event:
self.unwatch_event(watch)
def relative_path(self, path):
if path.startswith('/'):
return path
else:
return shetpath.join(self.root, path)
def add_property(self, path, get_callback, set_callback):
"""Create a property.
@param path The path of the property.
@param get_callback Function called with no args to get
the value of the property.
@param set_callback Function called with a single argument to
set the property.
@return Object representing the property.
Pass to remove_property() to remove.
"""
path = self.relative_path(path)
prop = Property(path, get_callback, set_callback)
self.properties[path] = prop
if self.client is not None:
self.client.send_mkprop(path)
return prop
def remove_property(self, prop):
"""Remove a property.
@param prop Object returned from add_property().
"""
del self.properties[prop.path]
if self.client is not None:
self.client.send_rmprop(prop.path)
def add_event(self, path):
"""Create an event.
@param path The path to the event.
@return An Event object.
Pass this to remove_event() to remove the event.
Call this like a function to raise the event.
"""
path = self.relative_path(path)
def _raise(*args):
return self._raise(path, *args)
event = Event(path, _raise)
self.events[path] = event
if self.client is not None:
self.client.send_mkevent(path)
return event
def remove_event(self, event):
"""Remove an event.
@param event Object returned from add_event().
"""
del self.events[event.path]
if self.client is not None:
self.client.send_rmevent(event.path)
def watch_event(self, path, callback, create_callback=None, delete_callback=None):
"""Watch an event on the server.
@param path Path to the event.
@param callback Called when the event is raised.
@param delete_callback Called if the event is deleted.
@return An object that can be passed to unwatch_event()
to stop watching this event.
"""
path = self.relative_path(path)
if not self.watched_events[path] and self.client is not None:
self.client.send_watch(path)
watch = Watch(path, callback, create_callback, delete_callback)
self.watched_events[path].append(watch)
return watch
def wait_for(self, path):
"""Wait for an event to fire on the server.
@param path Path to the event.
@return A deferred that will be called on the event being fired.
"""
d = Deferred()
# We can't define the callback function without having the event.
event = self.watch_event(path, None)
def callback(*args):
d.callback(args)
self.unwatch_event(event)
# Inject the callback.
event.callback = callback
return d
def unwatch_event(self, event):
"""Stop watching an event.
@param event an object returned from watch_event().
"""
self.watched_events[event.path].remove(event)
# TODO: possibly unwatch event.
def add_action(self, path, callback):
"""Create an action.
@param path Path to the action.
@param callback Called when the action is called.
@return An object that can be passed to remove_action()
to remove this action.
"""
path = self.relative_path(path)
action = Action(path, callback)
self.actions[path] = action
if self.client is not None:
self.client.send_mkaction(path)
return action
def remove_action(self, action):
"""Remove an action.
@param action The action to remove, returned from add_action().
"""
del self.actions[action.path]
if self.client is not None:
self.client.send_rmaction(action.path)
def call(self, path, *args):
"""Call an action on the server.
@param path Path to the action.
@param *args the arguments of the action.
@return The Deferred result of the action.
"""
path = self.relative_path(path)
if self.client is not None:
return self.client.send_call(path, *args)
else:
d = Deferred()
self.call_queue.append((path, args, d))
return d
def get(self, path):
"""Get a property on the server.
@param path The path to the property.
@return The Deferred value of the property.
"""
path = self.relative_path(path)
if self.client is not None:
return self.client.send_get(path)
else:
d = Deferred()
self.get_queue.append((path, d))
return d
def set(self, path, value):
"""Set a property on the server.
@param path The path to the property.
@param value The new value.
@return Deferred success/failure.
"""
path = self.relative_path(path)
if self.client is not None:
return self.client.send_set(path, value)
else:
d = Deferred()
self.set_queue.append((path, value, d))
return d
def act(self, path, *args):
"""Either call an action, or set a property.
If the path starts with "action:", it is called as an action.
If the path starts with "property:", it is set as a property.
Otherwise, it is assumed to be an action.
"""
if path.startswith("action:"):
path = path.partition(":")[2].strip()
self.call(path, *args)
elif path.startswith("property:"):
path = path.partition(":")[2].strip()
self.set(path, *args)
else:
self.call(path, *args)
def _raise(self, path, *args):
if self.client is not None:
return self.client.send_raise(path, *args)
else:
d = Deferred()
self.raise_queue.append((path, args, d))
return d
def _get_property(self, path):
return self.properties[path].get()
def _set_property(self, path, value):
return self.properties[path].set(value)
def install(self, address=None, port=None):
"""Install this instance into the twisted reactor.
Use this if you want to run some other service in parallel.
@param address address to connect to; defaults to $SHET_HOST or localhost.
@param port the port to use; defaults to $SHET_PORT or 11235
"""
address = address or os.getenv("SHET_HOST") or "localhost"
port = int(port or os.getenv("SHET_PORT") or 11235)
reactor.connectTCP(address, port, self)
def run(self, address=None, port=None):
"""Run this instance of the client.
This will not return until stop() is called.
nb: This calls twisted.internet.reactor.run().
@param address address to connect to; defaults to $SHET_HOST or localhost.
@param port the port to use; defaults to $SHET_PORT or 11235
"""
self.install(address, port)
reactor.run()
def stop(self):
"""Stop the client.
nb: This calls twisted.internet.reactor.stop()
"""
reactor.stop()
# Internal objects that the client programmer doesn't need to know about.
class Node(object):
pass
class Property(Node):
def __init__(self, path, get_callback, set_callback):
self.path = path
self.get_callback = get_callback
self.set_callback = set_callback
def get(self):
return self.get_callback()
def set(self, value):
self.set_callback(value)
class Event(Node):
def __init__(self, path, raise_callback):
self.raise_callback = raise_callback
self.path = path
def _raise(self, *args):
self.raise_callback(*args)
__call__ = _raise
class Watch(Node):
def __init__(self, path, callback, create_callback, delete_callback):
self.path = path
self.callback = callback
self.create_callback = create_callback
self.delete_callback = delete_callback
class Action(Node):
def __init__(self, path, callback):
self.path = path
self.callback = callback
def call(self, *args):
return self.callback(*args) | PypiClean |
/easy_boto3-0.1.7.tar.gz/easy_boto3-0.1.7/README.md | [![Upload Python Package](https://github.com/jermwatt/easy_boto3/actions/workflows/python-publish.yml/badge.svg)](https://github.com/jermwatt/easy_boto3/actions/workflows/python-publish.yml)
# `boto3` made easy
`easy_boto3` simplifies `boto3` usage by adding a command line interface (CLI) and abridged Python API that allows you to easily create, manage, and tear-down AWS resources using `boto3` and `awscli` in a simple, easy to use, and easy to refactor `.yaml` configuration file.
### Contents
- [`boto3` made easy](#boto3-made-easy)
- [Contents](#contents)
- [Installation](#installation)
- [Using `easy_boto3` CLI](#using-easy_boto3-cli)
- [Managing ec2 instances](#managing-ec2-instances)
- [Creating an ec2 instance with cloudwatch alarm](#creating-an-ec2-instance-with-cloudwatch-alarm)
- [Show instance cloud\_init logs](#show-instance-cloud_init-logs)
- [Show instance syslog logs](#show-instance-syslog-logs)
- [Listing ec2 instances](#listing-ec2-instances)
- [Stopping an ec2 instance](#stopping-an-ec2-instance)
- [Starting a stopped an ec2 instance](#starting-a-stopped-an-ec2-instance)
- [Termianting ec2 instances by id](#termianting-ec2-instances-by-id)
- [Managing AWS profiles](#managing-aws-profiles)
- [List all AWS profiles in `~/.aws/credentials`](#list-all-aws-profiles-in-awscredentials)
- [List active AWS profile (currently used by `easy_boto3`)](#list-active-aws-profile-currently-used-by-easy_boto3)
- [Set active AWS profile (currently used by `easy_boto3`)](#set-active-aws-profile-currently-used-by-easy_boto3)
- [Using `easy_boto3`'s Python API](#using-easy_boto3s-python-api)
- [Creating an ec2 instance](#creating-an-ec2-instance)
## Installation
You can install `easy_boto3` via `pip` as
```bash
pip install easy-boto3
```
## Using `easy_boto3` CLI
### Managing ec2 instances
#### Creating an ec2 instance with cloudwatch alarm
`easy_boto3` allows you to translate a standard `boto3` pythonic infrastructure task like instantiating an `ec2` instance with an attached `cloudwatch` cpu usage alarm from complex pythonic implementation like the following
```python
import boto3
# read in aws_access_key_id and aws_secret_access_key based on input profile_name using boto3
session = boto3.Session(profile_name=profile_name)
# create ec2 controller from session
ec2_controller = session.resource('ec2')
# read in startup script
with open(startup_script_path, 'r') as file:
startup_script = file.read()
# create a new EC2 instance
instances = ec2_controller.create_instances(
ImageId='ami-03f65b8614a860c29',
InstanceName='example_worker',
NetworkInterfaces=[{
'DeviceIndex': 0,
'Groups': ['sg-1ed8w56f12347f63d'],
'AssociatePublicIpAddress': True}],
UserData=startup_script,
TagSpecifications=[{'ResourceType': 'instance',
'Tags': [{'Key': 'Name', 'Value': 'example_worker'}]}],
InstanceType='t2.micro',
KeyName=<ssh_key_name>,
)
# wait for the instance to enter running state
instances[0].wait_until_running()
instance_id = instances[0].id
# create cloud watch client
cloudwatch_client = session.client('cloudwatch')
# enable detailed monitoring for the instance
ec2_client.monitor_instances(InstanceIds=[instance_id])
# create alarm
result = cloudwatch_client.put_metric_alarm(
AlarmName=cpu_alarm_name,
ComparisonOperator='GreaterThanOrEqualToThreshold',
EvaluationPeriods=1,
MetricName='CPUUtilization',
Namespace='AWS/EC2',
Period=60,
Statistic='Average',
Threshold=threshold_value,
Dimensions=[
{
'Name': 'InstanceId',
'Value': instance_id
},
],
)
```
into easier to re / use and refactor `.yaml` configuration file using the same `boto3` option syntax for to declaration of the same task. So for example the above task can be accomplished using the analogous `.yaml` configuration file carrying over the same `boto3` option syntax as follows:
```yaml
aws_profile: <your profile name in config/credentials of ~/.aws>
ec2_instance:
instance_details:
InstanceName: example_worker
InstanceType: t2.micro
ImageId: ami-03f65b8614a860c29
BlockDeviceMappings:
DeviceName: /dev/sda1
Ebs:
DeleteOnTermination: true
VolumeSize: 8
VolumeType: gp2
Groups:
- <your security group>
ssh_details:
Config:
User: ubuntu
IdentityFile: <path to ssh key>
ForwardAgent: yes
Options:
add_to_known_hosts: true
test_connection: true
script_details:
filepath: <path_to_startup>
inject_aws_creds: true
ssh_forwarding: true
github_host: true
alarm_details:
ComparisonOperator: GreaterThanOrEqualToThreshold
EvaluationPeriods: 1
MetricName: CPUUtilization
Namespace: AWS/EC2
Period: 60
Statistic: Average
Threshold: 0.99
```
Using `easy_boto3` and this configuration `config.yaml` the same task - instantiating an `ec2` instance - can be accomplished via the command line as follows:
```bash
easy_boto3 ec2 create config.yaml
```
#### Show instance cloud_init logs
```bash
easy_boto3 ec2 check_cloud_init_logs <instance_id>
```
#### Show instance syslog logs
```bash
easy_boto3 ec2 check_syslog <instance_id>
```
#### Listing ec2 instances
You can use `easy_boto3` to easy see (all/ running / stopped / terminated) instances in your AWS account as follows.
See all instances
```bash
easy_boto3 ec2 list_all
```
See just running instances
```bash
easy_boto3 ec2 list_running
```
The output of this command gives the instance id, name, type, and state of each instance in your account - looking like this
```bash
{'instance_id': 'instance_id', 'instance_state': 'running', 'instance_type': 't2.micro'}
```
You can filter by state - running, stopped, terminated - as follows
```bash
easy_boto3 ec2 list_running
```
```bash
easy_boto3 ec2 list_stopped
```
```bash
easy_boto3 ec2 list_terminated
```
#### Stopping an ec2 instance
```bash
easy_boto3 ec2 stop <instance_id>
```
#### Starting a stopped an ec2 instance
```bash
easy_boto3 ec2 start <instance_id>
```
#### Termianting ec2 instances by id
You can use `easy_boto3` CLI to terminate an ec2 instance by id as follows
```bash
easy_boto3 ec2 terminate <instance_id>
```
Note: by default this will delete any cloudwatch alarms associated with the instance.
### Managing AWS profiles
You can use `easy_boto3` CLI to manage AWS profiles as follows
#### List all AWS profiles in `~/.aws/credentials`
```bash
easy_boto3 profile list_all
```
#### List active AWS profile (currently used by `easy_boto3`)
```bash
easy_boto3 profile list_active
```
#### Set active AWS profile (currently used by `easy_boto3`)
```bash
easy_boto3 profile set <profile_name>
```
## Using `easy_boto3`'s Python API
In addition to config driven command line use, `easy_boto3` also offers a simplified python API that makes creating and managing AWS resources with `boto3` easier.
### Creating an ec2 instance
In this example an ec2 instance of user-specified type and AMI is created.
Note `block_device_mappings` and `UserData` startup bash script are optional.
```python
from easy_boto3 import set_profile
from easy_boto3.startup_script_management import read_startup_script
from easy_boto3.ec2_instance_management import launch_instance
# set aws profile - optional - set to 'default' profile by default
set_profile.set('my_aws_profile') # -> returns None if profile is valid
# read in startup script from file
UserData = read_startup_script('./path/to/startup.sh')
# build ec2 launch instance command
InstanceName = 'example_worker'
InstanceType = 't2.micro'
ImageId = 'ami-03f65b8614a860c29'
Groups = ['my_security_group_id']
BlockDeviceMappings = [
{
'DeviceName': '/dev/sda1',
'Ebs': {
'VolumeSize': 300,
'VolumeType': 'gp2'
}
}
]
KeyName = 'my_ssh_key_name'
# launch instance
launch_result = launch_instance(KeyName=KeyName,
InstanceName=InstanceName,
InstanceType=InstanceType,
ImageId=ImageId,
Groups=Groups,
BlockDeviceMappings=BlockDeviceMappings,
UserData=UserData)
# wait for the instance to enter running state
launch_result.wait_until_running()
# get instance id
instance_id = launch_result[0].id
```
Further uses of the Python API can be found in the `examples/python_api` directory.
| PypiClean |
/Wallace-0.9.1.tar.gz/Wallace-0.9.1/wallace/db/base/sql/table.py | import functools
from wallace.config import GetDBConn
from wallace.db.base.sql.writer import QueryWriter
from wallace.errors import DoesNotExist, SetupError, ValidationError
def catch_missing_table_name(f):
@functools.wraps(f)
def wrapper(cls, *a, **kw):
if not cls.table_name:
raise SetupError(405, 'must set table_name')
if not cls._query_writer:
cls._query_writer = QueryWriter(cls.table_name)
return f(cls, *a, **kw)
return wrapper
class SqlTable(object):
db = GetDBConn()
db_name = None
table_name = None
_query_writer = None
@classmethod
@catch_missing_table_name
def delete(cls, **kw):
q, vals = cls._query_writer.delete(**kw)
cls.db.execute(q, vals)
@classmethod
@catch_missing_table_name
def exists(cls, **kw):
q, vals = cls._query_writer.exists(**kw)
data = cls.db.fetchone(q, vals)
return data.get('exists')
@classmethod
@catch_missing_table_name
def fetch_one(cls, **kw):
q, vals = cls._query_writer.select(**kw)
return cls.db.fetchone(q, vals)
@classmethod
@catch_missing_table_name
def find_one(cls, **kw):
if not kw:
raise ValidationError(407)
data = cls.select(limit=2, **kw)
if not data:
raise DoesNotExist(406)
if len(data) != 1:
raise ValidationError(407, 'expected a unique result')
return data[0]
@classmethod
@catch_missing_table_name
def insert(cls, **kw):
q, vals = cls._query_writer.insert(**kw)
cls.db.execute(q, vals)
@classmethod
@catch_missing_table_name
def select(cls, **kw):
q, vals = cls._query_writer.select(**kw)
return cls.db.fetchall(q, vals)
@classmethod
@catch_missing_table_name
def update(cls, new_data, **kw):
q, vals = cls._query_writer.update(new_data, **kw)
cls.db.execute(q, vals)
@classmethod
@catch_missing_table_name
def _for_testing_only(cls):
pass | PypiClean |
/TurboGears-1.5.1.tar.gz/TurboGears-1.5.1/turbogears/i18n/data/nb.py |
languages={'vi': 'vietnamesisk', 'el': 'gresk', 'eo': 'esperanto', 'en': 'engelsk', 'zh': 'kinesisk', 'af': 'afrikaans', 'sw': 'swahili', 'ca': 'katalansk', 'am': 'amharisk', 'gu': 'gujarati', 'sv': 'svensk', 'cs': 'tsjekkisk', 'ar': 'arabisk', 'ga': 'irsk', 'eu': 'baskisk', 'et': 'estisk', 'gl': 'galicisk', 'id': 'indonesisk', 'es': 'spansk', 'ru': 'russisk', 'nl': 'nederlandsk', 'nn': 'norsk nynorsk', 'no': 'norsk', 'nb': u'norsk bokm\xe5l', 'tr': 'tyrkisk', 'lv': 'latvisk', 'lt': 'litauisk', 'th': 'thai', 'ti': 'tigrinya', 'it': 'italiensk', 'so': 'somalisk', 'he': 'hebraisk', 'is': 'islandsk', 'pl': 'polsk', 'haw': 'hawaiisk', 'be': 'hviterussisk', 'fr': 'fransk', 'bg': 'bulgarsk', 'sl': 'slovensk', 'hr': 'kroatisk', 'iu': 'inuktitut', 'bn': 'bengali', 'de': 'tysk', 'da': 'dansk', 'fa': 'persisk', 'hi': 'hindi', 'fi': 'finsk', 'hy': 'armensk', 'hu': 'ungarsk', 'ja': 'japansk', 'fo': u'f\xe6r\xf8ysk', 'om': 'oromo', 'ro': 'rumensk', 'gv': 'manx', 'pt': 'portugisisk', 'sr': 'serbisk', 'sq': 'albansk', 'ko': 'koreansk', 'kn': 'kannada', 'mk': 'makedonsk', 'kl': 'kalaallisut', 'sk': 'slovakisk', 'mt': 'maltesisk', 'sh': 'serbokroatisk', 'kw': 'kornisk', 'uk': 'ukrainsk', 'mr': 'marathi', 'ta': 'tamil'}
countries={'BD': 'Bangladesh', 'BE': 'Belgia', 'BF': 'Burkina Faso', 'BG': 'Bulgaria', 'BA': 'Bosnia og Hercegovina', 'BB': 'Barbados', 'WF': 'Wallis og Futuna', 'BM': 'Bermuda', 'BN': 'Brunei Darussalam', 'BO': 'Bolivia', 'BH': 'Bahrain', 'BI': 'Burundi', 'BJ': 'Benin', 'BT': 'Bhutan', 'JM': 'Jamaica', 'BV': u'Bouvet\xf8ya', 'BW': 'Botswana', 'WS': 'Samoa', 'BR': 'Brasil', 'BS': 'Bahamas', 'BY': 'Hviterussland', 'BZ': 'Belize', 'RU': u'Den russiske f\xf8derasjon', 'RW': 'Rwanda', 'TL': u'\xd8st-Timor', 'RE': 'Reunion', 'TM': 'Turkmenistan', 'TJ': 'Tadsjikistan', 'RO': 'Romania', 'TK': 'Tokelau', 'GW': 'Guinea-Bissau', 'GU': 'Guam', 'GT': 'Guatemala', 'GS': u'S\xf8r-Georgia og S\xf8r-Sandwich-\xf8yene', 'GR': 'Hellas', 'GQ': 'Ekvatorial-Guinea', 'GP': 'Guadeloupe', 'JP': 'Japan', 'GY': 'Guyana', 'GF': 'Fransk Guyana', 'GE': 'Georgia', 'GD': 'Grenada', 'GB': 'Storbritannia', 'GA': 'Gabon', 'SV': 'El Salvador', 'GN': 'Guinea', 'GM': 'Gambia', 'GL': u'Gr\xf8nland', 'GI': 'Gibraltar', 'GH': 'Ghana', 'OM': 'Oman', 'TN': 'Tunisia', 'JO': 'Jordan', 'SP': 'Serbia', 'HR': 'Kroatia', 'HT': 'Haiti', 'HU': 'Ungarn', 'HK': 'Hong Kong S.A.R. (Kina)', 'HN': 'Honduras', 'HM': u'Heard- og McDonalds\xf8yene', 'VE': 'Venezuela', 'PR': 'Puerto Rico', 'PS': 'Palestinsk territorium', 'PW': 'Palau', 'PT': 'Portugal', 'SJ': 'Svalbard og Jan Mayen', 'PY': 'Paraguay', 'IQ': 'Irak', 'PA': 'Panama', 'PF': 'Fransk Polynesia', 'PG': 'Papua Ny-Guinea', 'PE': 'Peru', 'PK': 'Pakistan', 'PH': 'Filippinene', 'PN': 'Pitcairn', 'PL': 'Polen', 'PM': 'St. Pierre og Miquelon', 'ZM': 'Zambia', 'EH': 'Vest-Sahara', 'EE': 'Estland', 'EG': 'Egypt', 'ZA': u'S\xf8r-Afrika', 'EC': 'Ecuador', 'IT': 'Italia', 'VN': 'Vietnam', 'SB': u'Salomon\xf8yene', 'ET': 'Etiopia', 'SO': 'Somalia', 'ZW': 'Zimbabwe', 'SA': 'Saudi Arabia', 'ES': 'Spania', 'ER': 'Eritrea', 'MD': 'Moldova', 'MG': 'Madagaskar', 'MA': 'Marokko', 'MC': 'Monaco', 'UZ': 'Usbekistan', 'MM': 'Myanmar', 'ML': 'Mali', 'MO': 'Macao S.A.R. (Kina)', 'MN': 'Mongolia', 'MH': u'Marshall\xf8yene', 'MK': 'Makedonia, Republikken', 'MU': 'Mauritius', 'MT': 'Malta', 'MW': 'Malawi', 'MV': 'Maldivene', 'MQ': 'Martinique', 'MP': 'Nord-Marianene', 'MS': 'Montserrat', 'MR': 'Mauritania', 'UG': 'Uganda', 'MY': 'Malaysia', 'MX': 'Mexico', 'IL': 'Israel', 'FR': 'Frankrike', 'IO': u'Britiske omr\xe5der i det indiske hav', 'SH': 'Saint Helena', 'FI': 'Finland', 'FJ': 'Fiji', 'FK': u'Falklands\xf8yene (Malvinas)', 'FM': u'Mikronesiaf\xf8derasjonen', 'FO': u'F\xe6r\xf8yene', 'NI': 'Nicaragua', 'NL': 'Nederland', 'NO': 'Norge', 'NA': 'Namibia', 'VU': 'Vanuatu', 'NC': 'Ny-Caledonia', 'NE': 'Niger', 'NF': u'Norfolk\xf8yene', 'NG': 'Nigeria', 'NZ': 'New Zealand', 'NP': 'Nepal', 'NR': 'Nauru', 'NU': 'Niue', 'CK': u'Cook\xf8yene', 'CI': 'Elfenbenskysten', 'CH': 'Sveits', 'CO': 'Colombia', 'CN': 'Kina', 'CM': 'Kamerun', 'CL': 'Chile', 'CC': u'Kokos\xf8yene (Keeling\xf8yene)', 'CA': 'Canada', 'CG': 'Kongo', 'CF': 'Den sentralafrikanske republikk', 'CD': 'Kongo, Den demokratiske republikken', 'CZ': 'Tsjekkia', 'CY': 'Kypros', 'CX': u'Christmas\xf8ya', 'CR': 'Costa Rica', 'Fallback': 'en', 'CV': 'Kapp Verde', 'CU': 'Cuba', 'SZ': 'Swaziland', 'SY': 'Syria', 'KG': 'Kirgisistan', 'KE': 'Kenya', 'SR': 'Surinam', 'KI': 'Kiribati', 'KH': 'Kambodsja', 'KN': 'St. Christopher og Nevis', 'KM': 'Komorene', 'ST': 'Sao Tome og Principe', 'SK': 'Slovakia', 'KR': u'Korea (S\xf8r)', 'SI': 'Slovenia', 'KP': 'Korea (Nord)', 'KW': 'Kuwait', 'SN': 'Senegal', 'SM': 'San Marino', 'SL': 'Sierra Leone', 'SC': 'Seychellene', 'KZ': 'Kasakhstan', 'KY': u'Cayman\xf8yene', 'SG': 'Singapore', 'SE': 'Sverige', 'SD': 'Sudan', 'DO': 'Den dominikanske republikk', 'DM': 'Dominica', 'DJ': 'Djibouti', 'DK': 'Danmark', 'VG': u'Jomfru\xf8yene (britisk)', 'DE': 'Tyskland', 'YE': 'Yemen', 'DZ': 'Algerie', 'US': 'Sambandsstatane', 'UY': 'Uruguay', 'YU': 'Jugoslavia', 'YT': 'Mayotte', 'UM': u'USAs mindre \xf8yer', 'LB': 'Libanon', 'LC': 'St. Lucia', 'LA': 'Laos, Den folkedemokratiske republikken', 'TV': 'Tuvalu', 'TW': 'Taiwan', 'TT': 'Trinidad og Tobago', 'TR': 'Tyrkia', 'LK': 'Sri Lanka', 'LI': 'Liechtenstein', 'LV': 'Latvia', 'TO': 'Tonga', 'LT': 'Litauen', 'LU': 'Luxembourg', 'LR': 'Liberia', 'LS': 'Lesotho', 'TH': 'Thailand', 'TF': u'Franske s\xf8romr\xe5der', 'TG': 'Togo', 'TD': 'Tchad', 'TC': u'Turks- og Caicos\xf8yene', 'LY': 'Libya', 'VA': 'Vatikanstaten', 'VC': 'St. Vincent og Grenadinene', 'AE': 'De forente arabiske emiratene', 'AD': 'Andorra', 'AG': 'Antigua og Barbuda', 'AF': 'Afghanistan', 'AI': 'Anguilla', 'VI': 'U.S. Virgin Islands', 'IS': 'Island', 'IR': 'Iran', 'AM': 'Armenia', 'AL': 'Albania', 'AO': 'Angola', 'AN': 'De nederlandske antiller', 'AQ': 'Antarktis', 'AS': 'Amerikansk Samoa', 'AR': 'Argentina', 'AU': 'Australia', 'AT': u'\xd8sterrike', 'AW': 'Aruba', 'IN': 'India', 'TZ': 'Tanzania', 'AZ': 'Aserbajdsjan', 'IE': 'Irland', 'ID': 'Indonesia', 'UA': 'Ukraina', 'QA': 'Qatar', 'MZ': 'Mosambik'}
months=['januar', 'februar', 'mars', 'april', 'mai', 'juni', 'juli', 'august', 'september', 'oktober', 'november', 'desember']
abbrMonths=['jan', 'feb', 'mar', 'apr', 'mai', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'des']
days=['mandag', 'tirsdag', 'onsdag', 'torsdag', 'fredag', u'l\xf8rdag', u's\xf8ndag']
abbrDays=['ma', 'ti', 'on', 'to', 'fr', u'l\xf8', u's\xf8']
dateFormats={'medium': '%d. %%(abbrmonthname)s. %Y', 'full': '%%(dayname)s %d. %%(monthname)s %Y', 'long': '%d. %%(monthname)s %Y', 'short': '%d.%m.%y'}
numericSymbols={'group': u'\xa0', 'nativeZeroDigit': '0', 'exponential': 'E', 'perMille': u'\u2030', 'nan': u'\ufffd', 'decimal': ',', 'percentSign': '%', 'list': ';', 'patternDigit': '#', 'plusSign': '+', 'infinity': u'\u221e', 'minusSign': '-'} | PypiClean |
/pdc-updater-0.9.3.tar.gz/pdc-updater-0.9.3/pdcupdater/handlers/__init__.py | import abc
import fedmsg.utils
def load_handlers(config):
""" Import and instantiate all handlers listed in the given config. """
for import_path in config['pdcupdater.handlers']:
cls = fedmsg.utils.load_class(import_path)
handler = cls(config)
yield handler
class BaseHandler(object):
""" An abstract base class for handlers to enforce API. """
__metaclass__ = abc.ABCMeta
def __init__(self, config):
self.config = config
def construct_topics(self, config):
# Don't use the environment when using STOMP
if config.get('stomp_uri'):
return [
'.'.join([config['topic_prefix'], topic])
for topic in self.topic_suffixes
]
else:
return [
'.'.join([config['topic_prefix'], config['environment'],
topic])
for topic in self.topic_suffixes
]
@abc.abstractproperty
def topic_suffixes(self):
pass
@abc.abstractmethod
def can_handle(self, pdc, msg):
""" Return True or False if this handler can handle this message. """
pass
@abc.abstractmethod
def handle(self, pdc, msg):
""" Handle a fedmsg and update PDC if necessary. """
pass
@abc.abstractmethod
def audit(self, pdc):
""" This is intended to be called from a cronjob once every few days
and is meant to (in a read-only fashion) check that what PDC thinks is
true about a service, is actually true.
It is expected to take a long time to run.
It should return a two lists. The first should be a list of items
present in PDC but not in the other service. The second should be a
list of items present in the other service, but not in PDC. Those lists
will be sewn together into an email to the releng group.
"""
pass
@abc.abstractmethod
def initialize(self, pdc):
""" This needs to be called only once when pdc-updater is first
installed. It should query the original data source and initialize PDC
with a base layer of data.
It is expected to take a very long time to run.
"""
pass | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/jax/output/HTML-CSS/fonts/STIX/fontdata-extra.js | (function(h){var q="2.7.9";var n=h.FONTDATA.DELIMITERS;var l="STIXGeneral",i="STIXGeneral-bold",k="STIXVariants",o="STIXNonUnicode",f="STIXSizeOneSym",d="STIXSizeTwoSym",b="STIXSizeThreeSym",a="STIXSizeFourSym",p="STIXSizeFiveSym",j="STIXIntegralsD";var m="H",g="V";var e={61:{dir:m,HW:[[0.685,l]],stretch:{rep:[61,l]}},8606:{dir:m,HW:[[0.926,l]],stretch:{left:[8606,l],rep:[8722,l]}},8608:{dir:m,HW:[[0.926,l]],stretch:{right:[8608,l],rep:[8722,l]}},8612:{dir:m,HW:[[0.926,l]],stretch:{left:[8592,k],rep:[8722,l],right:[8867,i,0,0.1,0.6]}},8613:{dir:g,HW:[[0.816,l]],stretch:{bot:[95,l,0.05,-0.01,0.8],ext:[9168,l],top:[8593,l]}},8614:{dir:m,HW:[[1,l]],stretch:{left:[57526,o],rep:[8722,l],right:[8594,l]}},8615:{dir:g,HW:[[0.816,l]],stretch:{top:[8868,i,0.04,0,0.6],ext:[9168,l],bot:[8595,l]}},8624:{dir:g,HW:[[0.818,l]],stretch:{top:[8624,l],ext:[9168,l,0.152]}},8625:{dir:g,HW:[[0.818,l]],stretch:{top:[8625,l],ext:[9168,l,-0.195]}},8636:{dir:m,HW:[[0.955,l]],stretch:{left:[8636,l],rep:[8722,l]}},8637:{dir:m,HW:[[0.955,l]],stretch:{left:[8637,l],rep:[8722,l]}},8638:{dir:g,HW:[[0.818,l]],stretch:{top:[8638,l],ext:[9168,l]}},8639:{dir:g,HW:[[0.818,l]],stretch:{top:[8639,l],ext:[9168,l]}},8640:{dir:m,HW:[[0.955,l]],stretch:{right:[8640,l],rep:[8722,l]}},8641:{dir:m,HW:[[0.955,l]],stretch:{right:[8641,l],rep:[8722,l]}},8642:{dir:g,HW:[[0.818,l]],stretch:{bot:[8642,l],ext:[9168,l]}},8643:{dir:g,HW:[[0.818,l]],stretch:{bot:[8643,l],ext:[9168,l]}},8666:{dir:m,HW:[[0.926,l]],stretch:{left:[8666,l],rep:[8801,l]}},8667:{dir:m,HW:[[0.926,l]],stretch:{right:[8667,l],rep:[8801,l]}},8747:{dir:g,HW:[[0.607,l],[0.979,j]],stretch:{top:[8992,f],ext:[9134,f],bot:[8993,f]}},9140:{dir:m,HW:[[0.926,l],[1.063,f],[1.606,d],[2.147,b],[2.692,a],[3.237,p]],stretch:{left:[8976,l],rep:[8722,l,0,0.12],right:[172,l]}},9141:{dir:m,HW:[[0.926,l],[1.063,f],[1.606,d],[2.147,b],[2.692,a],[3.237,p]],stretch:{left:[10813,l,0,0.12],rep:[8722,l,0,0,0,0.12],right:[10812,l,0,0.12]}},9180:{dir:m,HW:[[0.926,f],[1,l],[1.46,d],[1.886,b],[2.328,a],[3.237,p]],stretch:{left:[57659,o],right:[57660,o],rep:[57674,o]}},9181:{dir:m,HW:[[0.926,f],[1,l],[1.46,d],[1.886,b],[2.328,a],[3.237,p]],stretch:{left:[57661,o],right:[57662,o],rep:[57675,o]}},9184:{dir:m,HW:[[1,l],[1.46,f],[1.886,d],[2.312,b],[2.738,a],[3.164,p]],stretch:{left:[57613,o,-0.1,-0.1],rep:[8722,l,0,0.05],right:[57612,o,0,-0.1],fullExtenders:true}},9185:{dir:m,HW:[[1,l],[1.46,f],[1.886,d],[2.312,b],[2.738,a],[3.164,p]],stretch:{left:[57612,o,-0.1,0.1],rep:[8722,l,0,-0.1,0,0.1],right:[57613,o,0,0.1],fullExtenders:true}},10502:{dir:m,HW:[[0.926,l]],stretch:{left:[8656,l],rep:[61,l],right:[10980,l,0,-0.09]}},10503:{dir:m,HW:[[0.926,l]],stretch:{left:[8872,l,0,-0.09],rep:[61,l],right:[8658,l]}},10574:{dir:m,HW:[],stretch:{left:[8636,l],rep:[8722,l],right:[8640,l]}},10575:{dir:g,HW:[[0.818,l]],stretch:{top:[8638,l],ext:[9168,l],bot:[8642,l]}},10576:{dir:m,HW:[[0.926,l]],stretch:{left:[8637,l],rep:[8722,l],right:[8641,l]}},10577:{dir:g,HW:[[0.818,l]],stretch:{top:[8639,l],ext:[9168,l],bot:[8643,l]}},10586:{dir:m,HW:[[0.926,l]],stretch:{left:[8636,l],rep:[8722,l],right:[8867,i,0,0.1,0.6]}},10587:{dir:m,HW:[[0.926,l]],stretch:{left:[57526,o],rep:[8722,l],right:[8640,l]}},10588:{dir:g,HW:[[0.818,l]],stretch:{bot:[95,l,0.05,-0.01,0.8],ext:[9168,l],top:[8638,l]}},10589:{dir:g,HW:[[0.818,l]],stretch:{top:[8868,i,0.04,0,0.6],ext:[9168,l],bot:[8642,l]}},10590:{dir:m,HW:[[0.926,l]],stretch:{left:[8637,l],rep:[8722,l],right:[8867,i,0,0.1,0.6]}},10591:{dir:m,HW:[[0.926,l]],stretch:{left:[57526,o],rep:[8722,l],right:[8641,l]}},10592:{dir:g,HW:[[0.818,l]],stretch:{bot:[95,l,0.05,-0.01,0.8],ext:[9168,l],top:[8639,l]}},10593:{dir:g,HW:[[0.818,l]],stretch:{top:[8868,i,0.04,0,0.6],ext:[9168,l],bot:[8643,l]}},711:{dir:m,HW:[[0.333,l],[0.56,f],[0.979,d],[1.458,b],[1.886,a],[2.328,p]]},717:{dir:m,HW:[[0.334,l]],stretch:{rep:[717,l]}},759:{dir:m,HW:[[0.558,f],[0.977,d],[1.458,b],[1.886,a],[2.328,p]]},8607:{dir:g,HW:[[0.816,l]],stretch:{ext:[9168,l],top:[8607,l]}},8609:{dir:g,HW:[[0.816,l]],stretch:{ext:[9168,l],bot:[8609,l]}},8616:{dir:g,HW:[[0.816,l]],stretch:{top:[8593,l],ext:[9168,l],bot:[10515,l]}},8617:{dir:m,HW:[[0.926,l]],stretch:{left:[8592,l],rep:[8722,l],right:[57525,o]}},8618:{dir:m,HW:[[0.926,l]],stretch:{left:[57524,o],rep:[8722,l],right:[8594,l]}},8626:{dir:g,HW:[[0.818,l]],stretch:{bot:[8626,l],ext:[9168,l,0.152]}},8627:{dir:g,HW:[[0.818,l]],stretch:{bot:[8627,l],ext:[9168,l,-0.195]}},8628:{dir:m,HW:[[0.926,l]],stretch:{rep:[8722,l,0,0.4],right:[8628,l]}},8629:{dir:g,HW:[[0.818,l]],stretch:{bot:[8629,l],ext:[9168,l,0.57]}},8651:{dir:m,HW:[[0.926,l]],stretch:{left:[10602,l],rep:[61,l],right:[10605,l]}},8652:{dir:m,HW:[[0.926,l]],stretch:{left:[10603,l],rep:[61,l],right:[10604,l]}},8672:{dir:m,HW:[[0.926,l]],stretch:{left:[8672,l],rep:[57633,o,0,0,0,0,0.1],fullExtenders:true}},8673:{dir:g,HW:[[0.818,l]],stretch:{ext:[57645,o],top:[8673,l],fullExtenders:true}},8674:{dir:m,HW:[[0.926,l]],stretch:{right:[8674,k],rep:[57646,o,0,0,0,0,0.1],fullExtenders:true}},8675:{dir:g,HW:[[0.818,l]],stretch:{ext:[57644,o],bot:[8675,l],fullExtenders:true}},8676:{dir:m,HW:[[0.926,l]],stretch:{left:[8676,l],rep:[8722,l]}},8677:{dir:m,HW:[[0.926,l]],stretch:{right:[8677,l],rep:[8722,l]}},8701:{dir:m,HW:[[0.926,l]],stretch:{left:[8701,l],rep:[8722,l]}},8702:{dir:m,HW:[[0.926,l]],stretch:{right:[8702,l],rep:[8722,l]}},8703:{dir:m,HW:[[0.926,l]],stretch:{left:[8701,l],rep:[8722,l],right:[8702,l]}},10214:{dir:g,HW:[[0.93,l],[1.23,f],[1.845,d],[2.46,b],[3.075,a]],stretch:{top:[9555,l],ext:[9553,l],bot:[9561,l]}},10215:{dir:g,HW:[[0.93,l],[1.23,f],[1.845,d],[2.46,b],[3.075,a]],stretch:{top:[9558,l],ext:[9553,l],bot:[9564,l]}},10218:{dir:g,HW:[[0.931,l],[1.23,f],[1.845,d],[2.461,b],[3.075,a]]},10219:{dir:g,HW:[[0.931,l],[1.23,f],[1.845,d],[2.461,b],[3.075,a]]},10506:{dir:g,HW:[[0.818,l]],stretch:{top:[10506,l],ext:[57653,o]}},10507:{dir:g,HW:[[0.818,l]],stretch:{bot:[10507,l],ext:[57653,o]}},10514:{dir:g,HW:[[0.818,l]],stretch:{top:[10514,l],ext:[9168,l]}},10515:{dir:g,HW:[[0.818,l]],stretch:{bot:[10515,l],ext:[9168,l]}},10578:{dir:m,HW:[[0.926,l]],stretch:{left:[10578,l],rep:[8722,l]}},10579:{dir:m,HW:[[0.926,l]],stretch:{right:[10579,l],rep:[8722,l]}},10580:{dir:g,HW:[[0.818,l]],stretch:{top:[10580,l],ext:[9168,l]}},10581:{dir:g,HW:[[0.818,l]],stretch:{bot:[10581,l],ext:[9168,l]}},10582:{dir:m,HW:[[0.926,l]],stretch:{left:[10582,l],rep:[8722,l]}},10583:{dir:m,HW:[[0.926,l]],stretch:{right:[10583,l],rep:[8722,l]}},10584:{dir:g,HW:[[0.818,l]],stretch:{top:[10584,l],ext:[9168,l]}},10585:{dir:g,HW:[[0.818,l]],stretch:{bot:[10585,l],ext:[9168,l]}},10624:{dir:g,HW:[[0.874,l]],stretch:{ext:[10624,l]}},10647:{dir:g,HW:[[0.932,l]],stretch:{top:[57613,o,0.1,0.05],ext:[9168,l,-0.1],bot:[57612,o,0.1]}},10648:{dir:g,HW:[[0.932,l]],stretch:{top:[57612,o,-0.1,0.05],ext:[9168,l],bot:[57613,o,-0.1]}}};for(var c in e){if(e.hasOwnProperty(c)){n[c]=e[c]}}MathJax.Ajax.loadComplete(h.fontDir+"/fontdata-extra.js")})(MathJax.OutputJax["HTML-CSS"]); | PypiClean |
/msgraph-sdk-1.0.0a3.tar.gz/msgraph-sdk-1.0.0a3/msgraph/generated/groups/item/sites/item/term_store/sets/item/children/item/relations/item/relation_item_request_builder.py | from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, Union
from ............models.o_data_errors import o_data_error
from ............models.term_store import relation
from .from_term import from_term_request_builder
from .set import set_request_builder
from .to_term import to_term_request_builder
class RelationItemRequestBuilder():
"""
Provides operations to manage the relations property of the microsoft.graph.termStore.term entity.
"""
def from_term(self) -> from_term_request_builder.FromTermRequestBuilder:
"""
Provides operations to manage the fromTerm property of the microsoft.graph.termStore.relation entity.
"""
return from_term_request_builder.FromTermRequestBuilder(self.request_adapter, self.path_parameters)
def set(self) -> set_request_builder.SetRequestBuilder:
"""
Provides operations to manage the set property of the microsoft.graph.termStore.relation entity.
"""
return set_request_builder.SetRequestBuilder(self.request_adapter, self.path_parameters)
def to_term(self) -> to_term_request_builder.ToTermRequestBuilder:
"""
Provides operations to manage the toTerm property of the microsoft.graph.termStore.relation entity.
"""
return to_term_request_builder.ToTermRequestBuilder(self.request_adapter, self.path_parameters)
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new RelationItemRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/groups/{group%2Did}/sites/{site%2Did}/termStore/sets/{set%2Did}/children/{term%2Did}/relations/{relation%2Did}{?%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
def create_delete_request_information(self,request_configuration: Optional[RelationItemRequestBuilderDeleteRequestConfiguration] = None) -> RequestInformation:
"""
Delete navigation property relations for groups
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.DELETE
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
return request_info
def create_get_request_information(self,request_configuration: Optional[RelationItemRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
To indicate which terms are related to the current term as either pinned or reused.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = "application/json"
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
def create_patch_request_information(self,body: Optional[relation.Relation] = None, request_configuration: Optional[RelationItemRequestBuilderPatchRequestConfiguration] = None) -> RequestInformation:
"""
Update the navigation property relations in groups
Args:
body:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.PATCH
request_info.headers["Accept"] = "application/json"
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.add_request_options(request_configuration.options)
request_info.set_content_from_parsable(self.request_adapter, "application/json", body)
return request_info
async def delete(self,request_configuration: Optional[RelationItemRequestBuilderDeleteRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> None:
"""
Delete navigation property relations for groups
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
responseHandler: Response handler to use in place of the default response handling provided by the core service
"""
request_info = self.create_delete_request_information(
request_configuration
)
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_no_response_content_async(request_info, response_handler, error_mapping)
async def get(self,request_configuration: Optional[RelationItemRequestBuilderGetRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> Optional[relation.Relation]:
"""
To indicate which terms are related to the current term as either pinned or reused.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
responseHandler: Response handler to use in place of the default response handling provided by the core service
Returns: Optional[relation.Relation]
"""
request_info = self.create_get_request_information(
request_configuration
)
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_async(request_info, relation.Relation, response_handler, error_mapping)
async def patch(self,body: Optional[relation.Relation] = None, request_configuration: Optional[RelationItemRequestBuilderPatchRequestConfiguration] = None, response_handler: Optional[ResponseHandler] = None) -> Optional[relation.Relation]:
"""
Update the navigation property relations in groups
Args:
body:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
responseHandler: Response handler to use in place of the default response handling provided by the core service
Returns: Optional[relation.Relation]
"""
if body is None:
raise Exception("body cannot be undefined")
request_info = self.create_patch_request_information(
body, request_configuration
)
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
return await self.request_adapter.send_async(request_info, relation.Relation, response_handler, error_mapping)
@dataclass
class RelationItemRequestBuilderDeleteRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, str]] = None
# Request options
options: Optional[List[RequestOption]] = None
@dataclass
class RelationItemRequestBuilderGetQueryParameters():
"""
To indicate which terms are related to the current term as either pinned or reused.
"""
# Expand related entities
expand: Optional[List[str]] = None
# Select properties to be returned
select: Optional[List[str]] = None
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
return original_name
@dataclass
class RelationItemRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, str]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[RelationItemRequestBuilder.RelationItemRequestBuilderGetQueryParameters] = None
@dataclass
class RelationItemRequestBuilderPatchRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, str]] = None
# Request options
options: Optional[List[RequestOption]] = None | PypiClean |
/azure_percept-0.0.13-cp38-cp38-manylinux_2_24_aarch64.whl/azure/iot/percept/extensions/ops/non_max_suppression.py |
import logging as log
import numpy as np
from mo.front.common.partial_infer.utils import int64_array
from mo.front.extractor import bool_to_str
from mo.graph.graph import Node, Graph
from mo.middle.passes.convert_data_type import np_data_type_to_destination_type
from mo.ops.op import Op
from mo.utils.error import Error
class NonMaxSuppression(Op):
op = 'NonMaxSuppression'
def __init__(self, graph: Graph, attrs: dict):
mandatory_props = {
'type': self.op,
'op': self.op,
'version': 'opset5',
'infer': self.infer,
'output_type': np.int64,
'box_encoding': 'corner',
'in_ports_count': 5,
'sort_result_descending': 1,
'force_precision_in_ports': {
2: 'int64'},
'type_infer': self.type_infer,
}
super().__init__(graph, mandatory_props, attrs)
version = self.get_opset()
if version in ['opset1', 'opset3', 'opset4']:
self.attrs['out_ports_count'] = 1
elif version == 'opset5':
self.attrs['out_ports_count'] = 3
else:
raise Error('Unsupported operation opset version "{}"'.format(version))
def backend_attrs(self):
version = self.get_opset()
if version in ['opset3', 'opset4', 'opset5']:
return [('sort_result_descending', lambda node: bool_to_str(node, 'sort_result_descending')),
'box_encoding',
('output_type', lambda node: np_data_type_to_destination_type(node.output_type))]
elif version == 'opset1':
return [('sort_result_descending', lambda node: bool_to_str(node, 'sort_result_descending')),
'box_encoding']
else:
raise Error('Unsupported operation opset version "{}"'.format(version))
@staticmethod
def infer(node: Node):
num_of_inputs = len(node.in_ports())
opset = node.get_opset()
max_num_of_inputs = 6 if opset == 'opset5' else 5
input_msg_fmt = 'NonMaxSuppression node {} from {} must have from 2 to {} inputs'
inputs_msg = input_msg_fmt.format(node.soft_get('name', node.id), opset, max_num_of_inputs)
assert 2 <= num_of_inputs <= max_num_of_inputs, inputs_msg
boxes_shape = node.in_port(0).data.get_shape()
assert boxes_shape is not None, 'The shape of tensor with boxes is not defined'
scores_shape = node.in_port(1).data.get_shape()
assert scores_shape is not None, 'The shape of tensor with scores is not defined'
assert len(boxes_shape) == 3, 'Length of tensors with boxes must be equal to 3'
assert len(scores_shape) == 3, 'Length of tensors with scores must be equal to 3'
# According to the specification of the operation NonMaxSuppression,
# the input 'max_output_boxes_per_class' (port 2) is optional, with default value 0.
if num_of_inputs >= 3:
max_output_boxes_per_class = node.in_port(2).data.get_value()
else:
max_output_boxes_per_class = 0
if not max_output_boxes_per_class:
log.info('Set default "max_output_boxes_per_class" for node {} to number of boxes'.format(node.name))
max_output_boxes_per_class = boxes_shape[1]
num_classes = scores_shape[1]
num_input_boxes = boxes_shape[1]
assert scores_shape[2] == num_input_boxes, 'Number of boxes mismatch'
if node.get_opset() in ['opset4', 'opset5']:
max_number_of_boxes = min(num_input_boxes, max_output_boxes_per_class) * boxes_shape[0] * num_classes
else:
max_number_of_boxes = min(num_input_boxes, boxes_shape[0] * max_output_boxes_per_class * num_classes)
node.out_port(0).data.set_shape(int64_array([max_number_of_boxes, 3]))
if opset == 'opset5':
num_of_outputs = len([port for port in node.out_ports().values() if not port.disconnected()])
if num_of_outputs >= 2 and node.has_port('out', 1):
node.out_port(1).data.set_shape(int64_array([max_number_of_boxes, 3]))
if num_of_outputs >= 3 and node.has_port('out', 2):
node.out_port(2).data.set_shape(int64_array(1))
@staticmethod
def type_infer(node):
opset = node.get_opset()
if opset == 'opset5':
node.out_port(0).set_data_type(node.output_type)
if node.has_port('out', 1):
node.out_port(1).set_data_type(np.float32)
if node.has_port('out', 2):
node.out_port(2).set_data_type(np.int64)
elif opset in ['opset3', 'opset4']:
node.out_port(0).set_data_type(node.output_type)
else:
node.out_port(0).set_data_type(np.int64) | PypiClean |
/django_xprez-0.2.3-py3-none-any.whl/xprez/static/xprez/admin/libs/rangy/rangy-highlighter.min.js | !function(e,t){"function"==typeof define&&define.amd?define(["./rangy-core"],e):"undefined"!=typeof module&&"object"==typeof exports?module.exports=e(require("rangy")):e(t.rangy)}(function(e){return e.createModule("Highlighter",["ClassApplier"],function(e){function t(e,t){return e.characterRange.start-t.characterRange.start}function n(e,t){return t?e.getElementById(t):l(e)}function r(e,t){this.type=e,this.converterCreator=t}function i(e,t){f[e]=new r(e,t)}function a(e){var t=f[e];if(t instanceof r)return t.create();throw new Error("Highlighter type '"+e+"' is not valid")}function s(e,t){this.start=e,this.end=t}function h(e,t,n,r,i,a){i?(this.id=i,d=Math.max(d,i+1)):this.id=d++,this.characterRange=t,this.doc=e,this.classApplier=n,this.converter=r,this.containerElementId=a||null,this.applied=!1}function o(e,t){t=t||"textContent",this.doc=e||document,this.classAppliers={},this.highlights=[],this.converter=a(t)}var c=e.dom,g=c.arrayContains,l=c.getBody,u=e.util.createOptions,p=e.util.forEach,d=1,f={};r.prototype.create=function(){var e=this.converterCreator();return e.type=this.type,e},e.registerHighlighterType=i,s.prototype={intersects:function(e){return this.start<e.end&&this.end>e.start},isContiguousWith:function(e){return this.start==e.end||this.end==e.start},union:function(e){return new s(Math.min(this.start,e.start),Math.max(this.end,e.end))},intersection:function(e){return new s(Math.max(this.start,e.start),Math.min(this.end,e.end))},getComplements:function(e){var t=[];if(this.start>=e.start){if(this.end<=e.end)return[];t.push(new s(e.end,this.end))}else t.push(new s(this.start,Math.min(this.end,e.start))),this.end>e.end&&t.push(new s(e.end,this.end));return t},toString:function(){return"[CharacterRange("+this.start+", "+this.end+")]"}},s.fromCharacterRange=function(e){return new s(e.start,e.end)};var R={rangeToCharacterRange:function(e,t){var n=e.getBookmark(t);return new s(n.start,n.end)},characterRangeToRange:function(t,n,r){var i=e.createRange(t);return i.moveToBookmark({start:n.start,end:n.end,containerNode:r}),i},serializeSelection:function(e,t){for(var n=e.getAllRanges(),r=n.length,i=[],a=1==r&&e.isBackward(),s=0,h=n.length;h>s;++s)i[s]={characterRange:this.rangeToCharacterRange(n[s],t),backward:a};return i},restoreSelection:function(e,t,n){e.removeAllRanges();for(var r,i,a,s=e.win.document,h=0,o=t.length;o>h;++h)i=t[h],a=i.characterRange,r=this.characterRangeToRange(s,i.characterRange,n),e.addRange(r,i.backward)}};i("textContent",function(){return R}),i("TextRange",function(){var t;return function(){if(!t){var n=e.modules.TextRange;if(!n)throw new Error("TextRange module is missing.");if(!n.supported)throw new Error("TextRange module is present but not supported.");t={rangeToCharacterRange:function(e,t){return s.fromCharacterRange(e.toCharacterRange(t))},characterRangeToRange:function(t,n,r){var i=e.createRange(t);return i.selectCharacters(r,n.start,n.end),i},serializeSelection:function(e,t){return e.saveCharacterRanges(t)},restoreSelection:function(e,t,n){e.restoreCharacterRanges(n,t)}}}return t}}()),h.prototype={getContainerElement:function(){return n(this.doc,this.containerElementId)},getRange:function(){return this.converter.characterRangeToRange(this.doc,this.characterRange,this.getContainerElement())},fromRange:function(e){this.characterRange=this.converter.rangeToCharacterRange(e,this.getContainerElement())},getText:function(){return this.getRange().toString()},containsElement:function(e){return this.getRange().containsNodeContents(e.firstChild)},unapply:function(){this.classApplier.undoToRange(this.getRange()),this.applied=!1},apply:function(){this.classApplier.applyToRange(this.getRange()),this.applied=!0},getHighlightElements:function(){return this.classApplier.getElementsWithClassIntersectingRange(this.getRange())},toString:function(){return"[Highlight(ID: "+this.id+", class: "+this.classApplier.className+", character range: "+this.characterRange.start+" - "+this.characterRange.end+")]"}},o.prototype={addClassApplier:function(e){this.classAppliers[e.className]=e},getHighlightForElement:function(e){for(var t=this.highlights,n=0,r=t.length;r>n;++n)if(t[n].containsElement(e))return t[n];return null},removeHighlights:function(e){for(var t,n=0,r=this.highlights.length;r>n;++n)t=this.highlights[n],g(e,t)&&(t.unapply(),this.highlights.splice(n--,1))},removeAllHighlights:function(){this.removeHighlights(this.highlights)},getIntersectingHighlights:function(e){var t=[],n=this.highlights;return p(e,function(e){p(n,function(n){e.intersectsRange(n.getRange())&&!g(t,n)&&t.push(n)})}),t},highlightCharacterRanges:function(t,n,r){var i,a,o,c=this.highlights,g=this.converter,l=this.doc,d=[],f=t?this.classAppliers[t]:null;r=u(r,{containerElementId:null,exclusive:!0});var R,v,m,C=r.containerElementId,w=r.exclusive;C&&(R=this.doc.getElementById(C),R&&(v=e.createRange(this.doc),v.selectNodeContents(R),m=new s(0,v.toString().length)));var y,E,T,x,A,H;for(i=0,a=n.length;a>i;++i)if(y=n[i],A=[],m&&(y=y.intersection(m)),y.start!=y.end){for(o=0;o<c.length;++o)T=!1,C==c[o].containerElementId&&(E=c[o].characterRange,x=f==c[o].classApplier,H=!x&&w,(E.intersects(y)||E.isContiguousWith(y))&&(x||H)&&(H&&p(E.getComplements(y),function(e){A.push(new h(l,e,c[o].classApplier,g,null,C))}),T=!0,x&&(y=E.union(y)))),T?(d.push(c[o]),c[o]=new h(l,E.union(y),f,g,null,C)):A.push(c[o]);f&&A.push(new h(l,y,f,g,null,C)),this.highlights=c=A}p(d,function(e){e.unapply()});var I=[];return p(c,function(e){e.applied||(e.apply(),I.push(e))}),I},highlightRanges:function(t,n,r){var i=[],a=this.converter;r=u(r,{containerElement:null,exclusive:!0});var s,h=r.containerElement,o=h?h.id:null;return h&&(s=e.createRange(h),s.selectNodeContents(h)),p(n,function(e){var t=h?s.intersection(e):e;i.push(a.rangeToCharacterRange(t,h||l(e.getDocument())))}),this.highlightCharacterRanges(t,i,{containerElementId:o,exclusive:r.exclusive})},highlightSelection:function(t,r){var i=this.converter,a=t?this.classAppliers[t]:!1;r=u(r,{containerElementId:null,selection:e.getSelection(this.doc),exclusive:!0});var h=r.containerElementId,o=r.exclusive,c=r.selection,g=c.win.document,l=n(g,h);if(!a&&t!==!1)throw new Error("No class applier found for class '"+t+"'");var d=i.serializeSelection(c,l),f=[];p(d,function(e){f.push(s.fromCharacterRange(e.characterRange))});var R=this.highlightCharacterRanges(t,f,{containerElementId:h,exclusive:o});return i.restoreSelection(c,d,l),R},unhighlightSelection:function(t){t=t||e.getSelection(this.doc);var n=this.getIntersectingHighlights(t.getAllRanges());return this.removeHighlights(n),t.removeAllRanges(),n},getHighlightsInSelection:function(t){return t=t||e.getSelection(this.doc),this.getIntersectingHighlights(t.getAllRanges())},selectionOverlapsHighlight:function(e){return this.getHighlightsInSelection(e).length>0},serialize:function(e){var n,r,i,s,h=this,o=h.highlights;return o.sort(t),e=u(e,{serializeHighlightText:!1,type:h.converter.type}),n=e.type,i=n!=h.converter.type,i&&(s=a(n)),r=["type:"+n],p(o,function(t){var n,a=t.characterRange;i&&(n=t.getContainerElement(),a=s.rangeToCharacterRange(h.converter.characterRangeToRange(h.doc,a,n),n));var o=[a.start,a.end,t.id,t.classApplier.className,t.containerElementId];e.serializeHighlightText&&o.push(t.getText()),r.push(o.join("$"))}),r.join("|")},deserialize:function(e){var t,r,i,o=e.split("|"),c=[],g=o[0],l=!1;if(!g||!(t=/^type:(\w+)$/.exec(g)))throw new Error("Serialized highlights are invalid.");r=t[1],r!=this.converter.type&&(i=a(r),l=!0),o.shift();for(var u,p,d,f,R,v,m=o.length;m-->0;){if(v=o[m].split("$"),d=new s(+v[0],+v[1]),f=v[4]||null,l&&(R=n(this.doc,f),d=this.converter.rangeToCharacterRange(i.characterRangeToRange(this.doc,d,R),R)),u=this.classAppliers[v[3]],!u)throw new Error("No class applier found for class '"+v[3]+"'");p=new h(this.doc,d,u,this.converter,parseInt(v[2]),f),p.apply(),c.push(p)}this.highlights=c}},e.Highlighter=o,e.createHighlighter=function(e,t){return new o(e,t)}}),e},this); | PypiClean |
/projectZ.py-1.1.6.8.tar.gz/projectZ.py-1.1.6.8/projectZ/async_client.py | from .utils import exceptions, objects
from .utils.generator import Generator
from .utils.headers import Headers
from .async_socket import AsyncSocket, AsyncCallBacks
from json import dumps, loads
from sys import maxsize
from random import randint
from aiohttp import ClientSession, MultipartWriter
from asyncio import get_event_loop, new_event_loop, create_task
from io import BytesIO
from aiofiles.threadpool.binary import AsyncBufferedReader
from typing import Union, Optional
gen = Generator()
class AsyncClient(AsyncSocket, AsyncCallBacks):
def __init__(self, deviceId: str = None, socket_debug: bool = False, run_socket: bool = True, language: str = "en-US", country_code: str = "en", time_zone: int = 180):
self.api = 'https://api.projz.com'
self.deviceId = deviceId if deviceId else gen.deviceId()
self.profile = objects.User()
self.language = language
self.country_code = country_code
self.time_zone = time_zone
self.run_socket = run_socket
self.session = ClientSession()
AsyncSocket.__init__(self, headers=self.parse_headers, debug=socket_debug)
AsyncCallBacks.__init__(self)
def __del__(self):
try:
loop = get_event_loop()
loop.run_until_complete(self.close_session())
except RuntimeError:
loop = new_event_loop()
loop.run_until_complete(self.close_session())
async def close_session(self):
if not self.session.closed: await self.session.close()
def parse_headers(self, endpoint: str, data = None, content_type: str = 'application/json') -> dict:
h = Headers(deviceId=self.deviceId, sid=self.profile.sid, time_zone=self.time_zone, country_code=self.country_code, language=self.language)
head = h.get_persistent_headers()
head.update(h.Headers())
head.update({"Content-Type": content_type} if content_type else {})
head["HJTRFS"] = gen.signature(path=endpoint, headers=head, body=data or bytes())
return head
async def upload_media(self, file: AsyncBufferedReader, fileType: str, target: int = 1, returnType: str = 'object', duration: int = 0):
if fileType == "audio":
t = "audio/aac"
elif fileType == "image":
t = "image/jpg"
else: raise exceptions.WrongType(fileType)
file_content = await file.read()
content = BytesIO()
writer = MultipartWriter()
part = writer.append(file_content, {"Content-Type": t})
part.set_content_disposition("form-data", name="media", filename=file.name)
await writer.write(objects.CopyToBufferWriter(content))
endpoint = f"/v1/media/upload?target={target}&duration={duration}"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, content_type=f"multipart/form-data; boundary={writer.boundary}", data=content.getvalue()), data=content.getvalue()) as response:
if response.status != 200:return exceptions.CheckException(await response.text())
else:
return objects.Media(loads(await response.text())) if returnType == 'object' else loads(await response.text())
async def login(self, email: str, password: str):
data = dumps({
"authType": 1,
"email": email,
"password": password
})
endpoint = '/v1/auth/login'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
if response.status != 200: return exceptions.CheckException(await response.text())
else:
self.profile = objects.User(loads(await response.text()))
if self.run_socket:await self.connect()
return self.profile
async def logout(self):
endpoint = '/v1/auth/logout'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
if response.status != 200: return exceptions.CheckException(await response.text())
else:
self.profile = objects.User()
if self.run_socket:await self.disconnect()
return response.status
async def Online(self):
if not self.run_socket:return
if self.online_loop_active: return
self.online_loop_active = create_task(self.online_loop())
return self.online_loop_active
async def Offline(self):
if not self.run_socket:return
if self.online_loop_active:
self.online_loop_active.cancel()
self.online_loop_active = None
return self.online_loop_active
async def join_chat(self, chatId: int):
endpoint = f'/v1/chat/threads/{chatId}/members'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def leave_chat(self, chatId: int):
endpoint = f'/v1/chat/threads/{chatId}/members'
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_from_link(self, link: str):
data = dumps({"link": link})
endpoint = f"/v1/links/path"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.FromLink(loads(await response.text()))
async def get_link(self, userId: int = None, chatId: int = None, circleId: int = None, blogId: int = None):
data = {
"objectId": 0,
"objectType": 0,
"parentId": 0,
"circleIdForCircleAnnouncement": 0,
"parentType": 0
}
if userId:
data["path"] = f"user/{userId}"
elif chatId:
data["path"] = f"chat/{chatId}"
elif circleId:
data["objectType"] = 5
data["objectId"] = circleId
data["path"] = f"circle/{circleId}"
elif blogId:
data["path"] = f"blog/{blogId}"
else:
raise exceptions.WrongType(fileType)
data = dumps(data)
endpoint = '/v1/links/share'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.FromLink(loads(await response.text()))
async def get_my_chats(self, start: int = 0, size: int = 20, type: str = 'managed'):
endpoint = f'/v1/chat/joined-threads?start={start}&size={size}&type={type}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Thread(loads(await response.text()))
async def send_message(self, chatId: int, message: str = None, file: AsyncBufferedReader = None, fileType: str = None, file_duration: int = None, message_type: int = 1, replyTo: int = None, pollId: int = None, diceId: int = None):
if not self.run_socket:return
data = {
"threadId": chatId,
"uid": self.profile.uid,
"seqId": randint(0, maxsize),
"extensions": {}
}
if message:
data["content"]=message
data["type"]=message_type
elif file:
data["type"]= 2 if fileType == "image" else 6
data["media"] = await self.upload_media(file=file, fileType=fileType, target= 8 if fileType == "image" else 10, returnType='dict', duration=file_duration*1000 if file_duration else 0)
else:
raise exceptions.WrongType('Specify the "message" or "file" argument')
if replyTo: data["extensions"]["replyMessageId"] = replyTo
if pollId: data["extensions"]["pollId"] = pollId
if diceId: data["extensions"]["diceId"] = diceId
resp = await self.send(t=1, data=data, threadId=chatId)
return resp
async def send_verify_code(self, email: str, country_code: str = None):
data = dumps({
"authType": 1,
"purpose": 1,
"email": email,
"password": "",
"phoneNumber": "",
"securityCode": "",
"invitationCode": "",
"secret": "",
"gender": 0,
"birthday": "1990-01-01",
"requestToBeReactivated": False,
"countryCode": country_code if country_code else self.country_code,
"suggestedCountryCode": country_code.upper() if country_code else self.country_code.upper(),
"ignoresDisabled": True,
"rawDeviceIdThree": gen.generate_device_id_three()
})
endpoint = '/v1/auth/request-security-validation'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def register(self, email: str, password: str, code: str, icon: Union[AsyncBufferedReader, objects.Media], country_code: str = None, invitation_code: str = None, nickname: str = 'XsarzyBest', tag_line: str = 'projectZ', gender: int = 100, birthday: str = '1990-01-01'):
data = dumps({
"authType": 1,
"purpose": 1,
"email": email,
"password": password,
"securityCode": code,
"invitationCode": invitation_code or "",
"nickname": nickname,
"tagLine": tag_line,
"icon": icon.json if isinstance(icon, objects.Media) else await self.upload_media(icon, returnType='dict', fileType="image"),
"nameCardBackground": None,
"gender": gender,
"birthday": birthday,
"requestToBeReactivated": False,
"countryCode": country_code if country_code else self.country_code,
"suggestedCountryCode": country_code.upper() if country_code else self.country_code.upper(),
"ignoresDisabled": True,
"rawDeviceIdThree": gen.generate_device_id_three()
})
endpoint = '/v1/auth/register'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def visit(self, userId):
endpoint = f'/v1/users/profile/{userId}/visit'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def add_to_favorites(self, userId: Union[list, int]):
userIds = userId if isinstance(userId, list) else [userId]
data = dumps({"targetUids": userIds})
endpoint = '/v1/users/membership/favorites'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def report(self, userId: int, message: str, images: Union[AsyncBufferedReader, list[AsyncBufferedReader]], flagType: int = 100):
media = list()
if isinstance(images, AsyncBufferedReader):images=[images]
elif isinstance(images, list):pass
else:raise exceptions.WrongType()
data = {
"objectId": userId,
"objectType": 4,
"flagType": flagType,
"message": message,
}
for image in images:
media.append(await self.upload_media(image, returnType='dict', fileType="image"))
data["mediaList"] = media
data = dumps(data)
endpoint = f'/v1/flags'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def delete_message(self, chatId: int, messageId: int):
endpoint = f'/v1/chat/threads/{chatId}/messages/{messageId}'
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def kick(self, chatId: int, userId: int, denyEntry: bool = False, removeContent: bool = False):
endpoint = f"/v1/chat/threads/{chatId}/members/{userId}?block={str(denyEntry).lower()}&removeContent={str(removeContent).lower()}"
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def pin_chat(self, chatId):
endpoint = f'/v1/chat/threads/{chatId}/pin'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def apply_bubble(self, chatId: int, bubbleColor: str):
data = dumps({"threadId": chatId, "bubbleColor": bubbleColor})
endpoint = f'/v1/chat/apply-bubble'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def invite_to_co_host(self, chatId: int, userId: Union[list, int]):
#TODO
if isinstance(userId, int): userIds = [userId]
elif isinstance(userId, list): userIds = userId
else:raise exceptions.WrongType('Specify the "message" or "file" argument')
data = dumps({"coHostUids": userIds})
endpoint = f"/v1/chat/threads/{chatId}/invite-co-host"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def invite_to_host(self, chatId: int, userId: int):
#TODO
endpoint = f"/v1/chat/threads/{chatId}/invite-host/{userId}"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def accept_co_host(self, chatId: int):
endpoint = f"/v1/chat/threads/{chatId}/accept-as-co-host"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def accept_host(self, chatId: int):
endpoint = f"/v1/chat/threads/{chatId}/accept-as-host"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def auto_offline(self, chatId: int, switch: bool = False):
endpoint = f"/v1/chat/threads/{chatId}/auto-offline/{'disable' if switch == False else 'enable'}"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def check_in(self):
endpoint = f"/v1/users/check-in"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
response = exceptions.CheckException(await response.text()) if response.status != 200 else objects.OrderInfo(loads(await response.text()))
await self.claim_transfer_orders(orderId=response.orderId)
return response
async def claim_transfer_orders(self, orderId: int):
endpoint = f"/biz/v3/transfer-orders/{orderId}/claim"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())#response.status
async def claim_gift_boxes(self, orderId: int):
endpoint = f"/v1/gift-boxes/{orderId}/claim"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_transfer_order_info(self, orderId: int):
endpoint = f"/biz/v1/transfer-orders/{orderId}"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def send_coins(self, wallet_password: int, userId: int, amount: int, title: str = "All the best!"):
data = dumps({
"toObjectId": userId,
"amount": f"{amount}000000000000000000",
"paymentPassword": str(wallet_password),
"toObjectType": 4,
"currencyType": 100,
"title": title
})
endpoint = f"/biz/v1/gift-boxes"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def online_chat_status(self, chatId: int, online: bool = True):
data = dumps({"partyOnlineStatus": 1 if online else 2})
endpoint = f"/v1/chat/threads/{chatId}/party-online-status"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_user_info(self, userId: int):
endpoint = f'/v1/users/profile/{userId}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.UserProfile(loads(await response.text()))
async def get_circles(self, type: str = 'recommend', categoryId: int = 0, size: int = 10):
endpoint = f'/v1/circles?type={type}&categoryId={categoryId}&size={size}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.CirclesList(loads(await response.text()))
async def get_blocked_users(self):
endpoint = '/v1/users/block-uids'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.BlockedUsers(loads(await response.text()))
async def get_blogs(self, type: str = 'recommend', size: int = 10):
endpoint = f'/v1/blogs?type={type}&size={size}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Blogs(loads(await response.text()))
async def mark_as_read(self, chatId: int):
endpoint = f'/v1/chat/threads/{chatId}/mark-as-read'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_chat_threads(self, chatId: int):
endpoint=f'/v1/chat/threads/{chatId}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_online_chat_members(self, chatId: int):
endpoint=f'/v1/chat/threads/{chatId}/online-members'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_chat_messages(self, chatId: int, size: int = 10):
endpoint=f'/v1/chat/threads/{chatId}/messages?size={size}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_mention_candidates(self, chatId: int, size: int = 10, queryWord: str = ''):
endpoint = f'/v1/chat/threads/{chatId}/mention-candidates?size={size}&queryWord={queryWord}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.MentionCandidates(loads(await response.text()))
async def get_comments(self, userId: int, type: int = 4, replyId: int= 0, size: int = 30, onlyPinned: int = 0):
endpoint = f'/v1/comments?parentId={userId}&parentType={type}&replyId={replyId}&size={size}&onlyPinned={onlyPinned}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Comments(loads(await response.text()))
async def block(self, userId: int):
endpoint = f'/v1/users/block/{userId}'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def unblock(self, userId: int):
endpoint = f'/v1/users/block/{userId}'
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def accept_chat_invitation(self, chatId: int):
endpoint = f'/v1/chat/threads/{chatId}/accept-invitation'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def join_circle(self, circleId):
data = dumps({"joinMethod": 1})
endpoint = f'/v1/circles/{circleId}/members'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def leave_circle(self, circleId):
endpoint = f'/v1/circles/{circleId}/members'
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_circle_info(self, circleId: int):
endpoint = f'/v1/circles/{circleId}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Circle(loads(await response.text()))
async def get_chat_info(self, chatId: int):
endpoint = f'/v1/chat/threads/{chatId}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.ChatInfo(loads(await response.text()))
async def follow(self, userId: int):
endpoint = f'/v1/users/membership/{userId}'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def unfollow(self, userId: int):
endpoint = f'/v1/users/membership/{userId}'
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def like(self, commentId: int = None, blogId: int = None, stickerId: int = 65956773102028339):
data = {
"createdTime": 0,
"stickerId": stickerId,
"count": 0,
"justAddTimeMs": 0
}
if commentId:
data['objectType'] = 3
data['objectId'] = commentId
elif blogId:
data['objectType'] = 2
data['objectId'] = blogId
else:
raise exceptions.WrongType()
data = dumps(data)
endpoint = f'/v1/reactions'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def unlike(self, commentId: int = None, blogId: int = None, stickerId: int = 65956773102028339):
if commentId:
endpoint = f'/v1/reactions?objectId={commentId}&objectType=3&stickerId={stickerId}'
elif blogId:
endpoint = f'/v1/reactions?objectId={blogId}&objectType=2&stickerId={stickerId}'
else:
raise exceptions.WrongType()
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def invite_to_chat(self, userId: Union[int, list], chatId: int):
if isinstance(userId, int): userIds = [userId]
elif isinstance(userId, list): userIds = userId
else:raise exceptions.WrongType()
data = dumps({"invitedUids": userIds})
endpoint = f'/v1/chat/threads/{chatId}/members-invite'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_my_invitation_code(self):
endpoint = f'/v1/users/multi-invitation-code'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.invitationCode(loads(await response.text()))
async def get_circles_members(self, circleId: int, size: int = 30, type :str = 'normal', pageToken: str = None):
endpoint = f'/v1/circles/{circleId}/members?type={type}&size={size}&isExcludeManger=false{f"&pageToken={pageToken}" if pageToken else ""}'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.CirclesMembers(loads(await response.text()))
async def get_baners(self):
endpoint = '/v2/banners'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Baners(loads(await response.text()))
async def activate_wallet(self, wallet_password: str, code: str, email: str = None):
data = dumps({
"authType": 1,
"identity": email if email else self.profile.email if self.profile.email else exceptions.NotLoggined('You are not authorized'),
"paymentPassword": wallet_password,
"securityCode": code
})
endpoint = '/biz/v1/wallet/0/activate'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def activate_shop(self):
endpoint = '/biz/v1/activate-store'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.ActivateShop(loads(await response.text()))
async def wallet_info(self):
endpoint = '/biz/v1/wallet'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.WalletInfo(loads(await response.text()))
async def my_nfts(self):
endpoint = '/biz/v1/nfts/count'
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Nfts(loads(await response.text()))
async def comment(self, message: str, userId: int = None, blogId: int = None, replyId: dict = None):
data = {
"commentId": 0,
"status":1,
"parentId": userId,
"replyId": 0,
"circleId": 0,
"uid": 0,
"content": message,
"mediaList": [],
"commentType": 1,
"subComments": [],
"subCommentsCount": 0,
"isPinned": False
}
if userId:
data['parentType'] = 4
elif blogId:
data['parentType'] = 2
else:
raise exceptions.WrongType()
if replyId:
data['replyId'] = replyId['commentId']
data['extensions'] = {"replyToUid": replyId['userId'], "contentStatus": 1}
data = dumps(data)
endpoint = f'/v1/comments'
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Comments(loads(await response.text()))
async def get_alerts(groupId: int = 3, size: int = 30):
endpoint = f"/v1/alerts?groupId={groupId}&size={size}"
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_moods(self):
endpoint = f"/v1/moods"
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def change_password(self, oldPassword: str, newPassword: str):
data = dumps({"oldPassword": oldPassword, "newPassword": newPassword})
endpoint="/v1/auth/change-password"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def get_message_info(self, chatId: int, messageId: int):
endpoint = f"/v1/chat/threads/{chatId}/messages/{messageId}"
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def delete_chat(self, chatId: int):
endpoint = f"/v1/chat/threads/{chatId}"
async with self.session.delete(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else response.status
async def qivotes_chat(self, chatId: int):
data = dumps({
"uid": 0,
"objectId": {chatId},
"objectType": 1,
"timezone": self.time_zone,
"votedCount": 1,
"votedDate": 0,
"createdTime": 0,
"lastVoteTime": 0
})
endpoint = "/v1/qivotes"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint, data=data), data=data) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_user_tasks(self):
endpoint = f"/v2/user-tasks"
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text())
async def get_my_gifts(self, size: int = 60):
endpoint = f"/biz/v2/transfer-orders?size={size}"
async with self.session.get(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else objects.Gifts(loads(await response.text()))
async def gift_withdrawn(self, orderId):
endpoint = f"/biz/v1/gift-boxes/{orderId}/withdrawn"
async with self.session.post(f"{self.api}{endpoint}", headers=self.parse_headers(endpoint=endpoint)) as response:
return exceptions.CheckException(await response.text()) if response.status != 200 else loads(await response.text()) | PypiClean |
/elastalert2-2.13.2-py3-none-any.whl/elastalert/__init__.py | import copy
import elastalert.eql as eql
from elasticsearch import Elasticsearch
from elasticsearch import RequestsHttpConnection
from elasticsearch.client import _make_path
from elasticsearch.client import query_params
from elasticsearch.exceptions import TransportError
class ElasticSearchClient(Elasticsearch):
""" Extension of low level :class:`Elasticsearch` client with additional version resolving features """
def __init__(self, conf):
"""
:arg conf: es_conn_config dictionary. Ref. :func:`~util.build_es_conn_config`
"""
super(ElasticSearchClient, self).__init__(host=conf.get('es_host'),
hosts=conf.get('es_hosts'),
port=conf['es_port'],
url_prefix=conf['es_url_prefix'],
use_ssl=conf['use_ssl'],
verify_certs=conf['verify_certs'],
ca_certs=conf['ca_certs'],
ssl_show_warn=conf['ssl_show_warn'],
connection_class=RequestsHttpConnection,
http_auth=conf['http_auth'],
headers=conf['headers'],
timeout=conf['es_conn_timeout'],
send_get_body_as=conf['send_get_body_as'],
client_cert=conf['client_cert'],
client_key=conf['client_key'])
self._conf = copy.copy(conf)
self._es_version = None
@property
def conf(self):
"""
Returns the provided es_conn_config used when initializing the class instance.
"""
return self._conf
@property
def es_version(self):
"""
Returns the reported version from the Elasticsearch server.
"""
if self._es_version is None:
self._es_version = util.get_version_from_cluster_info(self)
return self._es_version
def is_atleastseven(self):
"""
Returns True when the Elasticsearch server version >= 7
"""
return int(self.es_version.split(".")[0]) >= 7
def is_atleasteight(self):
"""
Returns True when the Elasticsearch server version >= 8
"""
return int(self.es_version.split(".")[0]) >= 8
def resolve_writeback_index(self, writeback_index, doc_type):
if doc_type == 'silence':
return writeback_index + '_silence'
elif doc_type == 'past_elastalert':
return writeback_index + '_past'
elif doc_type == 'elastalert_status':
return writeback_index + '_status'
elif doc_type == 'elastalert_error':
return writeback_index + '_error'
return writeback_index
@query_params(
"_source",
"_source_excludes",
"_source_includes",
"allow_no_indices",
"allow_partial_search_results",
"analyze_wildcard",
"analyzer",
"batched_reduce_size",
"ccs_minimize_roundtrips",
"default_operator",
"df",
"docvalue_fields",
"expand_wildcards",
"explain",
"from_",
"ignore_throttled",
"ignore_unavailable",
"lenient",
"max_concurrent_shard_requests",
"pre_filter_shard_size",
"preference",
"q",
"request_cache",
"rest_total_hits_as_int",
"routing",
"scroll",
"search_type",
"seq_no_primary_term",
"size",
"sort",
"stats",
"stored_fields",
"suggest_field",
"suggest_mode",
"suggest_size",
"suggest_text",
"terminate_after",
"timeout",
"track_scores",
"track_total_hits",
"typed_keys",
"version",
)
def search(self, body=None, index=None, doc_type=None, params=None, headers=None):
# This implementation of search is nearly identical to the base class with the following exceptions:
# 1. If the request body contains an EQL query, the body will be restructured to support the EQL API.
# 2. The path will be set to the EQL API endpoint, if #1 is true.
# 3. The scroll and _source_includes params will be dropped if #1 is true, since the EQL API doesn't support them.
# 4. The size param will be moved to a body parameter instead of a top-level param if #1 is true.
# 5. The results will be converted from EQL API format into the standard search format.
# from is a reserved word so it cannot be used, use from_ instead
if "from_" in params:
params["from"] = params.pop("from_")
path = _make_path(index, doc_type, "_search")
eql_body = eql.format_request(body)
if eql_body is not None:
path = path.replace('/_search', '/_eql/search')
body = eql_body
if 'size' in params:
body['size'] = int(params.pop('size'))
if 'scroll' in params:
params.pop('scroll')
if '_source_includes' in params:
params.pop('_source_includes')
results = self.transport.perform_request(
"POST",
path,
params=params,
headers=headers,
body=body,
)
eql.format_results(results);
return results | PypiClean |
/JoUtil-1.3.3-py3-none-any.whl/JoTools/utils/LivpUtil.py |
# 处理 iphone 拍照后得到的 livp 格式的图片,解压为 heic + mov 并进一步解析为 jpg
import os
from ..utils.ZipUtil import ZipUtil
import whatimage
import pyheif
import traceback
from PIL import Image
class LivpUtil():
@staticmethod
def unzip_to_heic(file_path, save_folder):
"""将 livp 文件加压为 heic + mov 保存到指定的文件夹,返回保存的 heic 文件的路径"""
if not str(file_path).endswith(".livp") or not os.path.isfile(file_path):
raise ValueError("error livp file path")
# 拿到解析后的所有文件路径
file_path_list = ZipUtil.unzip_file(file_path, save_folder)
# 返回 。heic 文件的路径
return file_path_list[0] if file_path_list[0].endswith('.heic') else file_path_list[1]
@staticmethod
def decodeImage(bytesIo, save_path):
try:
fmt = whatimage.identify_image(bytesIo)
# print('fmt = ', fmt)
if fmt in ['heic']:
i = pyheif.read_heif(bytesIo)
# print('i = ', i)
# print('i.metadata = ', i.metadata)
pi = Image.frombytes(mode=i.mode, size=i.size, data=i.data)
# print('pi = ', pi)
pi.save(save_path, format="jpeg")
except:
traceback.print_exc()
@staticmethod
def read_image_file_rb(file_path):
with open(file_path, 'rb') as f:
file_data = f.read()
return file_data
@staticmethod
def heic_to_jpg(heic_file_path, save_path):
"""heic 文件转为 jpg 文件"""
data = LivpUtil.read_image_file_rb(heic_file_path)
LivpUtil.decodeImage(data, save_path)
if __name__ == "__main__":
livp_dir = r"D:\AppData\baiduwangpan\20221015_宜兴出差"
temp_folder = r"C:\Users\14271\Desktop\del"
save_folder = r"D:\AppData\baiduwangpan\20221015_宜兴出差"
for each_name in os.listdir(livp_dir):
file_path = os.path.join(livp_dir, each_name)
# 解压为 .heic 文件
uzip_path = LivpUtil.unzip_to_heic(file_path, temp_folder)
# 继续解析为 jpg 文件
uzip_name = os.path.split(uzip_path)[1]
save_path = os.path.join(save_folder, uzip_name[:-4] + 'jpg')
LivpUtil.heic_to_jpg(uzip_path, save_path) | PypiClean |
/cloudberry-django-freeradius-1234.0.1.0.tar.gz/cloudberry-django-freeradius-1234.0.1.0/django_freeradius/migrations/0003_auto_20170727_1613.py | from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('django_freeradius', '0002_radiusaccounting_acct_update_time'),
]
operations = [
migrations.RenameField(
model_name='radiusaccounting',
old_name='rad_acct_id',
new_name='id',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_unique_id',
new_name='unique_id',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_authentic',
new_name='authentication',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_input_octets',
new_name='input_octets',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_output_octets',
new_name='output_octets',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_session_id',
new_name='session_id',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_session_time',
new_name='session_time',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_start_time',
new_name='start_time',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_stop_time',
new_name='stop_time',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_terminate_cause',
new_name='terminate_cause',
),
migrations.RenameField(
model_name='radiusaccounting',
old_name='acct_update_time',
new_name='update_time',
),
migrations.RemoveField(
model_name='radiusaccounting',
name='acct_start_delay',
),
migrations.RemoveField(
model_name='radiusaccounting',
name='acct_stop_delay',
),
migrations.RemoveField(
model_name='radiusaccounting',
name='xascend_session_svrkey',
),
migrations.RemoveField(
model_name='radiusaccounting',
name='created',
),
migrations.RemoveField(
model_name='radiusaccounting',
name='modified',
),
migrations.AddField(
model_name='radiusaccounting',
name='interval',
field=models.IntegerField(blank=True, db_column='acctinterval', null=True, verbose_name='interval'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='called_station_id',
field=models.CharField(blank=True, db_column='callingstationid', null=True, max_length=50, verbose_name='called station ID'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='calling_station_id',
field=models.CharField(blank=True, db_column='calledstationid', null=True, max_length=50, verbose_name='calling station ID'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='connection_info_start',
field=models.CharField(blank=True, db_column='connectinfo_start', max_length=50, null=True, verbose_name='connection info start'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='connection_info_stop',
field=models.CharField(blank=True, db_column='connectinfo_stop', max_length=50, null=True, verbose_name='connection info stop'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='framed_ip_address',
field=models.CharField(blank=True, db_column='framedipaddress', db_index=False, max_length=15, verbose_name='framed IP address'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='framed_protocol',
field=models.CharField(blank=True, db_column='framedprotocol', max_length=32, null=True, verbose_name='framed protocol'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='groupname',
field=models.CharField(blank=True, max_length=64, null=True, verbose_name='group name'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='nas_port_id',
field=models.CharField(blank=True, db_column='nasportid', max_length=15, null=True, verbose_name='NAS port ID'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='nas_port_type',
field=models.CharField(blank=True, db_column='nasporttype', max_length=32, null=True, verbose_name='NAS port type'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='id',
field=models.BigAutoField(db_column='radacctid', primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='radiusaccounting',
name='realm',
field=models.CharField(blank=True, max_length=64, null=True, verbose_name='realm'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='service_type',
field=models.CharField(blank=True, db_column='servicetype', max_length=32, null=True, verbose_name='service type'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='username',
field=models.CharField(blank=True, db_index=True, max_length=64, null=True, verbose_name='username'),
),
migrations.AlterField(
model_name='radiusaccounting',
name='nas_ip_address',
field=models.GenericIPAddressField(db_column='nasipaddress', db_index=False, verbose_name='NAS IP address'),
)
] | PypiClean |
/trytond-6.8.3.tar.gz/trytond-6.8.3/doc/tutorial/module/setup_database.rst | .. _tutorial-module-setup-database:
Initialize the database
=======================
By default Tryton, use an SQLite database stored in the folder :file:`db` of
your home directory.
This can be changed in the ``database`` section of the `configuration
<topics-configuration>`.
Now creating a Tryton database is only a matter of executing the following
commands:
.. code-block:: console
$ mkdir ~/db
$ touch ~/db/test.sqlite
$ trytond-admin -d test --all
You will be prompted to set the administrator email and password.
Once the database is initialized you can run the Tryton server:
.. code-block:: console
$ trytond
Connecting to the database using a Tryton client you will be greeted by the
module configuration wizard.
We will continue with :ref:`the anatomy of the module <tutorial-module-anatomy>`.
| PypiClean |
/winevtrc-20220106.tar.gz/winevtrc-20220106/docs/sources/eventlog-providers/Provider-Microsoft-Windows-WMP-Setup_WM.md | ## Microsoft-Windows-WMP-Setup_WM
Seen on:
* Windows 10 (1511, 1607, 1703, 1709, 1803, 1809, 1903, 1909, 2004, 20H2)
* Windows 11 (21H2)
* Windows 7
* Windows 8.0
* Windows 8.1
<table border="1" class="docutils">
<tbody>
<tr>
<td><b>Log source(s):</b></td>
<td>Microsoft-Windows-WMP-Setup_WM</td>
</tr>
<tr>
<td><b>Identifier:</b></td>
<td>{0d759f0f-cff9-4902-8867-eb9e29d7a98b}</td>
</tr>
<tr>
<td><b>Event message file(s):</b></td>
<td>%programfiles%\windows media player\setup_wm.exe</td>
</tr>
</tbody>
</table>
| PypiClean |
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/eslint/lib/rules/max-params.js | "use strict";
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const astUtils = require("./utils/ast-utils");
const { upperCaseFirst } = require("../shared/string-utils");
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
/** @type {import('../shared/types').Rule} */
module.exports = {
meta: {
type: "suggestion",
docs: {
description: "enforce a maximum number of parameters in function definitions",
recommended: false,
url: "https://eslint.org/docs/rules/max-params"
},
schema: [
{
oneOf: [
{
type: "integer",
minimum: 0
},
{
type: "object",
properties: {
maximum: {
type: "integer",
minimum: 0
},
max: {
type: "integer",
minimum: 0
}
},
additionalProperties: false
}
]
}
],
messages: {
exceed: "{{name}} has too many parameters ({{count}}). Maximum allowed is {{max}}."
}
},
create(context) {
const sourceCode = context.getSourceCode();
const option = context.options[0];
let numParams = 3;
if (
typeof option === "object" &&
(Object.prototype.hasOwnProperty.call(option, "maximum") || Object.prototype.hasOwnProperty.call(option, "max"))
) {
numParams = option.maximum || option.max;
}
if (typeof option === "number") {
numParams = option;
}
/**
* Checks a function to see if it has too many parameters.
* @param {ASTNode} node The node to check.
* @returns {void}
* @private
*/
function checkFunction(node) {
if (node.params.length > numParams) {
context.report({
loc: astUtils.getFunctionHeadLoc(node, sourceCode),
node,
messageId: "exceed",
data: {
name: upperCaseFirst(astUtils.getFunctionNameWithKind(node)),
count: node.params.length,
max: numParams
}
});
}
}
return {
FunctionDeclaration: checkFunction,
ArrowFunctionExpression: checkFunction,
FunctionExpression: checkFunction
};
}
}; | PypiClean |
/py3rosmsgs-1.13-py3-none-any.whl/rospy/rostime.py | import sys
import threading
import time
import traceback
import rospy.exceptions
import genpy
## /time support. This hooks into the rospy Time representation and
## allows it to be overriden with data from the /time topic.
_rostime_initialized = False
_rostime_current = None
_rostime_cond = threading.Condition()
# subclass genpy to provide abstraction layer
class Duration(genpy.Duration):
"""
Duration represents the ROS 'duration' primitive type, which
consists of two integers: seconds and nanoseconds. The Duration
class allows you to add and subtract Duration instances, including
adding and subtracting from L{Time} instances.
Usage::
five_seconds = Duration(5)
five_nanoseconds = Duration(0, 5)
print 'Fields are', five_seconds.secs, five_seconds.nsecs
# Duration arithmetic
ten_seconds = five_seconds + five_seconds
five_secs_ago = rospy.Time.now() - five_seconds # Time minus Duration is a Time
true_val = ten_second > five_seconds
"""
__slots__ = []
def __init__(self, secs=0, nsecs=0):
"""
Create new Duration instance. secs and nsecs are integers and
correspond to the ROS 'duration' primitive type.
@param secs: seconds
@type secs: int
@param nsecs: nanoseconds
@type nsecs: int
"""
super(Duration, self).__init__(secs, nsecs)
def __repr__(self):
return 'rospy.Duration[%d]' % self.to_nsec()
class Time(genpy.Time):
"""
Time represents the ROS 'time' primitive type, which consists of two
integers: seconds since epoch and nanoseconds since seconds. Time
instances are mutable.
The L{Time.now()} factory method can initialize Time to the
current ROS time and L{from_sec()} can be used to create a
Time instance from the Python's time.time() float seconds
representation.
The Time class allows you to subtract Time instances to compute
Durations, as well as add Durations to Time to create new Time
instances.
Usage::
now = rospy.Time.now()
zero_time = rospy.Time()
print 'Fields are', now.secs, now.nsecs
# Time arithmetic
five_secs_ago = now - rospy.Duration(5) # Time minus Duration is a Time
five_seconds = now - five_secs_ago # Time minus Time is a Duration
true_val = now > five_secs_ago
# NOTE: in general, you will want to avoid using time.time() in ROS code
import time
py_time = rospy.Time.from_sec(time.time())
"""
__slots__ = []
def __init__(self, secs=0, nsecs=0):
"""
Constructor: secs and nsecs are integers and correspond to the
ROS 'time' primitive type. You may prefer to use the static
L{from_sec()} and L{now()} factory methods instead.
@param secs: seconds since epoch
@type secs: int
@param nsecs: nanoseconds since seconds (since epoch)
@type nsecs: int
"""
super(Time, self).__init__(secs, nsecs)
def __repr__(self):
return 'rospy.Time[%d]' % self.to_nsec()
@staticmethod
def now():
"""
Create new L{Time} instance representing current time. This
can either be wall-clock time or a simulated clock. It is
strongly recommended that you use the now() factory to create
current time representations instead of reading wall-clock
time and create Time instances from it.
@return: L{Time} instance for current time
@rtype: L{Time}
"""
return get_rostime()
@classmethod
def from_seconds(cls, float_secs):
"""
Use Time.from_sec() instead. Retained for backwards compatibility.
@param float_secs: time value in time.time() format
@type float_secs: float
@return: Time instance for specified time
@rtype: L{Time}
"""
return cls.from_sec(float_secs)
def _set_rostime(t):
"""Callback to update ROS time from a ROS Topic"""
if isinstance(t, genpy.Time):
t = Time(t.secs, t.nsecs)
elif not isinstance(t, Time):
raise ValueError("must be Time instance: %s"%t.__class__)
global _rostime_current
_rostime_current = t
try:
_rostime_cond.acquire()
_rostime_cond.notifyAll()
finally:
_rostime_cond.release()
def get_rostime():
"""
Get the current time as a L{Time} object
@return: current time as a L{rospy.Time} object
@rtype: L{Time}
"""
if not _rostime_initialized:
raise rospy.exceptions.ROSInitException("time is not initialized. Have you called init_node()?")
if _rostime_current is not None:
# initialize with sim time
return _rostime_current
else:
# initialize with wallclock
float_secs = time.time()
secs = int(float_secs)
nsecs = int((float_secs - secs) * 1000000000)
return Time(secs, nsecs)
def get_time():
"""
Get the current time as float secs (time.time() format)
@return: time in secs (time.time() format)
@rtype: float
"""
return Time.now().to_sec()
def set_rostime_initialized(val):
"""
Internal use.
Mark rostime as initialized. This flag enables other routines to
throw exceptions if rostime is being used before the underlying
system is initialized.
@param val: value for initialization state
@type val: bool
"""
global _rostime_initialized
_rostime_initialized = val
def is_rostime_initialized():
"""
Internal use.
@return: True if rostime has been initialized
@rtype: bool
"""
return _rostime_initialized
def get_rostime_cond():
"""
internal API for helper routines that need to wait on time updates
@return: rostime conditional var
@rtype: threading.Cond
"""
return _rostime_cond
def is_wallclock():
"""
Internal use for ROS-time routines.
@return: True if ROS is currently using wallclock time
@rtype: bool
"""
return _rostime_current == None
def switch_to_wallclock():
"""
Internal use.
Switch ROS to wallclock time. This is mainly for testing purposes.
"""
global _rostime_current
_rostime_current = None
try:
_rostime_cond.acquire()
_rostime_cond.notifyAll()
finally:
_rostime_cond.release()
def wallsleep(duration):
"""
Internal use.
Windows interrupts time.sleep with an IOError exception
when a signal is caught. Even when the signal is handled
by a callback, it will then proceed to throw IOError when
the handling has completed.
Refer to https://code.ros.org/trac/ros/ticket/3421.
So we create a platform dependant wrapper to handle this
here.
"""
if sys.platform in ['win32']: # cygwin seems to be ok
try:
time.sleep(duration)
except IOError:
pass
else:
time.sleep(duration) | PypiClean |
/golive-django-openapi-0.3.0.tar.gz/golive-django-openapi-0.3.0/golive_django_openapi/openapi/django/model_pdm_meta/base.py |
__all__ = [
"ConfigUnset",
"BaseModelPDMMeta",
]
import re
import typing
from typing import Type
from pydantic import Extra, PrivateAttr, ValidationError, Field
from django.db.models import Model, Field as django_field
from golive_django_openapi.utils.cls_utils import *
from golive_django_openapi.utils.logger_utils import *
from ...models.base import pdm
from .convert import *
from .prop import *
from ..model_pdm import BaseModelPDM
logger = get_bound_logger(__name__)
class ConfigUnset:
"""标记modelPDMConfig的参数未配置"""
pass
class BaseModelPDMMeta(pdm):
"""django model输出pdm的配置"""
# 使用哪个pdm的基类,一般不需要动
base_pdm: Type[pdm] = Field(BaseModelPDM)
# 表字段仅包含
column_include: tuple[typing.Union[str, django_field], ...] = Field(ConfigUnset)
# 表字段去除
column_exclude: tuple[typing.Union[str, django_field], ...] = Field(ConfigUnset)
# prop仅包含
prop_include: tuple[str, ...] = Field(ConfigUnset)
# prop去除
prop_exclude: tuple[str, ...] = Field(ConfigUnset)
# 子类插槽字典,
# 在用BASE_PDM为父类,依照先column再prop的方式创建完pdm之后的pdm,再拼接下述参数生成子类
# 这个字段用于部分参数校验的修正
sub_pdm_slot_dict: dict[str, typing.Any] = Field(ConfigUnset)
# 强制全部字段可选
all_optional: bool = Field(False)
# 用于getter switch的标志,注意本标志在一个django model的全部modelPDM中不允许重复
switch_flag: str = Field("")
# 下面是用户不应该手动配置的参数
# 所属的django model
_django_model = PrivateAttr()
# 实际使用的表字段: 类型定义
_columns: dict[str, typing.Any] = PrivateAttr()
# 实际使用的prop名: prop-meta实例
_props: dict[str: PropMeta] = PrivateAttr()
# 最终可用的键
_keys: set[str] = PrivateAttr()
# 生成的pdm
_pdm: Type[pdm] = PrivateAttr()
# prop的regex
_prop_method_regex: str = PrivateAttr(r"^(.+?)$")
class Config:
extra = Extra.forbid
arbitrary_types_allowed = True
def prepare_columns(self, model):
""".column_*的内容在配置的时候是str,这里转为django_field"""
def _inner(target_name: str):
r = set()
target = getattr(self, target_name)
if target is ConfigUnset:
return
for c in target:
assert isinstance(c, (str, django_field)), \
f"pass django field name or field to {target_name}, not {type(c)=}"
if isinstance(c, str):
try:
c = getattr(getattr(model, c), "field")
except (NameError, AttributeError) as e:
assert 0, f"column name {c} not exists in {model}: {e}"
r.add(c)
setattr(self, target_name, tuple(r))
_inner("column_include")
_inner("column_exclude")
def prepare_props(self, model):
def _inner(target_name: str):
r = set()
target = getattr(self, target_name)
if target is ConfigUnset:
return
_inner("prop_include")
_inner("prop_exclude")
@staticmethod
def raise_mutually_exclusive(a, b):
"""互斥的两个字段的值不能有交集"""
if a is not ConfigUnset and b is not ConfigUnset:
intersection = set(a).intersection(b)
if intersection:
raise ValueError(f"{a} and {b} are mutually exclusive and can't have intersection: {intersection}")
def column_need(self, c):
"""
:param c: django model field
:return:
"""
if self.column_include is not ConfigUnset:
return c in self.column_include
return any([
self.column_exclude is not ConfigUnset and c not in self.column_exclude,
self.column_include is ConfigUnset and self.column_exclude is ConfigUnset
])
def prop_need(self, c: str):
"""
:param c: prop的实际名称
:return:
"""
if self.prop_include is not ConfigUnset:
return c in self.prop_include
return any([
self.prop_exclude is not ConfigUnset and c not in self.prop_exclude,
self.prop_include is ConfigUnset and self.prop_exclude is ConfigUnset
])
def get_columns(self, model):
"""
初始化._columns内容
:param model:
:return:
"""
self.raise_mutually_exclusive(self.column_include, self.column_exclude)
assert safe_issubclass(model, Model)
r = {}
for field in model._meta.fields:
if not self.column_need(field):
continue
t, field_info = get_field_validator(field)
r[field.attname] = (t, field_info)
self._columns = r
def get_prop_validator(self, prop):
"""找到prop的类型"""
raise NotImplementedError
def get_props(self, model):
"""
初始化._props内容
:param model:
:return:
"""
self.raise_mutually_exclusive(self.prop_include, self.prop_exclude)
assert safe_issubclass(model, Model)
r = {}
for prop_method_name in dir(model):
try:
suppose_prop_name = re.compile(self._prop_method_regex).findall(prop_method_name)[0]
if not suppose_prop_name:
assert 0
except:
continue
v = getattr(model, prop_method_name) # v is the method
meta_of_v = getattr(v, "prop_meta", None)
if not meta_of_v:
continue # without meta, a method of the model will never become a prop
assert callable(v), f"bad prop {prop_method_name=} in {model}, callable required."
if not getattr(meta_of_v, "_name", None):
meta_of_v._name = suppose_prop_name
if not self.prop_need(meta_of_v._name):
continue
meta_of_v._validator = self.get_prop_validator(v)
meta_of_v._method = v
r[meta_of_v._name] = meta_of_v
self._props = r
def get_props_type_dict(self) -> dict[str, typing.Any]:
"""返回._props内的prop的类型字典"""
r = {}
for prop_name, prop_meta in self._props.items():
r[prop_name] = prop_meta._validator
return r
def check_integrity(self, model):
"""检查表字段和prop是否有重复,有重复的必须明确去留"""
intersection = set(self._props.keys()).intersection(self._columns.keys())
if intersection:
raise ValueError(f"duplication in columns and props {intersection} at {model}")
self._keys = set(list(self._props.keys()) + list(self._columns.keys()))
@classmethod
def get_pdm_model_name(cls, model, *args):
"""返回一个给新生成的pdm用的名字"""
return "__".join([model.__module__, model.__name__, *args]).replace(".", "_") + "__PDM"
@classmethod
def plugin_names(cls) -> set[str]:
r = [p.flag for p in cls.plugins]
assert len(r) == len(set(r)), f"duplicated plugins in {cls}: {r}"
return set(r)
@classmethod
def init_config(cls, model, c: Type) -> Type[BaseModelPDM]:
"""装载配置"""
d = {}
if c:
d = {k: getattr(c, k) for k in dir(c) if not k.startswith("_") and k not in ("mro",)}
assert isinstance(d, dict), f"misconfigured for {model} -> {c=}"
try:
meta = cls(**d)
except ValidationError as e:
assert 0, f"bad configuration argument for {model} in {c}: {str(e)}"
meta.prepare_columns(model)
meta.prepare_props(model)
meta.get_columns(model)
meta.get_props(model)
meta.check_integrity(model)
slot = {}
if meta.sub_pdm_slot_dict is not ConfigUnset:
if not set(meta.sub_pdm_slot_dict.keys()).issubset(meta._keys):
diff = set(meta.sub_pdm_slot_dict.keys()).difference(meta._keys)
raise ValueError(f"{meta.sub_pdm_slot_dict} contains keys "
f"which are not existed in columns or props: {diff}")
slot = meta.sub_pdm_slot_dict
meta._django_model = model
intermediate_model_pdm = meta.base_pdm.sub({
**meta._columns,
**meta.get_props_type_dict(),
**slot
}, title=cls.get_pdm_model_name(model, c.__name__), all_optional=meta.all_optional)
model_pdm = intermediate_model_pdm
model_pdm._meta = meta
return model_pdm | PypiClean |
/ultracart_rest_sdk-4.0.173-py3-none-any.whl/ultracart/model/coupon_item_search_results_response.py | import re # noqa: F401
import sys # noqa: F401
from ultracart.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from ultracart.exceptions import ApiAttributeError
def lazy_import():
from ultracart.model.coupon_item_search_result import CouponItemSearchResult
from ultracart.model.error import Error
from ultracart.model.response_metadata import ResponseMetadata
from ultracart.model.warning import Warning
globals()['CouponItemSearchResult'] = CouponItemSearchResult
globals()['Error'] = Error
globals()['ResponseMetadata'] = ResponseMetadata
globals()['Warning'] = Warning
class CouponItemSearchResultsResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'error': (Error,), # noqa: E501
'metadata': (ResponseMetadata,), # noqa: E501
'search_results': ([CouponItemSearchResult],), # noqa: E501
'success': (bool,), # noqa: E501
'warning': (Warning,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'error': 'error', # noqa: E501
'metadata': 'metadata', # noqa: E501
'search_results': 'search_results', # noqa: E501
'success': 'success', # noqa: E501
'warning': 'warning', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""CouponItemSearchResultsResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
error (Error): [optional] # noqa: E501
metadata (ResponseMetadata): [optional] # noqa: E501
search_results ([CouponItemSearchResult]): search_results. [optional] # noqa: E501
success (bool): Indicates if API call was successful. [optional] # noqa: E501
warning (Warning): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""CouponItemSearchResultsResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
error (Error): [optional] # noqa: E501
metadata (ResponseMetadata): [optional] # noqa: E501
search_results ([CouponItemSearchResult]): search_results. [optional] # noqa: E501
success (bool): Indicates if API call was successful. [optional] # noqa: E501
warning (Warning): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | PypiClean |
/tf_serving_client-0.1.9-py3-none-any.whl/tf_serving_client/tensorflow_serving/apis/regression_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow_serving.apis import input_pb2 as tensorflow__serving_dot_apis_dot_input__pb2
from tensorflow_serving.apis import model_pb2 as tensorflow__serving_dot_apis_dot_model__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow_serving/apis/regression.proto',
package='tensorflow.serving',
syntax='proto3',
serialized_pb=_b('\n(tensorflow_serving/apis/regression.proto\x12\x12tensorflow.serving\x1a#tensorflow_serving/apis/input.proto\x1a#tensorflow_serving/apis/model.proto\"\x1b\n\nRegression\x12\r\n\x05value\x18\x01 \x01(\x02\"G\n\x10RegressionResult\x12\x33\n\x0bregressions\x18\x01 \x03(\x0b\x32\x1e.tensorflow.serving.Regression\"p\n\x11RegressionRequest\x12\x31\n\nmodel_spec\x18\x01 \x01(\x0b\x32\x1d.tensorflow.serving.ModelSpec\x12(\n\x05input\x18\x02 \x01(\x0b\x32\x19.tensorflow.serving.Input\"}\n\x12RegressionResponse\x12\x31\n\nmodel_spec\x18\x02 \x01(\x0b\x32\x1d.tensorflow.serving.ModelSpec\x12\x34\n\x06result\x18\x01 \x01(\x0b\x32$.tensorflow.serving.RegressionResultB\x03\xf8\x01\x01\x62\x06proto3')
,
dependencies=[tensorflow__serving_dot_apis_dot_input__pb2.DESCRIPTOR,tensorflow__serving_dot_apis_dot_model__pb2.DESCRIPTOR,])
_REGRESSION = _descriptor.Descriptor(
name='Regression',
full_name='tensorflow.serving.Regression',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='tensorflow.serving.Regression.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=138,
serialized_end=165,
)
_REGRESSIONRESULT = _descriptor.Descriptor(
name='RegressionResult',
full_name='tensorflow.serving.RegressionResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='regressions', full_name='tensorflow.serving.RegressionResult.regressions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=167,
serialized_end=238,
)
_REGRESSIONREQUEST = _descriptor.Descriptor(
name='RegressionRequest',
full_name='tensorflow.serving.RegressionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='model_spec', full_name='tensorflow.serving.RegressionRequest.model_spec', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input', full_name='tensorflow.serving.RegressionRequest.input', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=240,
serialized_end=352,
)
_REGRESSIONRESPONSE = _descriptor.Descriptor(
name='RegressionResponse',
full_name='tensorflow.serving.RegressionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='model_spec', full_name='tensorflow.serving.RegressionResponse.model_spec', index=0,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='result', full_name='tensorflow.serving.RegressionResponse.result', index=1,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=354,
serialized_end=479,
)
_REGRESSIONRESULT.fields_by_name['regressions'].message_type = _REGRESSION
_REGRESSIONREQUEST.fields_by_name['model_spec'].message_type = tensorflow__serving_dot_apis_dot_model__pb2._MODELSPEC
_REGRESSIONREQUEST.fields_by_name['input'].message_type = tensorflow__serving_dot_apis_dot_input__pb2._INPUT
_REGRESSIONRESPONSE.fields_by_name['model_spec'].message_type = tensorflow__serving_dot_apis_dot_model__pb2._MODELSPEC
_REGRESSIONRESPONSE.fields_by_name['result'].message_type = _REGRESSIONRESULT
DESCRIPTOR.message_types_by_name['Regression'] = _REGRESSION
DESCRIPTOR.message_types_by_name['RegressionResult'] = _REGRESSIONRESULT
DESCRIPTOR.message_types_by_name['RegressionRequest'] = _REGRESSIONREQUEST
DESCRIPTOR.message_types_by_name['RegressionResponse'] = _REGRESSIONRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Regression = _reflection.GeneratedProtocolMessageType('Regression', (_message.Message,), dict(
DESCRIPTOR = _REGRESSION,
__module__ = 'tensorflow_serving.apis.regression_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.Regression)
))
_sym_db.RegisterMessage(Regression)
RegressionResult = _reflection.GeneratedProtocolMessageType('RegressionResult', (_message.Message,), dict(
DESCRIPTOR = _REGRESSIONRESULT,
__module__ = 'tensorflow_serving.apis.regression_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.RegressionResult)
))
_sym_db.RegisterMessage(RegressionResult)
RegressionRequest = _reflection.GeneratedProtocolMessageType('RegressionRequest', (_message.Message,), dict(
DESCRIPTOR = _REGRESSIONREQUEST,
__module__ = 'tensorflow_serving.apis.regression_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.RegressionRequest)
))
_sym_db.RegisterMessage(RegressionRequest)
RegressionResponse = _reflection.GeneratedProtocolMessageType('RegressionResponse', (_message.Message,), dict(
DESCRIPTOR = _REGRESSIONRESPONSE,
__module__ = 'tensorflow_serving.apis.regression_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.serving.RegressionResponse)
))
_sym_db.RegisterMessage(RegressionResponse)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
# @@protoc_insertion_point(module_scope) | PypiClean |
/tensorflow_io-0.33.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl/tensorflow_io/python/experimental/avro_record_dataset_ops.py | """_AvroRecordDataset"""
import tensorflow as tf
from tensorflow_io.python.ops import core_ops
_DEFAULT_READER_BUFFER_SIZE_BYTES = 256 * 1024 # 256 KB
_DEFAULT_READER_SCHEMA = ""
# From https://github.com/tensorflow/tensorflow/blob/v2.0.0/tensorflow/python/data/ops/readers.py
def _require(condition: bool, err_msg: str = None) -> None:
"""Checks if the specified condition is true else raises exception
Args:
condition: The condition to test
err_msg: If specified, it's the error message to use if condition is not true.
Raises:
ValueError: Raised when the condition is false
Returns:
None
"""
if not condition:
raise ValueError(err_msg)
# copied from https://github.com/tensorflow/tensorflow/blob/
# 3095681b8649d9a828afb0a14538ace7a998504d/tensorflow/python/data/ops/readers.py#L36
def _create_or_validate_filenames_dataset(filenames):
"""create_or_validate_filenames_dataset"""
if isinstance(filenames, tf.data.Dataset):
if tf.compat.v1.data.get_output_types(filenames) != tf.string:
raise TypeError(
"`filenames` must be a `tf.data.Dataset` of `tf.string` elements."
)
if not tf.compat.v1.data.get_output_shapes(filenames).is_compatible_with(
tf.TensorShape([])
):
raise TypeError(
"`filenames` must be a `tf.data.Dataset` of scalar `tf.string` "
"elements."
)
else:
filenames = tf.convert_to_tensor(filenames, dtype_hint=tf.string)
if filenames.dtype != tf.string:
raise TypeError(
"`filenames` must be a `tf.Tensor` of dtype `tf.string` dtype."
" Got {}".format(filenames.dtype)
)
filenames = tf.reshape(filenames, [-1], name="flat_filenames")
filenames = tf.data.Dataset.from_tensor_slices(filenames)
return filenames
# copied from https://github.com/tensorflow/tensorflow/blob/
# 3095681b8649d9a828afb0a14538ace7a998504d/tensorflow/python/data/ops/readers.py#L67
def _create_dataset_reader(
dataset_creator,
filenames,
cycle_length=None,
num_parallel_calls=None,
deterministic=None,
block_length=1,
):
"""
This creates a dataset reader which reads records from multiple files and interleaves them together
```
dataset = Dataset.range(1, 6) # ==> [ 1, 2, 3, 4, 5 ]
# NOTE: New lines indicate "block" boundaries.
dataset = dataset.interleave(
lambda x: Dataset.from_tensors(x).repeat(6),
cycle_length=2, block_length=4)
list(dataset.as_numpy_iterator())
```
Results in the following output:
[1,1,1,1,
2,2,2,2,
1,1,
2,2,
3,3,3,3,
4,4,4,4,
3,4,
5,5,5,5,
5,5,
]
Args:
dataset_creator: Initializer for AvroDatasetRecord
filenames: A `tf.data.Dataset` iterator of filenames to read
cycle_length: The number of files to be processed in parallel. This is used by `Dataset.Interleave`.
We set this equal to `block_length`, so that each time n number of records are returned for each of the n
files.
num_parallel_calls: Number of threads spawned by the interleave call.
deterministic: Sets whether the interleaved records are written in deterministic order. in tf.interleave this is default true
block_length: Sets the number of output on the output tensor. Defaults to 1
Returns:
A dataset iterator with an interleaved list of parsed avro records.
"""
def read_many_files(filenames):
filenames = tf.convert_to_tensor(filenames, tf.string, name="filename")
return dataset_creator(filenames)
if cycle_length is None:
return filenames.flat_map(read_many_files)
return filenames.interleave(
read_many_files,
cycle_length=cycle_length,
num_parallel_calls=num_parallel_calls,
block_length=block_length,
deterministic=deterministic,
)
class _AvroRecordDataset(tf.data.Dataset):
"""A `Dataset` comprising records from one or more AvroRecord files."""
def __init__(self, filenames, buffer_size=None, reader_schema=None):
"""Creates a `AvroRecordDataset`.
Args:
filenames: A `tf.string` tensor containing one or more filenames.
buffer_size: (Optional.) A `tf.int64` scalar representing the number of
bytes in the read buffer. 0 means no buffering.
reader_schema: (Optional.) A `tf.string` scalar
representing the reader schema or None
"""
self._filenames = filenames
self._buffer_size = _AvroRecordDataset.__optional_param_to_tensor(
"buffer_size",
buffer_size,
argument_default=_DEFAULT_READER_BUFFER_SIZE_BYTES,
)
self._reader_schema = _AvroRecordDataset.__optional_param_to_tensor(
"reader_schema",
reader_schema,
argument_default=_DEFAULT_READER_SCHEMA,
argument_dtype=tf.dtypes.string,
)
variant_tensor = core_ops.io_avro_record_dataset(
self._filenames, self._buffer_size, self._reader_schema
)
super().__init__(variant_tensor)
# Copied from https://github.com/tensorflow/tensorflow/blob/f40a875355557483aeae60ffcf757fc9626c752b
# /tensorflow/python/data/util/convert.py#L26-L35
@staticmethod
def __optional_param_to_tensor(
argument_name,
argument_value,
argument_default=0,
argument_dtype=tf.dtypes.int64,
):
"""optional_param_to_tensor"""
if argument_value is not None:
return tf.convert_to_tensor(
argument_value, dtype=argument_dtype, name=argument_name
)
return tf.constant(argument_default, dtype=argument_dtype, name=argument_name)
@property
def element_spec(self):
return tf.TensorSpec([], tf.dtypes.string)
def _inputs(self):
return []
class AvroRecordDataset(tf.data.Dataset):
"""A `Dataset` comprising records from one or more AvroRecord files."""
def __init__(
self,
filenames,
buffer_size=None,
num_parallel_reads=None,
num_parallel_calls=None,
reader_schema=None,
deterministic=True,
block_length=1,
):
"""Creates a `AvroRecordDataset` to read one or more AvroRecord files.
Args:
filenames: A `tf.string` tensor or `tf.data.Dataset` containing one or
more filenames.
buffer_size: (Optional.) A `tf.int64` scalar representing the number of
bytes in the read buffer. If your input pipeline is I/O bottlenecked,
consider setting this parameter to a value 1-100 MBs. If `None`, a
sensible default for both local and remote file systems is used.
num_parallel_reads: (Optional.) A `tf.int64` scalar representing the
number of files to read in parallel. If greater than one, the records of
files read in parallel are outputted in an interleaved order. If your
input pipeline is I/O bottlenecked, consider setting this parameter to a
value greater than one to parallelize the I/O. If `None`, files will be
read sequentially. This must be set to equal or greater than `num_parallel_calls`.
This constraint exists because `num_parallel_reads` becomes `cycle_length` in the
underlying call to `tf.Dataset.Interleave`, and the `cycle_length` is required to be
equal or higher than the number of threads(`num_parallel_calls`).
`cycle_length` in tf.Dataset.Interleave will dictate how many items it will pick up to process
num_parallel_calls: (Optional.) number of thread to spawn. This must be set to `None`
or greater than 0. Also this must be less than or equal to `num_parallel_reads`. This defines
the degree of parallelism in the underlying Dataset.interleave call.
reader_schema: (Optional.) A `tf.string` scalar representing the reader
schema or None.
deterministic: (Optional.) A boolean controlling whether determinism should be traded for performance by
allowing elements to be produced out of order. Defaults to `True`
block_length: Sets the number of output on the output tensor. Defaults to 1
Raises:
TypeError: If any argument does not have the expected type.
ValueError: If any argument does not have the expected shape.
"""
_require(
num_parallel_calls is None
or num_parallel_calls == tf.data.experimental.AUTOTUNE
or num_parallel_calls > 0,
f"num_parallel_calls: {num_parallel_calls} must be set to None, "
f"tf.data.experimental.AUTOTUNE, or greater than 0",
)
if num_parallel_calls is not None:
_require(
num_parallel_reads is not None
and (
num_parallel_reads >= num_parallel_calls
or num_parallel_reads == tf.data.experimental.AUTOTUNE
),
f"num_parallel_reads: {num_parallel_reads} must be greater than or equal to "
f"num_parallel_calls: {num_parallel_calls} or set to tf.data.experimental.AUTOTUNE",
)
filenames = _create_or_validate_filenames_dataset(filenames)
self._filenames = filenames
self._buffer_size = buffer_size
self._num_parallel_reads = num_parallel_reads
self._num_parallel_calls = num_parallel_calls
self._reader_schema = reader_schema
self._block_length = block_length
def read_multiple_files(filenames):
return _AvroRecordDataset(filenames, buffer_size, reader_schema)
self._impl = _create_dataset_reader(
read_multiple_files,
filenames,
cycle_length=num_parallel_reads,
num_parallel_calls=num_parallel_calls,
deterministic=deterministic,
block_length=block_length,
)
variant_tensor = self._impl._variant_tensor # pylint: disable=protected-access
super().__init__(variant_tensor)
def _clone(
self,
filenames=None,
buffer_size=None,
num_parallel_reads=None,
num_parallel_calls=None,
reader_schema=None,
block_length=None,
):
return AvroRecordDataset(
filenames or self._filenames,
buffer_size or self._buffer_size,
num_parallel_reads or self._num_parallel_reads,
num_parallel_calls or self._num_parallel_calls,
reader_schema or self._reader_schema,
block_length or self._block_length,
)
def _inputs(self):
return self._impl._inputs() # pylint: disable=protected-access
@property
def element_spec(self):
return tf.TensorSpec([], tf.dtypes.string) | PypiClean |
/crunch-cli-1.8.2.tar.gz/crunch-cli-1.8.2/crunch/main.py | import os
import logging
import click
from . import command, constants, utils, api, library, tester
session = None
debug = False
@click.group()
@click.option("--debug", "enable_debug", envvar=constants.DEBUG_ENV_VAR, is_flag=True, help="Enable debug output.")
@click.option("--api-base-url", envvar=constants.API_BASE_URL_ENV_VAR, default=constants.API_BASE_URL_DEFAULT, help="Set the API base url.")
@click.option("--web-base-url", envvar=constants.WEB_BASE_URL_ENV_VAR, default=constants.WEB_BASE_URL_DEFAULT, help="Set the Web base url.")
@click.option("--notebook", is_flag=True, help="Tell the CLI you are running the command while inside a notebook.")
def cli(
enable_debug: bool,
api_base_url: str,
web_base_url: str,
notebook: bool,
):
global debug
debug = enable_debug
global session
session = utils.CustomSession(
web_base_url,
api_base_url,
debug,
notebook,
)
@cli.command(help="Setup a workspace directory with the latest submission of you code.")
@click.option("--token", "clone_token", required=True, help="Clone token to use.")
@click.option("--submission", "submission_number", required=False, type=int, help="Submission number to clone. (latest if not specified)")
@click.option("--no-data", is_flag=True, help="Do not download the data. (faster)")
@click.option("--no-model", is_flag=True, help="Do not download the model of the cloned submission.")
@click.option("--force", "-f", is_flag=True, help="Deleting the old directory (if any).")
@click.option("--model-directory", "model_directory_path", default="resources", show_default=True, help="Directory where your model is stored.")
@click.argument("project-name", required=True)
@click.argument("directory", default="{projectName}")
def setup(
clone_token: str,
submission_number: str,
no_data: bool,
no_model: bool,
force: bool,
project_name: str,
directory: str,
model_directory_path: str,
):
directory = directory.replace("{projectName}", project_name)
command.setup(
session,
clone_token=clone_token,
submission_number=submission_number,
project_name=project_name,
directory=directory,
model_directory=model_directory_path,
force=force,
no_model=no_model,
)
if not no_data:
os.chdir(directory)
try:
command.download(session, force=True)
except api.CurrentCrunchNotFoundException:
command.download_no_data_available()
print("\n---")
print(f"Success! Your environment has been correctly setup.")
print(f"Next recommended actions:")
print(f" - To get inside your workspace directory, run: cd {directory}")
print(f" - To see all of the available commands of the CrunchDAO CLI, run: crunch --help")
@cli.command(help="Send the new submission of your code.")
@click.option("-m", "--message", prompt=True, default="", help="Specify the change of your code. (like a commit message)")
@click.option("--main-file", "main_file_path", default="main.py", show_default=True, help="Entrypoint of your code.")
@click.option("--model-directory", "model_directory_path", default="resources", show_default=True, help="Directory where your model is stored.")
@click.option("--export", "export_path", show_default=True, type=str, help="Copy the `.tar` to the specified file.")
@click.option("--no-pip-freeze", is_flag=True, help="Do not do a `pip freeze` to know preferred packages version.")
def push(
message: str,
main_file_path: str,
model_directory_path: str,
export_path: str,
no_pip_freeze: bool,
):
utils.change_root()
converted = False
if not os.path.exists(main_file_path):
print(f"missing {main_file_path}")
file_name, _ = os.path.splitext(os.path.basename(main_file_path))
dirpath = os.path.dirname(main_file_path)
path_without_extension = os.path.join(dirpath, file_name)
notebook_file_path = f"{path_without_extension}.ipynb"
if not os.path.exists(notebook_file_path):
raise click.Abort()
main_file_path = constants.CONVERTED_MAIN_PY
command.convert(notebook_file_path, main_file_path)
converted = True
try:
submission = command.push(
session,
message=message,
main_file_path=main_file_path,
model_directory_path=model_directory_path,
export_path=export_path,
include_installed_packages_version=not no_pip_freeze
)
if submission:
command.push_summary(submission, session)
finally:
if converted:
os.unlink(main_file_path)
@cli.command(help="Test your code locally.")
@click.option("--main-file", "main_file_path", default="main.py", show_default=True, help="Entrypoint of your code.")
@click.option("--model-directory", "model_directory_path", default="resources", show_default=True, help="Directory where your model is stored.")
@click.option("--no-force-first-train", is_flag=True, help="Do not force the train at the first loop.")
@click.option("--train-frequency", default=1, show_default=True, help="Train interval.")
@click.option("--skip-library-check", is_flag=True, help="Skip forbidden library check.")
def test(
main_file_path: str,
model_directory_path: str,
no_force_first_train: bool,
train_frequency: int,
skip_library_check: bool
):
utils.change_root()
tester.install_logger()
if not skip_library_check and os.path.exists(constants.REQUIREMENTS_TXT):
library.scan(session, requirements_file=constants.REQUIREMENTS_TXT)
logging.warn('')
command.test(
session,
main_file_path=main_file_path,
model_directory_path=model_directory_path,
force_first_train=not no_force_first_train,
train_frequency=train_frequency,
)
@cli.command(help="Download the data locally.")
def download():
utils.change_root()
try:
command.download(session)
except api.CurrentCrunchNotFoundException:
command.download_no_data_available()
@cli.command(help="Convert a notebook to a python script.")
@click.option("--override", is_flag=True, help="Force overwrite of the python file.")
@click.argument("notebook-file-path", required=True)
@click.argument("python-file-path", default="main.py")
def convert(
override: bool,
notebook_file_path: str,
python_file_path: str,
):
command.convert(
notebook_file_path=notebook_file_path,
python_file_path=python_file_path,
override=override,
)
if __name__ == '__main__':
cli() | PypiClean |
/MACS_virtual_experiment-0.168.tar.gz/MACS_virtual_experiment-0.168/MACS_virtual_experiment/scripting/macs_to_mcstas.py |
from __future__ import print_function
import sys
import re
import numpy as np
import os
import glob
import os.path
baseName='tiO2_110'
num_threads = 12
#number simulated neutrons
n_mono = 1e6
n=1e6
def getParams(ng0file):
# Gets all parameters relevant to the instrument simulation from the ng0 file
# returns a dictionary.
#
# Load in file line by line.
#
# Monochromtor simulation takes following arguments:
'''
EM=2.7
EF_all=2.5
HF=1
VF=1
sample_diameter_d=0.02
beta_1=1
beta_2=1
CPF=0
MPL=6.06
MPD=0.775
APE_h=0.35
APE_v=0.35
misalign_mono_deg=0.15
DIRDEV=0
DIVSOU=3.0
L0_delta=-1.06
L1_delta=0.0
mon_t=0.0
mon_e=0.0
monrot_delta=0
'''
# Kidney simulation takes following params:
'''
A3_angle=0
kidney_angle=35.15615
EM=2.5
EF_all=2.5
HF=1
VF=1
sample_diameter_d=0.02
beta_1=1
beta_2=1
CPF=0
MPL=6.06
MPD=0.775
APE_h=0.35
APE_v=0.35
misalign_mono_deg=0.15
DIRDEV=0
DIVSOU=3.0
L0_delta=-1.06
L1_delta=0.0
mon_t=0.0
mon_e=0.0
monrot_delta=0
slit_h=0.2
slit_v=0.2
resolution_mode=0
res_radius=0.01
res_height=0.03
repeat_count=2
E0_resolution=0
dE_resolution=1
'''
#List of the params that are contained in the param dictionary output.
'''
'''
# Sequentially read them all in from the input ng0 file or use a default value.
param_dictionary = {}
f = open(ng0file,'r')
lines = f.readlines()
#Can't guess the row numbers they're not consistent. We'll find them instead.
k=0
i=0
for line in lines:
if line.split()[0]=='#ScanDescr':
ScanDescr_i=i
k=k+1
if line.split()[0]=='#Columns':
label_index=i
k=k+1
i=i+1
if k==2:
break
labels = lines[label_index]
labels= labels.split()[1:]
#Other important details in the header are Npoints, temp, lattice, orient
# ScanDescr column also has Npts, Counts, Range=A3=-90.0,90.0
a3_column = np.where(labels=='A3')
ScanDescr = lines[ScanDescr_i].split(':')
for i in range(len(ScanDescr)):
if ScanDescr[i].split('=')[0]=='Npts':
N_i=i
if ScanDescr[i].split('=')[0]=='Counts':
Counts_i=i
if ScanDescr[i].split('=')[0]=='Range':
scan_i=i
data = np.genfromtxt(ng0file,names=labels)
N = int(ScanDescr[N_i].split('=')[1])
Counts = int(float(ScanDescr[Counts_i].split('=')[1]))
scan = ScanDescr[scan_i].split('=')
scan_type = scan[1]
scan_min = np.min(data['A3'])
scan_max = np.max(data['A3'])
f.close()
return param_dictionary,N,Counts,scan_type,scan_min,scan_max,data
class ProgressBar(object):
DEFAULT = 'Progress: %(bar)s %(percent)3d%%'
FULL = '%(bar)s %(current)d/%(total)d (%(percent)3d%%) %(remaining)d to go'
def __init__(self, total, width=40, fmt=DEFAULT, symbol='=',
output=sys.stderr):
assert len(symbol) == 1
self.total = total
self.width = width
self.symbol = symbol
self.output = output
self.fmt = re.sub(r'(?P<name>%\(.+?\))d',
r'\g<name>%dd' % len(str(total)), fmt)
self.current = 0
def __call__(self):
percent = self.current / float(self.total)
size = int(self.width * percent)
remaining = self.total - self.current
bar = '[' + self.symbol * size + ' ' * (self.width - size) + ']'
args = {
'total': self.total,
'bar': bar,
'current': self.current,
'percent': percent * 100,
'remaining': remaining}
print('\r' + self.fmt % args,file=self.output, end='')
def done(self):
self.current = self.total
self()
print('', file=self.output)
def writeParams(param_file, param_dict, data):
'''
Function to write a parameter file for a kidney sim.
'''
kidney_angle=np.mean(data['Kidney'])
Ei = np.mean(data['Ei'])
Ef = np.mean(data['Ef'])
beta1 = np.mean(data['Beta1'])
beta2=np.mean(data['Beta2'])
param_file.write('A3_angle=0 \n')
param_file.write('kidney_angle='+str(kidney_angle)+' \n')
param_file.write('EM='+str(Ei)+'\n')
param_file.write('EF_all='+str(Ef)+'\n')
'''
param_file.write('HF=1\n')
param_file.write('VF=1\n')
#param_file.write('sample_diameter_d=0.02\n')
#param_file.write('beta_1='+str(beta1)+'\n')
#param_file.write('beta_2='+str(beta2)+'\n')
param_file.write('CPF=0\n')
param_file.write('MPL=6.06\n')
param_file.write('MPD=0.775\n')
param_file.write('APE_h=0.35\n')
param_file.write('APE_v=0.35\n')
param_file.write('misalign_mono_deg=0.15\n')
param_file.write('DIRDEV=0\n')
param_file.write('DIVSOU=3.0\n')
param_file.write('L0_delta=-1.06\n')
param_file.write('L1_delta=0.0\n')
param_file.write('mon_t=0.0\n')
param_file.write('mon_e=0.0\n')
param_file.write( 'monrot_delta=0\n')
param_file.write( 'slit_h=0.2\n')
param_file.write('slit_v=0.2\n')
param_file.write('resolution_mode=0\n')
param_file.write('res_radius=0.01\n')
param_file.write('res_height=0.03\n')
param_file.write('repeat_count=1\n')
param_file.write('E0_resolution=0\n')
param_file.write('dE_resolution=1\n')
param_file.close()
'''
def writeParams_mono(param_file,param_dict,data):
'''
Function to write a parameter file for a monochromator sim
'''
Ei = np.mean(data['Ei'])
Ef = np.mean(data['Ef'])
beta1 = np.mean(data['Beta1'])
beta2=np.mean(data['Beta2'])
param_file.write('EM='+str(Ei)+'\n')
param_file.write('EF_all='+str(Ef)+'\n')
'''
param_file.write('HF=1\n')
param_file.write('VF=1\n')
param_file.write('sample_diameter_d=0.02\n')
param_file.write('beta_1='+str(beta1)+'\n')
param_file.write('beta_2='+str(beta2)+'\n')
param_file.write('CPF=0\n')
param_file.write('MPL=6.06\n')
param_file.write('MPD=0.775\n')
param_file.write('APE_h=0.35\n')
param_file.write('APE_v=0.35\n')
param_file.write('misalign_mono_deg=0.15\n')
param_file.write('DIRDEV=0\n')
param_file.write('DIVSOU=3.0\n')
param_file.write('L0_delta=-1.06\n')
param_file.write('L1_delta=0.0\n')
param_file.write('mon_t=0.0\n')
param_file.write('mon_e=0.0\n')
param_file.write('monrot_delta=0\n')
param_file.close()
'''
def speed_writeScriptLine(script_file, angle,Ei_i,Ef, baseName, speed_param,N,A3min,A3max):
'''
Function to properly write Mcstas call in script. Specifically for Kidney.
'''
if speed_param==True:
token = ' &'
else:
token = ''
if 1==1:
script_file.write('mcrun -d '+baseName+'_Ei='+str(round(Ei_i,3))+'meV_A4='+str(round(angle,3))+' -p '+baseName+'_param_'+str(round(angle,3))+'_Ei='+str(round(Ei_i,3))+'meV_Ef='+str(round(np.mean(data['Ef']),3))+'meV.txt -n '+str(n)+' -N'+str(N)+' A3_angle='
+str(A3min)+',' +str(A3max)+' MACS_sample_kidney.instr >/dev/null'+token+'\n')
else:
script_file.write('mcrun -d '+baseName+'_Ei='+str(round(Ei_i,3))+'meV_A4='+str(round(angle,3))+' -p '+baseName+'_param_'+str(round(angle,3))+'_Ei='+str(round(Ei_i,3))+'meV_Ef='+str(round(np.mean(data['Ef']),3))+'meV.txt -n '+str(n)+' -N'+str(Steps)+' A3_angle='
+str(A3min)+',' +str(A3max)+' MACS_sample_kidney.instr >/dev/null'+token+'\n')
def speed_writeScriptLine_monochromator(script_file, Ei, angle, baseName, speed_param,n_mono=1e6):
'''
Function to properly write McStas call in script. Specifically for Monochromator.
'''
energy=Ei
if speed_param==True:
token = ' &'
else:
token = ''
if 1==1:
script_file.write('mcrun -d '+baseName+'_monochromator_Ei_'+str(round(Ei,3))+' -p '+baseName+'_monochromator_'+str(round(energy,3))+'meV.txt'+' -n '+str(n_mono)+' MACS_monochromator.instr'+token+'\n')
else:
script_file.write('mcrun -d '+baseName+'_monochromator_Ei_'+str(round(Ei,3)) +' -p '+baseName+'_monochromator_'+str(round(energy,3))+'meV.txt'+' -n '+str(n_mono)+' MACS_monochromator.instr'+token+'\n')
#This first bit should only be done if the monochromator Ei's haven't been simulated already
#First gather a list of all .ng0 files in the directory.
ng0_files = glob.glob('*.ng0')
#Loop through and create necessary monochromator script file to be run first.
if not os.path.exists('MACS_monochromator.c'):
#Throw an error
print('Monochromator must be compiled first!')
print('Try running mcrun -c -n 1e7 MACS_monochromator.instr')
#quit()
Ei_vals = []
mono_script_file = open(baseName+'_monochromator.sh','w')
i=0
for file in ng0_files:
param_dictionary,N,Counts,scan_type,scan_min,scan_max,data = getParams(file)
Ei = np.mean(data['Ei'])
angle = np.mean(data['Kidney'])
#Check if this value exists already
#if not, write a param file for it.
# also, add to the script either way.
if not os.path.exists(baseName+'_monochromator_'+str(round(Ei,3))+'meV.txt'):
Ei_vals.append(Ei)
mono_param_file = open(baseName+'_monochromator_'+str(round(Ei,3))+'meV.txt','w')
writeParams_mono(mono_param_file,param_dictionary,data)
mono_param_file.close()
if Ei not in Ei_vals:
if i%num_threads==0:
speed_param=False
else:
speed_param=True
Ei_vals.append(Ei)
#Add this value to the script
speed_writeScriptLine_monochromator(mono_script_file, Ei, angle, baseName, speed_param,n_mono=1e6,)
i=i+1
#Monochromator sim should be ready at this point.
mono_script_file.close()
os.system('chmod +x '+baseName+'_monochromator.sh')
#Prepare the kidney scans, almost always a particular kidney angle with A3 scan.
kidney_script_file = open(baseName+'.sh','w')
i=1
progress= ProgressBar(len(ng0_files),fmt=ProgressBar.FULL)
for file in ng0_files:
param_dictionary,N,Counts,scan_type,scan_min,scan_max,data = getParams(file)
#Check to make sure this is an A3 scan- other types not supported.
if scan_type != 'a3' and scan_type!='A3':
#Throw an error
print(' ')
print('Deteted Scan type: '+str(scan_type))
print('Warning- scan types that are not A3 are not yet supported')
#Write the param file.
angle = np.mean(data['Kidney'])
param_file_name = baseName+'_param_'+str(round(angle,3))+'_Ei='+str(round(np.mean(data['Ei']),3))+'meV_Ef='+str(round(np.mean(data['Ef']),3))+'meV.txt'
param_file = open(param_file_name,'w')
writeParams(param_file, param_dictionary, data)
if i%num_threads==0:
speed_param=False
else:
speed_param=True
speed_writeScriptLine(kidney_script_file, angle,np.mean(data['Ei']),round(np.mean(data['Ef']),3), baseName, speed_param,N,scan_min,scan_max)
i=i+1
progress.current+=1
progress()
kidney_script_file.close()
os.system('chmod +x '+baseName+'.sh') | PypiClean |
/chronos-python-1.2.1.tar.gz/chronos-python-1.2.1/bin/chronos-nagios.py |
# The MIT License (MIT)
#
# Copyright (c) 2014 Asher Feldman
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import sys
import re
import argparse
import logging
import chronos
def match_prefix(prefixes=[], job=''):
for prefix in prefixes:
if re.search('^' + prefix, job):
return True
return False
def main():
parser = argparse.ArgumentParser(description="Monitor the status of Chronos Jobs")
parser.add_argument("--hostname", metavar="<host:port>", required=True,
help="hostname and port of the Chronos instance")
parser.add_argument("--prefix", metavar="job-prefix", required=False, action="append",
help="if set, only check jobs matching this prefix")
parser.add_argument("--exclude", metavar="job-prefix", required=False, action="append",
help="if set, exclude jobs matching this prefix")
parser.add_argument("--warn", metavar="#", default=1,
help="warn if at least this number of jobs are currently failed")
parser.add_argument("--crit", metavar="#", default=1,
help="critical if at least this number of jobs are currently failed")
args = parser.parse_args()
fails = []
ok = []
unknown = []
c = chronos.connect(args.hostname)
cjobs = c.list()
if not isinstance(cjobs, list):
print("UNKNOWN: error querying chronos")
sys.exit(3)
for job in cjobs:
if job['disabled']:
continue
if isinstance(args.prefix, list):
if not match_prefix(args.prefix, job['name']):
continue
if isinstance(args.exclude, list):
if match_prefix(args.exclude, job['name']):
continue
if job['lastError'] > job['lastSuccess']:
fails.append(job['name'].encode('ascii'))
elif job['lastSuccess']:
ok.append(job['name'].encode('ascii'))
else:
unknown.append(job['name'].encode('ascii'))
if len(unknown) > 0:
umsg = "(%d waiting for execution or with no data)" % len(unknown)
else:
umsg = ''
if len(fails) == 0:
print("OK: %d jobs succeeded on last run %s" % (len(ok), umsg))
sys.exit(0)
elif len(fails) >= int(args.crit):
print("CRITICAL: %d failed jobs: %s %s" % (len(fails), str(fails).strip('[]'), umsg))
sys.exit(2)
elif len(fails) >= int(args.warn):
print("WARNING: %d failed jobs: %s %s" % (len(fails), str(fails).strip('[]'), umsg))
sys.exit(1)
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level="WARN")
main() | PypiClean |
/django-widgy-0.9.2.tar.gz/django-widgy-0.9.2/widgy/static/widgy/js/lib/ckeditor/plugins/a11yhelp/dialogs/lang/ca.js | /*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("a11yhelp","ca",{title:"Instruccions d'Accessibilitat",contents:"Continguts de l'Ajuda. Per tancar aquest quadre de diàleg premi ESC.",legend:[{name:"General",items:[{name:"Editor de barra d'eines",legend:"Premi ${toolbarFocus} per desplaçar-se per la barra d'eines. Vagi en el següent i anterior grup de barra d'eines amb TAB i SHIFT-TAB. Vagi en el següent i anterior botó de la barra d'eines amb RIGHT ARROW i LEFT ARROW. Premi SPACE o ENTER per activar el botó de la barra d'eines."},
{name:"Editor de quadre de diàleg",legend:"Dins d'un quadre de diàleg, premi la tecla TAB per desplaçar-se al següent camp del quadre de diàleg, premi SHIFT + TAB per desplaçar-se a l'anterior camp, premi ENTER per acceptar el quadre de diàleg, premi ESC per cancel·lar el quadre de diàleg. Per els quadres de diàleg que tenen diverses pestanyes, premi ALT + F10 per anar a la llista de pestanyes. Després podrà desplaçar-se a la següent pestanya amb TAB o RIGHT ARROW. Anar a la pestanya anterior amb SHIFT + TAB o LEFT ARROW. Premi SPACE o ENTER per seleccionar la pestanya."},
{name:"Editor de menú contextual",legend:"Premi ${contextMenu} o APPLICATION KEY per obrir el menú contextual. Després desplacis a la següent opció del menú amb TAB o DOWN ARROW. Desplacis a l'anterior opció amb SHIFT+TAB o UP ARROW. Premi SPACE o ENTER per seleccionar l'opció del menú. Obri el submenú de l'actual opció utilitzant SPACE o ENTER o RIGHT ARROW. Pot tornar a l'opció del menú pare amb ESC o LEFT ARROW. Tanqui el menú contextual amb ESC."},{name:"Editor de caixa de llista",legend:"Dins d'un quadre de llista, desplacis al següent element de la llista amb TAB o DOWN ARROW. Desplacis a l'anterior element de la llista amb SHIFT + TAB o UP ARROW. Premi SPACE o ENTER per seleccionar l'opció de la llista. Premi ESC per tancar el quadre de llista."},
{name:"Editor de barra de ruta de l'element",legend:"Premi ${elementsPathFocus} per anar als elements de la barra de ruta. Desplacis al botó de l'element següent amb TAB o RIGHT ARROW. Desplacis a l'anterior botó amb SHIFT+TAB o LEFT ARROW. Premi SPACE o ENTER per seleccionar l'element a l'editor."}]},{name:"Ordres",items:[{name:"Desfer ordre",legend:"Premi ${undo}"},{name:"Refer ordre",legend:"Premi ${redo}"},{name:"Ordre negreta",legend:"Premi ${bold}"},{name:"Ordre cursiva",legend:"Premi ${italic}"},
{name:"Ordre subratllat",legend:"Premi ${underline}"},{name:"Ordre enllaç",legend:"Premi ${link}"},{name:"Ordre amagar barra d'eines",legend:"Premi ${toolbarCollapse}"},{name:"Ordre per accedir a l'anterior espai enfocat",legend:"Premi ${accessPreviousSpace} per accedir a l'enfocament d'espai més proper inabastable abans del símbol d'intercalació, per exemple: dos elements HR adjacents. Repetiu la combinació de tecles per arribar a enfocaments d'espais distants."},{name:"Ordre per accedir al següent espai enfocat",
legend:"Premi ${accessNextSpace} per accedir a l'enfocament d'espai més proper inabastable després del símbol d'intercalació, per exemple: dos elements HR adjacents. Repetiu la combinació de tecles per arribar a enfocaments d'espais distants."},{name:"Ajuda d'accessibilitat",legend:"Premi ${a11yHelp}"}]}],backspace:"Retrocés",tab:"Tabulació",enter:"Intro",shift:"Majúscules",ctrl:"Ctrl",alt:"Alt",pause:"Pausa",capslock:"Bloqueig de majúscules",escape:"Escape",pageUp:"Pàgina Amunt",pageDown:"Pàgina Avall",
end:"Fi",home:"Inici",leftArrow:"Fletxa Esquerra",upArrow:"Fletxa Amunt",rightArrow:"Fletxa Dreta",downArrow:"Fletxa Avall",insert:"Inserir","delete":"Eliminar",leftWindowKey:"Tecla Windows Esquerra",rightWindowKey:"Tecla Windows Dreta",selectKey:"Tecla Seleccionar",numpad0:"Teclat Numèric 0",numpad1:"Teclat Numèric 1",numpad2:"Teclat Numèric 2",numpad3:"Teclat Numèric 3",numpad4:"Teclat Numèric 4",numpad5:"Teclat Numèric 5",numpad6:"Teclat Numèric 6",numpad7:"Teclat Numèric 7",numpad8:"Teclat Numèric 8",
numpad9:"Teclat Numèric 9",multiply:"Multiplicació",add:"Suma",subtract:"Resta",decimalPoint:"Punt Decimal",divide:"Divisió",f1:"F1",f2:"F2",f3:"F3",f4:"F4",f5:"F5",f6:"F6",f7:"F7",f8:"F8",f9:"F9",f10:"F10",f11:"F11",f12:"F12",numLock:"Bloqueig Teclat Numèric",scrollLock:"Bloqueig de Desplaçament",semiColon:"Punt i Coma",equalSign:"Símbol Igual",comma:"Coma",dash:"Guió",period:"Punt",forwardSlash:"Barra Diagonal",graveAccent:"Accent Obert",openBracket:"Claudàtor Obert",backSlash:"Barra Invertida",
closeBracket:"Claudàtor Tancat",singleQuote:"Cometa Simple"}); | PypiClean |
/decentriq_platform-0.21.0-py3-none-any.whl/decentriq_platform/dataset_sink/__init__.py | from google.protobuf.json_format import MessageToDict
from ..proto import serialize_length_delimited, ComputeNodeFormat, parse_length_delimited
from ..node import Node
from typing import Optional, List
from .proto import (
DatasetSinkWorkerConfiguration,
SinkInput
)
__docformat__ = "restructuredtext"
__pdoc__ = {
"proto": False,
}
class DatasetSink(Node):
"""
Compute node that re-encrypts input data with a user-provided key
and creates a dataset entry in the database.
This node allows users to store the result of a computation as a
separate dataset that can be published to other Data Clean Rooms.
"""
def __init__(
self,
name: str,
inputs: List[SinkInput],
encryption_key_dependency: str,
dataset_import_id: Optional[str] = None,
is_key_hex_encoded: bool = False,
) -> None:
"""
Create a dataset sink node.
**Parameters**:
- `name`: A human-readable identifier of the node.
- `inputs`: The data sources from which to read. The given
encryption key will be used to encrypt all datasets derived
from these inputs.
Each input can either be a single raw input (for example,
when reading a whole ZIP archive), or one ore more files
contained in an input ZIP file. For each file contained
in the selection, a separate dataset will be created.
- `encryption_key_dependency`: The id of the node that provides
the encryption key to be used. In most cases, this will be a
data node to which the binary encryption key will be published.
- `dataset_import_id`: The identifier of the associated `DatasetImport`.
When using this node directly, this argument can be ommited.
- `is_key_hex_encoded`: Whether the encryption key material provided
by the node serving the key is hex-encoded. If this flag is set,
the node will try to decode the given key.
"""
config = DatasetSinkWorkerConfiguration(
inputs=inputs,
encryptionKeyDependency=encryption_key_dependency,
datasetImportId=dataset_import_id,
isKeyHexEncoded=is_key_hex_encoded,
)
config_serialized = serialize_length_delimited(config)
dependencies = (
[input.dependency for input in inputs] +
[encryption_key_dependency]
)
super().__init__(
name,
config=config_serialized,
enclave_type="decentriq.dataset-sink-worker",
dependencies=dependencies,
output_format=ComputeNodeFormat.ZIP
)
class DatasetSinkWorkerDecoder:
def decode(self, config: bytes):
config_decoded = DatasetSinkWorkerConfiguration()
parse_length_delimited(config, config_decoded)
return MessageToDict(config_decoded)
from .helpers import store_computation_result_as_dataset
__all__ = [
"DatasetSink",
"store_computation_result_as_dataset"
] | PypiClean |
/trackian-homebew-facebook-business-14.0.0.tar.gz/trackian-homebew-facebook-business-14.0.0/facebook_business/adobjects/adstudy.py |
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class AdStudy(
AbstractCrudObject,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdStudy = True
super(AdStudy, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
business = 'business'
canceled_time = 'canceled_time'
cooldown_start_time = 'cooldown_start_time'
created_by = 'created_by'
created_time = 'created_time'
description = 'description'
end_time = 'end_time'
id = 'id'
name = 'name'
observation_end_time = 'observation_end_time'
results_first_available_date = 'results_first_available_date'
start_time = 'start_time'
type = 'type'
updated_by = 'updated_by'
updated_time = 'updated_time'
cells = 'cells'
client_business = 'client_business'
confidence_level = 'confidence_level'
objectives = 'objectives'
viewers = 'viewers'
class Type:
continuous_lift_config = 'CONTINUOUS_LIFT_CONFIG'
geo_lift = 'GEO_LIFT'
lift = 'LIFT'
split_test = 'SPLIT_TEST'
# @deprecated get_endpoint function is deprecated
@classmethod
def get_endpoint(cls):
return 'ad_studies'
# @deprecated api_create is being deprecated
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.business import Business
return Business(api=self._api, fbid=parent_id).create_ad_study(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='DELETE',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdStudy,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'cells': 'list<Object>',
'client_business': 'string',
'confidence_level': 'float',
'cooldown_start_time': 'int',
'description': 'string',
'end_time': 'int',
'name': 'string',
'objectives': 'list<Object>',
'observation_end_time': 'int',
'start_time': 'int',
'type': 'type_enum',
'viewers': 'list<int>',
}
enums = {
'type_enum': AdStudy.Type.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdStudy,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_cells(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adstudycell import AdStudyCell
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/cells',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdStudyCell,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdStudyCell, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_instances(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.privateliftstudyinstance import PrivateLiftStudyInstance
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/instances',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=PrivateLiftStudyInstance,
api_type='EDGE',
response_parser=ObjectParser(target_class=PrivateLiftStudyInstance, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_instance(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.privateliftstudyinstance import PrivateLiftStudyInstance
param_types = {
'breakdown_key': 'map',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/instances',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=PrivateLiftStudyInstance,
api_type='EDGE',
response_parser=ObjectParser(target_class=PrivateLiftStudyInstance, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_objectives(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adstudyobjective import AdStudyObjective
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/objectives',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdStudyObjective,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdStudyObjective, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'business': 'Business',
'canceled_time': 'datetime',
'cooldown_start_time': 'datetime',
'created_by': 'User',
'created_time': 'datetime',
'description': 'string',
'end_time': 'datetime',
'id': 'string',
'name': 'string',
'observation_end_time': 'datetime',
'results_first_available_date': 'string',
'start_time': 'datetime',
'type': 'string',
'updated_by': 'User',
'updated_time': 'datetime',
'cells': 'list<Object>',
'client_business': 'string',
'confidence_level': 'float',
'objectives': 'list<Object>',
'viewers': 'list<int>',
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['Type'] = AdStudy.Type.__dict__.values()
return field_enum_info | PypiClean |
/bluebell_akn-2.3.0-py3-none-any.whl/bluebell/akn.py |
from collections import defaultdict
import re
class TreeNode(object):
def __init__(self, text, offset, elements):
self.text = text
self.offset = offset
self.elements = elements
def __iter__(self):
for el in self.elements:
yield el
class TreeNode1(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode1, self).__init__(text, offset, elements)
self.judgmentBody = elements[0]
self.conclusions = elements[1]
self.attachments = elements[2]
class TreeNode2(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode2, self).__init__(text, offset, elements)
self.introduction = elements[0]
self.background = elements[1]
self.arguments = elements[2]
self.remedies = elements[3]
self.motivation = elements[4]
self.decision = elements[5]
class TreeNode3(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode3, self).__init__(text, offset, elements)
self.preface = elements[0]
self.preamble = elements[1]
self.body = elements[2]
self.conclusions = elements[3]
self.attachments = elements[4]
class TreeNode4(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode4, self).__init__(text, offset, elements)
self.preface = elements[0]
self.preamble = elements[1]
self.mainBody = elements[2]
self.conclusions = elements[3]
self.attachments = elements[4]
class TreeNode5(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode5, self).__init__(text, offset, elements)
self.preface = elements[0]
self.debateBody = elements[1]
self.conclusions = elements[2]
self.attachments = elements[3]
class TreeNode6(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode6, self).__init__(text, offset, elements)
self.hier_element_name = elements[0]
self.attrs = elements[1]
self.heading = elements[2]
self.eol = elements[3]
self.body = elements[4]
class TreeNode7(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode7, self).__init__(text, offset, elements)
self.indent = elements[0]
self.subheading = elements[1]
self.content = elements[2]
self.dedent = elements[3]
class TreeNode8(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode8, self).__init__(text, offset, elements)
self.num = elements[0]
self.heading = elements[1]
class TreeNode9(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode9, self).__init__(text, offset, elements)
self.space = elements[1]
self.content = elements[2]
class TreeNode10(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode10, self).__init__(text, offset, elements)
self.num_content = elements[1]
class TreeNode11(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode11, self).__init__(text, offset, elements)
self.space = elements[0]
self.heading_content = elements[2]
class TreeNode12(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode12, self).__init__(text, offset, elements)
self.space = elements[0]
self.content = elements[1]
class TreeNode13(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode13, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode14(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode14, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode15(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode15, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode16(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode16, self).__init__(text, offset, elements)
self.speech_container_name = elements[0]
self.attrs = elements[1]
self.heading = elements[2]
self.eol = elements[3]
self.body = elements[4]
class TreeNode17(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode17, self).__init__(text, offset, elements)
self.indent = elements[0]
self.subheading = elements[1]
self.content = elements[2]
self.dedent = elements[3]
class TreeNode18(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode18, self).__init__(text, offset, elements)
self.speech_group_name = elements[0]
self.attrs = elements[1]
self.heading = elements[2]
self.eol = elements[3]
self.body = elements[4]
class TreeNode19(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode19, self).__init__(text, offset, elements)
self.indent = elements[0]
self.subheading = elements[1]
self.speech_from = elements[2]
self.content = elements[3]
self.dedent = elements[4]
class TreeNode20(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode20, self).__init__(text, offset, elements)
self.space = elements[1]
self.content = elements[2]
self.eol = elements[3]
class TreeNode21(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode21, self).__init__(text, offset, elements)
self.preface_marker = elements[0]
self.content = elements[1]
class TreeNode22(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode22, self).__init__(text, offset, elements)
self.block_element = elements[2]
class TreeNode23(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode23, self).__init__(text, offset, elements)
self.preamble_marker = elements[0]
self.content = elements[1]
class TreeNode24(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode24, self).__init__(text, offset, elements)
self.block_element = elements[1]
class TreeNode25(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode25, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode26(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode26, self).__init__(text, offset, elements)
self.hier_block_indent = elements[2]
class TreeNode27(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode27, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode28(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode28, self).__init__(text, offset, elements)
self.hier_block_indent = elements[2]
class TreeNode29(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode29, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode30(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode30, self).__init__(text, offset, elements)
self.speech_container_indent = elements[2]
class TreeNode31(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode31, self).__init__(text, offset, elements)
self.conclusions_marker = elements[0]
self.content = elements[1]
class TreeNode32(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode32, self).__init__(text, offset, elements)
self.block_element = elements[1]
class TreeNode33(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode33, self).__init__(text, offset, elements)
self.introduction_marker = elements[0]
self.content = elements[1]
class TreeNode34(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode34, self).__init__(text, offset, elements)
self.hier_block_indent = elements[7]
class TreeNode35(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode35, self).__init__(text, offset, elements)
self.background_marker = elements[0]
self.content = elements[1]
class TreeNode36(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode36, self).__init__(text, offset, elements)
self.hier_block_indent = elements[6]
class TreeNode37(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode37, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode38(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode38, self).__init__(text, offset, elements)
self.hier_block_indent = elements[5]
class TreeNode39(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode39, self).__init__(text, offset, elements)
self.remedies_marker = elements[0]
self.content = elements[1]
class TreeNode40(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode40, self).__init__(text, offset, elements)
self.hier_block_indent = elements[4]
class TreeNode41(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode41, self).__init__(text, offset, elements)
self.motivation_marker = elements[0]
self.content = elements[1]
class TreeNode42(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode42, self).__init__(text, offset, elements)
self.hier_block_indent = elements[3]
class TreeNode43(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode43, self).__init__(text, offset, elements)
self.decision_marker = elements[0]
self.content = elements[1]
class TreeNode44(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode44, self).__init__(text, offset, elements)
self.hier_block_indent = elements[2]
class TreeNode45(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode45, self).__init__(text, offset, elements)
self.attachment_marker = elements[0]
self.heading = elements[1]
self.eol = elements[2]
self.indented = elements[3]
self.content = elements[4]
class TreeNode46(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode46, self).__init__(text, offset, elements)
self.indent = elements[0]
self.subheading = elements[1]
self.content = elements[2]
self.attachments = elements[3]
self.dedent = elements[4]
class TreeNode47(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode47, self).__init__(text, offset, elements)
self.hier_block_element = elements[1]
class TreeNode48(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode48, self).__init__(text, offset, elements)
self.hier_block_indent = elements[1]
class TreeNode49(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode49, self).__init__(text, offset, elements)
self.space = elements[0]
self.content = elements[1]
class TreeNode50(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode50, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode51(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode51, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode52(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode52, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode53(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode53, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode54(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode54, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode55(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode55, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode56(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode56, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode57(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode57, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode58(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode58, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode59(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode59, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode60(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode60, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode61(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode61, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode62(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode62, self).__init__(text, offset, elements)
self.speech_block_name = elements[0]
self.attrs = elements[1]
self.space = elements[2]
self.content = elements[3]
self.eol = elements[4]
class TreeNode63(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode63, self).__init__(text, offset, elements)
self.body = elements[1]
self.eol = elements[2]
class TreeNode64(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode64, self).__init__(text, offset, elements)
self.space = elements[0]
self.content = elements[1]
class TreeNode65(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode65, self).__init__(text, offset, elements)
self.body = elements[1]
self.eol = elements[2]
class TreeNode66(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode66, self).__init__(text, offset, elements)
self.space = elements[0]
self.content = elements[1]
class TreeNode67(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode67, self).__init__(text, offset, elements)
self.body = elements[1]
self.eol = elements[2]
class TreeNode68(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode68, self).__init__(text, offset, elements)
self.space = elements[0]
self.content = elements[1]
class TreeNode69(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode69, self).__init__(text, offset, elements)
self.attrs = elements[1]
self.space = elements[2]
self.content = elements[3]
self.eol = elements[4]
class TreeNode70(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode70, self).__init__(text, offset, elements)
self.content = elements[1]
self.eol = elements[2]
class TreeNode71(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode71, self).__init__(text, offset, elements)
self.attrs = elements[1]
self.eol = elements[2]
self.indent = elements[3]
self.intro = elements[4]
self.items = elements[5]
self.wrapup = elements[6]
self.dedent = elements[7]
class TreeNode72(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode72, self).__init__(text, offset, elements)
self.line = elements[1]
self.footnotes = elements[2]
class TreeNode73(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode73, self).__init__(text, offset, elements)
self.line = elements[0]
self.footnotes = elements[1]
class TreeNode74(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode74, self).__init__(text, offset, elements)
self.heading = elements[1]
self.eol = elements[2]
self.content = elements[3]
class TreeNode75(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode75, self).__init__(text, offset, elements)
self.indent = elements[0]
self.subheading = elements[1]
self.children = elements[2]
self.dedent = elements[3]
class TreeNode76(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode76, self).__init__(text, offset, elements)
self.attrs = elements[1]
self.eol = elements[2]
self.indent = elements[3]
self.items = elements[4]
self.dedent = elements[5]
class TreeNode77(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode77, self).__init__(text, offset, elements)
self.initial = elements[1]
self.content = elements[2]
class TreeNode78(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode78, self).__init__(text, offset, elements)
self.indent = elements[0]
self.siblings = elements[1]
self.dedent = elements[2]
class TreeNode79(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode79, self).__init__(text, offset, elements)
self.attrs = elements[1]
self.eol = elements[2]
self.indent = elements[3]
self.rows = elements[4]
self.dedent = elements[5]
class TreeNode80(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode80, self).__init__(text, offset, elements)
self.eol = elements[1]
self.indent = elements[2]
self.cells = elements[3]
self.dedent = elements[4]
class TreeNode81(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode81, self).__init__(text, offset, elements)
self.name = elements[0]
self.attrs = elements[1]
self.eol = elements[2]
self.content = elements[3]
class TreeNode82(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode82, self).__init__(text, offset, elements)
self.indent = elements[0]
self.content = elements[1]
self.dedent = elements[2]
class TreeNode83(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode83, self).__init__(text, offset, elements)
self.classes = elements[0]
self.pairs = elements[1]
class TreeNode84(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode84, self).__init__(text, offset, elements)
self.first = elements[1]
self.rest = elements[3]
class TreeNode85(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode85, self).__init__(text, offset, elements)
self.attr = elements[2]
class TreeNode86(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode86, self).__init__(text, offset, elements)
self.attr_name = elements[0]
self.value = elements[1]
class TreeNode87(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode87, self).__init__(text, offset, elements)
self.space = elements[0]
self.attr_value = elements[1]
class TreeNode88(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode88, self).__init__(text, offset, elements)
self.attrs = elements[1]
self.eol = elements[2]
self.indent = elements[3]
self.content = elements[4]
self.dedent = elements[5]
class TreeNode89(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode89, self).__init__(text, offset, elements)
self.space = elements[1]
self.marker = elements[2]
self.eol = elements[4]
self.indent = elements[5]
self.content = elements[6]
self.dedent = elements[7]
class TreeNode90(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode90, self).__init__(text, offset, elements)
self.inline_marker = elements[1]
class TreeNode91(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode91, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode92(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode92, self).__init__(text, offset, elements)
self.inline = elements[1]
class TreeNode93(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode93, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode94(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode94, self).__init__(text, offset, elements)
self.inline = elements[1]
class TreeNode95(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode95, self).__init__(text, offset, elements)
self.content = elements[1]
class TreeNode96(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode96, self).__init__(text, offset, elements)
self.inline = elements[1]
class TreeNode97(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode97, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.content = elements[2]
self.inline_close = elements[3]
class TreeNode98(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode98, self).__init__(text, offset, elements)
self.content = elements[3]
class TreeNode99(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode99, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.href = elements[3]
self.content = elements[4]
self.inline_close = elements[5]
class TreeNode100(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode100, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.content = elements[2]
self.inline_close = elements[3]
class TreeNode101(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode101, self).__init__(text, offset, elements)
self.inline_nested = elements[1]
class TreeNode102(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode102, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.content = elements[2]
self.inline_close = elements[3]
class TreeNode103(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode103, self).__init__(text, offset, elements)
self.inline_nested = elements[1]
class TreeNode104(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode104, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.href = elements[2]
self.content = elements[4]
self.inline_close = elements[5]
class TreeNode105(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode105, self).__init__(text, offset, elements)
self.inline_nested = elements[1]
class TreeNode106(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode106, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.space = elements[2]
self.marker = elements[3]
self.inline_close = elements[4]
class TreeNode107(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode107, self).__init__(text, offset, elements)
self.inline_open = elements[0]
self.tag = elements[1]
self.standard_inline_marker = elements[1]
self.attrs = elements[2]
self.content = elements[4]
self.inline_close = elements[5]
class TreeNode108(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode108, self).__init__(text, offset, elements)
self.inline_nested = elements[1]
class TreeNode109(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode109, self).__init__(text, offset, elements)
self.newline = elements[0]
class TreeNode110(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode110, self).__init__(text, offset, elements)
self.eol = elements[1]
class TreeNode111(TreeNode):
def __init__(self, text, offset, elements):
super(TreeNode111, self).__init__(text, offset, elements)
self.eol = elements[1]
class ParseError(SyntaxError):
pass
FAILURE = object()
class Grammar(object):
REGEX_1 = re.compile('^[^\\n]')
REGEX_2 = re.compile('^[^ \\n|{}.]')
REGEX_3 = re.compile('^[^ \\n|{}]')
REGEX_4 = re.compile('^[^\\n|}]')
REGEX_5 = re.compile('^[^ \\n]')
REGEX_6 = re.compile('^[^\\n]')
REGEX_7 = re.compile('^[^*/_{\\n\\\\]')
REGEX_8 = re.compile('^[^*/_{\\n\\\\}]')
REGEX_9 = re.compile('^[^\\n]')
REGEX_10 = re.compile('^[^\\n]')
REGEX_11 = re.compile('^[\\n]')
REGEX_12 = re.compile('^[^ \\n]')
REGEX_13 = re.compile('^[^\\n]')
REGEX_14 = re.compile('^[^ \\n]')
REGEX_15 = re.compile('^[^\\n]')
def _read_root(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['root'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'noop':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'noop\'')
self._cache['root'][index0] = (address0, self._offset)
return address0
def _read_judgment(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['judgment'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_judgmentBody()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_conclusions()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_attachments()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode1(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Judgment', (cls0, self._types.Judgment), {})
self._cache['judgment'][index0] = (address0, self._offset)
return address0
def _read_judgmentBody(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['judgmentBody'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_introduction()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_background()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index4 = self._offset
address3 = self._read_arguments()
if address3 is FAILURE:
address3 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index5 = self._offset
address4 = self._read_remedies()
if address4 is FAILURE:
address4 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index6 = self._offset
address5 = self._read_motivation()
if address5 is FAILURE:
address5 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
index7 = self._offset
address6 = self._read_decision()
if address6 is FAILURE:
address6 = TreeNode(self._input[index7:index7], index7, [])
self._offset = index7
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode2(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'JudgmentBody', (cls0, self._types.JudgmentBody), {})
self._cache['judgmentBody'][index0] = (address0, self._offset)
return address0
def _read_act(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['act'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_hierarchical_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Act', (cls0, self._types.Act), {})
self._cache['act'][index0] = (address0, self._offset)
return address0
def _read_bill(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['bill'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_hierarchical_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Bill', (cls0, self._types.Bill), {})
self._cache['bill'][index0] = (address0, self._offset)
return address0
def _read_hierarchical_structure(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hierarchical_structure'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_preface()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_preamble()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_body()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index4 = self._offset
address4 = self._read_conclusions()
if address4 is FAILURE:
address4 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_attachments()
if address5 is FAILURE:
address5 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode3(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'HierarchicalStructure', (cls0, self._types.HierarchicalStructure), {})
self._cache['hierarchical_structure'][index0] = (address0, self._offset)
return address0
def _read_debateReport(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['debateReport'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_open_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'DebateReport', (cls0, self._types.DebateReport), {})
self._cache['debateReport'][index0] = (address0, self._offset)
return address0
def _read_doc(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['doc'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_open_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Doc', (cls0, self._types.Doc), {})
self._cache['doc'][index0] = (address0, self._offset)
return address0
def _read_statement(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['statement'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_open_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Statement', (cls0, self._types.Statement), {})
self._cache['statement'][index0] = (address0, self._offset)
return address0
def _read_open_structure(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['open_structure'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_preface()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_preamble()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_mainBody()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index4 = self._offset
address4 = self._read_conclusions()
if address4 is FAILURE:
address4 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_attachments()
if address5 is FAILURE:
address5 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode4(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'OpenStructure', (cls0, self._types.OpenStructure), {})
self._cache['open_structure'][index0] = (address0, self._offset)
return address0
def _read_debate(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['debate'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_debate_structure()
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Debate', (cls0, self._types.Debate), {})
self._cache['debate'][index0] = (address0, self._offset)
return address0
def _read_debate_structure(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['debate_structure'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_preface()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_debateBody()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_conclusions()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index4 = self._offset
address4 = self._read_attachments()
if address4 is FAILURE:
address4 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode5(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'DebateStructure', (cls0, self._types.DebateStructure), {})
self._cache['debate_structure'][index0] = (address0, self._offset)
return address0
def _read_hier_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_crossheading()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_hier_element_block()
if address0 is FAILURE:
self._offset = index1
self._cache['hier_element'][index0] = (address0, self._offset)
return address0
def _read_hier_element_block(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element_block'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_hier_element_name()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_hier_element_heading()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_eol()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index4 = self._offset
index5, elements1 = self._offset, []
address6 = FAILURE
address6 = self._read_indent()
if address6 is not FAILURE:
elements1.append(address6)
address7 = FAILURE
index6 = self._offset
address7 = self._read_subheading()
if address7 is FAILURE:
address7 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address7 is not FAILURE:
elements1.append(address7)
address8 = FAILURE
remaining0, index7, elements2, address9 = 0, self._offset, [], True
while address9 is not FAILURE:
address9 = self._read_hier_block_element()
if address9 is not FAILURE:
elements2.append(address9)
remaining0 -= 1
if remaining0 <= 0:
address8 = TreeNode(self._input[index7:self._offset], index7, elements2)
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements1.append(address8)
address10 = FAILURE
address10 = self._read_dedent()
if address10 is not FAILURE:
elements1.append(address10)
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
if elements1 is None:
address5 = FAILURE
else:
address5 = TreeNode7(self._input[index5:self._offset], index5, elements1)
self._offset = self._offset
if address5 is FAILURE:
address5 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode6(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'HierElement', (cls0, self._types.HierElement), {})
self._cache['hier_element_block'][index0] = (address0, self._offset)
return address0
def _read_hier_element_heading(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element_heading'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_hier_element_heading_num()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_hier_element_heading_heading()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode8(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'HierElementHeading', (cls0, self._types.HierElementHeading), {})
self._cache['hier_element_heading'][index0] = (address0, self._offset)
return address0
def _read_hier_element_heading_num(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element_heading_num'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_hier_element_heading_heading()
self._offset = index2
if address1 is FAILURE:
address1 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_space()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index3, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
index4, elements2 = self._offset, []
address5 = FAILURE
index5 = self._offset
address5 = self._read_hier_element_heading_heading()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_num_content()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address4 = FAILURE
else:
address4 = TreeNode10(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode9(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['hier_element_heading_num'][index0] = (address0, self._offset)
return address0
def _read_hier_element_heading_heading(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element_heading_heading'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_space()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '-':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'-\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_heading_content()
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode11(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['hier_element_heading_heading'][index0] = (address0, self._offset)
return address0
def _read_heading_content(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['heading_content'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
index2, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_space()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_inline()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode12(self._input[index2:self._offset], index2, elements0)
self._offset = self._offset
if address0 is FAILURE:
self._offset = index1
index4 = self._offset
address0 = self._read_eol()
self._offset = index4
if address0 is not FAILURE:
address0 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address0 = FAILURE
if address0 is FAILURE:
self._offset = index1
self._cache['heading_content'][index0] = (address0, self._offset)
return address0
def _read_num_content(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['num_content'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_escape()
if address0 is FAILURE:
self._offset = index1
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_1.search(chunk0):
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address0 is FAILURE:
self._offset = index1
self._cache['num_content'][index0] = (address0, self._offset)
return address0
def _read_hier_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_hier_element()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_block_element()
if address0 is FAILURE:
self._offset = index1
self._cache['hier_block_element'][index0] = (address0, self._offset)
return address0
def _read_hier_indent(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_indent'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
index2, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_indent()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_hier_element()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_dedent()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode13(self._input[index2:self._offset], index2, elements0)
self._offset = self._offset
if address0 is FAILURE:
self._offset = index1
address0 = self._read_hier_element()
if address0 is FAILURE:
self._offset = index1
self._cache['hier_indent'][index0] = (address0, self._offset)
return address0
def _read_hier_block_indent(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_block_indent'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
index2, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_indent()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_hier_block_element()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_dedent()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode14(self._input[index2:self._offset], index2, elements0)
self._offset = self._offset
if address0 is FAILURE:
self._offset = index1
address0 = self._read_hier_block_element()
if address0 is FAILURE:
self._offset = index1
self._cache['hier_block_indent'][index0] = (address0, self._offset)
return address0
def _read_speech_container_indent(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_container_indent'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
index2, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_indent()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_speech_container()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_dedent()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
else:
elements0 = None
self._offset = index2
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode15(self._input[index2:self._offset], index2, elements0)
self._offset = self._offset
if address0 is FAILURE:
self._offset = index1
address0 = self._read_speech_container()
if address0 is FAILURE:
self._offset = index1
self._cache['speech_container_indent'][index0] = (address0, self._offset)
return address0
def _read_hier_element_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['hier_element_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 6
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ALINEA':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ALINEA\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 7
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'ARTICLE':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ARTICLE\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 4
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'BOOK':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'BOOK\'')
if address0 is FAILURE:
self._offset = index1
chunk3, max3 = None, self._offset + 7
if max3 <= self._input_size:
chunk3 = self._input[self._offset:max3]
if chunk3 == 'CHAPTER':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'CHAPTER\'')
if address0 is FAILURE:
self._offset = index1
chunk4, max4 = None, self._offset + 6
if max4 <= self._input_size:
chunk4 = self._input[self._offset:max4]
if chunk4 == 'CLAUSE':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'CLAUSE\'')
if address0 is FAILURE:
self._offset = index1
chunk5, max5 = None, self._offset + 8
if max5 <= self._input_size:
chunk5 = self._input[self._offset:max5]
if chunk5 == 'DIVISION':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'DIVISION\'')
if address0 is FAILURE:
self._offset = index1
chunk6, max6 = None, self._offset + 6
if max6 <= self._input_size:
chunk6 = self._input[self._offset:max6]
if chunk6 == 'INDENT':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'INDENT\'')
if address0 is FAILURE:
self._offset = index1
chunk7, max7 = None, self._offset + 5
if max7 <= self._input_size:
chunk7 = self._input[self._offset:max7]
if chunk7 == 'LEVEL':
address0 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'LEVEL\'')
if address0 is FAILURE:
self._offset = index1
chunk8, max8 = None, self._offset + 4
if max8 <= self._input_size:
chunk8 = self._input[self._offset:max8]
if chunk8 == 'LIST':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'LIST\'')
if address0 is FAILURE:
self._offset = index1
chunk9, max9 = None, self._offset + 9
if max9 <= self._input_size:
chunk9 = self._input[self._offset:max9]
if chunk9 == 'PARAGRAPH':
address0 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PARAGRAPH\'')
if address0 is FAILURE:
self._offset = index1
chunk10, max10 = None, self._offset + 4
if max10 <= self._input_size:
chunk10 = self._input[self._offset:max10]
if chunk10 == 'PART':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PART\'')
if address0 is FAILURE:
self._offset = index1
chunk11, max11 = None, self._offset + 5
if max11 <= self._input_size:
chunk11 = self._input[self._offset:max11]
if chunk11 == 'POINT':
address0 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'POINT\'')
if address0 is FAILURE:
self._offset = index1
chunk12, max12 = None, self._offset + 7
if max12 <= self._input_size:
chunk12 = self._input[self._offset:max12]
if chunk12 == 'PROVISO':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PROVISO\'')
if address0 is FAILURE:
self._offset = index1
chunk13, max13 = None, self._offset + 4
if max13 <= self._input_size:
chunk13 = self._input[self._offset:max13]
if chunk13 == 'RULE':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'RULE\'')
if address0 is FAILURE:
self._offset = index1
chunk14, max14 = None, self._offset + 7
if max14 <= self._input_size:
chunk14 = self._input[self._offset:max14]
if chunk14 == 'SECTION':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SECTION\'')
if address0 is FAILURE:
self._offset = index1
chunk15, max15 = None, self._offset + 10
if max15 <= self._input_size:
chunk15 = self._input[self._offset:max15]
if chunk15 == 'SUBCHAPTER':
address0 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBCHAPTER\'')
if address0 is FAILURE:
self._offset = index1
chunk16, max16 = None, self._offset + 9
if max16 <= self._input_size:
chunk16 = self._input[self._offset:max16]
if chunk16 == 'SUBCLAUSE':
address0 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBCLAUSE\'')
if address0 is FAILURE:
self._offset = index1
chunk17, max17 = None, self._offset + 11
if max17 <= self._input_size:
chunk17 = self._input[self._offset:max17]
if chunk17 == 'SUBDIVISION':
address0 = TreeNode(self._input[self._offset:self._offset + 11], self._offset, [])
self._offset = self._offset + 11
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBDIVISION\'')
if address0 is FAILURE:
self._offset = index1
chunk18, max18 = None, self._offset + 7
if max18 <= self._input_size:
chunk18 = self._input[self._offset:max18]
if chunk18 == 'SUBLIST':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBLIST\'')
if address0 is FAILURE:
self._offset = index1
chunk19, max19 = None, self._offset + 12
if max19 <= self._input_size:
chunk19 = self._input[self._offset:max19]
if chunk19 == 'SUBPARAGRAPH':
address0 = TreeNode(self._input[self._offset:self._offset + 12], self._offset, [])
self._offset = self._offset + 12
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBPARAGRAPH\'')
if address0 is FAILURE:
self._offset = index1
chunk20, max20 = None, self._offset + 7
if max20 <= self._input_size:
chunk20 = self._input[self._offset:max20]
if chunk20 == 'SUBPART':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBPART\'')
if address0 is FAILURE:
self._offset = index1
chunk21, max21 = None, self._offset + 7
if max21 <= self._input_size:
chunk21 = self._input[self._offset:max21]
if chunk21 == 'SUBRULE':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBRULE\'')
if address0 is FAILURE:
self._offset = index1
chunk22, max22 = None, self._offset + 10
if max22 <= self._input_size:
chunk22 = self._input[self._offset:max22]
if chunk22 == 'SUBSECTION':
address0 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBSECTION\'')
if address0 is FAILURE:
self._offset = index1
chunk23, max23 = None, self._offset + 8
if max23 <= self._input_size:
chunk23 = self._input[self._offset:max23]
if chunk23 == 'SUBTITLE':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBTITLE\'')
if address0 is FAILURE:
self._offset = index1
chunk24, max24 = None, self._offset + 5
if max24 <= self._input_size:
chunk24 = self._input[self._offset:max24]
if chunk24 == 'TITLE':
address0 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TITLE\'')
if address0 is FAILURE:
self._offset = index1
chunk25, max25 = None, self._offset + 4
if max25 <= self._input_size:
chunk25 = self._input[self._offset:max25]
if chunk25 == 'TOME':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TOME\'')
if address0 is FAILURE:
self._offset = index1
chunk26, max26 = None, self._offset + 12
if max26 <= self._input_size:
chunk26 = self._input[self._offset:max26]
if chunk26 == 'TRANSITIONAL':
address0 = TreeNode(self._input[self._offset:self._offset + 12], self._offset, [])
self._offset = self._offset + 12
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TRANSITIONAL\'')
if address0 is FAILURE:
self._offset = index1
chunk27, max27 = None, self._offset + 3
if max27 <= self._input_size:
chunk27 = self._input[self._offset:max27]
if chunk27 == 'ART':
address0 = TreeNode(self._input[self._offset:self._offset + 3], self._offset, [])
self._offset = self._offset + 3
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ART\'')
if address0 is FAILURE:
self._offset = index1
chunk28, max28 = None, self._offset + 4
if max28 <= self._input_size:
chunk28 = self._input[self._offset:max28]
if chunk28 == 'CHAP':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'CHAP\'')
if address0 is FAILURE:
self._offset = index1
chunk29, max29 = None, self._offset + 4
if max29 <= self._input_size:
chunk29 = self._input[self._offset:max29]
if chunk29 == 'PARA':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PARA\'')
if address0 is FAILURE:
self._offset = index1
chunk30, max30 = None, self._offset + 3
if max30 <= self._input_size:
chunk30 = self._input[self._offset:max30]
if chunk30 == 'SEC':
address0 = TreeNode(self._input[self._offset:self._offset + 3], self._offset, [])
self._offset = self._offset + 3
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SEC\'')
if address0 is FAILURE:
self._offset = index1
chunk31, max31 = None, self._offset + 7
if max31 <= self._input_size:
chunk31 = self._input[self._offset:max31]
if chunk31 == 'SUBCHAP':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBCHAP\'')
if address0 is FAILURE:
self._offset = index1
chunk32, max32 = None, self._offset + 7
if max32 <= self._input_size:
chunk32 = self._input[self._offset:max32]
if chunk32 == 'SUBPARA':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBPARA\'')
if address0 is FAILURE:
self._offset = index1
chunk33, max33 = None, self._offset + 6
if max33 <= self._input_size:
chunk33 = self._input[self._offset:max33]
if chunk33 == 'SUBSEC':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBSEC\'')
if address0 is FAILURE:
self._offset = index1
self._cache['hier_element_name'][index0] = (address0, self._offset)
return address0
def _read_speech_container(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_container'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_speech_container_name()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_hier_element_heading()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_eol()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index4 = self._offset
index5, elements1 = self._offset, []
address6 = FAILURE
address6 = self._read_indent()
if address6 is not FAILURE:
elements1.append(address6)
address7 = FAILURE
index6 = self._offset
address7 = self._read_subheading()
if address7 is FAILURE:
address7 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address7 is not FAILURE:
elements1.append(address7)
address8 = FAILURE
remaining0, index7, elements2, address9 = 0, self._offset, [], True
while address9 is not FAILURE:
address9 = self._read_speech_hier_block_element()
if address9 is not FAILURE:
elements2.append(address9)
remaining0 -= 1
if remaining0 <= 0:
address8 = TreeNode(self._input[index7:self._offset], index7, elements2)
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements1.append(address8)
address10 = FAILURE
address10 = self._read_dedent()
if address10 is not FAILURE:
elements1.append(address10)
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
if elements1 is None:
address5 = FAILURE
else:
address5 = TreeNode17(self._input[index5:self._offset], index5, elements1)
self._offset = self._offset
if address5 is FAILURE:
address5 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode16(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'SpeechContainer', (cls0, self._types.SpeechContainer), {})
self._cache['speech_container'][index0] = (address0, self._offset)
return address0
def _read_speech_hier_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_hier_block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_speech_container()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_speech_group()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_speech_block_element()
if address0 is FAILURE:
self._offset = index1
self._cache['speech_hier_block_element'][index0] = (address0, self._offset)
return address0
def _read_speech_group(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_group'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_speech_group_name()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_hier_element_heading()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_eol()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index4, elements1 = self._offset, []
address6 = FAILURE
address6 = self._read_indent()
if address6 is not FAILURE:
elements1.append(address6)
address7 = FAILURE
index5 = self._offset
address7 = self._read_subheading()
if address7 is FAILURE:
address7 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address7 is not FAILURE:
elements1.append(address7)
address8 = FAILURE
address8 = self._read_speech_from()
if address8 is not FAILURE:
elements1.append(address8)
address9 = FAILURE
remaining0, index6, elements2, address10 = 1, self._offset, [], True
while address10 is not FAILURE:
address10 = self._read_speech_hier_block_element()
if address10 is not FAILURE:
elements2.append(address10)
remaining0 -= 1
if remaining0 <= 0:
address9 = TreeNode(self._input[index6:self._offset], index6, elements2)
self._offset = self._offset
else:
address9 = FAILURE
if address9 is not FAILURE:
elements1.append(address9)
address11 = FAILURE
address11 = self._read_dedent()
if address11 is not FAILURE:
elements1.append(address11)
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
if elements1 is None:
address5 = FAILURE
else:
address5 = TreeNode19(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode18(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'SpeechGroup', (cls0, self._types.SpeechGroup), {})
self._cache['speech_group'][index0] = (address0, self._offset)
return address0
def _read_speech_from(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_from'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'FROM':
address1 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'FROM\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_space()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
address4 = self._read_inline()
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address5 = FAILURE
address5 = self._read_eol()
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode20(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'From', (cls0, self._types.From), {})
self._cache['speech_from'][index0] = (address0, self._offset)
return address0
def _read_speech_container_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_container_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 7
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ADDRESS':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ADDRESS\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 11
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'ADJOURNMENT':
address0 = TreeNode(self._input[self._offset:self._offset + 11], self._offset, [])
self._offset = self._offset + 11
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ADJOURNMENT\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 20
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'ADMINISTRATIONOFOATH':
address0 = TreeNode(self._input[self._offset:self._offset + 20], self._offset, [])
self._offset = self._offset + 20
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ADMINISTRATIONOFOATH\'')
if address0 is FAILURE:
self._offset = index1
chunk3, max3 = None, self._offset + 13
if max3 <= self._input_size:
chunk3 = self._input[self._offset:max3]
if chunk3 == 'COMMUNICATION':
address0 = TreeNode(self._input[self._offset:self._offset + 13], self._offset, [])
self._offset = self._offset + 13
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'COMMUNICATION\'')
if address0 is FAILURE:
self._offset = index1
chunk4, max4 = None, self._offset + 13
if max4 <= self._input_size:
chunk4 = self._input[self._offset:max4]
if chunk4 == 'DEBATESECTION':
address0 = TreeNode(self._input[self._offset:self._offset + 13], self._offset, [])
self._offset = self._offset + 13
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'DEBATESECTION\'')
if address0 is FAILURE:
self._offset = index1
chunk5, max5 = None, self._offset + 17
if max5 <= self._input_size:
chunk5 = self._input[self._offset:max5]
if chunk5 == 'DECLARATIONOFVOTE':
address0 = TreeNode(self._input[self._offset:self._offset + 17], self._offset, [])
self._offset = self._offset + 17
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'DECLARATIONOFVOTE\'')
if address0 is FAILURE:
self._offset = index1
chunk6, max6 = None, self._offset + 21
if max6 <= self._input_size:
chunk6 = self._input[self._offset:max6]
if chunk6 == 'MINISTERIALSTATEMENTS':
address0 = TreeNode(self._input[self._offset:self._offset + 21], self._offset, [])
self._offset = self._offset + 21
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'MINISTERIALSTATEMENTS\'')
if address0 is FAILURE:
self._offset = index1
chunk7, max7 = None, self._offset + 16
if max7 <= self._input_size:
chunk7 = self._input[self._offset:max7]
if chunk7 == 'NATIONALINTEREST':
address0 = TreeNode(self._input[self._offset:self._offset + 16], self._offset, [])
self._offset = self._offset + 16
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'NATIONALINTEREST\'')
if address0 is FAILURE:
self._offset = index1
chunk8, max8 = None, self._offset + 15
if max8 <= self._input_size:
chunk8 = self._input[self._offset:max8]
if chunk8 == 'NOTICESOFMOTION':
address0 = TreeNode(self._input[self._offset:self._offset + 15], self._offset, [])
self._offset = self._offset + 15
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'NOTICESOFMOTION\'')
if address0 is FAILURE:
self._offset = index1
chunk9, max9 = None, self._offset + 14
if max9 <= self._input_size:
chunk9 = self._input[self._offset:max9]
if chunk9 == 'ORALSTATEMENTS':
address0 = TreeNode(self._input[self._offset:self._offset + 14], self._offset, [])
self._offset = self._offset + 14
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ORALSTATEMENTS\'')
if address0 is FAILURE:
self._offset = index1
chunk10, max10 = None, self._offset + 6
if max10 <= self._input_size:
chunk10 = self._input[self._offset:max10]
if chunk10 == 'PAPERS':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PAPERS\'')
if address0 is FAILURE:
self._offset = index1
chunk11, max11 = None, self._offset + 18
if max11 <= self._input_size:
chunk11 = self._input[self._offset:max11]
if chunk11 == 'PERSONALSTATEMENTS':
address0 = TreeNode(self._input[self._offset:self._offset + 18], self._offset, [])
self._offset = self._offset + 18
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PERSONALSTATEMENTS\'')
if address0 is FAILURE:
self._offset = index1
chunk12, max12 = None, self._offset + 9
if max12 <= self._input_size:
chunk12 = self._input[self._offset:max12]
if chunk12 == 'PETITIONS':
address0 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PETITIONS\'')
if address0 is FAILURE:
self._offset = index1
chunk13, max13 = None, self._offset + 12
if max13 <= self._input_size:
chunk13 = self._input[self._offset:max13]
if chunk13 == 'POINTOFORDER':
address0 = TreeNode(self._input[self._offset:self._offset + 12], self._offset, [])
self._offset = self._offset + 12
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'POINTOFORDER\'')
if address0 is FAILURE:
self._offset = index1
chunk14, max14 = None, self._offset + 7
if max14 <= self._input_size:
chunk14 = self._input[self._offset:max14]
if chunk14 == 'PRAYERS':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PRAYERS\'')
if address0 is FAILURE:
self._offset = index1
chunk15, max15 = None, self._offset + 17
if max15 <= self._input_size:
chunk15 = self._input[self._offset:max15]
if chunk15 == 'PROCEDURALMOTIONS':
address0 = TreeNode(self._input[self._offset:self._offset + 17], self._offset, [])
self._offset = self._offset + 17
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PROCEDURALMOTIONS\'')
if address0 is FAILURE:
self._offset = index1
chunk16, max16 = None, self._offset + 9
if max16 <= self._input_size:
chunk16 = self._input[self._offset:max16]
if chunk16 == 'QUESTIONS':
address0 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'QUESTIONS\'')
if address0 is FAILURE:
self._offset = index1
chunk17, max17 = None, self._offset + 11
if max17 <= self._input_size:
chunk17 = self._input[self._offset:max17]
if chunk17 == 'RESOLUTIONS':
address0 = TreeNode(self._input[self._offset:self._offset + 11], self._offset, [])
self._offset = self._offset + 11
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'RESOLUTIONS\'')
if address0 is FAILURE:
self._offset = index1
chunk18, max18 = None, self._offset + 8
if max18 <= self._input_size:
chunk18 = self._input[self._offset:max18]
if chunk18 == 'ROLLCALL':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ROLLCALL\'')
if address0 is FAILURE:
self._offset = index1
chunk19, max19 = None, self._offset + 17
if max19 <= self._input_size:
chunk19 = self._input[self._offset:max19]
if chunk19 == 'WRITTENSTATEMENTS':
address0 = TreeNode(self._input[self._offset:self._offset + 17], self._offset, [])
self._offset = self._offset + 17
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'WRITTENSTATEMENTS\'')
if address0 is FAILURE:
self._offset = index1
self._cache['speech_container_name'][index0] = (address0, self._offset)
return address0
def _read_speech_group_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_group_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 11
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'SPEECHGROUP':
address0 = TreeNode(self._input[self._offset:self._offset + 11], self._offset, [])
self._offset = self._offset + 11
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SPEECHGROUP\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 6
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'SPEECH':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SPEECH\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 8
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'QUESTION':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'QUESTION\'')
if address0 is FAILURE:
self._offset = index1
chunk3, max3 = None, self._offset + 6
if max3 <= self._input_size:
chunk3 = self._input[self._offset:max3]
if chunk3 == 'ANSWER':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ANSWER\'')
if address0 is FAILURE:
self._offset = index1
self._cache['speech_group_name'][index0] = (address0, self._offset)
return address0
def _read_preface(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['preface'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_preface_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_preamble_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_body_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_block_element()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode22(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode21(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Preface', (cls0, self._types.Preface), {})
self._cache['preface'][index0] = (address0, self._offset)
return address0
def _read_preamble(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['preamble'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_preamble_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_body_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
address5 = self._read_block_element()
if address5 is not FAILURE:
elements2.append(address5)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode24(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode23(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Preamble', (cls0, self._types.Preamble), {})
self._cache['preamble'][index0] = (address0, self._offset)
return address0
def _read_body(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['body'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_body_marker()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index4, elements2 = self._offset, []
address4 = FAILURE
index5 = self._offset
address4 = self._read_conclusions_marker()
self._offset = index5
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index6 = self._offset
address5 = self._read_attachment_marker()
self._offset = index6
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_hier_block_indent()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode26(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode25(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Body', (cls0, self._types.Body), {})
self._cache['body'][index0] = (address0, self._offset)
return address0
def _read_mainBody(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['mainBody'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_body_marker()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index4, elements2 = self._offset, []
address4 = FAILURE
index5 = self._offset
address4 = self._read_conclusions_marker()
self._offset = index5
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index6 = self._offset
address5 = self._read_attachment_marker()
self._offset = index6
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_hier_block_indent()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode28(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode27(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'MainBody', (cls0, self._types.MainBody), {})
self._cache['mainBody'][index0] = (address0, self._offset)
return address0
def _read_debateBody(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['debateBody'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_body_marker()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index4, elements2 = self._offset, []
address4 = FAILURE
index5 = self._offset
address4 = self._read_conclusions_marker()
self._offset = index5
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index6 = self._offset
address5 = self._read_attachment_marker()
self._offset = index6
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_speech_container_indent()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode30(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode29(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'DebateBody', (cls0, self._types.DebateBody), {})
self._cache['debateBody'][index0] = (address0, self._offset)
return address0
def _read_conclusions(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['conclusions'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_conclusions_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_attachment_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
address5 = self._read_block_element()
if address5 is not FAILURE:
elements2.append(address5)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode32(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode31(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Conclusions', (cls0, self._types.Conclusions), {})
self._cache['conclusions'][index0] = (address0, self._offset)
return address0
def _read_introduction(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['introduction'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_introduction_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_background_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_arguments_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index6 = self._offset
address6 = self._read_remedies_marker()
self._offset = index6
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index7 = self._offset
address7 = self._read_motivation_marker()
self._offset = index7
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
index8 = self._offset
address8 = self._read_decision_marker()
self._offset = index8
if address8 is FAILURE:
address8 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements2.append(address8)
address9 = FAILURE
index9 = self._offset
address9 = self._read_conclusions_marker()
self._offset = index9
if address9 is FAILURE:
address9 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address9 = FAILURE
if address9 is not FAILURE:
elements2.append(address9)
address10 = FAILURE
index10 = self._offset
address10 = self._read_attachment_marker()
self._offset = index10
if address10 is FAILURE:
address10 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address10 = FAILURE
if address10 is not FAILURE:
elements2.append(address10)
address11 = FAILURE
address11 = self._read_hier_block_indent()
if address11 is not FAILURE:
elements2.append(address11)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode34(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode33(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Introduction', (cls0, self._types.Introduction), {})
self._cache['introduction'][index0] = (address0, self._offset)
return address0
def _read_background(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['background'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_background_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_arguments_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_remedies_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index6 = self._offset
address6 = self._read_motivation_marker()
self._offset = index6
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index7 = self._offset
address7 = self._read_decision_marker()
self._offset = index7
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
index8 = self._offset
address8 = self._read_conclusions_marker()
self._offset = index8
if address8 is FAILURE:
address8 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements2.append(address8)
address9 = FAILURE
index9 = self._offset
address9 = self._read_attachment_marker()
self._offset = index9
if address9 is FAILURE:
address9 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address9 = FAILURE
if address9 is not FAILURE:
elements2.append(address9)
address10 = FAILURE
address10 = self._read_hier_block_indent()
if address10 is not FAILURE:
elements2.append(address10)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode36(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode35(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Background', (cls0, self._types.Background), {})
self._cache['background'][index0] = (address0, self._offset)
return address0
def _read_arguments(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['arguments'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_arguments_marker()
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index4, elements2 = self._offset, []
address4 = FAILURE
index5 = self._offset
address4 = self._read_remedies_marker()
self._offset = index5
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index6 = self._offset
address5 = self._read_motivation_marker()
self._offset = index6
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index7 = self._offset
address6 = self._read_decision_marker()
self._offset = index7
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index8 = self._offset
address7 = self._read_conclusions_marker()
self._offset = index8
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
index9 = self._offset
address8 = self._read_attachment_marker()
self._offset = index9
if address8 is FAILURE:
address8 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements2.append(address8)
address9 = FAILURE
address9 = self._read_hier_block_indent()
if address9 is not FAILURE:
elements2.append(address9)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode38(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode37(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Arguments', (cls0, self._types.Arguments), {})
self._cache['arguments'][index0] = (address0, self._offset)
return address0
def _read_remedies(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['remedies'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_remedies_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_motivation_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_decision_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index6 = self._offset
address6 = self._read_conclusions_marker()
self._offset = index6
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index7 = self._offset
address7 = self._read_attachment_marker()
self._offset = index7
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
address8 = self._read_hier_block_indent()
if address8 is not FAILURE:
elements2.append(address8)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode40(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode39(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Remedies', (cls0, self._types.Remedies), {})
self._cache['remedies'][index0] = (address0, self._offset)
return address0
def _read_motivation(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['motivation'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_motivation_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_decision_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_conclusions_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index6 = self._offset
address6 = self._read_attachment_marker()
self._offset = index6
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
address7 = self._read_hier_block_indent()
if address7 is not FAILURE:
elements2.append(address7)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode42(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode41(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Motivation', (cls0, self._types.Motivation), {})
self._cache['motivation'][index0] = (address0, self._offset)
return address0
def _read_decision(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['decision'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_decision_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
address4 = self._read_conclusions_marker()
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
index5 = self._offset
address5 = self._read_attachment_marker()
self._offset = index5
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_hier_block_indent()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode44(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode43(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Decision', (cls0, self._types.Decision), {})
self._cache['decision'][index0] = (address0, self._offset)
return address0
def _read_attachments(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attachments'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
address1 = self._read_attachment()
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Attachments', (cls0, self._types.Attachments), {})
self._cache['attachments'][index0] = (address0, self._offset)
return address0
def _read_attachment(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attachment'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_attachment_marker()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_attachment_heading()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index3 = self._offset
index4, elements1 = self._offset, []
address5 = FAILURE
address5 = self._read_indent()
if address5 is not FAILURE:
elements1.append(address5)
address6 = FAILURE
index5 = self._offset
address6 = self._read_subheading()
if address6 is FAILURE:
address6 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address6 is not FAILURE:
elements1.append(address6)
address7 = FAILURE
remaining0, index6, elements2, address8 = 0, self._offset, [], True
while address8 is not FAILURE:
index7, elements3 = self._offset, []
address9 = FAILURE
index8 = self._offset
address9 = self._read_attachment_marker()
self._offset = index8
if address9 is FAILURE:
address9 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address9 = FAILURE
if address9 is not FAILURE:
elements3.append(address9)
address10 = FAILURE
address10 = self._read_hier_block_element()
if address10 is not FAILURE:
elements3.append(address10)
else:
elements3 = None
self._offset = index7
else:
elements3 = None
self._offset = index7
if elements3 is None:
address8 = FAILURE
else:
address8 = TreeNode47(self._input[index7:self._offset], index7, elements3)
self._offset = self._offset
if address8 is not FAILURE:
elements2.append(address8)
remaining0 -= 1
if remaining0 <= 0:
address7 = TreeNode(self._input[index6:self._offset], index6, elements2)
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements1.append(address7)
address11 = FAILURE
index9 = self._offset
address11 = self._read_attachments()
if address11 is FAILURE:
address11 = TreeNode(self._input[index9:index9], index9, [])
self._offset = index9
if address11 is not FAILURE:
elements1.append(address11)
address12 = FAILURE
address12 = self._read_dedent()
if address12 is not FAILURE:
elements1.append(address12)
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
if elements1 is None:
address4 = FAILURE
else:
address4 = TreeNode46(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
if address4 is FAILURE:
address4 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address4 is not FAILURE:
elements0.append(address4)
address13 = FAILURE
remaining1, index10, elements4, address14 = 0, self._offset, [], True
while address14 is not FAILURE:
index11, elements5 = self._offset, []
address15 = FAILURE
index12 = self._offset
address15 = self._read_attachment_marker()
self._offset = index12
if address15 is FAILURE:
address15 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address15 = FAILURE
if address15 is not FAILURE:
elements5.append(address15)
address16 = FAILURE
address16 = self._read_hier_block_indent()
if address16 is not FAILURE:
elements5.append(address16)
else:
elements5 = None
self._offset = index11
else:
elements5 = None
self._offset = index11
if elements5 is None:
address14 = FAILURE
else:
address14 = TreeNode48(self._input[index11:self._offset], index11, elements5)
self._offset = self._offset
if address14 is not FAILURE:
elements4.append(address14)
remaining1 -= 1
if remaining1 <= 0:
address13 = TreeNode(self._input[index10:self._offset], index10, elements4)
self._offset = self._offset
else:
address13 = FAILURE
if address13 is not FAILURE:
elements0.append(address13)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode45(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Attachment', (cls0, self._types.Attachment), {})
self._cache['attachment'][index0] = (address0, self._offset)
return address0
def _read_attachment_heading(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attachment_heading'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_space()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_inline()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode49(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'AttachmentHeading', (cls0, self._types.AttachmentHeading), {})
self._cache['attachment_heading'][index0] = (address0, self._offset)
return address0
def _read_body_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['body_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'BODY':
address1 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'BODY\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode50(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['body_marker'][index0] = (address0, self._offset)
return address0
def _read_conclusions_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['conclusions_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 11
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'CONCLUSIONS':
address1 = TreeNode(self._input[self._offset:self._offset + 11], self._offset, [])
self._offset = self._offset + 11
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'CONCLUSIONS\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode51(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['conclusions_marker'][index0] = (address0, self._offset)
return address0
def _read_preamble_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['preamble_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 8
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'PREAMBLE':
address1 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PREAMBLE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode52(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['preamble_marker'][index0] = (address0, self._offset)
return address0
def _read_preface_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['preface_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 7
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'PREFACE':
address1 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'PREFACE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode53(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['preface_marker'][index0] = (address0, self._offset)
return address0
def _read_introduction_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['introduction_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 12
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'INTRODUCTION':
address1 = TreeNode(self._input[self._offset:self._offset + 12], self._offset, [])
self._offset = self._offset + 12
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'INTRODUCTION\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode54(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['introduction_marker'][index0] = (address0, self._offset)
return address0
def _read_background_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['background_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 10
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'BACKGROUND':
address1 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'BACKGROUND\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode55(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['background_marker'][index0] = (address0, self._offset)
return address0
def _read_arguments_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['arguments_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 9
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ARGUMENTS':
address1 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ARGUMENTS\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode56(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['arguments_marker'][index0] = (address0, self._offset)
return address0
def _read_remedies_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['remedies_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 8
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'REMEDIES':
address1 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'REMEDIES\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode57(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['remedies_marker'][index0] = (address0, self._offset)
return address0
def _read_motivation_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['motivation_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 10
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'MOTIVATION':
address1 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'MOTIVATION\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode58(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['motivation_marker'][index0] = (address0, self._offset)
return address0
def _read_decision_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['decision_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 8
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'DECISION':
address1 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'DECISION\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode59(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['decision_marker'][index0] = (address0, self._offset)
return address0
def _read_attachment_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attachment_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 10
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ATTACHMENT':
address0 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ATTACHMENT\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 8
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'APPENDIX':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'APPENDIX\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 8
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'SCHEDULE':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SCHEDULE\'')
if address0 is FAILURE:
self._offset = index1
chunk3, max3 = None, self._offset + 8
if max3 <= self._input_size:
chunk3 = self._input[self._offset:max3]
if chunk3 == 'ANNEXURE':
address0 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ANNEXURE\'')
if address0 is FAILURE:
self._offset = index1
self._cache['attachment_marker'][index0] = (address0, self._offset)
return address0
def _read_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_nested_block_element()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_block_elements()
if address0 is FAILURE:
self._offset = index1
self._cache['block_element'][index0] = (address0, self._offset)
return address0
def _read_nested_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['nested_block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_indent()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_block_element()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_dedent()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode60(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'NestedBlockElement', (cls0, self._types.NestedBlockElement), {})
self._cache['nested_block_element'][index0] = (address0, self._offset)
return address0
def _read_block_elements(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_elements'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_block_list()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_bullet_list()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_table()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_longtitle()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_footnote()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_block_quote()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_p()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_line()
if address0 is FAILURE:
self._offset = index1
self._cache['block_elements'][index0] = (address0, self._offset)
return address0
def _read_speech_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_nested_speech_block_element()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_speech_block_elements()
if address0 is FAILURE:
self._offset = index1
self._cache['speech_block_element'][index0] = (address0, self._offset)
return address0
def _read_nested_speech_block_element(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['nested_speech_block_element'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_indent()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_speech_block_element()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_dedent()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode61(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'NestedAltBlockElement', (cls0, self._types.NestedAltBlockElement), {})
self._cache['nested_speech_block_element'][index0] = (address0, self._offset)
return address0
def _read_speech_block_elements(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_block_elements'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_speech_block()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_block_list()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_bullet_list()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_table()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_footnote()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_p()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_line()
if address0 is FAILURE:
self._offset = index1
self._cache['speech_block_elements'][index0] = (address0, self._offset)
return address0
def _read_speech_block(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_block'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_speech_block_name()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index3, elements1, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_inline()
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode62(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'SpeechBlock', (cls0, self._types.SpeechBlock), {})
self._cache['speech_block'][index0] = (address0, self._offset)
return address0
def _read_speech_block_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['speech_block_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 5
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'SCENE':
address0 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SCENE\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 9
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'NARRATIVE':
address0 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'NARRATIVE\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 7
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'SUMMARY':
address0 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUMMARY\'')
if address0 is FAILURE:
self._offset = index1
self._cache['speech_block_name'][index0] = (address0, self._offset)
return address0
def _read_longtitle(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['longtitle'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 9
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'LONGTITLE':
address1 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'LONGTITLE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements1.append(address3)
address4 = FAILURE
remaining0, index4, elements2, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements1.append(address4)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address2 = FAILURE
else:
address2 = TreeNode64(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode63(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Longtitle', (cls0, self._types.Longtitle), {})
self._cache['longtitle'][index0] = (address0, self._offset)
return address0
def _read_subheading(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['subheading'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 10
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'SUBHEADING':
address1 = TreeNode(self._input[self._offset:self._offset + 10], self._offset, [])
self._offset = self._offset + 10
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'SUBHEADING\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements1.append(address3)
address4 = FAILURE
remaining0, index4, elements2, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements1.append(address4)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address2 = FAILURE
else:
address2 = TreeNode66(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode65(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Subheading', (cls0, self._types.Subheading), {})
self._cache['subheading'][index0] = (address0, self._offset)
return address0
def _read_crossheading(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['crossheading'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 12
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'CROSSHEADING':
address1 = TreeNode(self._input[self._offset:self._offset + 12], self._offset, [])
self._offset = self._offset + 12
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'CROSSHEADING\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements1.append(address3)
address4 = FAILURE
remaining0, index4, elements2, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements1.append(address4)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address2 = FAILURE
else:
address2 = TreeNode68(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode67(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Crossheading', (cls0, self._types.Crossheading), {})
self._cache['crossheading'][index0] = (address0, self._offset)
return address0
def _read_p(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['p'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'P':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'P\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index3, elements1, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_inline()
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode69(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'P', (cls0, self._types.P), {})
self._cache['p'][index0] = (address0, self._offset)
return address0
def _read_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
address1 = self._read_dedent()
self._offset = index2
if address1 is FAILURE:
address1 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index3, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_inline()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address4 = FAILURE
address4 = self._read_eol()
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode70(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Line', (cls0, self._types.Line), {})
self._cache['line'][index0] = (address0, self._offset)
return address0
def _read_block_list(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_list'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
chunk0, max0 = None, self._offset + 9
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'BLOCKLIST':
address1 = TreeNode(self._input[self._offset:self._offset + 9], self._offset, [])
self._offset = self._offset + 9
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'BLOCKLIST\'')
if address1 is FAILURE:
self._offset = index2
chunk1, max1 = None, self._offset + 5
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'ITEMS':
address1 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ITEMS\'')
if address1 is FAILURE:
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_indent()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index4 = self._offset
address5 = self._read_block_list_intro()
if address5 is FAILURE:
address5 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
remaining0, index5, elements1, address7 = 1, self._offset, [], True
while address7 is not FAILURE:
address7 = self._read_block_list_item()
if address7 is not FAILURE:
elements1.append(address7)
remaining0 -= 1
if remaining0 <= 0:
address6 = TreeNode(self._input[index5:self._offset], index5, elements1)
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements0.append(address6)
address8 = FAILURE
index6 = self._offset
address8 = self._read_block_list_wrapup()
if address8 is FAILURE:
address8 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address8 is not FAILURE:
elements0.append(address8)
address9 = FAILURE
address9 = self._read_dedent()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode71(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockList', (cls0, self._types.BlockList), {})
self._cache['block_list'][index0] = (address0, self._offset)
return address0
def _read_block_list_intro(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_list_intro'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ITEM':
address1 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ITEM\'')
self._offset = index2
if address1 is FAILURE:
address1 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_line()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index3, elements1, address4 = 0, self._offset, [], True
while address4 is not FAILURE:
address4 = self._read_footnote()
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode72(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockListIntro', (cls0, self._types.BlockListIntro), {})
self._cache['block_list_intro'][index0] = (address0, self._offset)
return address0
def _read_block_list_wrapup(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_list_wrapup'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_line()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_footnote()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode73(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockListWrapUp', (cls0, self._types.BlockListWrapUp), {})
self._cache['block_list_wrapup'][index0] = (address0, self._offset)
return address0
def _read_block_list_item(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_list_item'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'ITEM':
address1 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'ITEM\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_hier_element_heading()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index3 = self._offset
index4, elements1 = self._offset, []
address5 = FAILURE
address5 = self._read_indent()
if address5 is not FAILURE:
elements1.append(address5)
address6 = FAILURE
index5 = self._offset
address6 = self._read_subheading()
if address6 is FAILURE:
address6 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address6 is not FAILURE:
elements1.append(address6)
address7 = FAILURE
remaining0, index6, elements2, address8 = 1, self._offset, [], True
while address8 is not FAILURE:
address8 = self._read_block_element()
if address8 is not FAILURE:
elements2.append(address8)
remaining0 -= 1
if remaining0 <= 0:
address7 = TreeNode(self._input[index6:self._offset], index6, elements2)
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements1.append(address7)
address9 = FAILURE
address9 = self._read_dedent()
if address9 is not FAILURE:
elements1.append(address9)
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
if elements1 is None:
address4 = FAILURE
else:
address4 = TreeNode75(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
if address4 is FAILURE:
address4 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode74(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockListItem', (cls0, self._types.BlockListItem), {})
self._cache['block_list_item'][index0] = (address0, self._offset)
return address0
def _read_bullet_list(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['bullet_list'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 7
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'BULLETS':
address1 = TreeNode(self._input[self._offset:self._offset + 7], self._offset, [])
self._offset = self._offset + 7
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'BULLETS\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_indent()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
remaining0, index3, elements1, address6 = 1, self._offset, [], True
while address6 is not FAILURE:
address6 = self._read_bullet_list_item()
if address6 is not FAILURE:
elements1.append(address6)
remaining0 -= 1
if remaining0 <= 0:
address5 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements0.append(address5)
address7 = FAILURE
address7 = self._read_dedent()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode76(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BulletList', (cls0, self._types.BulletList), {})
self._cache['bullet_list'][index0] = (address0, self._offset)
return address0
def _read_bullet_list_item(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['bullet_list_item'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '*':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'*\'')
if address2 is not FAILURE:
elements1.append(address2)
address3 = FAILURE
index4 = self._offset
address3 = self._read_space()
if address3 is FAILURE:
address3 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address3 is not FAILURE:
elements1.append(address3)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address1 = FAILURE
else:
address1 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address1 is FAILURE:
address1 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address4 = FAILURE
index5 = self._offset
address4 = self._read_block_elements()
if address4 is FAILURE:
self._offset = index5
address4 = self._read_eol()
if address4 is FAILURE:
self._offset = index5
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
index6 = self._offset
index7, elements2 = self._offset, []
address6 = FAILURE
address6 = self._read_indent()
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
remaining0, index8, elements3, address8 = 1, self._offset, [], True
while address8 is not FAILURE:
address8 = self._read_block_element()
if address8 is not FAILURE:
elements3.append(address8)
remaining0 -= 1
if remaining0 <= 0:
address7 = TreeNode(self._input[index8:self._offset], index8, elements3)
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address9 = FAILURE
address9 = self._read_dedent()
if address9 is not FAILURE:
elements2.append(address9)
else:
elements2 = None
self._offset = index7
else:
elements2 = None
self._offset = index7
else:
elements2 = None
self._offset = index7
if elements2 is None:
address5 = FAILURE
else:
address5 = TreeNode78(self._input[index7:self._offset], index7, elements2)
self._offset = self._offset
if address5 is FAILURE:
address5 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address5 is not FAILURE:
elements0.append(address5)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode77(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BulletListItem', (cls0, self._types.BulletListItem), {})
self._cache['bullet_list_item'][index0] = (address0, self._offset)
return address0
def _read_table(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['table'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 5
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'TABLE':
address1 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TABLE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_indent()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
remaining0, index3, elements1, address6 = 1, self._offset, [], True
while address6 is not FAILURE:
address6 = self._read_table_row()
if address6 is not FAILURE:
elements1.append(address6)
remaining0 -= 1
if remaining0 <= 0:
address5 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements0.append(address5)
address7 = FAILURE
address7 = self._read_dedent()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode79(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Table', (cls0, self._types.Table), {})
self._cache['table'][index0] = (address0, self._offset)
return address0
def _read_table_row(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['table_row'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'TR':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TR\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_indent()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index2, elements1, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
address5 = self._read_table_cell()
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address6 = FAILURE
address6 = self._read_dedent()
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode80(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'TableRow', (cls0, self._types.TableRow), {})
self._cache['table_row'][index0] = (address0, self._offset)
return address0
def _read_table_cell(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['table_cell'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
index2 = self._offset
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'TH':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TH\'')
if address1 is FAILURE:
self._offset = index2
chunk1, max1 = None, self._offset + 2
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'TC':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'TC\'')
if address1 is FAILURE:
self._offset = index2
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index3 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index4 = self._offset
index5, elements1 = self._offset, []
address5 = FAILURE
address5 = self._read_indent()
if address5 is not FAILURE:
elements1.append(address5)
address6 = FAILURE
remaining0, index6, elements2, address7 = 1, self._offset, [], True
while address7 is not FAILURE:
address7 = self._read_block_element()
if address7 is not FAILURE:
elements2.append(address7)
remaining0 -= 1
if remaining0 <= 0:
address6 = TreeNode(self._input[index6:self._offset], index6, elements2)
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements1.append(address6)
address8 = FAILURE
address8 = self._read_dedent()
if address8 is not FAILURE:
elements1.append(address8)
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
else:
elements1 = None
self._offset = index5
if elements1 is None:
address4 = FAILURE
else:
address4 = TreeNode82(self._input[index5:self._offset], index5, elements1)
self._offset = self._offset
if address4 is FAILURE:
address4 = TreeNode(self._input[index4:index4], index4, [])
self._offset = index4
if address4 is not FAILURE:
elements0.append(address4)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode81(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'TableCell', (cls0, self._types.TableCell), {})
self._cache['table_cell'][index0] = (address0, self._offset)
return address0
def _read_block_attrs(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_attrs'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
remaining0, index2, elements1, address2 = 0, self._offset, [], True
while address2 is not FAILURE:
address2 = self._read_block_attr_class()
if address2 is not FAILURE:
elements1.append(address2)
remaining0 -= 1
if remaining0 <= 0:
address1 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address1 = FAILURE
if address1 is not FAILURE:
elements0.append(address1)
address3 = FAILURE
index3 = self._offset
address3 = self._read_block_attr_pairs()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode83(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockAttrs', (cls0, self._types.BlockAttrs), {})
self._cache['block_attrs'][index0] = (address0, self._offset)
return address0
def _read_block_attr_class(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_attr_class'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '.':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'.\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_class_name()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['block_attr_class'][index0] = (address0, self._offset)
return address0
def _read_block_attr_pairs(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_attr_pairs'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '{':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'{\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attr()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index3 = self._offset
address3 = self._read_space()
if address3 is FAILURE:
address3 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index4, elements1, address5 = 0, self._offset, [], True
while address5 is not FAILURE:
index5, elements2 = self._offset, []
address6 = FAILURE
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == '|':
address6 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'|\'')
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index6 = self._offset
address7 = self._read_space()
if address7 is FAILURE:
address7 = TreeNode(self._input[index6:index6], index6, [])
self._offset = index6
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
index7 = self._offset
address8 = self._read_block_attr()
if address8 is FAILURE:
address8 = TreeNode(self._input[index7:index7], index7, [])
self._offset = index7
if address8 is not FAILURE:
elements2.append(address8)
else:
elements2 = None
self._offset = index5
else:
elements2 = None
self._offset = index5
else:
elements2 = None
self._offset = index5
if elements2 is None:
address5 = FAILURE
else:
address5 = TreeNode85(self._input[index5:self._offset], index5, elements2)
self._offset = self._offset
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address9 = FAILURE
chunk2, max2 = None, self._offset + 1
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == '}':
address9 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address9 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'}\'')
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode84(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['block_attr_pairs'][index0] = (address0, self._offset)
return address0
def _read_block_attr(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_attr'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_attr_name()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
index3, elements1 = self._offset, []
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements1.append(address3)
address4 = FAILURE
address4 = self._read_attr_value()
if address4 is not FAILURE:
elements1.append(address4)
else:
elements1 = None
self._offset = index3
else:
elements1 = None
self._offset = index3
if elements1 is None:
address2 = FAILURE
else:
address2 = TreeNode87(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode86(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockAttr', (cls0, self._types.BlockAttr), {})
self._cache['block_attr'][index0] = (address0, self._offset)
return address0
def _read_class_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['class_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_2.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^ \\n|{}.]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['class_name'][index0] = (address0, self._offset)
return address0
def _read_attr_name(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attr_name'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_3.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^ \\n|{}]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['attr_name'][index0] = (address0, self._offset)
return address0
def _read_attr_value(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['attr_value'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 0, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_4.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n|}]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['attr_value'][index0] = (address0, self._offset)
return address0
def _read_block_quote(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['block_quote'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 5
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'QUOTE':
address1 = TreeNode(self._input[self._offset:self._offset + 5], self._offset, [])
self._offset = self._offset + 5
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'QUOTE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
index2 = self._offset
address2 = self._read_block_attrs()
if address2 is FAILURE:
address2 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_eol()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
address4 = self._read_indent()
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
remaining0, index3, elements1, address6 = 1, self._offset, [], True
while address6 is not FAILURE:
address6 = self._read_hier_block_element()
if address6 is not FAILURE:
elements1.append(address6)
remaining0 -= 1
if remaining0 <= 0:
address5 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements0.append(address5)
address7 = FAILURE
address7 = self._read_dedent()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode88(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'BlockQuote', (cls0, self._types.BlockQuote), {})
self._cache['block_quote'][index0] = (address0, self._offset)
return address0
def _read_footnote(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['footnote'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 8
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'FOOTNOTE':
address1 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'FOOTNOTE\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_space()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_5.search(chunk1):
address4 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address4 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^ \\n]')
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address5 = FAILURE
index3 = self._offset
address5 = self._read_space()
if address5 is FAILURE:
address5 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address5 is not FAILURE:
elements0.append(address5)
address6 = FAILURE
address6 = self._read_eol()
if address6 is not FAILURE:
elements0.append(address6)
address7 = FAILURE
address7 = self._read_indent()
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
remaining1, index4, elements2, address9 = 1, self._offset, [], True
while address9 is not FAILURE:
address9 = self._read_hier_block_element()
if address9 is not FAILURE:
elements2.append(address9)
remaining1 -= 1
if remaining1 <= 0:
address8 = TreeNode(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements0.append(address8)
address10 = FAILURE
address10 = self._read_dedent()
if address10 is not FAILURE:
elements0.append(address10)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode89(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Footnote', (cls0, self._types.Footnote), {})
self._cache['footnote'][index0] = (address0, self._offset)
return address0
def _read_inline(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['inline'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_non_inline_start()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_escape()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_inline_marker()
if address0 is FAILURE:
self._offset = index1
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_6.search(chunk0):
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'InlineText', (cls0, self._types.InlineText), {})
if address0 is FAILURE:
self._offset = index1
self._cache['inline'][index0] = (address0, self._offset)
return address0
def _read_non_inline_start(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['non_inline_start'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_7.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^*/_{\\n\\\\]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['non_inline_start'][index0] = (address0, self._offset)
return address0
def _read_inline_nested(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['inline_nested'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
remaining0, index2, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 is not None and Grammar.REGEX_8.search(chunk0):
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^*/_{\\n\\\\}]')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index2:self._offset], index2, elements0)
self._offset = self._offset
else:
address0 = FAILURE
if address0 is FAILURE:
self._offset = index1
address0 = self._read_escape()
if address0 is FAILURE:
self._offset = index1
index3 = self._offset
index4, elements1 = self._offset, []
address2 = FAILURE
index5 = self._offset
chunk1, max1 = None, self._offset + 2
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == '}}':
address2 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'}}\'')
self._offset = index5
if address2 is FAILURE:
address2 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements1.append(address2)
address3 = FAILURE
address3 = self._read_inline_marker()
if address3 is not FAILURE:
elements1.append(address3)
else:
elements1 = None
self._offset = index4
else:
elements1 = None
self._offset = index4
if elements1 is None:
address0 = FAILURE
else:
address0 = TreeNode90(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
if address0 is FAILURE:
self._offset = index3
chunk2, max2 = None, self._offset + 1
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 is not None and Grammar.REGEX_9.search(chunk2):
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address0 is FAILURE:
self._offset = index3
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'InlineText', (cls0, self._types.InlineText), {})
if address0 is FAILURE:
self._offset = index1
self._cache['inline_nested'][index0] = (address0, self._offset)
return address0
def _read_inline_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['inline_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
address0 = self._read_bold()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_footnote_ref()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_image()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_italics()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_ref()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_remark()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_sup()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_sub()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_standard_inline()
if address0 is FAILURE:
self._offset = index1
address0 = self._read_underline()
if address0 is FAILURE:
self._offset = index1
self._cache['inline_marker'][index0] = (address0, self._offset)
return address0
def _read_escape(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['escape'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '\\':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'\\\\\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_10.search(chunk1):
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['escape'][index0] = (address0, self._offset)
return address0
def _read_bold(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['bold'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '**':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'**\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
chunk1, max1 = None, self._offset + 2
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == '**':
address4 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address4 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'**\'')
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode92(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
chunk2, max2 = None, self._offset + 2
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == '**':
address6 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'**\'')
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode91(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Bold', (cls0, self._types.Bold), {})
self._cache['bold'][index0] = (address0, self._offset)
return address0
def _read_italics(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['italics'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '//':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'//\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
chunk1, max1 = None, self._offset + 2
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == '//':
address4 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address4 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'//\'')
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode94(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
chunk2, max2 = None, self._offset + 2
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == '//':
address6 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'//\'')
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode93(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Italics', (cls0, self._types.Italics), {})
self._cache['italics'][index0] = (address0, self._offset)
return address0
def _read_underline(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['underline'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '__':
address1 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'__\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 1, self._offset, [], True
while address3 is not FAILURE:
index3, elements2 = self._offset, []
address4 = FAILURE
index4 = self._offset
chunk1, max1 = None, self._offset + 2
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == '__':
address4 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address4 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'__\'')
self._offset = index4
if address4 is FAILURE:
address4 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements2.append(address4)
address5 = FAILURE
address5 = self._read_inline()
if address5 is not FAILURE:
elements2.append(address5)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address3 = FAILURE
else:
address3 = TreeNode96(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
address6 = FAILURE
chunk2, max2 = None, self._offset + 2
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == '__':
address6 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'__\'')
if address6 is not FAILURE:
elements0.append(address6)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode95(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Underline', (cls0, self._types.Underline), {})
self._cache['underline'][index0] = (address0, self._offset)
return address0
def _read_remark(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['remark'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '*':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'*\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
index3, elements2 = self._offset, []
address5 = FAILURE
index4 = self._offset
address5 = self._read_inline_close()
self._offset = index4
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
index5 = self._offset
address6 = self._read_indent()
self._offset = index5
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
index6 = self._offset
address7 = self._read_dedent()
self._offset = index6
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
index7 = self._offset
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_11.search(chunk1):
address8 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address8 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[\\n]')
if address8 is FAILURE:
self._offset = index7
address8 = self._read_inline_nested()
if address8 is FAILURE:
self._offset = index7
if address8 is not FAILURE:
elements2.append(address8)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address4 = FAILURE
else:
address4 = TreeNode98(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address9 = FAILURE
address9 = self._read_inline_close()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode97(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Remark', (cls0, self._types.Remark), {})
self._cache['remark'][index0] = (address0, self._offset)
return address0
def _read_image(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['image'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 3
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'IMG':
address2 = TreeNode(self._input[self._offset:self._offset + 3], self._offset, [])
self._offset = self._offset + 3
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'IMG\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index2 = self._offset
address3 = self._read_space()
if address3 is FAILURE:
address3 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index3, elements1, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
index4, elements2 = self._offset, []
address6 = FAILURE
index5 = self._offset
address6 = self._read_inline_close()
self._offset = index5
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_12.search(chunk1):
address7 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address7 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^ \\n]')
if address7 is not FAILURE:
elements2.append(address7)
else:
elements2 = None
self._offset = index4
else:
elements2 = None
self._offset = index4
if elements2 is None:
address5 = FAILURE
else:
address5 = TreeNode(self._input[index4:self._offset], index4, elements2)
self._offset = self._offset
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index3:self._offset], index3, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address8 = FAILURE
remaining1, index6, elements3, address9 = 0, self._offset, [], True
while address9 is not FAILURE:
index7, elements4 = self._offset, []
address10 = FAILURE
index8 = self._offset
address10 = self._read_inline_close()
self._offset = index8
if address10 is FAILURE:
address10 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address10 = FAILURE
if address10 is not FAILURE:
elements4.append(address10)
address11 = FAILURE
chunk2, max2 = None, self._offset + 1
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 is not None and Grammar.REGEX_13.search(chunk2):
address11 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address11 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address11 is not FAILURE:
elements4.append(address11)
else:
elements4 = None
self._offset = index7
else:
elements4 = None
self._offset = index7
if elements4 is None:
address9 = FAILURE
else:
address9 = TreeNode(self._input[index7:self._offset], index7, elements4)
self._offset = self._offset
if address9 is not FAILURE:
elements3.append(address9)
remaining1 -= 1
if remaining1 <= 0:
address8 = TreeNode(self._input[index6:self._offset], index6, elements3)
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements0.append(address8)
address12 = FAILURE
address12 = self._read_inline_close()
if address12 is not FAILURE:
elements0.append(address12)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode99(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Image', (cls0, self._types.Image), {})
self._cache['image'][index0] = (address0, self._offset)
return address0
def _read_sup(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['sup'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '^':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'^\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
index3, elements2 = self._offset, []
address5 = FAILURE
index4 = self._offset
address5 = self._read_inline_close()
self._offset = index4
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_inline_nested()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address4 = FAILURE
else:
address4 = TreeNode101(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address7 = FAILURE
address7 = self._read_inline_close()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode100(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Sup', (cls0, self._types.Sup), {})
self._cache['sup'][index0] = (address0, self._offset)
return address0
def _read_sub(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['sub'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '_':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'_\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 1, self._offset, [], True
while address4 is not FAILURE:
index3, elements2 = self._offset, []
address5 = FAILURE
index4 = self._offset
address5 = self._read_inline_close()
self._offset = index4
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
address6 = self._read_inline_nested()
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address4 = FAILURE
else:
address4 = TreeNode103(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address7 = FAILURE
address7 = self._read_inline_close()
if address7 is not FAILURE:
elements0.append(address7)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode102(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Sub', (cls0, self._types.Sub), {})
self._cache['sub'][index0] = (address0, self._offset)
return address0
def _read_ref(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['ref'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '>':
address2 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'>\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
remaining0, index2, elements1, address4 = 0, self._offset, [], True
while address4 is not FAILURE:
index3, elements2 = self._offset, []
address5 = FAILURE
index4 = self._offset
address5 = self._read_inline_close()
self._offset = index4
if address5 is FAILURE:
address5 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements2.append(address5)
address6 = FAILURE
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_14.search(chunk1):
address6 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address6 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^ \\n]')
if address6 is not FAILURE:
elements2.append(address6)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address4 = FAILURE
else:
address4 = TreeNode(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address4 is not FAILURE:
elements1.append(address4)
remaining0 -= 1
if remaining0 <= 0:
address3 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address3 = FAILURE
if address3 is not FAILURE:
elements0.append(address3)
address7 = FAILURE
index5 = self._offset
chunk2, max2 = None, self._offset + 1
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == ' ':
address7 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address7 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\' \'')
if address7 is FAILURE:
address7 = TreeNode(self._input[index5:index5], index5, [])
self._offset = index5
if address7 is not FAILURE:
elements0.append(address7)
address8 = FAILURE
remaining1, index6, elements3, address9 = 0, self._offset, [], True
while address9 is not FAILURE:
index7, elements4 = self._offset, []
address10 = FAILURE
index8 = self._offset
address10 = self._read_inline_close()
self._offset = index8
if address10 is FAILURE:
address10 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address10 = FAILURE
if address10 is not FAILURE:
elements4.append(address10)
address11 = FAILURE
address11 = self._read_inline_nested()
if address11 is not FAILURE:
elements4.append(address11)
else:
elements4 = None
self._offset = index7
else:
elements4 = None
self._offset = index7
if elements4 is None:
address9 = FAILURE
else:
address9 = TreeNode105(self._input[index7:self._offset], index7, elements4)
self._offset = self._offset
if address9 is not FAILURE:
elements3.append(address9)
remaining1 -= 1
if remaining1 <= 0:
address8 = TreeNode(self._input[index6:self._offset], index6, elements3)
self._offset = self._offset
else:
address8 = FAILURE
if address8 is not FAILURE:
elements0.append(address8)
address12 = FAILURE
address12 = self._read_inline_close()
if address12 is not FAILURE:
elements0.append(address12)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode104(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'Ref', (cls0, self._types.Ref), {})
self._cache['ref'][index0] = (address0, self._offset)
return address0
def _read_footnote_ref(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['footnote_ref'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
chunk0, max0 = None, self._offset + 8
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'FOOTNOTE':
address2 = TreeNode(self._input[self._offset:self._offset + 8], self._offset, [])
self._offset = self._offset + 8
else:
address2 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'FOOTNOTE\'')
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
address3 = self._read_space()
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
remaining0, index2, elements1, address5 = 1, self._offset, [], True
while address5 is not FAILURE:
index3, elements2 = self._offset, []
address6 = FAILURE
index4 = self._offset
address6 = self._read_inline_close()
self._offset = index4
if address6 is FAILURE:
address6 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address6 = FAILURE
if address6 is not FAILURE:
elements2.append(address6)
address7 = FAILURE
chunk1, max1 = None, self._offset + 1
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 is not None and Grammar.REGEX_15.search(chunk1):
address7 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address7 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('[^\\n]')
if address7 is not FAILURE:
elements2.append(address7)
else:
elements2 = None
self._offset = index3
else:
elements2 = None
self._offset = index3
if elements2 is None:
address5 = FAILURE
else:
address5 = TreeNode(self._input[index3:self._offset], index3, elements2)
self._offset = self._offset
if address5 is not FAILURE:
elements1.append(address5)
remaining0 -= 1
if remaining0 <= 0:
address4 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address4 = FAILURE
if address4 is not FAILURE:
elements0.append(address4)
address8 = FAILURE
address8 = self._read_inline_close()
if address8 is not FAILURE:
elements0.append(address8)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode106(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'FootnoteRef', (cls0, self._types.FootnoteRef), {})
self._cache['footnote_ref'][index0] = (address0, self._offset)
return address0
def _read_standard_inline(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['standard_inline'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_inline_open()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_standard_inline_marker()
if address2 is not FAILURE:
elements0.append(address2)
address3 = FAILURE
index2 = self._offset
address3 = self._read_block_attrs()
if address3 is FAILURE:
address3 = TreeNode(self._input[index2:index2], index2, [])
self._offset = index2
if address3 is not FAILURE:
elements0.append(address3)
address4 = FAILURE
index3 = self._offset
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == ' ':
address4 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address4 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\' \'')
if address4 is FAILURE:
address4 = TreeNode(self._input[index3:index3], index3, [])
self._offset = index3
if address4 is not FAILURE:
elements0.append(address4)
address5 = FAILURE
remaining0, index4, elements1, address6 = 0, self._offset, [], True
while address6 is not FAILURE:
index5, elements2 = self._offset, []
address7 = FAILURE
index6 = self._offset
address7 = self._read_inline_close()
self._offset = index6
if address7 is FAILURE:
address7 = TreeNode(self._input[self._offset:self._offset], self._offset, [])
self._offset = self._offset
else:
address7 = FAILURE
if address7 is not FAILURE:
elements2.append(address7)
address8 = FAILURE
address8 = self._read_inline_nested()
if address8 is not FAILURE:
elements2.append(address8)
else:
elements2 = None
self._offset = index5
else:
elements2 = None
self._offset = index5
if elements2 is None:
address6 = FAILURE
else:
address6 = TreeNode108(self._input[index5:self._offset], index5, elements2)
self._offset = self._offset
if address6 is not FAILURE:
elements1.append(address6)
remaining0 -= 1
if remaining0 <= 0:
address5 = TreeNode(self._input[index4:self._offset], index4, elements1)
self._offset = self._offset
else:
address5 = FAILURE
if address5 is not FAILURE:
elements0.append(address5)
address9 = FAILURE
address9 = self._read_inline_close()
if address9 is not FAILURE:
elements0.append(address9)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode107(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
if address0 is not FAILURE:
cls0 = type(address0)
address0.__class__ = type(cls0.__name__ + 'StandardInline', (cls0, self._types.StandardInline), {})
self._cache['standard_inline'][index0] = (address0, self._offset)
return address0
def _read_inline_open(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['inline_open'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '{{':
address0 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'{{\'')
self._cache['inline_open'][index0] = (address0, self._offset)
return address0
def _read_inline_close(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['inline_close'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0, max0 = None, self._offset + 2
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '}}':
address0 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'}}\'')
self._cache['inline_close'][index0] = (address0, self._offset)
return address0
def _read_standard_inline_marker(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['standard_inline_marker'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1 = self._offset
chunk0, max0 = None, self._offset + 4
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == 'abbr':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'abbr\'')
if address0 is FAILURE:
self._offset = index1
chunk1, max1 = None, self._offset + 3
if max1 <= self._input_size:
chunk1 = self._input[self._offset:max1]
if chunk1 == 'def':
address0 = TreeNode(self._input[self._offset:self._offset + 3], self._offset, [])
self._offset = self._offset + 3
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'def\'')
if address0 is FAILURE:
self._offset = index1
chunk2, max2 = None, self._offset + 2
if max2 <= self._input_size:
chunk2 = self._input[self._offset:max2]
if chunk2 == 'em':
address0 = TreeNode(self._input[self._offset:self._offset + 2], self._offset, [])
self._offset = self._offset + 2
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'em\'')
if address0 is FAILURE:
self._offset = index1
chunk3, max3 = None, self._offset + 6
if max3 <= self._input_size:
chunk3 = self._input[self._offset:max3]
if chunk3 == 'inline':
address0 = TreeNode(self._input[self._offset:self._offset + 6], self._offset, [])
self._offset = self._offset + 6
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'inline\'')
if address0 is FAILURE:
self._offset = index1
chunk4, max4 = None, self._offset + 4
if max4 <= self._input_size:
chunk4 = self._input[self._offset:max4]
if chunk4 == 'term':
address0 = TreeNode(self._input[self._offset:self._offset + 4], self._offset, [])
self._offset = self._offset + 4
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'term\'')
if address0 is FAILURE:
self._offset = index1
chunk5, max5 = None, self._offset + 1
if max5 <= self._input_size:
chunk5 = self._input[self._offset:max5]
if chunk5 == '-':
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'-\'')
if address0 is FAILURE:
self._offset = index1
chunk6, max6 = None, self._offset + 1
if max6 <= self._input_size:
chunk6 = self._input[self._offset:max6]
if chunk6 == '+':
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'+\'')
if address0 is FAILURE:
self._offset = index1
self._cache['standard_inline_marker'][index0] = (address0, self._offset)
return address0
def _read_eol(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['eol'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
address1 = self._read_newline()
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
remaining0, index2, elements1, address3 = 0, self._offset, [], True
while address3 is not FAILURE:
address3 = self._read_empty_line()
if address3 is not FAILURE:
elements1.append(address3)
remaining0 -= 1
if remaining0 <= 0:
address2 = TreeNode(self._input[index2:self._offset], index2, elements1)
self._offset = self._offset
else:
address2 = FAILURE
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode109(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['eol'][index0] = (address0, self._offset)
return address0
def _read_empty_line(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['empty_line'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
address0 = self._read_newline()
self._cache['empty_line'][index0] = (address0, self._offset)
return address0
def _read_space(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['space'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
remaining0, index1, elements0, address1 = 1, self._offset, [], True
while address1 is not FAILURE:
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == ' ':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\' \'')
if address1 is not FAILURE:
elements0.append(address1)
remaining0 -= 1
if remaining0 <= 0:
address0 = TreeNode(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
else:
address0 = FAILURE
self._cache['space'][index0] = (address0, self._offset)
return address0
def _read_newline(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['newline'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '\n':
address0 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address0 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('"\\n"')
self._cache['newline'][index0] = (address0, self._offset)
return address0
def _read_indent(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['indent'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'\\x0E\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode110(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['indent'][index0] = (address0, self._offset)
return address0
def _read_dedent(self):
address0, index0 = FAILURE, self._offset
cached = self._cache['dedent'].get(index0)
if cached:
self._offset = cached[1]
return cached[0]
index1, elements0 = self._offset, []
address1 = FAILURE
chunk0, max0 = None, self._offset + 1
if max0 <= self._input_size:
chunk0 = self._input[self._offset:max0]
if chunk0 == '':
address1 = TreeNode(self._input[self._offset:self._offset + 1], self._offset, [])
self._offset = self._offset + 1
else:
address1 = FAILURE
if self._offset > self._failure:
self._failure = self._offset
self._expected = []
if self._offset == self._failure:
self._expected.append('\'\\x0F\'')
if address1 is not FAILURE:
elements0.append(address1)
address2 = FAILURE
address2 = self._read_eol()
if address2 is not FAILURE:
elements0.append(address2)
else:
elements0 = None
self._offset = index1
else:
elements0 = None
self._offset = index1
if elements0 is None:
address0 = FAILURE
else:
address0 = TreeNode111(self._input[index1:self._offset], index1, elements0)
self._offset = self._offset
self._cache['dedent'][index0] = (address0, self._offset)
return address0
class Parser(Grammar):
def __init__(self, input, actions, types):
self._input = input
self._input_size = len(input)
self._actions = actions
self._types = types
self._offset = 0
self._cache = defaultdict(dict)
self._failure = 0
self._expected = []
def parse(self):
tree = self._read_root()
if tree is not FAILURE and self._offset == self._input_size:
return tree
if not self._expected:
self._failure = self._offset
self._expected.append('<EOF>')
raise ParseError(format_error(self._input, self._failure, self._expected))
def format_error(input, offset, expected):
lines, line_no, position = input.split('\n'), 0, 0
while position <= offset:
position += len(lines[line_no]) + 1
line_no += 1
message, line = 'Line ' + str(line_no) + ': expected ' + ', '.join(expected) + '\n', lines[line_no - 1]
message += line + '\n'
position -= len(line) + 1
message += ' ' * (offset - position)
return message + '^'
def parse(input, actions=None, types=None):
parser = Parser(input, actions, types)
return parser.parse() | PypiClean |
/loadimpact-1.1.5.tar.gz/loadimpact-1.1.5/ez_setup.py | import os
import shutil
import sys
import tempfile
import tarfile
import optparse
import subprocess
import platform
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "1.1.6"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _check_call_py24(cmd, *args, **kwargs):
res = subprocess.call(cmd, *args, **kwargs)
class CalledProcessError(Exception):
pass
if not res == 0:
msg = "Command '%s' return non-zero exit status %d" % (cmd, res)
raise CalledProcessError(msg)
vars(subprocess).setdefault('check_call', _check_call_py24)
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of setuptools (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U setuptools'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
]
subprocess.check_call(cmd)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
subprocess.check_call(cmd)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
subprocess.check_call(cmd)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15,
downloader_factory=get_best_downloader):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
tgz_name = "setuptools-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base,
downloader_factory=options.downloader_factory)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main()) | PypiClean |
/WorkWeRoBot-1.9.2.tar.gz/WorkWeRoBot-1.9.2/workwerobot/robot.py | from __future__ import absolute_import, unicode_literals
import six
import warnings
from workwerobot.config import Config, ConfigAttribute
from workwerobot.client import Client
from workwerobot.exceptions import ConfigError
from workwerobot.parser import parse_xml, process_message
from workwerobot.replies import process_function_reply
from workwerobot.utils import (
to_binary, to_text, check_signature, get_signature, make_error_page, cached_property,
is_regex
)
try:
from inspect import signature
except ImportError:
from funcsigs import signature
__all__ = ['BaseRoBot', 'WeRoBot']
_DEFAULT_CONFIG = dict(
TOKEN=None,
SERVER="auto",
HOST="127.0.0.1",
PORT="8888",
SESSION_STORAGE=None,
ENCODING_AES_KEY=None
)
class BaseRoBot(object):
"""
BaseRoBot 是整个应用的核心对象,负责提供 handler 的维护,消息和事件的处理等核心功能。
:param logger: 用来输出 log 的 logger,如果是 ``None``,将使用 werobot.logger
:param config: 用来设置的 :class:`werobot.config.Config` 对象 \\
.. note:: 对于下面的参数推荐使用 :class:`~werobot.config.Config` 进行设置,\
并且以下参数均已 **deprecated**。
:param token: 微信公众号设置的 token **(deprecated)**
:param enable_session: 是否开启 session **(deprecated)**
:param session_storage: 用来储存 session 的对象,如果为 ``None``,\
将使用 werobot.session.sqlitestorage.SQLiteStorage **(deprecated)**
:param encoding_aes_key: 用来加解密消息的 aes key **(deprecated)**
"""
message_types = [
'subscribe_event',
'unsubscribe_event',
'click_event',
'view_event',
'scancode_waitmsg_event',
'scancode_push_event',
'pic_sysphoto_event',
'pic_photo_or_album_event',
'pic_weixin_event',
'location_select_event',
'LOCATION_event',
'unknown_event',
'text',
'image',
'link',
'location',
'voice',
'unknown',
'video'
]
token = ConfigAttribute("TOKEN")
session_storage = ConfigAttribute("SESSION_STORAGE")
def __init__(
self,
token=None,
logger=None,
enable_session=None,
session_storage=None,
corp_id=None,
encoding_aes_key=None,
agent_id=None,
config=None,
**kwargs
):
self._handlers = {k: [] for k in self.message_types}
self._handlers['all'] = []
self.make_error_page = make_error_page
if logger is None:
import workwerobot.logger
logger = workwerobot.logger.logger
self.logger = logger
if config is None:
self.config = Config(_DEFAULT_CONFIG)
self.config.update(
TOKEN=token,
ENCODING_AES_KEY=encoding_aes_key,
CORP_ID=corp_id,
AGENT_ID=agent_id
)
for k, v in kwargs.items():
self.config[k.upper()] = v
if enable_session is not None:
warnings.warn(
"enable_session is deprecated."
"set SESSION_STORAGE to False if you want to disable Session",
DeprecationWarning,
stacklevel=2
)
if not enable_session:
self.config["SESSION_STORAGE"] = False
if session_storage:
self.config["SESSION_STORAGE"] = session_storage
else:
self.config = config
@cached_property
def crypto(self):
encoding_aes_key = self.config.get("ENCODING_AES_KEY", None)
corp_id = self.config.get("CORP_ID", None)
if not encoding_aes_key:
raise ConfigError(
"You need to provide encoding_aes_key "
"to encrypt/decrypt messages"
)
self.use_encryption = True
from .crypto import MessageCrypt
return MessageCrypt(
token=self.config["TOKEN"],
encoding_aes_key=encoding_aes_key,
corp_id=corp_id
)
@cached_property
def client(self):
return Client(self.config)
@cached_property
def session_storage(self):
if self.config["SESSION_STORAGE"] is False:
return None
if not self.config["SESSION_STORAGE"]:
from .session.sqlitestorage import SQLiteStorage
self.config["SESSION_STORAGE"] = SQLiteStorage()
return self.config["SESSION_STORAGE"]
@session_storage.setter
def session_storage(self, value):
warnings.warn(
"You should set session storage in config",
DeprecationWarning,
stacklevel=2
)
self.config["SESSION_STORAGE"] = value
def handler(self, f):
"""
为每一条消息或事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='all')
return f
def text(self, f):
"""
为文本 ``(text)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='text')
return f
def image(self, f):
"""
为图像 ``(image)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='image')
return f
def location(self, f):
"""
为位置 ``(location)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='location')
return f
def link(self, f):
"""
为链接 ``(link)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='link')
return f
def voice(self, f):
"""
为语音 ``(voice)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='voice')
return f
def video(self, f):
"""
为视频 ``(video)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='video')
return f
def unknown(self, f):
"""
为未知类型 ``(unknown)`` 消息添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='unknown')
return f
def subscribe(self, f):
"""
为被关注 ``(subscribe)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='subscribe_event')
return f
def unsubscribe(self, f):
"""
为被取消关注 ``(unsubscribe)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='unsubscribe_event')
return f
def click(self, f):
"""
为自定义菜单事件 ``(click)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='click_event')
return f
def scancode_push(self, f):
"""
为扫描推送 ``(scancode_push)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='scancode_push_event')
return f
def scancode_waitmsg(self, f):
"""
为扫描弹消息 ``(scancode_waitmsg)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='scancode_waitmsg_event')
return f
def pic_sysphoto(self, f):
"""
为弹出系统拍照发图的事件推送 ``(pic_sysphoto_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='pic_sysphoto_event')
return f
def pic_photo_or_album(self, f):
"""
为弹出拍照或者相册发图的事件推送 ``(pic_photo_or_album_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='pic_photo_or_album_event')
return f
def pic_weixin(self, f):
"""
为弹出微信相册发图器的事件推送 ``(pic_weixin_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='pic_weixin_event')
return f
def location_select(self, f):
"""
为弹出地理位置选择器的事件推送 ``(location_select_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='location_select_event')
return f
def location_event(self, f):
"""
为上报位置 ``(location_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='location_event')
return f
def view(self, f):
"""
为链接 ``(view)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='view_event')
return f
def unknown_event(self, f):
"""
为未知类型 ``(unknown_event)`` 事件添加一个 handler 方法的装饰器。
"""
self.add_handler(f, type='unknown_event')
return f
def key_click(self, key):
"""
为自定义菜单 ``(click)`` 事件添加 handler 的简便方法。
**@key_click('KEYNAME')** 用来为特定 key 的点击事件添加 handler 方法。
"""
def wraps(f):
argc = len(signature(f).parameters.keys())
@self.click
def onclick(message, session=None):
if message.key == key:
return f(*[message, session][:argc])
return f
return wraps
def filter(self, *args):
"""
为文本 ``(text)`` 消息添加 handler 的简便方法。
使用 ``@filter("xxx")``, ``@filter(re.compile("xxx"))``
或 ``@filter("xxx", "xxx2")`` 的形式为特定内容添加 handler。
"""
def wraps(f):
self.add_filter(func=f, rules=list(args))
return f
return wraps
def add_handler(self, func, type='all'):
"""
为 BaseRoBot 实例添加一个 handler。
:param func: 要作为 handler 的方法。
:param type: handler 的种类。
:return: None
"""
if not callable(func):
raise ValueError("{} is not callable".format(func))
self._handlers[type].append(
(func, len(signature(func).parameters.keys()))
)
def get_handlers(self, type):
return self._handlers.get(type, []) + self._handlers['all']
def add_filter(self, func, rules):
"""
为 BaseRoBot 添加一个 ``filter handler``。
:param func: 如果 rules 通过,则处理该消息的 handler。
:param rules: 一个 list,包含要匹配的字符串或者正则表达式。
:return: None
"""
if not callable(func):
raise ValueError("{} is not callable".format(func))
if not isinstance(rules, list):
raise ValueError("{} is not list".format(rules))
if len(rules) > 1:
for x in rules:
self.add_filter(func, [x])
else:
target_content = rules[0]
if isinstance(target_content, six.string_types):
target_content = to_text(target_content)
def _check_content(message):
return message.content == target_content
elif is_regex(target_content):
def _check_content(message):
return target_content.match(message.content)
else:
raise TypeError("%s is not a valid rule" % target_content)
argc = len(signature(func).parameters.keys())
@self.text
def _f(message, session=None):
_check_result = _check_content(message)
if _check_result:
if isinstance(_check_result, bool):
_check_result = None
return func(*[message, session, _check_result][:argc])
def parse_message(
self, body, timestamp=None, nonce=None, msg_signature=None
):
"""
解析获取到的 Raw XML ,如果需要的话进行解密,返回 WeRoBot Message。
:param body: 微信服务器发来的请求中的 Body。
:return: WeRoBot Message
"""
message_dict = parse_xml(body)
if "Encrypt" in message_dict:
xml = self.crypto.decrypt_message(
timestamp=timestamp,
nonce=nonce,
msg_signature=msg_signature,
encrypt_msg=message_dict["Encrypt"]
)
message_dict = parse_xml(xml)
return process_message(message_dict)
def get_reply(self, message):
"""
根据 message 的内容获取 Reply 对象。
:param message: 要处理的 message
:return: 获取的 Reply 对象
"""
session_storage = self.session_storage
id = None
session = None
if session_storage and hasattr(message, "source"):
id = to_binary(message.source)
session = session_storage[id]
handlers = self.get_handlers(message.type)
try:
for handler, args_count in handlers:
args = [message, session][:args_count]
reply = handler(*args)
if session_storage and id:
session_storage[id] = session
if reply:
return process_function_reply(reply, message=message)
except:
self.logger.exception("Catch an exception")
def get_encrypted_reply(self, message):
"""
对一个指定的 WeRoBot Message ,获取 handlers 处理后得到的 Reply。
如果可能,对该 Reply 进行加密。
返回 Reply Render 后的文本。
:param message: 一个 WeRoBot Message 实例。
:return: reply (纯文本)
"""
reply = self.get_reply(message)
if not reply:
self.logger.warning("No handler responded message %s" % message)
return ''
if self.use_encryption:
return self.crypto.encrypt_message(reply)
else:
return reply.render()
def check_signature(self, timestamp, nonce, echostr, signature):
"""
根据时间戳和生成签名的字符串 (nonce) 检查签名。
:param timestamp: 时间戳
:param nonce: 生成签名的随机字符串
:param signature: 要检查的签名
:param echostr: 要检查的str
:return: 如果签名合法将返回 ``True``,不合法将返回 ``False``
"""
return check_signature(
self.config["TOKEN"], timestamp, nonce, echostr, signature
)
def error_page(self, f):
"""
为 robot 指定 Signature 验证不通过时显示的错误页面。
Usage::
@robot.error_page
def make_error_page(url):
return "<h1>喵喵喵 %s 不是给麻瓜访问的快走开</h1>" % url
"""
self.make_error_page = f
return f
class WeRoBot(BaseRoBot):
"""
WeRoBot 是一个继承自 BaseRoBot 的对象,在 BaseRoBot 的基础上使用了 bottle 框架,
提供接收微信服务器发来的请求的功能。
"""
@cached_property
def wsgi(self):
if not self._handlers:
raise RuntimeError('No Handler.')
from bottle import Bottle
from workwerobot.contrib.bottle import make_view
app = Bottle()
app.route('<t:path>', ['GET', 'POST'], make_view(self))
return app
def run(
self, server=None, host=None, port=None, enable_pretty_logging=True
):
"""
运行 WeRoBot。
:param server: 传递给 Bottle 框架 run 方法的参数,详情见\
`bottle 文档 <https://bottlepy.org/docs/dev/deployment.html#switching-the-server-backend>`_
:param host: 运行时绑定的主机地址
:param port: 运行时绑定的主机端口
:param enable_pretty_logging: 是否开启 log 的输出格式优化
"""
if enable_pretty_logging:
from workwerobot.logger import enable_pretty_logging
enable_pretty_logging(self.logger)
if server is None:
server = self.config["SERVER"]
if host is None:
host = self.config["HOST"]
if port is None:
port = self.config["PORT"]
try:
self.wsgi.run(server=server, host=host, port=port)
except KeyboardInterrupt:
exit(0) | PypiClean |
/aiokafka-commit-0.0.7.tar.gz/aiokafka-commit-0.0.7/aiokafka/producer/message_accumulator.py | import asyncio
import collections
import copy
import time
from aiokafka.errors import (KafkaTimeoutError,
NotLeaderForPartitionError,
LeaderNotAvailableError,
ProducerClosed)
from aiokafka.record.legacy_records import LegacyRecordBatchBuilder
from aiokafka.record.default_records import DefaultRecordBatchBuilder
from aiokafka.structs import RecordMetadata
from aiokafka.util import create_future, get_running_loop
class BatchBuilder:
def __init__(self, magic, batch_size, compression_type,
*, is_transactional):
if magic < 2:
assert not is_transactional
self._builder = LegacyRecordBatchBuilder(
magic, compression_type, batch_size)
else:
self._builder = DefaultRecordBatchBuilder(
magic, compression_type, is_transactional=is_transactional,
producer_id=-1, producer_epoch=-1, base_sequence=0,
batch_size=batch_size)
self._relative_offset = 0
self._buffer = None
self._closed = False
def append(self, *, timestamp, key, value, headers=[]):
"""Add a message to the batch.
Arguments:
timestamp (float or None): epoch timestamp in seconds. If None,
the timestamp will be set to the current time. If submitting to
an 0.8.x or 0.9.x broker, the timestamp will be ignored.
key (bytes or None): the message key. `key` and `value` may not
both be None.
value (bytes or None): the message value. `key` and `value` may not
both be None.
Returns:
If the message was successfully added, returns a metadata object
with crc, offset, size, and timestamp fields. If the batch is full
or closed, returns None.
"""
if self._closed:
return None
metadata = self._builder.append(
self._relative_offset, timestamp, key, value,
headers=headers)
# Check if we could add the message
if metadata is None:
return None
self._relative_offset += 1
return metadata
def close(self):
"""Close the batch to further updates.
Closing the batch before submitting to the producer ensures that no
messages are added via the ``producer.send()`` interface. To gracefully
support both the batch and individual message interfaces, leave the
batch open. For complete control over the batch's contents, close
before submission. Closing a batch has no effect on when it's sent to
the broker.
A batch may not be reopened after it's closed.
"""
if self._closed:
return
self._closed = True
def _set_producer_state(self, producer_id, producer_epoch, base_sequence):
assert type(self._builder) is DefaultRecordBatchBuilder
self._builder.set_producer_state(
producer_id, producer_epoch, base_sequence)
def _build(self):
self.close()
if self._buffer is None:
self._buffer = self._builder.build()
del self._builder # We may only call self._builder.build() once!
return self._buffer
def size(self):
"""Get the size of batch in bytes."""
if self._buffer is not None:
return len(self._buffer)
else:
return self._builder.size()
def record_count(self):
"""Get the number of records in the batch."""
return self._relative_offset
class MessageBatch:
"""This class incapsulate operations with batch of produce messages"""
def __init__(self, tp, builder, ttl):
self._builder = builder
self._tp = tp
self._ttl = ttl
self._ctime = time.monotonic()
# Waiters
# Set when messages are delivered to Kafka based on ACK setting
self.future = create_future()
self._msg_futures = []
# Set when sender takes this batch
self._drain_waiter = create_future()
self._retry_count = 0
@property
def tp(self):
return self._tp
@property
def record_count(self):
return self._builder.record_count()
def append(self, key, value, timestamp_ms, _create_future=create_future,
headers=[]):
"""Append message (key and value) to batch
Returns:
None if batch is full
or
asyncio.Future that will resolved when message is delivered
"""
metadata = self._builder.append(
timestamp=timestamp_ms, key=key, value=value, headers=headers)
if metadata is None:
return None
future = _create_future()
self._msg_futures.append((future, metadata))
return future
def done(self, base_offset, timestamp=None, log_start_offset=None,
_record_metadata_class=RecordMetadata):
"""Resolve all pending futures"""
tp = self._tp
topic = tp.topic
partition = tp.partition
if timestamp == -1:
timestamp_type = 0
else:
timestamp_type = 1
# Set main batch future
if not self.future.done():
self.future.set_result(_record_metadata_class(
topic, partition, tp, base_offset, timestamp, timestamp_type,
log_start_offset))
# Set message futures
for future, metadata in self._msg_futures:
if future.done():
continue
# If timestamp returned by broker is -1 it means we need to take
# the timestamp sent by user.
if timestamp == -1:
timestamp = metadata.timestamp
offset = base_offset + metadata.offset
future.set_result(_record_metadata_class(
topic, partition, tp, offset, timestamp, timestamp_type,
log_start_offset))
def done_noack(self):
""" Resolve all pending futures to None """
# Faster resolve for base_offset=None case.
if not self.future.done():
self.future.set_result(None)
for future, _ in self._msg_futures:
if future.done():
continue
future.set_result(None)
def failure(self, exception):
if not self.future.done():
self.future.set_exception(exception)
for future, _ in self._msg_futures:
if future.done():
continue
# we need to copy exception so traceback is not multiplied
# https://github.com/aio-libs/aiokafka/issues/246
future.set_exception(copy.copy(exception))
# Consume exception to avoid warnings. We delegate this consumption
# to user only in case of explicit batch API.
if self._msg_futures:
self.future.exception()
# In case where sender fails and closes batches all waiters have to be
# reset also.
if not self._drain_waiter.done():
self._drain_waiter.set_exception(exception)
async def wait_drain(self, timeout=None):
"""Wait until all message from this batch is processed"""
waiter = self._drain_waiter
await asyncio.wait([waiter], timeout=timeout)
if waiter.done():
waiter.result() # Check for exception
def expired(self):
"""Check that batch is expired or not"""
return (time.monotonic() - self._ctime) > self._ttl
def drain_ready(self):
"""Compress batch to be ready for send"""
if not self._drain_waiter.done():
self._drain_waiter.set_result(None)
self._retry_count += 1
def reset_drain(self):
"""Reset drain waiter, until we will do another retry"""
assert self._drain_waiter.done()
self._drain_waiter = create_future()
def set_producer_state(self, producer_id, producer_epoch, base_sequence):
assert not self._drain_waiter.done()
self._builder._set_producer_state(
producer_id, producer_epoch, base_sequence)
def get_data_buffer(self):
return self._builder._build()
def is_empty(self):
return self._builder.record_count() == 0
@property
def retry_count(self):
return self._retry_count
class MessageAccumulator:
"""Accumulator of messages batched by topic-partition
Producer adds messages to this accumulator and a background send task
gets batches per nodes to process it.
"""
def __init__(
self, cluster, batch_size, compression_type, batch_ttl, *,
txn_manager=None, loop=None):
if loop is None:
loop = get_running_loop()
self._loop = loop
self._batches = collections.defaultdict(collections.deque)
self._pending_batches = set()
self._cluster = cluster
self._batch_size = batch_size
self._compression_type = compression_type
self._batch_ttl = batch_ttl
self._wait_data_future = loop.create_future()
self._closed = False
self._api_version = (0, 9)
self._txn_manager = txn_manager
self._exception = None # Critical exception
def set_api_version(self, api_version):
self._api_version = api_version
async def flush(self):
waiters = []
for batches in self._batches.values():
for batch in list(batches):
waiters.append(batch.future)
for batch in list(self._pending_batches):
waiters.append(batch.future)
if waiters:
await asyncio.wait(waiters)
async def flush_for_commit(self):
waiters = []
for batches in self._batches.values():
for batch in batches:
# We force all buffers to close to finalyze the transaction
# scope. We should not add anything to this transaction.
batch._builder.close()
waiters.append(batch.future)
for batch in self._pending_batches:
waiters.append(batch.future)
# Wait for all waiters to finish. We only wait for the scope we defined
# above, other batches should not be delivered as part of this
# transaction
if waiters:
await asyncio.wait(waiters)
def fail_all(self, exception):
# Close all batches with this exception
for batches in self._batches.values():
for batch in batches:
batch.failure(exception)
for batch in self._pending_batches:
batch.failure(exception)
self._exception = exception
async def close(self):
self._closed = True
await self.flush()
async def add_message(
self, tp, key, value, timeout, timestamp_ms=None,
headers=[]
):
""" Add message to batch by topic-partition
If batch is already full this method waits (`timeout` seconds maximum)
until batch is drained by send task
"""
while True:
if self._closed:
# this can happen when producer is closing but try to send some
# messages in async task
raise ProducerClosed()
if self._exception is not None:
raise copy.copy(self._exception)
pending_batches = self._batches.get(tp)
if not pending_batches:
builder = self.create_builder()
batch = self._append_batch(builder, tp)
else:
batch = pending_batches[-1]
future = batch.append(key, value, timestamp_ms, headers=headers)
if future is not None:
return future
# Batch is full, can't append data atm,
# waiting until batch per topic-partition is drained
start = time.monotonic()
await batch.wait_drain(timeout)
timeout -= time.monotonic() - start
if timeout <= 0:
raise KafkaTimeoutError()
def data_waiter(self):
""" Return waiter future that will be resolved when accumulator contain
some data for drain
"""
return self._wait_data_future
def _pop_batch(self, tp):
batch = self._batches[tp].popleft()
not_retry = batch.retry_count == 0
if self._txn_manager is not None and not_retry:
assert self._txn_manager.has_pid(), \
"We should have waited for it in sender routine"
seq = self._txn_manager.sequence_number(batch.tp)
self._txn_manager.increment_sequence_number(
batch.tp, batch.record_count)
batch.set_producer_state(
producer_id=self._txn_manager.producer_id,
producer_epoch=self._txn_manager.producer_epoch,
base_sequence=seq)
batch.drain_ready()
if len(self._batches[tp]) == 0:
del self._batches[tp]
self._pending_batches.add(batch)
if not_retry:
def cb(fut, batch=batch, self=self):
self._pending_batches.remove(batch)
batch.future.add_done_callback(cb)
return batch
def reenqueue(self, batch):
tp = batch.tp
self._batches[tp].appendleft(batch)
self._pending_batches.remove(batch)
batch.reset_drain()
def drain_by_nodes(self, ignore_nodes, muted_partitions=set()):
""" Group batches by leader to partition nodes. """
nodes = collections.defaultdict(dict)
unknown_leaders_exist = False
for tp in list(self._batches.keys()):
# Just ignoring by node is not enough, as leader can change during
# the cycle
if tp in muted_partitions:
continue
leader = self._cluster.leader_for_partition(tp)
if leader is None or leader == -1:
if self._batches[tp][0].expired():
# batch is for partition is expired and still no leader,
# so set exception for batch and pop it
batch = self._pop_batch(tp)
if leader is None:
err = NotLeaderForPartitionError()
else:
err = LeaderNotAvailableError()
batch.failure(exception=err)
unknown_leaders_exist = True
continue
elif ignore_nodes and leader in ignore_nodes:
continue
batch = self._pop_batch(tp)
# We can get an empty batch here if all `append()` calls failed
# with validation...
if not batch.is_empty():
nodes[leader][tp] = batch
else:
# XXX: use something more graceful. We just want to trigger
# delivery future here, no message futures.
batch.done_noack()
# all batches are drained from accumulator
# so create "wait data" future again for waiting new data in send
# task
if not self._wait_data_future.done():
self._wait_data_future.set_result(None)
self._wait_data_future = self._loop.create_future()
return nodes, unknown_leaders_exist
def create_builder(self):
if self._api_version >= (0, 11):
magic = 2
elif self._api_version >= (0, 10):
magic = 1
else:
magic = 0
is_transactional = False
if self._txn_manager is not None and \
self._txn_manager.transactional_id is not None:
is_transactional = True
return BatchBuilder(
magic, self._batch_size, self._compression_type,
is_transactional=is_transactional)
def _append_batch(self, builder, tp):
# We must do this before actual add takes place to check for errors.
if self._txn_manager is not None:
self._txn_manager.maybe_add_partition_to_txn(tp)
batch = MessageBatch(tp, builder, self._batch_ttl)
self._batches[tp].append(batch)
if not self._wait_data_future.done():
self._wait_data_future.set_result(None)
return batch
async def add_batch(self, builder, tp, timeout):
"""Add BatchBuilder to queue by topic-partition.
Arguments:
builder (BatchBuilder): batch object to enqueue.
tp (TopicPartition): topic and partition to enqueue this batch for.
timeout (int): time in seconds to wait for a free slot in the batch
queue.
Returns:
MessageBatch: delivery wrapper around the BatchBuilder object.
Raises:
aiokafka.errors.ProducerClosed: the accumulator has already been
closed and flushed.
aiokafka.errors.KafkaTimeoutError: the batch could not be added
within the specified timeout.
"""
if self._closed:
raise ProducerClosed()
if self._exception is not None:
raise copy.copy(self._exception)
start = time.monotonic()
while timeout > 0:
pending = self._batches.get(tp)
if pending:
await pending[-1].wait_drain(timeout=timeout)
timeout -= time.monotonic() - start
else:
batch = self._append_batch(builder, tp)
return asyncio.shield(batch.future)
raise KafkaTimeoutError() | PypiClean |
/onegov.swissvotes-1.2.5-py3-none-any.whl/onegov/swissvotes/layouts/vote.py | from cached_property import cached_property
from onegov.core.elements import Link
from onegov.swissvotes import _
from onegov.swissvotes.layouts.default import DefaultLayout
class VoteLayout(DefaultLayout):
@cached_property
def title(self):
return self.model.short_title
@cached_property
def editbar_links(self):
result = []
if self.request.has_role('admin', 'editor'):
result.append(
Link(
text=_("Manage attachments"),
url=self.request.link(self.model, name='upload'),
attrs={'class': 'upload-icon'}
)
)
result.append(
Link(
text=_("Delete vote"),
url=self.request.link(self.model, name='delete'),
attrs={'class': 'delete-icon'}
)
)
return result
@cached_property
def breadcrumbs(self):
return [
Link(_("Homepage"), self.homepage_url),
Link(_("Votes"), self.votes_url),
Link(self.title, '#'),
]
class VoteStrengthsLayout(DefaultLayout):
@cached_property
def title(self):
return _("Voter strengths")
@cached_property
def breadcrumbs(self):
return [
Link(_("Homepage"), self.homepage_url),
Link(_("Votes"), self.votes_url),
Link(self.model.short_title, self.request.link(self.model)),
Link(self.title, '#'),
]
class UploadVoteAttachemtsLayout(DefaultLayout):
@cached_property
def title(self):
return _("Manage attachments")
@cached_property
def breadcrumbs(self):
return [
Link(_("Homepage"), self.homepage_url),
Link(_("Votes"), self.votes_url),
Link(self.model.short_title, self.request.link(self.model)),
Link(self.title, '#'),
]
class DeleteVoteLayout(DefaultLayout):
@cached_property
def title(self):
return _("Delete vote")
@cached_property
def breadcrumbs(self):
return [
Link(_("Homepage"), self.homepage_url),
Link(_("Votes"), self.votes_url),
Link(self.model.short_title, self.request.link(self.model)),
Link(self.title, '#'),
] | PypiClean |
/djed.static-0.5.tar.gz/djed.static-0.5/docs/local-components.rst | .. _local-components:
Local Components
================
If you develop your own front-end-code (so called "local components"), you
can also publish them with BowerStatic.
You can add one or more local components in this way:
.. code-block:: python
config.add_bower_component('myapp:static/myapp')
To use a local components in an application, a ``bower_components`` directory
has to been defined somewhere in the application configuration
(see :ref:`getting-started`).
Local components can be included on your HTML page like any other component:
.. code-block:: python
request.include('myapp')
This includes your front-end-code in the HTML page and all dependencies that
are defined in the ``bower.json`` file.
| PypiClean |
/python_pptx_fork-0.6.18-py3-none-any.whl/pptx/spec.py | from __future__ import absolute_import
from pptx.enum.shapes import MSO_SHAPE
GRAPHIC_DATA_URI_CHART = "http://schemas.openxmlformats.org/drawingml/2006/chart"
GRAPHIC_DATA_URI_TABLE = "http://schemas.openxmlformats.org/drawingml/2006/table"
# ============================================================================
# AutoShape type specs
# ============================================================================
autoshape_types = {
MSO_SHAPE.ACTION_BUTTON_BACK_OR_PREVIOUS: {
"basename": "Action Button: Back or Previous",
"avLst": (),
},
MSO_SHAPE.ACTION_BUTTON_BEGINNING: {
"basename": "Action Button: Beginning",
"avLst": (),
},
MSO_SHAPE.ACTION_BUTTON_CUSTOM: {"basename": "Action Button: Custom", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_DOCUMENT: {
"basename": "Action Button: Document",
"avLst": (),
},
MSO_SHAPE.ACTION_BUTTON_END: {"basename": "Action Button: End", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_FORWARD_OR_NEXT: {
"basename": "Action Button: Forward or Next",
"avLst": (),
},
MSO_SHAPE.ACTION_BUTTON_HELP: {"basename": "Action Button: Help", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_HOME: {"basename": "Action Button: Home", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_INFORMATION: {
"basename": "Action Button: Information",
"avLst": (),
},
MSO_SHAPE.ACTION_BUTTON_MOVIE: {"basename": "Action Button: Movie", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_RETURN: {"basename": "Action Button: Return", "avLst": ()},
MSO_SHAPE.ACTION_BUTTON_SOUND: {"basename": "Action Button: Sound", "avLst": ()},
MSO_SHAPE.ARC: {"basename": "Arc", "avLst": (("adj1", 16200000), ("adj2", 0))},
MSO_SHAPE.BALLOON: {
"basename": "Rounded Rectangular Callout",
"avLst": (("adj1", -20833), ("adj2", 62500), ("adj3", 16667)),
},
MSO_SHAPE.BENT_ARROW: {
"basename": "Bent Arrow",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 43750)),
},
MSO_SHAPE.BENT_UP_ARROW: {
"basename": "Bent-Up Arrow",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000)),
},
MSO_SHAPE.BEVEL: {"basename": "Bevel", "avLst": (("adj", 12500),)},
MSO_SHAPE.BLOCK_ARC: {
"basename": "Block Arc",
"avLst": (("adj1", 10800000), ("adj2", 0), ("adj3", 25000)),
},
MSO_SHAPE.CAN: {"basename": "Can", "avLst": (("adj", 25000),)},
MSO_SHAPE.CHART_PLUS: {"basename": "Chart Plus", "avLst": ()},
MSO_SHAPE.CHART_STAR: {"basename": "Chart Star", "avLst": ()},
MSO_SHAPE.CHART_X: {"basename": "Chart X", "avLst": ()},
MSO_SHAPE.CHEVRON: {"basename": "Chevron", "avLst": (("adj", 50000),)},
MSO_SHAPE.CHORD: {
"basename": "Chord",
"avLst": (("adj1", 2700000), ("adj2", 16200000)),
},
MSO_SHAPE.CIRCULAR_ARROW: {
"basename": "Circular Arrow",
"avLst": (
("adj1", 12500),
("adj2", 1142319),
("adj3", 20457681),
("adj4", 10800000),
("adj5", 12500),
),
},
MSO_SHAPE.CLOUD: {"basename": "Cloud", "avLst": ()},
MSO_SHAPE.CLOUD_CALLOUT: {
"basename": "Cloud Callout",
"avLst": (("adj1", -20833), ("adj2", 62500)),
},
MSO_SHAPE.CORNER: {
"basename": "Corner",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.CORNER_TABS: {"basename": "Corner Tabs", "avLst": ()},
MSO_SHAPE.CROSS: {"basename": "Cross", "avLst": (("adj", 25000),)},
MSO_SHAPE.CUBE: {"basename": "Cube", "avLst": (("adj", 25000),)},
MSO_SHAPE.CURVED_DOWN_ARROW: {
"basename": "Curved Down Arrow",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 25000)),
},
MSO_SHAPE.CURVED_DOWN_RIBBON: {
"basename": "Curved Down Ribbon",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 12500)),
},
MSO_SHAPE.CURVED_LEFT_ARROW: {
"basename": "Curved Left Arrow",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 25000)),
},
MSO_SHAPE.CURVED_RIGHT_ARROW: {
"basename": "Curved Right Arrow",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 25000)),
},
MSO_SHAPE.CURVED_UP_ARROW: {
"basename": "Curved Up Arrow",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 25000)),
},
MSO_SHAPE.CURVED_UP_RIBBON: {
"basename": "Curved Up Ribbon",
"avLst": (("adj1", 25000), ("adj2", 50000), ("adj3", 12500)),
},
MSO_SHAPE.DECAGON: {"basename": "Decagon", "avLst": (("vf", 105146),)},
MSO_SHAPE.DIAGONAL_STRIPE: {
"basename": "Diagonal Stripe",
"avLst": (("adj", 50000),),
},
MSO_SHAPE.DIAMOND: {"basename": "Diamond", "avLst": ()},
MSO_SHAPE.DODECAGON: {"basename": "Dodecagon", "avLst": ()},
MSO_SHAPE.DONUT: {"basename": "Donut", "avLst": (("adj", 25000),)},
MSO_SHAPE.DOUBLE_BRACE: {"basename": "Double Brace", "avLst": (("adj", 8333),)},
MSO_SHAPE.DOUBLE_BRACKET: {
"basename": "Double Bracket",
"avLst": (("adj", 16667),),
},
MSO_SHAPE.DOUBLE_WAVE: {
"basename": "Double Wave",
"avLst": (("adj1", 6250), ("adj2", 0)),
},
MSO_SHAPE.DOWN_ARROW: {
"basename": "Down Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.DOWN_ARROW_CALLOUT: {
"basename": "Down Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 64977)),
},
MSO_SHAPE.DOWN_RIBBON: {
"basename": "Down Ribbon",
"avLst": (("adj1", 16667), ("adj2", 50000)),
},
MSO_SHAPE.EXPLOSION1: {"basename": "Explosion", "avLst": ()},
MSO_SHAPE.EXPLOSION2: {"basename": "Explosion", "avLst": ()},
MSO_SHAPE.FLOWCHART_ALTERNATE_PROCESS: {
"basename": "Alternate process",
"avLst": (),
},
MSO_SHAPE.FLOWCHART_CARD: {"basename": "Card", "avLst": ()},
MSO_SHAPE.FLOWCHART_COLLATE: {"basename": "Collate", "avLst": ()},
MSO_SHAPE.FLOWCHART_CONNECTOR: {"basename": "Connector", "avLst": ()},
MSO_SHAPE.FLOWCHART_DATA: {"basename": "Data", "avLst": ()},
MSO_SHAPE.FLOWCHART_DECISION: {"basename": "Decision", "avLst": ()},
MSO_SHAPE.FLOWCHART_DELAY: {"basename": "Delay", "avLst": ()},
MSO_SHAPE.FLOWCHART_DIRECT_ACCESS_STORAGE: {
"basename": "Direct Access Storage",
"avLst": (),
},
MSO_SHAPE.FLOWCHART_DISPLAY: {"basename": "Display", "avLst": ()},
MSO_SHAPE.FLOWCHART_DOCUMENT: {"basename": "Document", "avLst": ()},
MSO_SHAPE.FLOWCHART_EXTRACT: {"basename": "Extract", "avLst": ()},
MSO_SHAPE.FLOWCHART_INTERNAL_STORAGE: {"basename": "Internal Storage", "avLst": ()},
MSO_SHAPE.FLOWCHART_MAGNETIC_DISK: {"basename": "Magnetic Disk", "avLst": ()},
MSO_SHAPE.FLOWCHART_MANUAL_INPUT: {"basename": "Manual Input", "avLst": ()},
MSO_SHAPE.FLOWCHART_MANUAL_OPERATION: {"basename": "Manual Operation", "avLst": ()},
MSO_SHAPE.FLOWCHART_MERGE: {"basename": "Merge", "avLst": ()},
MSO_SHAPE.FLOWCHART_MULTIDOCUMENT: {"basename": "Multidocument", "avLst": ()},
MSO_SHAPE.FLOWCHART_OFFLINE_STORAGE: {"basename": "Offline Storage", "avLst": ()},
MSO_SHAPE.FLOWCHART_OFFPAGE_CONNECTOR: {
"basename": "Off-page Connector",
"avLst": (),
},
MSO_SHAPE.FLOWCHART_OR: {"basename": "Or", "avLst": ()},
MSO_SHAPE.FLOWCHART_PREDEFINED_PROCESS: {
"basename": "Predefined Process",
"avLst": (),
},
MSO_SHAPE.FLOWCHART_PREPARATION: {"basename": "Preparation", "avLst": ()},
MSO_SHAPE.FLOWCHART_PROCESS: {"basename": "Process", "avLst": ()},
MSO_SHAPE.FLOWCHART_PUNCHED_TAPE: {"basename": "Punched Tape", "avLst": ()},
MSO_SHAPE.FLOWCHART_SEQUENTIAL_ACCESS_STORAGE: {
"basename": "Sequential Access Storage",
"avLst": (),
},
MSO_SHAPE.FLOWCHART_SORT: {"basename": "Sort", "avLst": ()},
MSO_SHAPE.FLOWCHART_STORED_DATA: {"basename": "Stored Data", "avLst": ()},
MSO_SHAPE.FLOWCHART_SUMMING_JUNCTION: {"basename": "Summing Junction", "avLst": ()},
MSO_SHAPE.FLOWCHART_TERMINATOR: {"basename": "Terminator", "avLst": ()},
MSO_SHAPE.FOLDED_CORNER: {"basename": "Folded Corner", "avLst": ()},
MSO_SHAPE.FRAME: {"basename": "Frame", "avLst": (("adj1", 12500),)},
MSO_SHAPE.FUNNEL: {"basename": "Funnel", "avLst": ()},
MSO_SHAPE.GEAR_6: {
"basename": "Gear 6",
"avLst": (("adj1", 15000), ("adj2", 3526)),
},
MSO_SHAPE.GEAR_9: {
"basename": "Gear 9",
"avLst": (("adj1", 10000), ("adj2", 1763)),
},
MSO_SHAPE.HALF_FRAME: {
"basename": "Half Frame",
"avLst": (("adj1", 33333), ("adj2", 33333)),
},
MSO_SHAPE.HEART: {"basename": "Heart", "avLst": ()},
MSO_SHAPE.HEPTAGON: {
"basename": "Heptagon",
"avLst": (("hf", 102572), ("vf", 105210)),
},
MSO_SHAPE.HEXAGON: {
"basename": "Hexagon",
"avLst": (("adj", 25000), ("vf", 115470)),
},
MSO_SHAPE.HORIZONTAL_SCROLL: {
"basename": "Horizontal Scroll",
"avLst": (("adj", 12500),),
},
MSO_SHAPE.ISOSCELES_TRIANGLE: {
"basename": "Isosceles Triangle",
"avLst": (("adj", 50000),),
},
MSO_SHAPE.LEFT_ARROW: {
"basename": "Left Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.LEFT_ARROW_CALLOUT: {
"basename": "Left Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 64977)),
},
MSO_SHAPE.LEFT_BRACE: {
"basename": "Left Brace",
"avLst": (("adj1", 8333), ("adj2", 50000)),
},
MSO_SHAPE.LEFT_BRACKET: {"basename": "Left Bracket", "avLst": (("adj", 8333),)},
MSO_SHAPE.LEFT_CIRCULAR_ARROW: {
"basename": "Left Circular Arrow",
"avLst": (
("adj1", 12500),
("adj2", -1142319),
("adj3", 1142319),
("adj4", 10800000),
("adj5", 12500),
),
},
MSO_SHAPE.LEFT_RIGHT_ARROW: {
"basename": "Left-Right Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.LEFT_RIGHT_ARROW_CALLOUT: {
"basename": "Left-Right Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 48123)),
},
MSO_SHAPE.LEFT_RIGHT_CIRCULAR_ARROW: {
"basename": "Left Right Circular Arrow",
"avLst": (
("adj1", 12500),
("adj2", 1142319),
("adj3", 20457681),
("adj4", 11942319),
("adj5", 12500),
),
},
MSO_SHAPE.LEFT_RIGHT_RIBBON: {
"basename": "Left Right Ribbon",
"avLst": (("adj1", 50000), ("adj2", 50000), ("adj3", 16667)),
},
MSO_SHAPE.LEFT_RIGHT_UP_ARROW: {
"basename": "Left-Right-Up Arrow",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000)),
},
MSO_SHAPE.LEFT_UP_ARROW: {
"basename": "Left-Up Arrow",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000)),
},
MSO_SHAPE.LIGHTNING_BOLT: {"basename": "Lightning Bolt", "avLst": ()},
MSO_SHAPE.LINE_CALLOUT_1: {
"basename": "Line Callout 1",
"avLst": (("adj1", 18750), ("adj2", -8333), ("adj3", 112500), ("adj4", -38333)),
},
MSO_SHAPE.LINE_CALLOUT_1_ACCENT_BAR: {
"basename": "Line Callout 1 (Accent Bar)",
"avLst": (("adj1", 18750), ("adj2", -8333), ("adj3", 112500), ("adj4", -38333)),
},
MSO_SHAPE.LINE_CALLOUT_1_BORDER_AND_ACCENT_BAR: {
"basename": "Line Callout 1 (Border and Accent Bar)",
"avLst": (("adj1", 18750), ("adj2", -8333), ("adj3", 112500), ("adj4", -38333)),
},
MSO_SHAPE.LINE_CALLOUT_1_NO_BORDER: {
"basename": "Line Callout 1 (No Border)",
"avLst": (("adj1", 18750), ("adj2", -8333), ("adj3", 112500), ("adj4", -38333)),
},
MSO_SHAPE.LINE_CALLOUT_2: {
"basename": "Line Callout 2",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 112500),
("adj6", -46667),
),
},
MSO_SHAPE.LINE_CALLOUT_2_ACCENT_BAR: {
"basename": "Line Callout 2 (Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 112500),
("adj6", -46667),
),
},
MSO_SHAPE.LINE_CALLOUT_2_BORDER_AND_ACCENT_BAR: {
"basename": "Line Callout 2 (Border and Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 112500),
("adj6", -46667),
),
},
MSO_SHAPE.LINE_CALLOUT_2_NO_BORDER: {
"basename": "Line Callout 2 (No Border)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 112500),
("adj6", -46667),
),
},
MSO_SHAPE.LINE_CALLOUT_3: {
"basename": "Line Callout 3",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_3_ACCENT_BAR: {
"basename": "Line Callout 3 (Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_3_BORDER_AND_ACCENT_BAR: {
"basename": "Line Callout 3 (Border and Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_3_NO_BORDER: {
"basename": "Line Callout 3 (No Border)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_4: {
"basename": "Line Callout 3",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_4_ACCENT_BAR: {
"basename": "Line Callout 3 (Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_4_BORDER_AND_ACCENT_BAR: {
"basename": "Line Callout 3 (Border and Accent Bar)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_CALLOUT_4_NO_BORDER: {
"basename": "Line Callout 3 (No Border)",
"avLst": (
("adj1", 18750),
("adj2", -8333),
("adj3", 18750),
("adj4", -16667),
("adj5", 100000),
("adj6", -16667),
("adj7", 112963),
("adj8", -8333),
),
},
MSO_SHAPE.LINE_INVERSE: {"basename": "Straight Connector", "avLst": ()},
MSO_SHAPE.MATH_DIVIDE: {
"basename": "Division",
"avLst": (("adj1", 23520), ("adj2", 5880), ("adj3", 11760)),
},
MSO_SHAPE.MATH_EQUAL: {
"basename": "Equal",
"avLst": (("adj1", 23520), ("adj2", 11760)),
},
MSO_SHAPE.MATH_MINUS: {"basename": "Minus", "avLst": (("adj1", 23520),)},
MSO_SHAPE.MATH_MULTIPLY: {"basename": "Multiply", "avLst": (("adj1", 23520),)},
MSO_SHAPE.MATH_NOT_EQUAL: {
"basename": "Not Equal",
"avLst": (("adj1", 23520), ("adj2", 6600000), ("adj3", 11760)),
},
MSO_SHAPE.MATH_PLUS: {"basename": "Plus", "avLst": (("adj1", 23520),)},
MSO_SHAPE.MOON: {"basename": "Moon", "avLst": (("adj", 50000),)},
MSO_SHAPE.NON_ISOSCELES_TRAPEZOID: {
"basename": "Non-isosceles Trapezoid",
"avLst": (("adj1", 25000), ("adj2", 25000)),
},
MSO_SHAPE.NOTCHED_RIGHT_ARROW: {
"basename": "Notched Right Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.NO_SYMBOL: {"basename": '"No" symbol', "avLst": (("adj", 18750),)},
MSO_SHAPE.OCTAGON: {"basename": "Octagon", "avLst": (("adj", 29289),)},
MSO_SHAPE.OVAL: {"basename": "Oval", "avLst": ()},
MSO_SHAPE.OVAL_CALLOUT: {
"basename": "Oval Callout",
"avLst": (("adj1", -20833), ("adj2", 62500)),
},
MSO_SHAPE.PARALLELOGRAM: {"basename": "Parallelogram", "avLst": (("adj", 25000),)},
MSO_SHAPE.PENTAGON: {"basename": "Pentagon", "avLst": (("adj", 50000),)},
MSO_SHAPE.PIE: {"basename": "Pie", "avLst": (("adj1", 0), ("adj2", 16200000))},
MSO_SHAPE.PIE_WEDGE: {"basename": "Pie", "avLst": ()},
MSO_SHAPE.PLAQUE: {"basename": "Plaque", "avLst": (("adj", 16667),)},
MSO_SHAPE.PLAQUE_TABS: {"basename": "Plaque Tabs", "avLst": ()},
MSO_SHAPE.QUAD_ARROW: {
"basename": "Quad Arrow",
"avLst": (("adj1", 22500), ("adj2", 22500), ("adj3", 22500)),
},
MSO_SHAPE.QUAD_ARROW_CALLOUT: {
"basename": "Quad Arrow Callout",
"avLst": (("adj1", 18515), ("adj2", 18515), ("adj3", 18515), ("adj4", 48123)),
},
MSO_SHAPE.RECTANGLE: {"basename": "Rectangle", "avLst": ()},
MSO_SHAPE.RECTANGULAR_CALLOUT: {
"basename": "Rectangular Callout",
"avLst": (("adj1", -20833), ("adj2", 62500)),
},
MSO_SHAPE.REGULAR_PENTAGON: {
"basename": "Regular Pentagon",
"avLst": (("hf", 105146), ("vf", 110557)),
},
MSO_SHAPE.RIGHT_ARROW: {
"basename": "Right Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.RIGHT_ARROW_CALLOUT: {
"basename": "Right Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 64977)),
},
MSO_SHAPE.RIGHT_BRACE: {
"basename": "Right Brace",
"avLst": (("adj1", 8333), ("adj2", 50000)),
},
MSO_SHAPE.RIGHT_BRACKET: {"basename": "Right Bracket", "avLst": (("adj", 8333),)},
MSO_SHAPE.RIGHT_TRIANGLE: {"basename": "Right Triangle", "avLst": ()},
MSO_SHAPE.ROUNDED_RECTANGLE: {
"basename": "Rounded Rectangle",
"avLst": (("adj", 16667),),
},
MSO_SHAPE.ROUNDED_RECTANGULAR_CALLOUT: {
"basename": "Rounded Rectangular Callout",
"avLst": (("adj1", -20833), ("adj2", 62500), ("adj3", 16667)),
},
MSO_SHAPE.ROUND_1_RECTANGLE: {
"basename": "Round Single Corner Rectangle",
"avLst": (("adj", 16667),),
},
MSO_SHAPE.ROUND_2_DIAG_RECTANGLE: {
"basename": "Round Diagonal Corner Rectangle",
"avLst": (("adj1", 16667), ("adj2", 0)),
},
MSO_SHAPE.ROUND_2_SAME_RECTANGLE: {
"basename": "Round Same Side Corner Rectangle",
"avLst": (("adj1", 16667), ("adj2", 0)),
},
MSO_SHAPE.SMILEY_FACE: {"basename": "Smiley Face", "avLst": (("adj", 4653),)},
MSO_SHAPE.SNIP_1_RECTANGLE: {
"basename": "Snip Single Corner Rectangle",
"avLst": (("adj", 16667),),
},
MSO_SHAPE.SNIP_2_DIAG_RECTANGLE: {
"basename": "Snip Diagonal Corner Rectangle",
"avLst": (("adj1", 0), ("adj2", 16667)),
},
MSO_SHAPE.SNIP_2_SAME_RECTANGLE: {
"basename": "Snip Same Side Corner Rectangle",
"avLst": (("adj1", 16667), ("adj2", 0)),
},
MSO_SHAPE.SNIP_ROUND_RECTANGLE: {
"basename": "Snip and Round Single Corner Rectangle",
"avLst": (("adj1", 16667), ("adj2", 16667)),
},
MSO_SHAPE.SQUARE_TABS: {"basename": "Square Tabs", "avLst": ()},
MSO_SHAPE.STAR_10_POINT: {
"basename": "10-Point Star",
"avLst": (("adj", 42533), ("hf", 105146)),
},
MSO_SHAPE.STAR_12_POINT: {"basename": "12-Point Star", "avLst": (("adj", 37500),)},
MSO_SHAPE.STAR_16_POINT: {"basename": "16-Point Star", "avLst": (("adj", 37500),)},
MSO_SHAPE.STAR_24_POINT: {"basename": "24-Point Star", "avLst": (("adj", 37500),)},
MSO_SHAPE.STAR_32_POINT: {"basename": "32-Point Star", "avLst": (("adj", 37500),)},
MSO_SHAPE.STAR_4_POINT: {"basename": "4-Point Star", "avLst": (("adj", 12500),)},
MSO_SHAPE.STAR_5_POINT: {
"basename": "5-Point Star",
"avLst": (("adj", 19098), ("hf", 105146), ("vf", 110557)),
},
MSO_SHAPE.STAR_6_POINT: {
"basename": "6-Point Star",
"avLst": (("adj", 28868), ("hf", 115470)),
},
MSO_SHAPE.STAR_7_POINT: {
"basename": "7-Point Star",
"avLst": (("adj", 34601), ("hf", 102572), ("vf", 105210)),
},
MSO_SHAPE.STAR_8_POINT: {"basename": "8-Point Star", "avLst": (("adj", 37500),)},
MSO_SHAPE.STRIPED_RIGHT_ARROW: {
"basename": "Striped Right Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.SUN: {"basename": "Sun", "avLst": (("adj", 25000),)},
MSO_SHAPE.SWOOSH_ARROW: {
"basename": "Swoosh Arrow",
"avLst": (("adj1", 25000), ("adj2", 16667)),
},
MSO_SHAPE.TEAR: {"basename": "Teardrop", "avLst": (("adj", 100000),)},
MSO_SHAPE.TRAPEZOID: {"basename": "Trapezoid", "avLst": (("adj", 25000),)},
MSO_SHAPE.UP_ARROW: {
"basename": "Up Arrow",
"avLst": (("adj1", 50000), ("adj2", 50000)),
},
MSO_SHAPE.UP_ARROW_CALLOUT: {
"basename": "Up Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 64977)),
},
MSO_SHAPE.UP_DOWN_ARROW: {
"basename": "Up-Down Arrow",
"avLst": (("adj1", 50000), ("adj1", 50000), ("adj2", 50000), ("adj2", 50000)),
},
MSO_SHAPE.UP_DOWN_ARROW_CALLOUT: {
"basename": "Up-Down Arrow Callout",
"avLst": (("adj1", 25000), ("adj2", 25000), ("adj3", 25000), ("adj4", 48123)),
},
MSO_SHAPE.UP_RIBBON: {
"basename": "Up Ribbon",
"avLst": (("adj1", 16667), ("adj2", 50000)),
},
MSO_SHAPE.U_TURN_ARROW: {
"basename": "U-Turn Arrow",
"avLst": (
("adj1", 25000),
("adj2", 25000),
("adj3", 25000),
("adj4", 43750),
("adj5", 75000),
),
},
MSO_SHAPE.VERTICAL_SCROLL: {
"basename": "Vertical Scroll",
"avLst": (("adj", 12500),),
},
MSO_SHAPE.WAVE: {"basename": "Wave", "avLst": (("adj1", 12500), ("adj2", 0))},
} | PypiClean |
/envs_manager-0.1.3.tar.gz/envs_manager-0.1.3/envs_manager/manager.py |
import os
from pathlib import Path
from envs_manager.backends.venv_interface import VEnvInterface
from envs_manager.backends.conda_like_interface import CondaLikeInterface
DEFAULT_BACKENDS_ROOT_PATH = Path(
os.environ.get(
"BACKENDS_ROOT_PATH", str(Path.home() / ".envs-manager" / "backends")
)
)
DEFAULT_BACKEND = os.environ.get("ENV_BACKEND", "venv")
DEFAULT_ENVS_ROOT_PATH = DEFAULT_BACKENDS_ROOT_PATH / DEFAULT_BACKEND / "envs"
EXTERNAL_EXECUTABLE = os.environ.get("ENV_BACKEND_EXECUTABLE", None)
class Manager:
"""
Class to handle different Python environment and packages managers implementations.
"""
BACKENDS = {
VEnvInterface.ID: VEnvInterface,
CondaLikeInterface.ID: CondaLikeInterface,
}
def __init__(
self,
backend,
root_path=None,
env_name=None,
env_directory=None,
external_executable=None,
):
self.backend_class = self.BACKENDS[backend]
self.env_name = env_name
self.root_path = root_path
if env_directory:
self.env_directory = str(env_directory)
elif root_path and env_name:
self.env_directory = root_path / backend / "envs" / env_name
else:
raise Exception(
"'env_directory' or 'root_path' and 'env_name' should be provided"
)
self.backend_instance = self.backend_class(
str(self.env_directory), external_executable=str(external_executable)
)
def create_environment(self, packages=None, channels=None, force=False):
if channels:
return self.backend_instance.create_environment(
packages, channels=channels, force=force
)
else:
return self.backend_instance.create_environment(packages, force=force)
def delete_environment(self, force=False):
return self.backend_instance.delete_environment(force=force)
def activate(self):
self.backend_instance.activate_environment()
def deactivate(self):
self.backend_instance.deactivate_environment()
def export_environment(self, export_file_path=None):
return self.backend_instance.export_environment(
export_file_path=export_file_path
)
def import_environment(self, import_file_path, force=False):
return self.backend_instance.import_environment(import_file_path, force=force)
def install(self, packages=None, channels=None, force=False, capture_output=False):
if channels:
return self.backend_instance.install_packages(
packages=packages,
channels=channels,
force=force,
capture_output=capture_output,
)
else:
return self.backend_instance.install_packages(
packages, force=force, capture_output=capture_output
)
def uninstall(self, packages, force=False, capture_output=False):
return self.backend_instance.uninstall_packages(
packages, force=force, capture_output=capture_output
)
def update(self, packages, force=False, capture_output=False):
return self.backend_instance.update_packages(
packages, force=force, capture_output=capture_output
)
def list(self):
return self.backend_instance.list_packages()
@classmethod
def list_environments(
cls,
backend=DEFAULT_BACKEND,
root_path=DEFAULT_BACKENDS_ROOT_PATH,
external_executable=None,
):
return cls.BACKENDS[backend].list_environments(
root_path, external_executable=external_executable
) | PypiClean |
/Muntjac-1.1.2.tar.gz/Muntjac-1.1.2/muntjac/data/util/filter/simple_string_filter.py | from muntjac.data.container import IFilter
class SimpleStringFilter(IFilter):
"""Simple string filter for matching items that start with or contain a
specified string. The matching can be case-sensitive or case-insensitive.
This filter also directly supports in-memory filtering. When performing
in-memory filtering, values of other types are converted using __str__,
but other (lazy container) implementations do not need to perform such
conversions and might not support values of different types.
Note that this filter might not be very efficient e.g. for database
filtering.
"""
def __init__(self, propertyId, filterString, ignoreCase, onlyMatchPrefix):
self.propertyId = propertyId
if ignoreCase:
self.filterString = filterString.lower()
else:
self.filterString = filterString
self.ignoreCase = ignoreCase
self.onlyMatchPrefix = onlyMatchPrefix
def passesFilter(self, itemId, item):
p = item.getItemProperty(self.propertyId)
if p is None or str(p) is None:
return False
value = str(p).lower() if self.ignoreCase else str(p)
if self.onlyMatchPrefix:
if not value.startswith(self.filterString):
return False
else:
if self.filterString not in value:
return False
return True
def appliesToProperty(self, propertyId):
return self.propertyId == propertyId
def __eq__(self, obj):
# Only ones of the objects of the same class can be equal
if not isinstance(obj, SimpleStringFilter):
return False
# Checks the properties one by one
if (self.propertyId != obj.propertyId
and obj.propertyId is not None
and obj.propertyId != self.propertyId):
return False
if (self.filterString != obj.filterString
and obj.filterString is not None
and obj.filterString != self.filterString):
return False
if self.ignoreCase != obj.ignoreCase:
return False
if self.onlyMatchPrefix != obj.onlyMatchPrefix:
return False
return True
def __hash__(self):
h1 = hash(self.propertyId) if self.propertyId is not None else 0
h2 = hash(self.filterString) if self.filterString is not None else 0
return h1 ^ h2
def getPropertyId(self):
"""Returns the property identifier to which this filter applies.
@return: property id
"""
return self.propertyId
def getFilterString(self):
"""Returns the filter string.
Note: this method is intended only for implementations of lazy
string filters and may change in the future.
@return: filter string given to the constructor
"""
return self.filterString
def isIgnoreCase(self):
"""Returns whether the filter is case-insensitive or case-sensitive.
Note: this method is intended only for implementations of lazy string
filters and may change in the future.
@return: true if performing case-insensitive filtering, false for
case-sensitive
"""
return self.ignoreCase
def isOnlyMatchPrefix(self):
"""Returns true if the filter only applies to the beginning of the value
string, false for any location in the value.
Note: this method is intended only for implementations of lazy string
filters and may change in the future.
@return: true if checking for matches at the beginning of the value only,
false if matching any part of value
"""
return self.onlyMatchPrefix | PypiClean |
/hnzhu010507-0.0.9.tar.gz/hnzhu010507-0.0.9/_pytest/_code/source.py | import ast
import inspect
import textwrap
import tokenize
import types
import warnings
from bisect import bisect_right
from typing import Iterable
from typing import Iterator
from typing import List
from typing import Optional
from typing import overload
from typing import Tuple
from typing import Union
class Source:
"""An immutable object holding a source code fragment.
When using Source(...), the source lines are deindented.
"""
def __init__(self, obj: object = None) -> None:
if not obj:
self.lines: List[str] = []
elif isinstance(obj, Source):
self.lines = obj.lines
elif isinstance(obj, (tuple, list)):
self.lines = deindent(x.rstrip("\n") for x in obj)
elif isinstance(obj, str):
self.lines = deindent(obj.split("\n"))
else:
try:
rawcode = getrawcode(obj)
src = inspect.getsource(rawcode)
except TypeError:
src = inspect.getsource(obj) # type: ignore[arg-type]
self.lines = deindent(src.split("\n"))
def __eq__(self, other: object) -> bool:
if not isinstance(other, Source):
return NotImplemented
return self.lines == other.lines
# Ignore type because of https://github.com/python/mypy/issues/4266.
__hash__ = None # type: ignore
@overload
def __getitem__(self, key: int) -> str:
...
@overload
def __getitem__(self, key: slice) -> "Source":
...
def __getitem__(self, key: Union[int, slice]) -> Union[str, "Source"]:
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
newsource = Source()
newsource.lines = self.lines[key.start : key.stop]
return newsource
def __iter__(self) -> Iterator[str]:
return iter(self.lines)
def __len__(self) -> int:
return len(self.lines)
def strip(self) -> "Source":
"""Return new Source object with trailing and leading blank lines removed."""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end - 1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def indent(self, indent: str = " " * 4) -> "Source":
"""Return a copy of the source object with all lines indented by the
given indent-string."""
newsource = Source()
newsource.lines = [(indent + line) for line in self.lines]
return newsource
def getstatement(self, lineno: int) -> "Source":
"""Return Source statement which contains the given linenumber
(counted from 0)."""
start, end = self.getstatementrange(lineno)
return self[start:end]
def getstatementrange(self, lineno: int) -> Tuple[int, int]:
"""Return (start, end) tuple which spans the minimal statement region
which containing the given lineno."""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self) -> "Source":
"""Return a new Source object deindented."""
newsource = Source()
newsource.lines[:] = deindent(self.lines)
return newsource
def __str__(self) -> str:
return "\n".join(self.lines)
#
# helper functions
#
def findsource(obj) -> Tuple[Optional[Source], int]:
try:
sourcelines, lineno = inspect.findsource(obj)
except Exception:
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getrawcode(obj: object, trycall: bool = True) -> types.CodeType:
"""Return code object for given function."""
try:
return obj.__code__ # type: ignore[attr-defined,no-any-return]
except AttributeError:
pass
if trycall:
call = getattr(obj, "__call__", None)
if call and not isinstance(obj, type):
return getrawcode(call, trycall=False)
raise TypeError(f"could not get code object for {obj!r}")
def deindent(lines: Iterable[str]) -> List[str]:
return textwrap.dedent("\n".join(lines)).splitlines()
def get_statement_startend2(lineno: int, node: ast.AST) -> Tuple[int, Optional[int]]:
# Flatten all statements and except handlers into one lineno-list.
# AST's line numbers start indexing at 1.
values: List[int] = []
for x in ast.walk(node):
if isinstance(x, (ast.stmt, ast.ExceptHandler)):
# Before Python 3.8, the lineno of a decorated class or function pointed at the decorator.
# Since Python 3.8, the lineno points to the class/def, so need to include the decorators.
if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)):
for d in x.decorator_list:
values.append(d.lineno - 1)
values.append(x.lineno - 1)
for name in ("finalbody", "orelse"):
val: Optional[List[ast.stmt]] = getattr(x, name, None)
if val:
# Treat the finally/orelse part as its own statement.
values.append(val[0].lineno - 1 - 1)
values.sort()
insert_index = bisect_right(values, lineno)
start = values[insert_index - 1]
if insert_index >= len(values):
end = None
else:
end = values[insert_index]
return start, end
def getstatementrange_ast(
lineno: int,
source: Source,
assertion: bool = False,
astnode: Optional[ast.AST] = None,
) -> Tuple[ast.AST, int, int]:
if astnode is None:
content = str(source)
# See #4260:
# Don't produce duplicate warnings when compiling source to find AST.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
astnode = ast.parse(content, "source", "exec")
start, end = get_statement_startend2(lineno, astnode)
# We need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# Make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself.
block_finder = inspect.BlockFinder()
# If we start with an indented line, put blockfinder to "started" mode.
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# The end might still point to a comment or empty line, correct it.
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end | PypiClean |
/jay3332-discord.py-2.0.0a0.tar.gz/jay3332-discord.py-2.0.0a0/discord/stage_instance.py | from __future__ import annotations
from typing import Optional, TYPE_CHECKING
from .utils import MISSING, cached_slot_property
from .mixins import Hashable
from .errors import InvalidArgument
from .enums import StagePrivacyLevel, try_enum
__all__ = (
'StageInstance',
)
if TYPE_CHECKING:
from .types.channel import StageInstance as StageInstancePayload
from .state import ConnectionState
from .channel import StageChannel
from .guild import Guild
class StageInstance(Hashable):
"""Represents a stage instance of a stage channel in a guild.
.. versionadded:: 2.0
.. container:: operations
.. describe:: x == y
Checks if two stage instances are equal.
.. describe:: x != y
Checks if two stage instances are not equal.
.. describe:: hash(x)
Returns the stage instance's hash.
Attributes
-----------
id: :class:`int`
The stage instance's ID.
guild: :class:`Guild`
The guild that the stage instance is running in.
channel_id: :class:`int`
The ID of the channel that the stage instance is running in.
topic: :class:`str`
The topic of the stage instance.
privacy_level: :class:`StagePrivacyLevel`
The privacy level of the stage instance.
discoverable_disabled: :class:`bool`
Whether discoverability for the stage instance is disabled.
"""
__slots__ = (
'_state',
'id',
'guild',
'channel_id',
'topic',
'privacy_level',
'discoverable_disabled',
'_cs_channel',
)
def __init__(self, *, state: ConnectionState, guild: Guild, data: StageInstancePayload) -> None:
self._state = state
self.guild = guild
self._update(data)
def _update(self, data: StageInstancePayload):
self.id: int = int(data['id'])
self.channel_id: int = int(data['channel_id'])
self.topic: str = data['topic']
self.privacy_level: StagePrivacyLevel = try_enum(StagePrivacyLevel, data['privacy_level'])
self.discoverable_disabled: bool = data.get('discoverable_disabled', False)
def __repr__(self) -> str:
return f'<StageInstance id={self.id} guild={self.guild!r} channel_id={self.channel_id} topic={self.topic!r}>'
@cached_slot_property('_cs_channel')
def channel(self) -> Optional[StageChannel]:
"""Optional[:class:`StageChannel`]: The channel that stage instance is running in."""
# the returned channel will always be a StageChannel or None
return self._state.get_channel(self.channel_id) # type: ignore
def is_public(self) -> bool:
return self.privacy_level is StagePrivacyLevel.public
async def edit(self, *, topic: str = MISSING, privacy_level: StagePrivacyLevel = MISSING, reason: Optional[str] = None) -> None:
"""|coro|
Edits the stage instance.
You must have the :attr:`~Permissions.manage_channels` permission to
use this.
Parameters
-----------
topic: :class:`str`
The stage instance's new topic.
privacy_level: :class:`StagePrivacyLevel`
The stage instance's new privacy level.
reason: :class:`str`
The reason the stage instance was edited. Shows up on the audit log.
Raises
------
InvalidArgument
If the ``privacy_level`` parameter is not the proper type.
Forbidden
You do not have permissions to edit the stage instance.
HTTPException
Editing a stage instance failed.
"""
payload = {}
if topic is not MISSING:
payload['topic'] = topic
if privacy_level is not MISSING:
if not isinstance(privacy_level, StagePrivacyLevel):
raise InvalidArgument('privacy_level field must be of type PrivacyLevel')
payload['privacy_level'] = privacy_level.value
if payload:
await self._state.http.edit_stage_instance(self.channel_id, **payload, reason=reason)
async def delete(self, *, reason: Optional[str] = None) -> None:
"""|coro|
Deletes the stage instance.
You must have the :attr:`~Permissions.manage_channels` permission to
use this.
Parameters
-----------
reason: :class:`str`
The reason the stage instance was deleted. Shows up on the audit log.
Raises
------
Forbidden
You do not have permissions to delete the stage instance.
HTTPException
Deleting the stage instance failed.
"""
await self._state.http.delete_stage_instance(self.channel_id, reason=reason) | PypiClean |
/LambdaTool-0.9.5.tar.gz/LambdaTool-0.9.5/lambdatool/stack_tool.py | class StackTool(object):
_cf_client = None
_stack_name = None
_stage = None
_region = None
def __init__(self, stack_name, stage, profile, region, cf_client):
"""
StackTool is a simple tool to print some specific data about a
CloudFormation stack.
Args:
stack_name - name of the stack of interest
stage - the supplied stage/environment
profile - AWS credential profile (may be None)
region - AWS region where the stack was created
Returns:
not a damn thing
Raises:
SystemError - if everything isn't just right
"""
try:
self._stack_name = stack_name
self._stage = stage
self._region = region
self._cf_client = cf_client
except Exception:
raise SystemError
def print_stack_info(self):
'''
List resources from the given stack
Args:
None
Returns:
A dictionary filled resources or None if things went sideways
'''
try:
rest_api_id = None
deployment_found = False
response = self._cf_client.describe_stack_resources(
StackName=self._stack_name
)
print('\nThe following resources were created:')
for resource in response['StackResources']:
if resource['ResourceType'] == 'AWS::ApiGateway::RestApi':
rest_api_id = resource['PhysicalResourceId']
elif resource['ResourceType'] == 'AWS::ApiGateway::Deployment':
deployment_found = True
print('\t{}\t{}\t{}'.format(
resource['ResourceType'],
resource['LogicalResourceId'],
resource['PhysicalResourceId']
)
)
if rest_api_id and deployment_found:
url = 'https://{}.execute-api.{}.amazonaws.com/{}'.format(
rest_api_id,
self._region,
self._stage
)
print('\nThe deployed service can be found at this URL:')
print('\t{}\n'.format(url))
return response
except Exception as wtf:
print(wtf)
return None
if __name__ == '__main__':
stack_tool = StackTool('lambda-mars-dev', 'dev', None, 'us-east-2')
stack_tool.print_stack_info() | PypiClean |
/ignition_api-8.1.31.post1.tar.gz/ignition_api-8.1.31.post1/src/system/user.py | from __future__ import print_function
__all__ = [
"addCompositeSchedule",
"addHoliday",
"addRole",
"addSchedule",
"addUser",
"createScheduleAdjustment",
"editHoliday",
"editRole",
"editSchedule",
"editUser",
"getHoliday",
"getHolidayNames",
"getHolidays",
"getNewUser",
"getRoles",
"getSchedule",
"getScheduleNames",
"getScheduledUsers",
"getSchedules",
"getUser",
"getUserSources",
"getUsers",
"isUserScheduled",
"removeHoliday",
"removeRole",
"removeSchedule",
"removeUser",
]
from typing import List, Optional, Union
from com.inductiveautomation.ignition.common.messages import UIResponse
from com.inductiveautomation.ignition.common.user import PyUser, UserSourceMeta
from com.inductiveautomation.ignition.common.user.schedule import (
AbstractScheduleModel,
BasicScheduleModel,
HolidayModel,
ScheduleAdjustment,
)
from dev.thecesrom.helper.types import AnyStr
from java.util import Date, Locale
def addCompositeSchedule(name, scheduleOne, scheduleTwo, description=None):
# type: (AnyStr, AnyStr, AnyStr, Optional[AnyStr]) -> UIResponse
"""Allows two schedules to be combined into a composite schedule.
Args:
name: The name of the new composite schedule.
scheduleOne: The first schedule to combine.
scheduleTwo: The second schedule to combine.
description: Description of the new combined schedule. Optional.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the add.
"""
print(name, scheduleOne, scheduleTwo, description)
return UIResponse(Locale.ENGLISH)
def addHoliday(holiday):
# type: (HolidayModel) -> UIResponse
"""Allows a holiday to be added.
Args:
holiday: The holiday to add.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the add.
"""
print(holiday)
return UIResponse(Locale.ENGLISH)
def addRole(userSource, role):
# type: (AnyStr, AnyStr) -> UIResponse
"""Allows a role to the specified user source.
When altering the Gateway System User Source, the Allow User Admin
setting must be enabled.
Args:
userSource: The user source to add a role to. Blank will use the
default user source.
role: The role to add. Role must not be blank and must not
already exist.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the add.
"""
print(userSource, role)
return UIResponse(Locale.ENGLISH)
def addSchedule(schedule):
# type: (AbstractScheduleModel) -> UIResponse
"""Allows a schedule to be added.
Args:
schedule: The schedule to add. Can be a BasicScheduleModel or
CompositeScheduleModel object (or any other class that
extends AbstractScheduleModel).
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the add.
"""
print(schedule)
return UIResponse(Locale.ENGLISH)
def addUser(userSource, user):
# type: (AnyStr, PyUser) -> UIResponse
"""Adds a new user to a user source.
Used in combination with getNewUser to create new user.
Args:
userSource: The user source to add a user to. If set to an empty
string, the function will attempt to use the project's
default user source (if called from a project).
user: The user to add, as a User object. Refer also to the
PyUser class.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the add attempt.
"""
print(userSource, user)
return UIResponse(Locale.ENGLISH)
def createScheduleAdjustment(startDate, endDate, isAvailable, note):
# type: (Date, Date, bool, AnyStr) -> ScheduleAdjustment
"""Creates a schedule adjustment.
Args:
startDate: The starting date of the schedule adjustment.
endDate: The ending date of the schedule adjustment.
isAvailable: True if the user is available during this schedule
adjustment.
note: A note about the schedule adjustment.
Returns:
A ScheduleAdjustment object that can be added to a user.
"""
return ScheduleAdjustment(startDate, endDate, isAvailable, note)
def editHoliday(holidayName, holiday):
# type: (AnyStr, HolidayModel) -> UIResponse
"""Allows a holiday to be edited.
Args:
holidayName: The name of the holiday to edit. Name is
case-sensitive.
holiday: The edited holiday, as a HolidayModel object.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the edit.
"""
print(holidayName, holiday)
return UIResponse(Locale.ENGLISH)
def editRole(userSource, oldName, newName):
# type: (AnyStr, AnyStr, AnyStr) -> UIResponse
"""Renames a role in the specified user source.
When altering the Gateway System User Source, the Allow User Admin
setting must be enabled.
Args:
userSource: The user source in which the role is found. Blank
will use the default user source.
oldName: The role to edit. Role must not be blank and must
exist.
newName: The new name for the role. Must not be blank.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the edit.
"""
print(userSource, oldName, newName)
return UIResponse(Locale.ENGLISH)
def editSchedule(scheduleName, schedule):
# type: (AnyStr, AbstractScheduleModel) -> UIResponse
"""Allows a schedule to be edited.
Args:
scheduleName: The name of the schedule to edit. Name is
case-sensitive.
schedule: The schedule to add. Can be a BasicScheduleModel or
CompositeScheduleModel object (or any other class that
extends AbstractScheduleModel).
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the edit.
"""
print(scheduleName, schedule)
return UIResponse(Locale.ENGLISH)
def editUser(userSource, user):
# type: (AnyStr, PyUser) -> UIResponse
"""Alters a specific user in a user source, replacing the previous
data with the new data passed in.
Args:
userSource: The user source in which the user is found. Blank
will use the default user source.
user: The user to update.
Returns:
A UIResponse object with lists of warnings, errors, and
information returned after the edit attempt.
"""
print(userSource, user)
return UIResponse(Locale.ENGLISH)
def getHoliday(holidayName):
# type: (AnyStr) -> Optional[HolidayModel]
"""Returns a specific holiday.
Args:
holidayName: The name of the holiday to return. Case-sensitive.
Returns:
The holiday, as a HolidayModel object,, or None if not found.
"""
print(holidayName)
return HolidayModel(holidayName, Date(), True)
def getHolidayNames():
# type: () -> List[AnyStr]
"""Returns a collection of strings of all holiday names.
Returns:
A list of all holiday names, or an empty list if no holidays are
defined.
"""
return ["Cinco de mayo", "Labor Day", "Groundhog Day"]
def getHolidays():
# type: () -> List[HolidayModel]
"""Returns a sequence of all of the holidays available.
Returns:
A list of holidays, as HolidayModel objects.
"""
return [HolidayModel("Cinco de mayo", Date(), True)]
def getNewUser(userSource, username):
# type: (AnyStr, AnyStr) -> PyUser
"""Creates a new user object.
The user will not be added to the user source until addUser is
called.
Args:
userSource: The name of the user source in which to create a
user.
username: The username for the new user. Does not check if
username already exists or is valid.
Returns:
The new user.
"""
print(userSource, username)
return PyUser()
def getRoles(userSource):
# type: (AnyStr) -> List[AnyStr]
"""Returns a sequence of strings representing all of the roles
configured in a specific user source.
Args:
userSource: The user source to fetch the roles for.
Returns:
A List of strings that holds all the roles in the user source.
"""
print(userSource)
return ["Administrator", "Designer", "Developer"]
def getSchedule(scheduleName):
# type: (AnyStr) -> Optional[AbstractScheduleModel]
"""Returns a specific schedule.
Args:
scheduleName: The name of the schedule to return.
Case-sensitive.
Returns:
The schedule, which can be a BasicSchedule, CompositeSchedule,
or another type registered by a module. If a schedule was not
found, the function will return None if called from a Vision
Client or the Designer. if called in from a Perspective Session
or anywhere else in the Gateway scope, will throw an
IllegalArgumentException.
"""
print(scheduleName)
return BasicScheduleModel()
def getScheduleNames():
# type: () -> List[AnyStr]
"""Returns a sequence of strings representing the names of all of
the schedules available.
Returns:
A List of strings that holds the names of all the available
schedules.
"""
return ["A", "Always", "B", "C", "Example", "MyComposite", "MySchedule"]
def getScheduledUsers(userSource, date=Date()):
# type: (AnyStr, Optional[Union[Date, int]]) -> List[PyUser]
"""Returns a list of users that are scheduled on.
If no users are scheduled, it will return an empty list.
Args:
userSource: The name of the user source to check for scheduled
users.
date: The date to check schedules for. May be a Java Date or
Unix Time in ms.. If omitted, the current date and time will
be used. Optional.
Returns:
List of all Users scheduled for the given date, taking schedule
adjustments into account.
"""
print(userSource, date)
return [PyUser()]
def getSchedules():
# type: () -> List[AbstractScheduleModel]
"""Returns a sequence of all available schedule models, which can be
used to return configuration information on the schedule, such as
time for each day of the week.
Returns:
A list of schedules. Each schedule can be a BasicScheduleModel
object, CompositeScheduleModel object, or another type
registered by a module.
"""
return [BasicScheduleModel()]
def getUser(userSource, username):
# type: (AnyStr, AnyStr) -> PyUser
"""Looks up a specific user in a user source, by username.
The full User object is returned except for the user's password.
Args:
userSource: The name of the user source to search for the user
in. Can be a blank string to use the Vision Client's default
user source.
username: The username of the user to search for.
Returns:
The user.
"""
print(userSource, username)
return PyUser()
def getUserSources():
# type: () -> List[UserSourceMeta]
"""Returns a sequence of objects representing all of the user source
profiles configured in the Gateway. Each object has a "name"
property, a "description" property, and a "type" property.
Returns:
A List of all user source profiles configured in the system in
ascending order by their names.
"""
return [
UserSourceMeta(
"default",
"This user source profile was automatically created during a clean startup",
"INTERNAL",
)
]
def getUsers(userSource):
# type: (AnyStr) -> List[PyUser]
"""Retrieves the list of users in a specific user source.
The User objects that are returned contain all of the information
about that user, except for the user's password.
Args:
userSource: The name of the user source to find the users in.
Returns:
A list of User objects.
"""
print(userSource)
return [PyUser()]
def isUserScheduled(user, date=None):
# type: (PyUser, Optional[Union[Date, int]]) -> bool
"""Will check if a specified User is scheduled currently or on a
specified date/time.
Args:
user: The User object to check for on the schedule.
date: The date to check schedules for. May be a Java Date or
Unix Time in ms. If omitted, the current date and time
will be used. Optional.
Returns:
True if the user is scheduled for the specified date, False if
not.
"""
print(user, date)
return True
def removeHoliday(holidayName):
# type: (AnyStr) -> UIResponse
"""Allows a holiday to be deleted.
Args:
holidayName: The name of the holiday to delete. Case-sensitive.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the deletion.
"""
print(holidayName)
return UIResponse(Locale.ENGLISH)
def removeRole(userSource, role):
# type: (AnyStr, AnyStr) -> UIResponse
"""Removes a role from the specified user source.
When altering the Gateway System User Source, the Allow User Admin
setting must be enabled.
Args:
userSource: The user source in which the role is found. Blank
will use the default user source.
role: The role to remove. The role must exist.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the deletion.
"""
print(userSource, role)
return UIResponse(Locale.ENGLISH)
def removeSchedule(scheduleName):
# type: (AnyStr) -> UIResponse
"""Allows a schedule to be deleted.
Note that schedules which are used in Composite Schedules can not be
deleted until they are removed from the Composite Schedule.
Args:
scheduleName: The name of the schedule to delete. Name is
case-sensitive.
Returns:
A UIResponse object with lists of warnings, errors, and info
about the success or failure of the deletion.
"""
print(scheduleName)
return UIResponse(Locale.ENGLISH)
def removeUser(userSource, username):
# type: (AnyStr, AnyStr) -> UIResponse
"""Removes a specific user from a user source based on username.
Args:
userSource: The user source in which the user is found. Blank
will use the default user source.
username: The username of the user to remove.
Returns:
A UIResponse object with lists of warnings, errors, and
information returned after the removal attempt.
"""
print(userSource, username)
return UIResponse(Locale.ENGLISH) | PypiClean |
/edc-protocol-incident-0.1.29.tar.gz/edc-protocol-incident-0.1.29/edc_protocol_incident/admin/protocol_deviation_violation_admin.py | from django.contrib import admin
from django_audit_fields.admin import audit_fieldset_tuple
from edc_model_admin.dashboard import ModelAdminSubjectDashboardMixin
from edc_model_admin.history import SimpleHistoryAdmin
from ..admin_site import edc_protocol_incident_admin
from ..forms import ProtocolDeviationViolationForm
from ..models import ProtocolDeviationViolation
@admin.register(ProtocolDeviationViolation, site=edc_protocol_incident_admin)
class ProtocolDeviationViolationAdmin(ModelAdminSubjectDashboardMixin, SimpleHistoryAdmin):
form = ProtocolDeviationViolationForm
fieldsets = (
(
None,
{
"fields": (
"subject_identifier",
"report_datetime",
"short_description",
"report_type",
)
},
),
(
"Details of protocol violation",
{
"fields": (
"safety_impact",
"safety_impact_details",
"study_outcomes_impact",
"study_outcomes_impact_details",
"violation_datetime",
"violation",
"violation_other",
"violation_description",
"violation_reason",
)
},
),
(
"Actions taken",
{
"fields": (
"corrective_action_datetime",
"corrective_action",
"preventative_action_datetime",
"preventative_action",
"action_required",
)
},
),
(
"Report status",
{
"fields": (
"report_status",
"report_closed_datetime",
)
},
),
audit_fieldset_tuple,
)
radio_fields = {
"action_required": admin.VERTICAL,
"report_status": admin.VERTICAL,
"report_type": admin.VERTICAL,
"safety_impact": admin.VERTICAL,
"study_outcomes_impact": admin.VERTICAL,
}
list_filter = (
"report_type",
"safety_impact",
"study_outcomes_impact",
"report_status",
)
list_display = (
"subject_identifier",
"dashboard",
"report_type",
"safety_impact",
"study_outcomes_impact",
"report_status",
)
search_fields = ("subject_identifier",) | PypiClean |