id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/bambu-tools-3.3.4.tar.gz/bambu-tools-3.3.4/bambu/dataportability/simplexmlwriter.py |
##
# Tools to write XML files, without having to deal with encoding
# issues, well-formedness, etc.
# <p>
# The current version does not provide built-in support for
# namespaces. To create files using namespaces, you have to provide
# "xmlns" attributes and explicitly add prefixes to tags and
# attributes.
#
# <h3>Patterns</h3>
#
# The following example generates a small XHTML document.
# <pre>
#
# from elementtree.SimpleXMLWriter import XMLWriter
# import sys
#
# w = XMLWriter(sys.stdout)
#
# html = w.start("html")
#
# w.start("head")
# w.element("title", "my document")
# w.element("meta", name="generator", value="my application 1.0")
# w.end()
#
# w.start("body")
# w.element("h1", "this is a heading")
# w.element("p", "this is a paragraph")
#
# w.start("p")
# w.data("this is ")
# w.element("b", "bold")
# w.data(" and ")
# w.element("i", "italic")
# w.data(".")
# w.end("p")
#
# w.close(html)
# </pre>
##
import re, sys, string
try:
unicode("")
except NameError:
def encode(s, encoding):
# 1.5.2: application must use the right encoding
return s
_escape = re.compile(r"[&<>\"\x80-\xff]+") # 1.5.2
else:
def encode(s, encoding):
return s.encode(encoding)
_escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"'))
def encode_entity(text, pattern=_escape):
# map reserved and non-ascii characters to numerical entities
def escape_entities(m):
out = []
for char in m.group():
out.append("&#%d;" % ord(char))
return string.join(out, "")
return encode(pattern.sub(escape_entities, text), "ascii")
del _escape
#
# the following functions assume an ascii-compatible encoding
# (or "utf-16")
def escape_cdata(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
def escape_attrib(s, encoding=None, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "'", "'")
s = replace(s, "\"", """)
s = replace(s, "<", "<")
s = replace(s, ">", ">")
if encoding:
try:
return encode(s, encoding)
except UnicodeError:
return encode_entity(s)
return s
##
# XML writer class.
#
# @param file A file or file-like object. This object must implement
# a <b>write</b> method that takes an 8-bit string.
# @param encoding Optional encoding.
class SimpleXMLWriter:
def __init__(self, file, encoding="us-ascii"):
if not hasattr(file, "write"):
file = open(file, "w")
self.__write = file.write
if hasattr(file, "flush"):
self.flush = file.flush
self.__open = 0 # true if start tag is open
self.__tags = []
self.__data = []
self.__encoding = encoding
def __flush(self):
# flush internal buffers
if self.__open:
self.__write(">")
self.__open = 0
if self.__data:
data = string.join(self.__data, "")
self.__write(escape_cdata(data, self.__encoding))
self.__data = []
##
# Writes an XML declaration.
def declaration(self):
encoding = self.__encoding
if encoding == "us-ascii" or encoding == "utf-8":
self.__write("<?xml version='1.0'?>\n")
else:
self.__write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
##
# Opens a new element. Attributes can be given as keyword
# arguments, or as a string/string dictionary. You can pass in
# 8-bit strings or Unicode strings; the former are assumed to use
# the encoding passed to the constructor. The method returns an
# opaque identifier that can be passed to the <b>close</b> method,
# to close all open elements up to and including this one.
#
# @param tag Element tag.
# @param attrib Attribute dictionary. Alternatively, attributes
# can be given as keyword arguments.
# @return An element identifier.
def start(self, tag, attrib={}, **extra):
self.__flush()
tag = escape_cdata(tag, self.__encoding)
self.__data = []
self.__tags.append(tag)
self.__write("<%s" % tag)
if attrib or extra:
attrib = attrib.copy()
attrib.update(extra)
attrib = attrib.items()
attrib.sort()
for k, v in attrib:
k = escape_cdata(k, self.__encoding)
v = escape_attrib(v, self.__encoding)
self.__write(" %s=\"%s\"" % (k, v))
self.__open = 1
return len(self.__tags)-1
##
# Adds a comment to the output stream.
#
# @param comment Comment text, as an 8-bit string or Unicode string.
def comment(self, comment):
self.__flush()
self.__write("<!-- %s -->\n" % escape_cdata(comment, self.__encoding))
##
# Adds character data to the output stream.
#
# @param text Character data, as an 8-bit string or Unicode string.
def data(self, text):
self.__data.append(text)
##
# Adds unparsed character data to the output stream.
#
# @param text Character data, as an 8-bit string or Unicode string.
def cdata(self, text):
try:
text = encode(text, self.__encoding)
except UnicodeError:
text = encode_entity(text)
self.__flush()
self.__write('<![CDATA[%s]]>' % text)
##
# Closes the current element (opened by the most recent call to
# <b>start</b>).
#
# @param tag Element tag. If given, the tag must match the start
# tag. If omitted, the current element is closed.
def end(self, tag=None):
if tag:
assert self.__tags, "unbalanced end(%s)" % tag
assert escape_cdata(tag, self.__encoding) == self.__tags[-1],\
"expected end(%s), got %s" % (self.__tags[-1], tag)
else:
assert self.__tags, "unbalanced end()"
tag = self.__tags.pop()
if self.__data:
self.__flush()
elif self.__open:
self.__open = 0
self.__write(" />")
return
self.__write("</%s>" % tag)
##
# Closes open elements, up to (and including) the element identified
# by the given identifier.
#
# @param id Element identifier, as returned by the <b>start</b> method.
def close(self, id):
while len(self.__tags) > id:
self.end()
##
# Adds an entire element. This is the same as calling <b>start</b>,
# <b>data</b>, and <b>end</b> in sequence. The <b>text</b> argument
# can be omitted.
def element(self, tag, text=None, attrib={}, **extra):
apply(self.start, (tag, attrib), extra)
if text:
self.data(text)
self.end()
##
# Flushes the output stream.
def flush(self):
pass # replaced by the constructor | PypiClean |
/sports2d-0.2.5-py3-none-any.whl/Sports2D/Utilities/common.py | ## INIT
import sys
import numpy as np
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QTabWidget, QVBoxLayout
from scipy import interpolate
## AUTHORSHIP INFORMATION
__author__ = "David Pagnon"
__copyright__ = "Copyright 2023, Sports2D"
__credits__ = ["David Pagnon"]
__license__ = "BSD 3-Clause License"
__version__ = "0.1"
__maintainer__ = "David Pagnon"
__email__ = "[email protected]"
__status__ = "Development"
## CLASSES
class plotWindow():
'''
Display several figures in tabs
Taken from https://github.com/superjax/plotWindow/blob/master/plotWindow.py
USAGE:
pw = plotWindow()
f = plt.figure()
plt.plot(x1, y1)
pw.addPlot("1", f)
f = plt.figure()
plt.plot(x2, y2)
pw.addPlot("2", f)
'''
def __init__(self, parent=None):
self.app = QApplication(sys.argv)
self.MainWindow = QMainWindow()
self.MainWindow.__init__()
self.MainWindow.setWindowTitle("Multitabs figure")
self.canvases = []
self.figure_handles = []
self.toolbar_handles = []
self.tab_handles = []
self.current_window = -1
self.tabs = QTabWidget()
self.MainWindow.setCentralWidget(self.tabs)
self.MainWindow.resize(1280, 720)
self.MainWindow.show()
def addPlot(self, title, figure):
new_tab = QWidget()
layout = QVBoxLayout()
new_tab.setLayout(layout)
figure.subplots_adjust(left=0.1, right=0.99, bottom=0.1, top=0.91, wspace=0.2, hspace=0.2)
new_canvas = FigureCanvas(figure)
new_toolbar = NavigationToolbar(new_canvas, new_tab)
layout.addWidget(new_canvas)
layout.addWidget(new_toolbar)
self.tabs.addTab(new_tab, title)
self.toolbar_handles.append(new_toolbar)
self.canvases.append(new_canvas)
self.figure_handles.append(figure)
self.tab_handles.append(new_tab)
def show(self):
self.app.exec_()
## FUNCTIONS
def interpolate_zeros_nans(col, *args):
'''
Interpolate missing points (of value zero),
unless more than N contiguous values are missing.
INPUTS:
- col: pandas column of coordinates
- args[0] = N: max number of contiguous bad values, above which they won't be interpolated
- args[1] = kind: 'linear', 'slinear', 'quadratic', 'cubic'. Default: 'cubic'
OUTPUT:
- col_interp: interpolated pandas column
'''
if len(args)==2:
N, kind = args
if len(args)==1:
N = np.inf
kind = args[0]
if not args:
N = np.inf
# Interpolate nans
mask = ~(np.isnan(col) | col.eq(0)) # true where nans or zeros
idx_good = np.where(mask)[0]
if len(idx_good)>5:
if 'kind' not in locals(): # 'linear', 'slinear', 'quadratic', 'cubic'
f_interp = interpolate.interp1d(idx_good, col[idx_good], kind="linear", fill_value='extrapolate', bounds_error=False)
else:
f_interp = interpolate.interp1d(idx_good, col[idx_good], kind=kind, fill_value='extrapolate', bounds_error=False)
col_interp = np.where(mask, col, f_interp(col.index)) #replace at false index with interpolated values
# Reintroduce nans if lenght of sequence > N
idx_notgood = np.where(~mask)[0]
gaps = np.where(np.diff(idx_notgood) > 1)[0] + 1 # where the indices of true are not contiguous
sequences = np.split(idx_notgood, gaps)
if sequences[0].size>0:
for seq in sequences:
if len(seq) > N: # values to exclude from interpolation are set to false when they are too long
col_interp[seq] = np.nan
else:
col_interp = col.copy()
return col_interp | PypiClean |
/jiver-0.1.28.tar.gz/jiver-0.1.28/extra/jiver-completion.sh | function _mycomplete_()
{
local cmd="${1##*/}"
local word=${COMP_WORDS[COMP_CWORD]}
local line=${COMP_LINE}
local values=''
case "$line" in
*build-and-run*)
if [ "${line: -1}" == '-' ]
then
values='--skip-tests'
else
local elements=()
local value=''
local x=`pwd`;
while [ "$x" != "/" ] ;
do
for i in $(find "$x" -maxdepth 1 -name pom.xml)
do
local modules=`cat $i | grep '<module>' | sed 's/<module>//' | sed 's/<.*//' | sed 's/ //g' | sed 's/ //g'`
elements=(${elements[@]} $modules)
if [ `cat $i | grep plugin.dirs | wc -l` -ne "0" ]
then
local plugin_dirs=`cat $i | sed 's/<project.*>/<project>/' | xmllint --xpath "//plugin.dirs/text()" - | sed 's/^[ ]*//' | sed '/^$/d' | sed 's/,$//' | sed 's/.*\///'`
elements=(${elements[@]} $plugin_dirs)
fi
done
x=`dirname "$x"`;
done
local sorted=$(printf '%s\n' "${elements[@]}"|sort)
values=$sorted
fi
;;
*build-and-run-all*)
values=''
;;
*core-checkout-url*)
if [ "${line: -1}" == '-' ]
then
values='--depth-1'
else
values=''
fi
;;
*core-checkout\ *)
if [ "${line: -1}" == '-' ]
then
values='--depth-1'
else
local version_numbers=`cat /usr/local/jiver/git-checkout.txt | awk '{print $1}' | sort `
values=$version_numbers
fi
;;
*create*)
values='project plugin'
;;
*database*)
values='connect backup restore-latest create-project-databases drop-project-databases'
;;
*diffmerge*)
values=''
;;
*move-theme-to-top-level*)
values=''
;;
*run-tabs*)
values='--no-search-service --target-log'
;;
*overlay*)
values=''
;;
*setup-false*)
values=''
;;
*soy-escape*)
values=''
;;
*upgrade-analyzer*)
values=''
;;
*upgrade-project*)
local upgrade_versions=`cat /usr/local/jiver/project-upgrade.txt | sed 's/-.*//' | sort `
values=$upgrade_versions
;;
*vpn*)
values='all split my-current-gateway'
;;
*)
values='build-and-run core-checkout core-checkout-url create database diffmerge move-theme-to-top-level overlay run-tabs upgrade-analyzer upgrade-project vpn'
esac
COMPREPLY=($(compgen -W "$values" -- "${word}"))
}
complete -F _mycomplete_ jiver | PypiClean |
/cryptoxlib-aio-5.3.0.tar.gz/cryptoxlib-aio-5.3.0/cryptoxlib/CryptoXLibClient.py | import asyncio
import aiohttp
import ssl
import logging
import datetime
import json
import enum
import time
from abc import ABC, abstractmethod
from multidict import CIMultiDictProxy
from typing import List, Optional, Dict
from cryptoxlib.version_conversions import async_create_task
from cryptoxlib.Timer import Timer
from cryptoxlib.exceptions import CryptoXLibException
from cryptoxlib.WebsocketMgr import Subscription, WebsocketMgr
LOG = logging.getLogger(__name__)
class RestCallType(enum.Enum):
GET = "GET"
POST = "POST"
DELETE = "DELETE"
PUT = "PUT"
class SubscriptionSet(object):
SUBSCRIPTION_SET_ID_SEQ = 0
def __init__(self, subscriptions: List[Subscription]):
self.subscription_set_id = SubscriptionSet.SUBSCRIPTION_SET_ID_SEQ
SubscriptionSet.SUBSCRIPTION_SET_ID_SEQ += 1
self.subscriptions: List[Subscription] = subscriptions
self.websocket_mgr: Optional[WebsocketMgr] = None
def find_subscription(self, subscription: Subscription) -> Optional[Subscription]:
for s in self.subscriptions:
if s.internal_subscription_id == subscription.internal_subscription_id:
return s
return None
class CryptoXLibClient(ABC):
def __init__(self, api_trace_log: bool = False, ssl_context: ssl.SSLContext = None) -> None:
self.api_trace_log = api_trace_log
self.rest_session = None
self.subscription_sets: Dict[int, SubscriptionSet] = {}
if ssl_context is not None:
self.ssl_context = ssl_context
else:
self.ssl_context = ssl.create_default_context()
@abstractmethod
def _get_rest_api_uri(self) -> str:
pass
@abstractmethod
def _sign_payload(self, rest_call_type: RestCallType, resource: str, data: dict = None, params: dict = None, headers: dict = None) -> None:
pass
@abstractmethod
def _preprocess_rest_response(self, status_code: int, headers: 'CIMultiDictProxy[str]', body: Optional[dict]) -> None:
pass
@abstractmethod
def _get_websocket_mgr(self, subscriptions: List[Subscription], startup_delay_ms: int = 0,
ssl_context = None) -> WebsocketMgr:
pass
async def close(self) -> None:
session = self._get_rest_session()
if session is not None:
await session.close()
async def _create_get(self, resource: str, params: dict = None, headers: dict = None, signed: bool = False,
api_variable_path: str = None) -> dict:
return await self._create_rest_call(RestCallType.GET, resource, None, params, headers, signed, api_variable_path)
async def _create_post(self, resource: str, data: dict = None, params: dict = None, headers: dict = None, signed: bool = False,
api_variable_path: str = None) -> dict:
return await self._create_rest_call(RestCallType.POST, resource, data, params, headers, signed, api_variable_path)
async def _create_delete(self, resource: str, data:dict = None, params: dict = None, headers: dict = None, signed: bool = False,
api_variable_path: str = None) -> dict:
return await self._create_rest_call(RestCallType.DELETE, resource, data, params, headers, signed, api_variable_path)
async def _create_put(self, resource: str, data: dict = None, params: dict = None, headers: dict = None, signed: bool = False,
api_variable_path: str = None) -> dict:
return await self._create_rest_call(RestCallType.PUT, resource, data, params, headers, signed, api_variable_path)
async def _create_rest_call(self, rest_call_type: RestCallType, resource: str, data: dict = None, params: dict = None, headers: dict = None, signed: bool = False,
api_variable_path: str = None) -> dict:
with Timer('RestCall'):
# ensure headers & params are always valid objects
if headers is None:
headers = {}
if params is None:
params = {}
# add signature into the parameters
if signed:
self._sign_payload(rest_call_type, resource, data, params, headers)
resource_uri = self._get_rest_api_uri()
if api_variable_path is not None:
resource_uri += api_variable_path
resource_uri += resource
if rest_call_type == RestCallType.GET:
rest_call = self._get_rest_session().get(resource_uri, json = data, params = params, headers = headers, ssl = self.ssl_context)
elif rest_call_type == RestCallType.POST:
rest_call = self._get_rest_session().post(resource_uri, json = data, params = params, headers = headers, ssl = self.ssl_context)
elif rest_call_type == RestCallType.DELETE:
rest_call = self._get_rest_session().delete(resource_uri, json = data, params = params, headers = headers, ssl = self.ssl_context)
elif rest_call_type == RestCallType.PUT:
rest_call = self._get_rest_session().put(resource_uri, json = data, params = params, headers = headers, ssl = self.ssl_context)
else:
raise Exception(f"Unsupported REST call type {rest_call_type}.")
LOG.debug(f"> rest type [{rest_call_type.name}], uri [{resource_uri}], params [{params}], headers [{headers}], data [{data}]")
async with rest_call as response:
status_code = response.status
headers = response.headers
body = await response.text()
LOG.debug(f"<: status [{status_code}], response [{body}]")
if len(body) > 0:
try:
body = json.loads(body)
except json.JSONDecodeError:
body = {
"raw": body
}
self._preprocess_rest_response(status_code, headers, body)
return {
"status_code": status_code,
"headers": headers,
"response": body
}
def _get_rest_session(self) -> aiohttp.ClientSession:
if self.rest_session is not None:
return self.rest_session
if self.api_trace_log:
trace_config = aiohttp.TraceConfig()
trace_config.on_request_start.append(CryptoXLibClient._on_request_start)
trace_config.on_request_end.append(CryptoXLibClient._on_request_end)
trace_configs = [trace_config]
else:
trace_configs = None
self.rest_session = aiohttp.ClientSession(trace_configs=trace_configs)
return self.rest_session
@staticmethod
def _clean_request_params(params: dict) -> dict:
clean_params = {}
for key, value in params.items():
if value is not None:
clean_params[key] = str(value)
return clean_params
@staticmethod
async def _on_request_start(session, trace_config_ctx, params) -> None:
LOG.debug(f"> Context: {trace_config_ctx}")
LOG.debug(f"> Params: {params}")
@staticmethod
async def _on_request_end(session, trace_config_ctx, params) -> None:
LOG.debug(f"< Context: {trace_config_ctx}")
LOG.debug(f"< Params: {params}")
@staticmethod
def _get_current_timestamp_ms() -> int:
return int(datetime.datetime.now(tz = datetime.timezone.utc).timestamp() * 1000)
@staticmethod
def _get_unix_timestamp_ns() -> int:
return int(time.time_ns() * 10**9)
def compose_subscriptions(self, subscriptions: List[Subscription]) -> int:
subscription_set = SubscriptionSet(subscriptions = subscriptions)
self.subscription_sets[subscription_set.subscription_set_id] = subscription_set
return subscription_set.subscription_set_id
async def add_subscriptions(self, subscription_set_id: int, subscriptions: List[Subscription]) -> None:
await self.subscription_sets[subscription_set_id].websocket_mgr.subscribe(subscriptions)
async def unsubscribe_subscriptions(self, subscriptions: List[Subscription]) -> None:
for subscription in subscriptions:
subscription_found = False
for id, subscription_set in self.subscription_sets.items():
if subscription_set.find_subscription(subscription) is not None:
subscription_found = True
await subscription_set.websocket_mgr.unsubscribe(subscriptions)
if not subscription_found:
raise CryptoXLibException(f"No active subscription {subscription.subscription_id} found.")
async def unsubscribe_subscription_set(self, subscription_set_id: int) -> None:
return await self.unsubscribe_subscriptions(self.subscription_sets[subscription_set_id].subscriptions)
async def unsubscribe_all(self) -> None:
for id, _ in self.subscription_sets.items():
await self.unsubscribe_subscription_set(id)
async def start_websockets(self, websocket_start_time_interval_ms: int = 0) -> None:
if len(self.subscription_sets) < 1:
raise CryptoXLibException("ERROR: There are no subscriptions to be started.")
tasks = []
startup_delay_ms = 0
for id, subscription_set in self.subscription_sets.items():
subscription_set.websocket_mgr = self._get_websocket_mgr(subscription_set.subscriptions, startup_delay_ms, self.ssl_context)
tasks.append(async_create_task(
subscription_set.websocket_mgr.run())
)
startup_delay_ms += websocket_start_time_interval_ms
done, pending = await asyncio.wait(tasks, return_when = asyncio.FIRST_EXCEPTION)
for task in done:
try:
task.result()
except Exception as e:
LOG.error(f"Unrecoverable exception occurred while processing messages: {e}")
LOG.info(f"Remaining websocket managers scheduled for shutdown.")
await self.shutdown_websockets()
if len(pending) > 0:
await asyncio.wait(pending, return_when = asyncio.ALL_COMPLETED)
LOG.info("All websocket managers shut down.")
raise
async def shutdown_websockets(self):
for id, subscription_set in self.subscription_sets.items():
await subscription_set.websocket_mgr.shutdown() | PypiClean |
/bb2_cyanide_api-0.0.6-py3-none-any.whl/bb2_cyanide_api/tournament.py | from .match import Match
from collections import Counter
class Tournament:
def __init__(self, *args:Match):
self.matches = args
def leaderboard(self):
coaches = {}
for match in self.matches:
if match.is_concede():
continue
for coach in [match.coach1(), match.coach2()]:
if not coach['coachname'] in coaches:
coaches[coach['coachname']] = Counter({'name': coach['coachname']})
winner = match.winner()
coach1_name = match.coach1()['coachname']
coach2_name = match.coach2()['coachname']
coaches[coach1_name]['matches'] += 1
coaches[coach2_name]['matches'] += 1
for stat in ["inflictedtouchdowns", "inflictedtackles", "inflictedcasualties",
'inflictedinjuries', 'inflictedko', 'inflicteddead', 'inflictedmetersrunning',
'inflictedpasses', 'inflictedcatches', 'inflictedinterceptions',
'sustainedexpulsions', 'sustainedcasualties', 'sustainedko',
'sustainedinjuries', 'sustaineddead', 'inflictedmeterspassing']:
coaches[coach1_name][stat] += match.team1()[stat]
coaches[coach2_name][stat] += match.team2()[stat]
coaches[coach1_name]['opponentinflictedinjuries'] += match.team2()['inflictedinjuries']
coaches[coach2_name]['opponentinflictedinjuries'] += match.team1()['inflictedinjuries']
coaches[coach1_name]['opponentinflictedko'] += match.team2()['inflictedko']
coaches[coach2_name]['opponentinflictedko'] += match.team1()['inflictedko']
coaches[coach1_name]['opponentinflictedcasualties'] += match.team2()['inflictedcasualties']
coaches[coach2_name]['opponentinflictedcasualties'] += match.team1()['inflictedcasualties']
coaches[coach1_name]['opponentinflicteddead'] += match.team2()['inflicteddead']
coaches[coach2_name]['opponentinflicteddead'] += match.team1()['inflicteddead']
coaches[coach1_name]['sustainedtouchdowns'] += match.team2()['inflictedtouchdowns']
coaches[coach2_name]['sustainedtouchdowns'] += match.team1()['inflictedtouchdowns']
coaches[coach1_name]['inflictedpushouts'] += sum(
[player['stats']['inflictedpushouts'] for player in match.team1()['roster']]
)
coaches[coach2_name]['inflictedpushouts'] += sum(
[player['stats']['inflictedpushouts'] for player in match.team2()['roster']]
)
coaches[coach1_name]['sustainedtackles'] += sum(
[player['stats']['sustainedtackles'] for player in match.team1()['roster']]
)
coaches[coach2_name]['sustainedtackles'] += sum(
[player['stats']['sustainedtackles'] for player in match.team2()['roster']]
)
if winner:
if winner == match.coach1():
coaches[coach1_name]['wins'] += 1
coaches[coach1_name]['points'] += 3
coaches[coach2_name]['losses'] += 1
else:
coaches[coach2_name]['wins'] += 1
coaches[coach2_name]['points'] += 3
coaches[coach1_name]['losses'] += 1
else:
coaches[coach1_name]['draws'] += 1
coaches[coach1_name]['points'] += 1
coaches[coach2_name]['draws'] += 1
coaches[coach2_name]['points'] += 1
return [v for k, v in sorted(coaches.items(), key=lambda item: item[1]['points'], reverse=True)] | PypiClean |
/portapy-0.0.1.post2-py3-none-any.whl/portapy-js/node_modules/tslint/lib/rules/maxFileLineCountRule.js | "use strict";
/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var Lint = require("../index");
var Rule = /** @class */ (function (_super) {
tslib_1.__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
/* tslint:enable:object-literal-sort-keys */
Rule.FAILURE_STRING = function (lineCount, lineLimit) {
return ("This file has " + lineCount + " lines, which exceeds the maximum of " + lineLimit + " lines allowed. " +
"Consider breaking this file up into smaller parts");
};
Rule.prototype.isEnabled = function () {
return _super.prototype.isEnabled.call(this) && this.ruleArguments[0] > 0;
};
Rule.prototype.apply = function (sourceFile) {
var lineLimit = this.ruleArguments[0];
var lineCount = sourceFile.getLineStarts().length;
if (lineCount <= lineLimit) {
return [];
}
var len = sourceFile.text.length;
return [
new Lint.RuleFailure(sourceFile, len - 1, len, Rule.FAILURE_STRING(lineCount, lineLimit), this.ruleName),
];
};
/* tslint:disable:object-literal-sort-keys */
Rule.metadata = {
ruleName: "max-file-line-count",
description: "Requires files to remain under a certain number of lines",
rationale: Lint.Utils.dedent(templateObject_1 || (templateObject_1 = tslib_1.__makeTemplateObject(["\n Limiting the number of lines allowed in a file allows files to remain small,\n single purpose, and maintainable."], ["\n Limiting the number of lines allowed in a file allows files to remain small,\n single purpose, and maintainable."]))),
optionsDescription: "An integer indicating the maximum number of lines.",
options: {
type: "number",
minimum: "1",
},
optionExamples: [[true, 300]],
type: "maintainability",
typescriptOnly: false,
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var templateObject_1; | PypiClean |
/PPPF-0.1.0.tar.gz/PPPF-0.1.0/pppf_clusters/cluster.py | from uuid import uuid4
from pppf_lib import is_hypothetical
class Cluster:
"""
A cluster of sequences has an ID that should be unique, an exemplar sequence, and a set of members.
For convenience, the exemplar is also guaranteed to be one of the members so that you don't need to search through both
the members and the examplar.
The ID can also be the exemplar name too!
:ivar id: A unique ID. If not provided we will calculate one
:ivar exmmplar: The exemplar sequence
:ivar members: a list or set (or settable object) of the members of the cluster
:ivar longest_id: the id of the longest protein or None if not set
:ivar longest_len: the length of the longest protein or None if not set
:ivar shortest_id: the id of the shortest protein or None if not set
:ivar shortest_len: the length of the shortest protein or None if not set
:ivar average_size: the average size of the members of the set or None if not set
:ivar number_of_members: the number of members in the cluster
:ivar number_of_functions: the number of unique functions in the cluster
:ivar functions: a dict of functions and their frequency
:ivar function: the most abundant function
:ivar only_hypothetical: True is the set only has proteins whose functions are hypothetical.
"""
def __init__(self, id, exemplar, members):
"""
Initiate a cluster of sequences
:param id: the unique ID. Can be None to autogenerate one
:param exemplar: the exemplar sequence
:param members: a list or set of the members of the sequence
"""
if id:
self.id = id
else:
self.id = str(uuid4())
self.exemplar = exemplar
self.members = set(members)
self.members.add(exemplar)
self.longest_id = None
self.longest_len = None
self.shortest_id = None
self.shortest_len = None
self.average_size = None
self.number_of_members = len(self.members)
self.functions = {}
self.function = None
self.number_of_functions = 0
self.only_hypothetical = None
def is_hypothetical(self):
"""
Is the function associated with this class hypothetical?
:return: boolean: True if all the functions are hypothetical. False if not
"""
if self.only_hypothetical:
return self.only_hypothetical
for f in self.functions:
if not is_hypothetical(f):
self.only_hypothetical = False
return False
self.only_hypothetical = True
return True | PypiClean |
/aws-amplify.cdk.exported-backend-0.0.6.tar.gz/aws-amplify.cdk.exported-backend-0.0.6/src/aws_amplify/cdk/exported_backend/__init__.py | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from ._jsii import *
import aws_cdk
import aws_cdk.aws_apigateway
import aws_cdk.aws_appsync
import aws_cdk.aws_cognito
import aws_cdk.aws_lambda
import aws_cdk.aws_s3_deployment
import aws_cdk.cloudformation_include
import constructs
class APIGraphQLIncludedNestedStack(
metaclass=jsii.JSIIMeta,
jsii_type="@aws-amplify/cdk-exported-backend.APIGraphQLIncludedNestedStack",
):
def __init__(
self,
*,
included_template: aws_cdk.cloudformation_include.CfnInclude,
stack: aws_cdk.NestedStack,
) -> None:
'''
:param included_template: The CfnInclude that represents the template, which can be used to access Resources and other template elements.
:param stack: The NestedStack object which represents the scope of the template.
'''
included_stack = aws_cdk.cloudformation_include.IncludedNestedStack(
included_template=included_template, stack=stack
)
jsii.create(self.__class__, self, [included_stack])
@jsii.member(jsii_name="appSyncAPIKey")
def app_sync_api_key(self) -> aws_cdk.aws_appsync.CfnApiKey:
'''
:return: Appsync Api Key {CfnApiKey} of the auth stack
:throws: {CfnResourceNotFoundError} if not found
'''
return typing.cast(aws_cdk.aws_appsync.CfnApiKey, jsii.invoke(self, "appSyncAPIKey", []))
@jsii.member(jsii_name="getResourceConstruct")
def get_resource_construct(self, logical_id: builtins.str) -> typing.Any:
'''
:param logical_id: -
'''
if __debug__:
def stub(logical_id: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument logical_id", value=logical_id, expected_type=type_hints["logical_id"])
return typing.cast(typing.Any, jsii.invoke(self, "getResourceConstruct", [logical_id]))
@jsii.member(jsii_name="graphQLAPI")
def graph_qlapi(self) -> aws_cdk.aws_appsync.CfnGraphQLApi:
'''
:return: of the api stack
:throws: {CfnResourceNotFoundError} if not found
'''
return typing.cast(aws_cdk.aws_appsync.CfnGraphQLApi, jsii.invoke(self, "graphQLAPI", []))
@jsii.member(jsii_name="graphQLSchema")
def graph_ql_schema(self) -> aws_cdk.aws_appsync.CfnGraphQLSchema:
'''
:return: of the api stack
:throws: {CfnResourceNotFoundError} if not found
'''
return typing.cast(aws_cdk.aws_appsync.CfnGraphQLSchema, jsii.invoke(self, "graphQLSchema", []))
@jsii.member(jsii_name="modelNestedStack")
def model_nested_stack(
self,
table_name: builtins.str,
) -> aws_cdk.cloudformation_include.IncludedNestedStack:
'''
:param table_name: is the model name in your Graph QL API.
:return: return nested stack
'''
if __debug__:
def stub(table_name: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument table_name", value=table_name, expected_type=type_hints["table_name"])
return typing.cast(aws_cdk.cloudformation_include.IncludedNestedStack, jsii.invoke(self, "modelNestedStack", [table_name]))
@builtins.property
@jsii.member(jsii_name="includedTemplate")
def included_template(self) -> aws_cdk.cloudformation_include.CfnInclude:
return typing.cast(aws_cdk.cloudformation_include.CfnInclude, jsii.get(self, "includedTemplate"))
@included_template.setter
def included_template(
self,
value: aws_cdk.cloudformation_include.CfnInclude,
) -> None:
if __debug__:
def stub(value: aws_cdk.cloudformation_include.CfnInclude) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includedTemplate", value)
@builtins.property
@jsii.member(jsii_name="stack")
def stack(self) -> aws_cdk.NestedStack:
return typing.cast(aws_cdk.NestedStack, jsii.get(self, "stack"))
@stack.setter
def stack(self, value: aws_cdk.NestedStack) -> None:
if __debug__:
def stub(value: aws_cdk.NestedStack) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "stack", value)
class APIRestIncludedStack(
metaclass=jsii.JSIIMeta,
jsii_type="@aws-amplify/cdk-exported-backend.APIRestIncludedStack",
):
def __init__(
self,
included_stack: typing.Union[aws_cdk.cloudformation_include.IncludedNestedStack, typing.Dict[str, typing.Any]],
resource_name: builtins.str,
) -> None:
'''
:param included_stack: -
:param resource_name: -
'''
if __debug__:
def stub(
included_stack: typing.Union[aws_cdk.cloudformation_include.IncludedNestedStack, typing.Dict[str, typing.Any]],
resource_name: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument included_stack", value=included_stack, expected_type=type_hints["included_stack"])
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
jsii.create(self.__class__, self, [included_stack, resource_name])
@jsii.member(jsii_name="apiDeployment")
def api_deployment(self) -> aws_cdk.aws_apigateway.CfnDeployment:
'''Gets the Deployment of the Rest API.'''
return typing.cast(aws_cdk.aws_apigateway.CfnDeployment, jsii.invoke(self, "apiDeployment", []))
@jsii.member(jsii_name="getResourceConstruct")
def get_resource_construct(self, logical_id: builtins.str) -> typing.Any:
'''
:param logical_id: -
'''
if __debug__:
def stub(logical_id: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument logical_id", value=logical_id, expected_type=type_hints["logical_id"])
return typing.cast(typing.Any, jsii.invoke(self, "getResourceConstruct", [logical_id]))
@jsii.member(jsii_name="restAPI")
def rest_api(self) -> aws_cdk.aws_apigateway.CfnRestApi:
'''Gets the RestApi of the API stack.'''
return typing.cast(aws_cdk.aws_apigateway.CfnRestApi, jsii.invoke(self, "restAPI", []))
@builtins.property
@jsii.member(jsii_name="includedTemplate")
def included_template(self) -> aws_cdk.cloudformation_include.CfnInclude:
return typing.cast(aws_cdk.cloudformation_include.CfnInclude, jsii.get(self, "includedTemplate"))
@included_template.setter
def included_template(
self,
value: aws_cdk.cloudformation_include.CfnInclude,
) -> None:
if __debug__:
def stub(value: aws_cdk.cloudformation_include.CfnInclude) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includedTemplate", value)
@builtins.property
@jsii.member(jsii_name="resourceName")
def resource_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "resourceName"))
@resource_name.setter
def resource_name(self, value: builtins.str) -> None:
if __debug__:
def stub(value: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "resourceName", value)
@builtins.property
@jsii.member(jsii_name="stack")
def stack(self) -> aws_cdk.NestedStack:
return typing.cast(aws_cdk.NestedStack, jsii.get(self, "stack"))
@stack.setter
def stack(self, value: aws_cdk.NestedStack) -> None:
if __debug__:
def stub(value: aws_cdk.NestedStack) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "stack", value)
class AmplifyExportedBackend(
constructs.Construct,
metaclass=jsii.JSIIMeta,
jsii_type="@aws-amplify/cdk-exported-backend.AmplifyExportedBackend",
):
'''- Used to include the backend generated by running ``amplify export --out <path>`` into the cdk app.
:see:
'''
def __init__(
self,
scope: constructs.Construct,
id: builtins.str,
*,
amplify_environment: builtins.str,
path: builtins.str,
analytics_reporting: typing.Optional[builtins.bool] = None,
description: typing.Optional[builtins.str] = None,
env: typing.Optional[typing.Union[aws_cdk.Environment, typing.Dict[str, typing.Any]]] = None,
stack_name: typing.Optional[builtins.str] = None,
synthesizer: typing.Optional[aws_cdk.IStackSynthesizer] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
termination_protection: typing.Optional[builtins.bool] = None,
) -> None:
'''
:param scope: The parent construct of this template.
:param id: The ID of this construct.
:param amplify_environment: The Amplify CLI environment deploy to The amplify backend requires a stage to deploy. Default: is 'dev'
:param path: The path to the exported folder that contains the artifacts for the Amplify CLI backend ex: ./amplify-synth-out/.
:param analytics_reporting: Include runtime versioning information in this Stack. Default: ``analyticsReporting`` setting of containing ``App``, or value of 'aws:cdk:version-reporting' context key
:param description: A description of the stack. Default: - No description.
:param env: The AWS environment (account/region) where this stack will be deployed. Set the ``region``/``account`` fields of ``env`` to either a concrete value to select the indicated environment (recommended for production stacks), or to the values of environment variables ``CDK_DEFAULT_REGION``/``CDK_DEFAULT_ACCOUNT`` to let the target environment depend on the AWS credentials/configuration that the CDK CLI is executed under (recommended for development stacks). If the ``Stack`` is instantiated inside a ``Stage``, any undefined ``region``/``account`` fields from ``env`` will default to the same field on the encompassing ``Stage``, if configured there. If either ``region`` or ``account`` are not set nor inherited from ``Stage``, the Stack will be considered "*environment-agnostic*"". Environment-agnostic stacks can be deployed to any environment but may not be able to take advantage of all features of the CDK. For example, they will not be able to use environmental context lookups such as ``ec2.Vpc.fromLookup`` and will not automatically translate Service Principals to the right format based on the environment's AWS partition, and other such enhancements. Default: - The environment of the containing ``Stage`` if available, otherwise create the stack will be environment-agnostic.
:param stack_name: Name to deploy the stack with. Default: - Derived from construct path.
:param synthesizer: Synthesis method to use while deploying this stack. Default: - ``DefaultStackSynthesizer`` if the ``@aws-cdk/core:newStyleStackSynthesis`` feature flag is set, ``LegacyStackSynthesizer`` otherwise.
:param tags: Stack tags that will be applied to all the taggable resources and the stack itself. Default: {}
:param termination_protection: Whether to enable termination protection for this stack. Default: false
'''
if __debug__:
def stub(
scope: constructs.Construct,
id: builtins.str,
*,
amplify_environment: builtins.str,
path: builtins.str,
analytics_reporting: typing.Optional[builtins.bool] = None,
description: typing.Optional[builtins.str] = None,
env: typing.Optional[typing.Union[aws_cdk.Environment, typing.Dict[str, typing.Any]]] = None,
stack_name: typing.Optional[builtins.str] = None,
synthesizer: typing.Optional[aws_cdk.IStackSynthesizer] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
termination_protection: typing.Optional[builtins.bool] = None,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
props = AmplifyExportedBackendProps(
amplify_environment=amplify_environment,
path=path,
analytics_reporting=analytics_reporting,
description=description,
env=env,
stack_name=stack_name,
synthesizer=synthesizer,
tags=tags,
termination_protection=termination_protection,
)
jsii.create(self.__class__, self, [scope, id, props])
@jsii.member(jsii_name="apiRestNestedStack")
def api_rest_nested_stack(
self,
resource_name: builtins.str,
) -> APIRestIncludedStack:
'''Use this to get rest api stack from the backend.
:param resource_name: -
:return: the nested of type Rest API
:throws: {AmplifyCategoryNotFoundError} if the API Rest stack doesn't exist
'''
if __debug__:
def stub(resource_name: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
return typing.cast(APIRestIncludedStack, jsii.invoke(self, "apiRestNestedStack", [resource_name]))
@jsii.member(jsii_name="authNestedStack")
def auth_nested_stack(self) -> "AuthIncludedNestedStack":
'''Method to get the auth stack.
:return: the nested stack of type {IAuthIncludeNestedStack}
:function: true
:method: true
:throws: {AmplifyCategoryNotFoundError} if the auth stack doesn't exist
'''
return typing.cast("AuthIncludedNestedStack", jsii.invoke(self, "authNestedStack", []))
@jsii.member(jsii_name="filterCategory")
def _filter_category(
self,
category: builtins.str,
service: typing.Optional[builtins.str] = None,
resource_name: typing.Optional[builtins.str] = None,
) -> typing.List["CategoryStackMapping"]:
'''
:param category: -
:param service: -
:param resource_name: -
'''
if __debug__:
def stub(
category: builtins.str,
service: typing.Optional[builtins.str] = None,
resource_name: typing.Optional[builtins.str] = None,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument category", value=category, expected_type=type_hints["category"])
check_type(argname="argument service", value=service, expected_type=type_hints["service"])
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
return typing.cast(typing.List["CategoryStackMapping"], jsii.invoke(self, "filterCategory", [category, service, resource_name]))
@jsii.member(jsii_name="findResourceForNestedStack")
def _find_resource_for_nested_stack(
self,
category: builtins.str,
service: builtins.str,
resource_name: typing.Optional[builtins.str] = None,
) -> "CategoryStackMapping":
'''
:param category: -
:param service: -
:param resource_name: -
'''
if __debug__:
def stub(
category: builtins.str,
service: builtins.str,
resource_name: typing.Optional[builtins.str] = None,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument category", value=category, expected_type=type_hints["category"])
check_type(argname="argument service", value=service, expected_type=type_hints["service"])
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
return typing.cast("CategoryStackMapping", jsii.invoke(self, "findResourceForNestedStack", [category, service, resource_name]))
@jsii.member(jsii_name="getExportedDataFromFile")
def _get_exported_data_from_file(self, file_name: builtins.str) -> typing.Any:
'''
:param file_name: -
'''
if __debug__:
def stub(file_name: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument file_name", value=file_name, expected_type=type_hints["file_name"])
return typing.cast(typing.Any, jsii.invoke(self, "getExportedDataFromFile", [file_name]))
@jsii.member(jsii_name="graphqlNestedStacks")
def graphql_nested_stacks(self) -> APIGraphQLIncludedNestedStack:
'''Use this to get the api graphql stack from the backend.
:return: the nested stack of type {IAPIGraphQLIncludeNestedStack}
:: true
:throws: {AmplifyCategoryNotFoundError} if the API graphql stack doesn't exist
'''
return typing.cast(APIGraphQLIncludedNestedStack, jsii.invoke(self, "graphqlNestedStacks", []))
@jsii.member(jsii_name="lambdaFunctionNestedStackByName")
def lambda_function_nested_stack_by_name(
self,
function_name: builtins.str,
) -> "LambdaFunctionIncludedNestedStack":
'''Use this to get a specific lambda function from the backend.
:param function_name: the function name to get from the nested stack.
:throws: {AmplifyCategoryNotFoundError} if the lambda function stack doesn't exist
'''
if __debug__:
def stub(function_name: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument function_name", value=function_name, expected_type=type_hints["function_name"])
return typing.cast("LambdaFunctionIncludedNestedStack", jsii.invoke(self, "lambdaFunctionNestedStackByName", [function_name]))
@jsii.member(jsii_name="lambdaFunctionNestedStacks")
def lambda_function_nested_stacks(
self,
) -> typing.List["LambdaFunctionIncludedNestedStack"]:
'''Use this to get all the lambda functions from the backend.
:throws: {AmplifyCategoryNotFoundError} if the no Lambda Function stacks are found
'''
return typing.cast(typing.List["LambdaFunctionIncludedNestedStack"], jsii.invoke(self, "lambdaFunctionNestedStacks", []))
@jsii.member(jsii_name="nestedStackByCategortService")
def nested_stack_by_categort_service(
self,
category: builtins.str,
service: builtins.str,
) -> typing.List[aws_cdk.cloudformation_include.IncludedNestedStack]:
'''
:param category: -
:param service: -
'''
if __debug__:
def stub(category: builtins.str, service: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument category", value=category, expected_type=type_hints["category"])
check_type(argname="argument service", value=service, expected_type=type_hints["service"])
return typing.cast(typing.List[aws_cdk.cloudformation_include.IncludedNestedStack], jsii.invoke(self, "nestedStackByCategortService", [category, service]))
@jsii.member(jsii_name="nestedStacksByCategory")
def nested_stacks_by_category(
self,
category: builtins.str,
resource_name: typing.Optional[builtins.str] = None,
) -> typing.List[aws_cdk.cloudformation_include.IncludedNestedStack]:
'''Returns the stacks defined in the backend.
:param category: Categories defined in Amplify CLI like function, api, auth etc.
:param resource_name: -
:default: is undefined
'''
if __debug__:
def stub(
category: builtins.str,
resource_name: typing.Optional[builtins.str] = None,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument category", value=category, expected_type=type_hints["category"])
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
return typing.cast(typing.List[aws_cdk.cloudformation_include.IncludedNestedStack], jsii.invoke(self, "nestedStacksByCategory", [category, resource_name]))
@jsii.member(jsii_name="transformTemplateFile")
def _transform_template_file(
self,
cfn_include_props: typing.Union[aws_cdk.cloudformation_include.CfnIncludeProps, typing.Dict[str, typing.Any]],
export_path: builtins.str,
) -> aws_cdk.cloudformation_include.CfnIncludeProps:
'''
:param cfn_include_props: -
:param export_path: -
'''
if __debug__:
def stub(
cfn_include_props: typing.Union[aws_cdk.cloudformation_include.CfnIncludeProps, typing.Dict[str, typing.Any]],
export_path: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument cfn_include_props", value=cfn_include_props, expected_type=type_hints["cfn_include_props"])
check_type(argname="argument export_path", value=export_path, expected_type=type_hints["export_path"])
return typing.cast(aws_cdk.cloudformation_include.CfnIncludeProps, jsii.invoke(self, "transformTemplateFile", [cfn_include_props, export_path]))
@builtins.property
@jsii.member(jsii_name="categoryStackMappings")
def _category_stack_mappings(self) -> typing.List["CategoryStackMapping"]:
return typing.cast(typing.List["CategoryStackMapping"], jsii.get(self, "categoryStackMappings"))
@_category_stack_mappings.setter
def _category_stack_mappings(
self,
value: typing.List["CategoryStackMapping"],
) -> None:
if __debug__:
def stub(value: typing.List["CategoryStackMapping"]) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "categoryStackMappings", value)
@builtins.property
@jsii.member(jsii_name="cfnInclude")
def cfn_include(self) -> aws_cdk.cloudformation_include.CfnInclude:
'''cfnInclude of the Amplify backend.'''
return typing.cast(aws_cdk.cloudformation_include.CfnInclude, jsii.get(self, "cfnInclude"))
@cfn_include.setter
def cfn_include(self, value: aws_cdk.cloudformation_include.CfnInclude) -> None:
if __debug__:
def stub(value: aws_cdk.cloudformation_include.CfnInclude) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "cfnInclude", value)
@builtins.property
@jsii.member(jsii_name="exportBackendManifest")
def _export_backend_manifest(self) -> "ExportManifest":
return typing.cast("ExportManifest", jsii.get(self, "exportBackendManifest"))
@_export_backend_manifest.setter
def _export_backend_manifest(self, value: "ExportManifest") -> None:
if __debug__:
def stub(value: "ExportManifest") -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "exportBackendManifest", value)
@builtins.property
@jsii.member(jsii_name="exportPath")
def _export_path(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "exportPath"))
@_export_path.setter
def _export_path(self, value: builtins.str) -> None:
if __debug__:
def stub(value: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "exportPath", value)
@builtins.property
@jsii.member(jsii_name="exportTags")
def _export_tags(self) -> typing.List["ExportTag"]:
return typing.cast(typing.List["ExportTag"], jsii.get(self, "exportTags"))
@_export_tags.setter
def _export_tags(self, value: typing.List["ExportTag"]) -> None:
if __debug__:
def stub(value: typing.List["ExportTag"]) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "exportTags", value)
@builtins.property
@jsii.member(jsii_name="rootStack")
def root_stack(self) -> aws_cdk.Stack:
'''The root stack created.'''
return typing.cast(aws_cdk.Stack, jsii.get(self, "rootStack"))
@root_stack.setter
def root_stack(self, value: aws_cdk.Stack) -> None:
if __debug__:
def stub(value: aws_cdk.Stack) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "rootStack", value)
@builtins.property
@jsii.member(jsii_name="auxiliaryDeployment")
def _auxiliary_deployment(
self,
) -> typing.Optional[aws_cdk.aws_s3_deployment.BucketDeployment]:
return typing.cast(typing.Optional[aws_cdk.aws_s3_deployment.BucketDeployment], jsii.get(self, "auxiliaryDeployment"))
@_auxiliary_deployment.setter
def _auxiliary_deployment(
self,
value: typing.Optional[aws_cdk.aws_s3_deployment.BucketDeployment],
) -> None:
if __debug__:
def stub(
value: typing.Optional[aws_cdk.aws_s3_deployment.BucketDeployment],
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "auxiliaryDeployment", value)
@builtins.property
@jsii.member(jsii_name="env")
def _env(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "env"))
@_env.setter
def _env(self, value: typing.Optional[builtins.str]) -> None:
if __debug__:
def stub(value: typing.Optional[builtins.str]) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "env", value)
@jsii.data_type(
jsii_type="@aws-amplify/cdk-exported-backend.AmplifyExportedBackendProps",
jsii_struct_bases=[aws_cdk.StackProps],
name_mapping={
"analytics_reporting": "analyticsReporting",
"description": "description",
"env": "env",
"stack_name": "stackName",
"synthesizer": "synthesizer",
"tags": "tags",
"termination_protection": "terminationProtection",
"amplify_environment": "amplifyEnvironment",
"path": "path",
},
)
class AmplifyExportedBackendProps(aws_cdk.StackProps):
def __init__(
self,
*,
analytics_reporting: typing.Optional[builtins.bool] = None,
description: typing.Optional[builtins.str] = None,
env: typing.Optional[typing.Union[aws_cdk.Environment, typing.Dict[str, typing.Any]]] = None,
stack_name: typing.Optional[builtins.str] = None,
synthesizer: typing.Optional[aws_cdk.IStackSynthesizer] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
termination_protection: typing.Optional[builtins.bool] = None,
amplify_environment: builtins.str,
path: builtins.str,
) -> None:
'''
:param analytics_reporting: Include runtime versioning information in this Stack. Default: ``analyticsReporting`` setting of containing ``App``, or value of 'aws:cdk:version-reporting' context key
:param description: A description of the stack. Default: - No description.
:param env: The AWS environment (account/region) where this stack will be deployed. Set the ``region``/``account`` fields of ``env`` to either a concrete value to select the indicated environment (recommended for production stacks), or to the values of environment variables ``CDK_DEFAULT_REGION``/``CDK_DEFAULT_ACCOUNT`` to let the target environment depend on the AWS credentials/configuration that the CDK CLI is executed under (recommended for development stacks). If the ``Stack`` is instantiated inside a ``Stage``, any undefined ``region``/``account`` fields from ``env`` will default to the same field on the encompassing ``Stage``, if configured there. If either ``region`` or ``account`` are not set nor inherited from ``Stage``, the Stack will be considered "*environment-agnostic*"". Environment-agnostic stacks can be deployed to any environment but may not be able to take advantage of all features of the CDK. For example, they will not be able to use environmental context lookups such as ``ec2.Vpc.fromLookup`` and will not automatically translate Service Principals to the right format based on the environment's AWS partition, and other such enhancements. Default: - The environment of the containing ``Stage`` if available, otherwise create the stack will be environment-agnostic.
:param stack_name: Name to deploy the stack with. Default: - Derived from construct path.
:param synthesizer: Synthesis method to use while deploying this stack. Default: - ``DefaultStackSynthesizer`` if the ``@aws-cdk/core:newStyleStackSynthesis`` feature flag is set, ``LegacyStackSynthesizer`` otherwise.
:param tags: Stack tags that will be applied to all the taggable resources and the stack itself. Default: {}
:param termination_protection: Whether to enable termination protection for this stack. Default: false
:param amplify_environment: The Amplify CLI environment deploy to The amplify backend requires a stage to deploy. Default: is 'dev'
:param path: The path to the exported folder that contains the artifacts for the Amplify CLI backend ex: ./amplify-synth-out/.
'''
if isinstance(env, dict):
env = aws_cdk.Environment(**env)
if __debug__:
def stub(
*,
analytics_reporting: typing.Optional[builtins.bool] = None,
description: typing.Optional[builtins.str] = None,
env: typing.Optional[typing.Union[aws_cdk.Environment, typing.Dict[str, typing.Any]]] = None,
stack_name: typing.Optional[builtins.str] = None,
synthesizer: typing.Optional[aws_cdk.IStackSynthesizer] = None,
tags: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
termination_protection: typing.Optional[builtins.bool] = None,
amplify_environment: builtins.str,
path: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument analytics_reporting", value=analytics_reporting, expected_type=type_hints["analytics_reporting"])
check_type(argname="argument description", value=description, expected_type=type_hints["description"])
check_type(argname="argument env", value=env, expected_type=type_hints["env"])
check_type(argname="argument stack_name", value=stack_name, expected_type=type_hints["stack_name"])
check_type(argname="argument synthesizer", value=synthesizer, expected_type=type_hints["synthesizer"])
check_type(argname="argument tags", value=tags, expected_type=type_hints["tags"])
check_type(argname="argument termination_protection", value=termination_protection, expected_type=type_hints["termination_protection"])
check_type(argname="argument amplify_environment", value=amplify_environment, expected_type=type_hints["amplify_environment"])
check_type(argname="argument path", value=path, expected_type=type_hints["path"])
self._values: typing.Dict[str, typing.Any] = {
"amplify_environment": amplify_environment,
"path": path,
}
if analytics_reporting is not None:
self._values["analytics_reporting"] = analytics_reporting
if description is not None:
self._values["description"] = description
if env is not None:
self._values["env"] = env
if stack_name is not None:
self._values["stack_name"] = stack_name
if synthesizer is not None:
self._values["synthesizer"] = synthesizer
if tags is not None:
self._values["tags"] = tags
if termination_protection is not None:
self._values["termination_protection"] = termination_protection
@builtins.property
def analytics_reporting(self) -> typing.Optional[builtins.bool]:
'''Include runtime versioning information in this Stack.
:default:
``analyticsReporting`` setting of containing ``App``, or value of
'aws:cdk:version-reporting' context key
'''
result = self._values.get("analytics_reporting")
return typing.cast(typing.Optional[builtins.bool], result)
@builtins.property
def description(self) -> typing.Optional[builtins.str]:
'''A description of the stack.
:default: - No description.
'''
result = self._values.get("description")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def env(self) -> typing.Optional[aws_cdk.Environment]:
'''The AWS environment (account/region) where this stack will be deployed.
Set the ``region``/``account`` fields of ``env`` to either a concrete value to
select the indicated environment (recommended for production stacks), or to
the values of environment variables
``CDK_DEFAULT_REGION``/``CDK_DEFAULT_ACCOUNT`` to let the target environment
depend on the AWS credentials/configuration that the CDK CLI is executed
under (recommended for development stacks).
If the ``Stack`` is instantiated inside a ``Stage``, any undefined
``region``/``account`` fields from ``env`` will default to the same field on the
encompassing ``Stage``, if configured there.
If either ``region`` or ``account`` are not set nor inherited from ``Stage``, the
Stack will be considered "*environment-agnostic*"". Environment-agnostic
stacks can be deployed to any environment but may not be able to take
advantage of all features of the CDK. For example, they will not be able to
use environmental context lookups such as ``ec2.Vpc.fromLookup`` and will not
automatically translate Service Principals to the right format based on the
environment's AWS partition, and other such enhancements.
:default:
- The environment of the containing ``Stage`` if available,
otherwise create the stack will be environment-agnostic.
Example::
// Use a concrete account and region to deploy this stack to:
// `.account` and `.region` will simply return these values.
new Stack(app, 'Stack1', {
env: {
account: '123456789012',
region: 'us-east-1'
},
});
// Use the CLI's current credentials to determine the target environment:
// `.account` and `.region` will reflect the account+region the CLI
// is configured to use (based on the user CLI credentials)
new Stack(app, 'Stack2', {
env: {
account: process.env.CDK_DEFAULT_ACCOUNT,
region: process.env.CDK_DEFAULT_REGION
},
});
// Define multiple stacks stage associated with an environment
const myStage = new Stage(app, 'MyStage', {
env: {
account: '123456789012',
region: 'us-east-1'
}
});
// both of these stacks will use the stage's account/region:
// `.account` and `.region` will resolve to the concrete values as above
new MyStack(myStage, 'Stack1');
new YourStack(myStage, 'Stack2');
// Define an environment-agnostic stack:
// `.account` and `.region` will resolve to `{ "Ref": "AWS::AccountId" }` and `{ "Ref": "AWS::Region" }` respectively.
// which will only resolve to actual values by CloudFormation during deployment.
new MyStack(app, 'Stack1');
'''
result = self._values.get("env")
return typing.cast(typing.Optional[aws_cdk.Environment], result)
@builtins.property
def stack_name(self) -> typing.Optional[builtins.str]:
'''Name to deploy the stack with.
:default: - Derived from construct path.
'''
result = self._values.get("stack_name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def synthesizer(self) -> typing.Optional[aws_cdk.IStackSynthesizer]:
'''Synthesis method to use while deploying this stack.
:default:
- ``DefaultStackSynthesizer`` if the ``@aws-cdk/core:newStyleStackSynthesis`` feature flag
is set, ``LegacyStackSynthesizer`` otherwise.
'''
result = self._values.get("synthesizer")
return typing.cast(typing.Optional[aws_cdk.IStackSynthesizer], result)
@builtins.property
def tags(self) -> typing.Optional[typing.Mapping[builtins.str, builtins.str]]:
'''Stack tags that will be applied to all the taggable resources and the stack itself.
:default: {}
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.Mapping[builtins.str, builtins.str]], result)
@builtins.property
def termination_protection(self) -> typing.Optional[builtins.bool]:
'''Whether to enable termination protection for this stack.
:default: false
'''
result = self._values.get("termination_protection")
return typing.cast(typing.Optional[builtins.bool], result)
@builtins.property
def amplify_environment(self) -> builtins.str:
'''The Amplify CLI environment deploy to The amplify backend requires a stage to deploy.
:default: is 'dev'
'''
result = self._values.get("amplify_environment")
assert result is not None, "Required property 'amplify_environment' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def path(self) -> builtins.str:
'''The path to the exported folder that contains the artifacts for the Amplify CLI backend ex: ./amplify-synth-out/.'''
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AmplifyExportedBackendProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class AuthIncludedNestedStack(
metaclass=jsii.JSIIMeta,
jsii_type="@aws-amplify/cdk-exported-backend.AuthIncludedNestedStack",
):
def __init__(
self,
*,
included_template: aws_cdk.cloudformation_include.CfnInclude,
stack: aws_cdk.NestedStack,
) -> None:
'''
:param included_template: The CfnInclude that represents the template, which can be used to access Resources and other template elements.
:param stack: The NestedStack object which represents the scope of the template.
'''
included_stack = aws_cdk.cloudformation_include.IncludedNestedStack(
included_template=included_template, stack=stack
)
jsii.create(self.__class__, self, [included_stack])
@jsii.member(jsii_name="getResourceConstruct")
def get_resource_construct(self, logical_id: builtins.str) -> typing.Any:
'''
:param logical_id: -
'''
if __debug__:
def stub(logical_id: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument logical_id", value=logical_id, expected_type=type_hints["logical_id"])
return typing.cast(typing.Any, jsii.invoke(self, "getResourceConstruct", [logical_id]))
@jsii.member(jsii_name="hostedUiProviderCredentials")
def hosted_ui_provider_credentials(
self,
credentials: typing.Sequence[typing.Union["ProviderCredential", typing.Dict[str, typing.Any]]],
) -> None:
'''
:param credentials: -
'''
if __debug__:
def stub(
credentials: typing.Sequence[typing.Union["ProviderCredential", typing.Dict[str, typing.Any]]],
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument credentials", value=credentials, expected_type=type_hints["credentials"])
return typing.cast(None, jsii.invoke(self, "hostedUiProviderCredentials", [credentials]))
@jsii.member(jsii_name="identityPool")
def identity_pool(self) -> aws_cdk.aws_cognito.CfnIdentityPool:
'''
:return: Cognito UserPool {CfnUserPool} of the auth stack
:throws: {}
'''
return typing.cast(aws_cdk.aws_cognito.CfnIdentityPool, jsii.invoke(self, "identityPool", []))
@jsii.member(jsii_name="userPool")
def user_pool(self) -> aws_cdk.aws_cognito.CfnUserPool:
'''
:return: of the auth stack
'''
return typing.cast(aws_cdk.aws_cognito.CfnUserPool, jsii.invoke(self, "userPool", []))
@builtins.property
@jsii.member(jsii_name="includedTemplate")
def included_template(self) -> aws_cdk.cloudformation_include.CfnInclude:
return typing.cast(aws_cdk.cloudformation_include.CfnInclude, jsii.get(self, "includedTemplate"))
@included_template.setter
def included_template(
self,
value: aws_cdk.cloudformation_include.CfnInclude,
) -> None:
if __debug__:
def stub(value: aws_cdk.cloudformation_include.CfnInclude) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includedTemplate", value)
@builtins.property
@jsii.member(jsii_name="stack")
def stack(self) -> aws_cdk.NestedStack:
return typing.cast(aws_cdk.NestedStack, jsii.get(self, "stack"))
@stack.setter
def stack(self, value: aws_cdk.NestedStack) -> None:
if __debug__:
def stub(value: aws_cdk.NestedStack) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "stack", value)
@jsii.data_type(
jsii_type="@aws-amplify/cdk-exported-backend.CategoryStackMapping",
jsii_struct_bases=[],
name_mapping={
"category": "category",
"resource_name": "resourceName",
"service": "service",
},
)
class CategoryStackMapping:
def __init__(
self,
*,
category: builtins.str,
resource_name: builtins.str,
service: builtins.str,
) -> None:
'''
:param category:
:param resource_name:
:param service:
'''
if __debug__:
def stub(
*,
category: builtins.str,
resource_name: builtins.str,
service: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument category", value=category, expected_type=type_hints["category"])
check_type(argname="argument resource_name", value=resource_name, expected_type=type_hints["resource_name"])
check_type(argname="argument service", value=service, expected_type=type_hints["service"])
self._values: typing.Dict[str, typing.Any] = {
"category": category,
"resource_name": resource_name,
"service": service,
}
@builtins.property
def category(self) -> builtins.str:
result = self._values.get("category")
assert result is not None, "Required property 'category' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def resource_name(self) -> builtins.str:
result = self._values.get("resource_name")
assert result is not None, "Required property 'resource_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def service(self) -> builtins.str:
result = self._values.get("service")
assert result is not None, "Required property 'service' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CategoryStackMapping(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-amplify/cdk-exported-backend.ExportManifest",
jsii_struct_bases=[],
name_mapping={"props": "props", "stack_name": "stackName"},
)
class ExportManifest:
def __init__(
self,
*,
props: typing.Union[aws_cdk.cloudformation_include.CfnIncludeProps, typing.Dict[str, typing.Any]],
stack_name: builtins.str,
) -> None:
'''
:param props:
:param stack_name:
'''
if isinstance(props, dict):
props = aws_cdk.cloudformation_include.CfnIncludeProps(**props)
if __debug__:
def stub(
*,
props: typing.Union[aws_cdk.cloudformation_include.CfnIncludeProps, typing.Dict[str, typing.Any]],
stack_name: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument props", value=props, expected_type=type_hints["props"])
check_type(argname="argument stack_name", value=stack_name, expected_type=type_hints["stack_name"])
self._values: typing.Dict[str, typing.Any] = {
"props": props,
"stack_name": stack_name,
}
@builtins.property
def props(self) -> aws_cdk.cloudformation_include.CfnIncludeProps:
result = self._values.get("props")
assert result is not None, "Required property 'props' is missing"
return typing.cast(aws_cdk.cloudformation_include.CfnIncludeProps, result)
@builtins.property
def stack_name(self) -> builtins.str:
result = self._values.get("stack_name")
assert result is not None, "Required property 'stack_name' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ExportManifest(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@aws-amplify/cdk-exported-backend.ExportTag",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class ExportTag:
def __init__(self, *, key: builtins.str, value: builtins.str) -> None:
'''
:param key:
:param value:
'''
if __debug__:
def stub(*, key: builtins.str, value: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument key", value=key, expected_type=type_hints["key"])
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
self._values: typing.Dict[str, typing.Any] = {
"key": key,
"value": value,
}
@builtins.property
def key(self) -> builtins.str:
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def value(self) -> builtins.str:
result = self._values.get("value")
assert result is not None, "Required property 'value' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ExportTag(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class LambdaFunctionIncludedNestedStack(
metaclass=jsii.JSIIMeta,
jsii_type="@aws-amplify/cdk-exported-backend.LambdaFunctionIncludedNestedStack",
):
def __init__(
self,
*,
included_template: aws_cdk.cloudformation_include.CfnInclude,
stack: aws_cdk.NestedStack,
) -> None:
'''
:param included_template: The CfnInclude that represents the template, which can be used to access Resources and other template elements.
:param stack: The NestedStack object which represents the scope of the template.
'''
included_stack = aws_cdk.cloudformation_include.IncludedNestedStack(
included_template=included_template, stack=stack
)
jsii.create(self.__class__, self, [included_stack])
@jsii.member(jsii_name="getResourceConstruct")
def get_resource_construct(self, logical_id: builtins.str) -> typing.Any:
'''
:param logical_id: -
'''
if __debug__:
def stub(logical_id: builtins.str) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument logical_id", value=logical_id, expected_type=type_hints["logical_id"])
return typing.cast(typing.Any, jsii.invoke(self, "getResourceConstruct", [logical_id]))
@jsii.member(jsii_name="lambdaFunction")
def lambda_function(self) -> aws_cdk.aws_lambda.CfnFunction:
return typing.cast(aws_cdk.aws_lambda.CfnFunction, jsii.invoke(self, "lambdaFunction", []))
@builtins.property
@jsii.member(jsii_name="includedTemplate")
def included_template(self) -> aws_cdk.cloudformation_include.CfnInclude:
return typing.cast(aws_cdk.cloudformation_include.CfnInclude, jsii.get(self, "includedTemplate"))
@included_template.setter
def included_template(
self,
value: aws_cdk.cloudformation_include.CfnInclude,
) -> None:
if __debug__:
def stub(value: aws_cdk.cloudformation_include.CfnInclude) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includedTemplate", value)
@builtins.property
@jsii.member(jsii_name="stack")
def stack(self) -> aws_cdk.NestedStack:
return typing.cast(aws_cdk.NestedStack, jsii.get(self, "stack"))
@stack.setter
def stack(self, value: aws_cdk.NestedStack) -> None:
if __debug__:
def stub(value: aws_cdk.NestedStack) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "stack", value)
@jsii.data_type(
jsii_type="@aws-amplify/cdk-exported-backend.ProviderCredential",
jsii_struct_bases=[],
name_mapping={
"client_id": "clientId",
"client_secret": "clientSecret",
"provider_name": "providerName",
},
)
class ProviderCredential:
def __init__(
self,
*,
client_id: builtins.str,
client_secret: builtins.str,
provider_name: builtins.str,
) -> None:
'''
:param client_id:
:param client_secret:
:param provider_name:
'''
if __debug__:
def stub(
*,
client_id: builtins.str,
client_secret: builtins.str,
provider_name: builtins.str,
) -> None:
...
type_hints = typing.get_type_hints(stub)
check_type(argname="argument client_id", value=client_id, expected_type=type_hints["client_id"])
check_type(argname="argument client_secret", value=client_secret, expected_type=type_hints["client_secret"])
check_type(argname="argument provider_name", value=provider_name, expected_type=type_hints["provider_name"])
self._values: typing.Dict[str, typing.Any] = {
"client_id": client_id,
"client_secret": client_secret,
"provider_name": provider_name,
}
@builtins.property
def client_id(self) -> builtins.str:
result = self._values.get("client_id")
assert result is not None, "Required property 'client_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def client_secret(self) -> builtins.str:
result = self._values.get("client_secret")
assert result is not None, "Required property 'client_secret' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def provider_name(self) -> builtins.str:
result = self._values.get("provider_name")
assert result is not None, "Required property 'provider_name' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ProviderCredential(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"APIGraphQLIncludedNestedStack",
"APIRestIncludedStack",
"AmplifyExportedBackend",
"AmplifyExportedBackendProps",
"AuthIncludedNestedStack",
"CategoryStackMapping",
"ExportManifest",
"ExportTag",
"LambdaFunctionIncludedNestedStack",
"ProviderCredential",
]
publication.publish() | PypiClean |
/antchain_ak_d55db67c8e5a4e799ff51ac9e5bcede3-1.0.0-py3-none-any.whl/antchain_sdk_ak_d55db67c8e5a4e799ff51ac9e5bcede3/models.py | from Tea.model import TeaModel
from typing import List
class Config(TeaModel):
"""
Model for initing client
"""
def __init__(
self,
access_key_id: str = None,
access_key_secret: str = None,
security_token: str = None,
protocol: str = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
endpoint: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
user_agent: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
max_idle_time_millis: int = None,
keep_alive_duration_millis: int = None,
max_requests: int = None,
max_requests_per_host: int = None,
):
# accesskey id
self.access_key_id = access_key_id
# accesskey secret
self.access_key_secret = access_key_secret
# security token
self.security_token = security_token
# http protocol
self.protocol = protocol
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy
self.http_proxy = http_proxy
# https proxy
self.https_proxy = https_proxy
# endpoint
self.endpoint = endpoint
# proxy white list
self.no_proxy = no_proxy
# max idle conns
self.max_idle_conns = max_idle_conns
# user agent
self.user_agent = user_agent
# socks5 proxy
self.socks_5proxy = socks_5proxy
# socks5 network
self.socks_5net_work = socks_5net_work
# 长链接最大空闲时长
self.max_idle_time_millis = max_idle_time_millis
# 长链接最大连接时长
self.keep_alive_duration_millis = keep_alive_duration_millis
# 最大连接数(长链接最大总数)
self.max_requests = max_requests
# 每个目标主机的最大连接数(分主机域名的长链接最大总数
self.max_requests_per_host = max_requests_per_host
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.protocol is not None:
result['protocol'] = self.protocol
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.user_agent is not None:
result['userAgent'] = self.user_agent
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
if self.max_idle_time_millis is not None:
result['maxIdleTimeMillis'] = self.max_idle_time_millis
if self.keep_alive_duration_millis is not None:
result['keepAliveDurationMillis'] = self.keep_alive_duration_millis
if self.max_requests is not None:
result['maxRequests'] = self.max_requests
if self.max_requests_per_host is not None:
result['maxRequestsPerHost'] = self.max_requests_per_host
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
if m.get('maxIdleTimeMillis') is not None:
self.max_idle_time_millis = m.get('maxIdleTimeMillis')
if m.get('keepAliveDurationMillis') is not None:
self.keep_alive_duration_millis = m.get('keepAliveDurationMillis')
if m.get('maxRequests') is not None:
self.max_requests = m.get('maxRequests')
if m.get('maxRequestsPerHost') is not None:
self.max_requests_per_host = m.get('maxRequestsPerHost')
return self
class ALiYunPagination(TeaModel):
def __init__(
self,
page_size: int = None,
page_number: int = None,
total_count: int = None,
):
# 分页大小
self.page_size = page_size
# 分页编号
self.page_number = page_number
# 总数大小
self.total_count = total_count
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['page_size'] = self.page_size
if self.page_number is not None:
result['page_number'] = self.page_number
if self.total_count is not None:
result['total_count'] = self.total_count
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('page_size') is not None:
self.page_size = m.get('page_size')
if m.get('page_number') is not None:
self.page_number = m.get('page_number')
if m.get('total_count') is not None:
self.total_count = m.get('total_count')
return self
class ALiYunContractProject(TeaModel):
def __init__(
self,
consortium_id: str = None,
create_time: int = None,
project_description: str = None,
project_id: str = None,
project_name: str = None,
project_version: str = None,
update_time: int = None,
):
# consortium_id
self.consortium_id = consortium_id
# create_time
self.create_time = create_time
# project_description
self.project_description = project_description
# project_id
self.project_id = project_id
# project_name
self.project_name = project_name
# project_version
self.project_version = project_version
# update_time
self.update_time = update_time
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.create_time is not None:
result['create_time'] = self.create_time
if self.project_description is not None:
result['project_description'] = self.project_description
if self.project_id is not None:
result['project_id'] = self.project_id
if self.project_name is not None:
result['project_name'] = self.project_name
if self.project_version is not None:
result['project_version'] = self.project_version
if self.update_time is not None:
result['update_time'] = self.update_time
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('create_time') is not None:
self.create_time = m.get('create_time')
if m.get('project_description') is not None:
self.project_description = m.get('project_description')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('project_name') is not None:
self.project_name = m.get('project_name')
if m.get('project_version') is not None:
self.project_version = m.get('project_version')
if m.get('update_time') is not None:
self.update_time = m.get('update_time')
return self
class ALiYunContractProjectDuplicate(TeaModel):
def __init__(
self,
consortium_id: str = None,
description: str = None,
gmt_create: int = None,
gmt_modified: int = None,
id: str = None,
name: str = None,
version: str = None,
):
# consortium_id
self.consortium_id = consortium_id
# description
self.description = description
# gmt_create
self.gmt_create = gmt_create
# gmt_modified
self.gmt_modified = gmt_modified
# id
self.id = id
# name
self.name = name
# version
self.version = version
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.description is not None:
result['description'] = self.description
if self.gmt_create is not None:
result['gmt_create'] = self.gmt_create
if self.gmt_modified is not None:
result['gmt_modified'] = self.gmt_modified
if self.id is not None:
result['id'] = self.id
if self.name is not None:
result['name'] = self.name
if self.version is not None:
result['version'] = self.version
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('gmt_create') is not None:
self.gmt_create = m.get('gmt_create')
if m.get('gmt_modified') is not None:
self.gmt_modified = m.get('gmt_modified')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('version') is not None:
self.version = m.get('version')
return self
class ALiYunContractProjects(TeaModel):
def __init__(
self,
contract_projects: List[ALiYunContractProject] = None,
pagination: ALiYunPagination = None,
):
# contract_projects
self.contract_projects = contract_projects
# pagination
self.pagination = pagination
def validate(self):
if self.contract_projects:
for k in self.contract_projects:
if k:
k.validate()
if self.pagination:
self.pagination.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['contract_projects'] = []
if self.contract_projects is not None:
for k in self.contract_projects:
result['contract_projects'].append(k.to_map() if k else None)
if self.pagination is not None:
result['pagination'] = self.pagination.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
self.contract_projects = []
if m.get('contract_projects') is not None:
for k in m.get('contract_projects'):
temp_model = ALiYunContractProject()
self.contract_projects.append(temp_model.from_map(k))
if m.get('pagination') is not None:
temp_model = ALiYunPagination()
self.pagination = temp_model.from_map(m['pagination'])
return self
class CloneBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
project_description: str = None,
project_id: str = None,
project_name: str = None,
project_version: str = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# project_description
self.project_description = project_description
# project_id
self.project_id = project_id
# project_name
self.project_name = project_name
# project_version
self.project_version = project_version
# region_id
self.region_id = region_id
def validate(self):
self.validate_required(self.consortium_id, 'consortium_id')
self.validate_required(self.project_description, 'project_description')
self.validate_required(self.project_id, 'project_id')
self.validate_required(self.project_name, 'project_name')
self.validate_required(self.project_version, 'project_version')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.project_description is not None:
result['project_description'] = self.project_description
if self.project_id is not None:
result['project_id'] = self.project_id
if self.project_name is not None:
result['project_name'] = self.project_name
if self.project_version is not None:
result['project_version'] = self.project_version
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('project_description') is not None:
self.project_description = m.get('project_description')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('project_name') is not None:
self.project_name = m.get('project_name')
if m.get('project_version') is not None:
self.project_version = m.get('project_version')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class CloneBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: ALiYunContractProject = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
temp_model = ALiYunContractProject()
self.result = temp_model.from_map(m['result'])
return self
class CreateBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
project_description: str = None,
project_name: str = None,
project_version: str = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# project_description
self.project_description = project_description
# project_name
self.project_name = project_name
# project_version
self.project_version = project_version
# region_id
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.project_description is not None:
result['project_description'] = self.project_description
if self.project_name is not None:
result['project_name'] = self.project_name
if self.project_version is not None:
result['project_version'] = self.project_version
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('project_description') is not None:
self.project_description = m.get('project_description')
if m.get('project_name') is not None:
self.project_name = m.get('project_name')
if m.get('project_version') is not None:
self.project_version = m.get('project_version')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class CreateBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: ALiYunContractProject = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
temp_model = ALiYunContractProject()
self.result = temp_model.from_map(m['result'])
return self
class DeleteBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
project_id: str = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# project_id
self.project_id = project_id
# region_id
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.project_id is not None:
result['project_id'] = self.project_id
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class DeleteBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
self.result = m.get('result')
return self
class QueryBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
page_number: int = None,
page_size: int = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# page_number
self.page_number = page_number
# page_size
self.page_size = page_size
# region_id
self.region_id = region_id
def validate(self):
self.validate_required(self.consortium_id, 'consortium_id')
self.validate_required(self.page_number, 'page_number')
self.validate_required(self.page_size, 'page_size')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.page_number is not None:
result['page_number'] = self.page_number
if self.page_size is not None:
result['page_size'] = self.page_size
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('page_number') is not None:
self.page_number = m.get('page_number')
if m.get('page_size') is not None:
self.page_size = m.get('page_size')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class QueryBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: ALiYunContractProjects = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
temp_model = ALiYunContractProjects()
self.result = temp_model.from_map(m['result'])
return self
class RetryBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
description: str = None,
name: str = None,
project_id: str = None,
project_version: str = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# description
self.description = description
# name
self.name = name
# project_id
self.project_id = project_id
# project_version
self.project_version = project_version
# region_id
self.region_id = region_id
def validate(self):
self.validate_required(self.consortium_id, 'consortium_id')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.description is not None:
result['description'] = self.description
if self.name is not None:
result['name'] = self.name
if self.project_id is not None:
result['project_id'] = self.project_id
if self.project_version is not None:
result['project_version'] = self.project_version
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('project_version') is not None:
self.project_version = m.get('project_version')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class RetryBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: ALiYunContractProjectDuplicate = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
temp_model = ALiYunContractProjectDuplicate()
self.result = temp_model.from_map(m['result'])
return self
class UpdateBaasChainContractProjectRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
consortium_id: str = None,
project_description: str = None,
project_id: str = None,
project_name: str = None,
project_version: str = None,
region_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# consortium_id
self.consortium_id = consortium_id
# project_description
self.project_description = project_description
# project_id
self.project_id = project_id
# project_name
self.project_name = project_name
# project_version
self.project_version = project_version
# region_id
self.region_id = region_id
def validate(self):
self.validate_required(self.consortium_id, 'consortium_id')
self.validate_required(self.project_id, 'project_id')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.consortium_id is not None:
result['consortium_id'] = self.consortium_id
if self.project_description is not None:
result['project_description'] = self.project_description
if self.project_id is not None:
result['project_id'] = self.project_id
if self.project_name is not None:
result['project_name'] = self.project_name
if self.project_version is not None:
result['project_version'] = self.project_version
if self.region_id is not None:
result['region_id'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('consortium_id') is not None:
self.consortium_id = m.get('consortium_id')
if m.get('project_description') is not None:
self.project_description = m.get('project_description')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('project_name') is not None:
self.project_name = m.get('project_name')
if m.get('project_version') is not None:
self.project_version = m.get('project_version')
if m.get('region_id') is not None:
self.region_id = m.get('region_id')
return self
class UpdateBaasChainContractProjectResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
result: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# result
self.result = result
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.result is not None:
result['result'] = self.result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('result') is not None:
self.result = m.get('result')
return self | PypiClean |
/bpy_nibbler-0.1.tar.gz/bpy_nibbler-0.1/bpy_lambda/2.78/scripts/addons_contrib/archipack/archipack_gl.py |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# ----------------------------------------------------------
# Author: Stephen Leger (s-leger)
#
# ----------------------------------------------------------
import bgl
import blf
import bpy
from math import sin, cos, atan2, pi
from mathutils import Vector, Matrix
from bpy_extras import view3d_utils, object_utils
# ------------------------------------------------------------------
# Define Gl Handle types
# ------------------------------------------------------------------
class DefaultColorScheme:
"""
Font sizes and basic colour scheme
default to this when not found in addon prefs
Colors are FloatVectorProperty of size 4 and type COLOR_GAMMA
"""
feedback_size_main = 16
feedback_size_title = 14
feedback_size_shortcut = 11
feedback_colour_main = (0.95, 0.95, 0.95, 1.0)
feedback_colour_key = (0.67, 0.67, 0.67, 1.0)
feedback_colour_shortcut = (0.51, 0.51, 0.51, 1.0)
feedback_shortcut_area = (0, 0.4, 0.6, 0.2)
feedback_title_area = (0, 0.4, 0.6, 0.5)
"""
# Addon prefs template
feedback_size_main = IntProperty(
name="Main",
description="Main title font size (pixels)",
min=2,
default=16
)
feedback_size_title = IntProperty(
name="Title",
description="Tool name font size (pixels)",
min=2,
default=14
)
feedback_size_shortcut = IntProperty(
name="Shortcut",
description="Shortcuts font size (pixels)",
min=2,
default=11
)
feedback_shortcut_area = FloatVectorProperty(
name="Background Shortcut",
description="Shortcut area background color",
subtype='COLOR_GAMMA',
default=(0, 0.4, 0.6, 0.2),
size=4,
min=0, max=1
)
feedback_title_area = FloatVectorProperty(
name="Background Main",
description="Title area background color",
subtype='COLOR_GAMMA',
default=(0, 0.4, 0.6, 0.5),
size=4,
min=0, max=1
)
feedback_colour_main = FloatVectorProperty(
name="Font Main",
description="Title color",
subtype='COLOR_GAMMA',
default=(0.95, 0.95, 0.95, 1.0),
size=4,
min=0, max=1
)
feedback_colour_key = FloatVectorProperty(
name="Font Shortcut key",
description="KEY label color",
subtype='COLOR_GAMMA',
default=(0.67, 0.67, 0.67, 1.0),
size=4,
min=0, max=1
)
feedback_colour_shortcut = FloatVectorProperty(
name="Font Shortcut hint",
description="Shortcuts text color",
subtype='COLOR_GAMMA',
default=(0.51, 0.51, 0.51, 1.0),
size=4,
min=0, max=1
)
def draw(self, context):
layout = self.layout
box = layout.box()
row = box.row()
split = row.split(percentage=0.5)
col = split.column()
col.label(text="Colors:")
row = col.row(align=True)
row.prop(self, "feedback_title_area")
row = col.row(align=True)
row.prop(self, "feedback_shortcut_area")
row = col.row(align=True)
row.prop(self, "feedback_colour_main")
row = col.row(align=True)
row.prop(self, "feedback_colour_key")
row = col.row(align=True)
row.prop(self, "feedback_colour_shortcut")
col = split.column()
col.label(text="Font size:")
col.prop(self, "feedback_size_main")
col.prop(self, "feedback_size_title")
col.prop(self, "feedback_size_shortcut")
"""
# @TODO:
# 1 Make a clear separation of 2d (pixel position) and 3d (world position)
# modes way to set gl coords
# 2 Unify methods to set points - currently set_pts, set_pos ...
# 3 Put all Gl part in a sub module as it may be used by other devs
# as gl toolkit abstraction for screen feedback
# 4 Implement cursor badges (np_station sample)
# 5 Define a clear color scheme so it is easy to customize
# 6 Allow different arguments for each classes like
# eg: for line p0 p1, p0 and vector (p1-p0)
# raising exceptions when incomplete
# 7 Use correct words, normal is not realy a normal
# but a perpendicular
# May be hard code more shapes ?
# Fine tuned text styles with shadows and surronding boxes / backgrounds
# Extending tests to hdr screens, ultra wide ones and so on
# Circular handle, handle styling (only border, filling ...)
# Keep point 3 in mind while doing this, to keep it simple and easy to use
# Take inspiration from other's feed back systems, talk to other devs
# and find who actually work on bgl future for 2.8 release
class Gl():
"""
handle 3d -> 2d gl drawing
d : dimensions
3 to convert pos from 3d
2 to keep pos as 2d absolute screen position
"""
def __init__(self,
d=3,
colour=(0.0, 0.0, 0.0, 1.0)):
# nth dimensions of input coords 3=word coords 2=pixel screen coords
self.d = d
self.pos_2d = Vector((0, 0))
self.colour_inactive = colour
@property
def colour(self):
return self.colour_inactive
def position_2d_from_coord(self, context, coord, render=False):
""" coord given in local input coordsys
"""
if self.d == 2:
return coord
if render:
return self.get_render_location(context, coord)
region = context.region
rv3d = context.region_data
loc = view3d_utils.location_3d_to_region_2d(region, rv3d, coord, self.pos_2d)
return loc
def get_render_location(self, context, coord):
scene = context.scene
co_2d = object_utils.world_to_camera_view(scene, scene.camera, coord)
# Get pixel coords
render_scale = scene.render.resolution_percentage / 100
render_size = (int(scene.render.resolution_x * render_scale),
int(scene.render.resolution_y * render_scale))
return [round(co_2d.x * render_size[0]), round(co_2d.y * render_size[1])]
def _end(self):
bgl.glEnd()
bgl.glPopAttrib()
bgl.glLineWidth(1)
bgl.glDisable(bgl.GL_BLEND)
bgl.glColor4f(0.0, 0.0, 0.0, 1.0)
class GlText(Gl):
def __init__(self,
d=3,
label="",
value=None,
precision=2,
unit_mode='AUTO',
unit_type='SIZE',
dimension=1,
angle=0,
font_size=12,
colour=(1, 1, 1, 1),
z_axis=Vector((0, 0, 1))):
"""
d: [2|3] coords type: 2 for coords in screen pixels, 3 for 3d world location
label : string label
value : float value (will add unit according following settings)
precision : integer rounding for values
dimension : [1 - 3] nth dimension of unit (single, square, cubic)
unit_mode : ['AUTO','METER','CENTIMETER','MILIMETER','FEET','INCH','RADIANS','DEGREE']
unit type to use to postfix values
auto use scene units setup
unit_type : ['SIZE','ANGLE']
unit type to add to value
angle : angle to rotate text
"""
self.z_axis = z_axis
# text, add as prefix to value
self.label = label
# value with unit related
self.value = value
self.precision = precision
self.dimension = dimension
self.unit_type = unit_type
self.unit_mode = unit_mode
self.font_size = font_size
self.angle = angle
Gl.__init__(self, d)
self.colour_inactive = colour
# store text with units
self._text = ""
def text_size(self, context):
"""
overall on-screen size in pixels
"""
dpi, font_id = context.user_preferences.system.dpi, 0
if self.angle != 0:
blf.enable(font_id, blf.ROTATION)
blf.rotation(font_id, self.angle)
blf.aspect(font_id, 1.0)
blf.size(font_id, self.font_size, dpi)
x, y = blf.dimensions(font_id, self.text)
if self.angle != 0:
blf.disable(font_id, blf.ROTATION)
return Vector((x, y))
@property
def pts(self):
return [self.pos_3d]
@property
def text(self):
s = self.label + self._text
return s.strip()
def add_units(self, context):
if self.value is None:
return ""
if self.unit_type == 'ANGLE':
scale = 1
else:
scale = context.scene.unit_settings.scale_length
val = self.value * scale
mode = self.unit_mode
if mode == 'AUTO':
if self.unit_type == 'ANGLE':
mode = context.scene.unit_settings.system_rotation
else:
if context.scene.unit_settings.system == "IMPERIAL":
if round(val * (3.2808399 ** self.dimension), 2) >= 1.0:
mode = 'FEET'
else:
mode = 'INCH'
elif context.scene.unit_settings.system == "METRIC":
if round(val, 2) >= 1.0:
mode = 'METER'
else:
if round(val, 2) >= 0.01:
mode = 'CENTIMETER'
else:
mode = 'MILIMETER'
# convert values
if mode == 'METER':
unit = "m"
elif mode == 'CENTIMETER':
val *= (100 ** self.dimension)
unit = "cm"
elif mode == 'MILIMETER':
val *= (1000 ** self.dimension)
unit = 'mm'
elif mode == 'INCH':
val *= (39.3700787 ** self.dimension)
unit = "in"
elif mode == 'FEET':
val *= (3.2808399 ** self.dimension)
unit = "ft"
elif mode == 'RADIANS':
unit = ""
elif mode == 'DEGREES':
val = self.value / pi * 180
unit = "°"
else:
unit = ""
if self.dimension == 2:
unit += "\u00b2" # Superscript two
elif self.dimension == 3:
unit += "\u00b3" # Superscript three
fmt = "%1." + str(self.precision) + "f " + unit
return fmt % val
def set_pos(self, context, value, pos_3d, direction, angle=0, normal=Vector((0, 0, 1))):
self.up_axis = direction.normalized()
self.c_axis = self.up_axis.cross(normal)
self.pos_3d = pos_3d
self.value = value
self.angle = angle
self._text = self.add_units(context)
def draw(self, context, render=False):
self.render = render
x, y = self.position_2d_from_coord(context, self.pts[0], render)
# dirty fast assignment
dpi, font_id = context.user_preferences.system.dpi, 0
bgl.glColor4f(*self.colour)
if self.angle != 0:
blf.enable(font_id, blf.ROTATION)
blf.rotation(font_id, self.angle)
blf.size(font_id, self.font_size, dpi)
blf.position(font_id, x, y, 0)
blf.draw(font_id, self.text)
if self.angle != 0:
blf.disable(font_id, blf.ROTATION)
class GlBaseLine(Gl):
def __init__(self,
d=3,
width=1,
style=bgl.GL_LINE,
closed=False):
Gl.__init__(self, d)
# default line width
self.width = width
# default line style
self.style = style
# allow closed lines
self.closed = False
def draw(self, context, render=False):
"""
render flag when rendering
"""
bgl.glPushAttrib(bgl.GL_ENABLE_BIT)
if self.style == bgl.GL_LINE_STIPPLE:
bgl.glLineStipple(1, 0x9999)
bgl.glEnable(self.style)
bgl.glEnable(bgl.GL_BLEND)
if render:
# enable anti-alias on lines
bgl.glEnable(bgl.GL_LINE_SMOOTH)
bgl.glColor4f(*self.colour)
bgl.glLineWidth(self.width)
if self.closed:
bgl.glBegin(bgl.GL_LINE_LOOP)
else:
bgl.glBegin(bgl.GL_LINE_STRIP)
for pt in self.pts:
x, y = self.position_2d_from_coord(context, pt, render)
bgl.glVertex2f(x, y)
self._end()
class GlLine(GlBaseLine):
"""
2d/3d Line
"""
def __init__(self, d=3, p=None, v=None, p0=None, p1=None, z_axis=None):
"""
d=3 use 3d coords, d=2 use 2d pixels coords
Init by either
p: Vector or tuple origin
v: Vector or tuple size and direction
or
p0: Vector or tuple 1 point location
p1: Vector or tuple 2 point location
Will convert any into Vector 3d
both optionnals
"""
if p is not None and v is not None:
self.p = Vector(p)
self.v = Vector(v)
elif p0 is not None and p1 is not None:
self.p = Vector(p0)
self.v = Vector(p1) - self.p
else:
self.p = Vector((0, 0, 0))
self.v = Vector((0, 0, 0))
if z_axis is not None:
self.z_axis = z_axis
else:
self.z_axis = Vector((0, 0, 1))
GlBaseLine.__init__(self, d)
@property
def p0(self):
return self.p
@property
def p1(self):
return self.p + self.v
@p0.setter
def p0(self, p0):
"""
Note: setting p0
move p0 only
"""
p1 = self.p1
self.p = Vector(p0)
self.v = p1 - p0
@p1.setter
def p1(self, p1):
"""
Note: setting p1
move p1 only
"""
self.v = Vector(p1) - self.p
@property
def length(self):
return self.v.length
@property
def angle(self):
return atan2(self.v.y, self.v.x)
@property
def cross(self):
"""
Vector perpendicular on plane defined by z_axis
lie on the right side
p1
|--x
p0
"""
return self.v.cross(self.z_axis)
def normal(self, t=0):
"""
Line perpendicular on plane defined by z_axis
lie on the right side
p1
|--x
p0
"""
n = GlLine()
n.p = self.lerp(t)
n.v = self.cross
return n
def sized_normal(self, t, size):
"""
GlLine perpendicular on plane defined by z_axis and of given size
positionned at t in current line
lie on the right side
p1
|--x
p0
"""
n = GlLine()
n.p = self.lerp(t)
n.v = size * self.cross.normalized()
return n
def lerp(self, t):
"""
Interpolate along segment
t parameter [0, 1] where 0 is start of arc and 1 is end
"""
return self.p + self.v * t
def offset(self, offset):
"""
offset > 0 on the right part
"""
self.p += offset * self.cross.normalized()
def point_sur_segment(self, pt):
""" point_sur_segment (2d)
point: Vector 3d
t: param t de l'intersection sur le segment courant
d: distance laterale perpendiculaire positif a droite
"""
dp = (pt - self.p).to_2d()
v2d = self.v.to_2d()
dl = v2d.length
d = (self.v.x * dp.y - self.v.y * dp.x) / dl
t = (v2d * dp) / (dl * dl)
return t > 0 and t < 1, d, t
@property
def pts(self):
return [self.p0, self.p1]
class GlCircle(GlBaseLine):
def __init__(self,
d=3,
radius=0,
center=Vector((0, 0, 0)),
z_axis=Vector((0, 0, 1))):
self.r = radius
self.c = center
z = z_axis
if z.z < 1:
x = z.cross(Vector((0, 0, 1)))
y = x.cross(z)
else:
x = Vector((1, 0, 0))
y = Vector((0, 1, 0))
self.rM = Matrix([
Vector((x.x, y.x, z.x)),
Vector((x.y, y.y, z.y)),
Vector((x.z, y.z, z.z))
])
self.z_axis = z
self.a0 = 0
self.da = 2 * pi
GlBaseLine.__init__(self, d)
def lerp(self, t):
"""
Linear interpolation
"""
a = self.a0 + t * self.da
return self.c + self.rM * Vector((self.r * cos(a), self.r * sin(a), 0))
@property
def pts(self):
n_pts = max(1, int(round(abs(self.da) / pi * 30, 0)))
t_step = 1 / n_pts
return [self.lerp(i * t_step) for i in range(n_pts + 1)]
class GlArc(GlCircle):
def __init__(self,
d=3,
radius=0,
center=Vector((0, 0, 0)),
z_axis=Vector((0, 0, 1)),
a0=0,
da=0):
"""
a0 and da arguments are in radians
a0 = 0 on the x+ axis side
a0 = pi on the x- axis side
da > 0 CCW contrary-clockwise
da < 0 CW clockwise
"""
GlCircle.__init__(self, d, radius, center, z_axis)
self.da = da
self.a0 = a0
@property
def length(self):
return self.r * abs(self.da)
def normal(self, t=0):
"""
perpendicular line always on the right side
"""
n = GlLine(d=self.d, z_axis=self.z_axis)
n.p = self.lerp(t)
if self.da < 0:
n.v = self.c - n.p
else:
n.v = n.p - self.c
return n
def sized_normal(self, t, size):
n = GlLine(d=self.d, z_axis=self.z_axis)
n.p = self.lerp(t)
if self.da < 0:
n.v = size * (self.c - n.p).normalized()
else:
n.v = size * (n.p - self.c).normalized()
return n
def tangeant(self, t, length):
a = self.a0 + t * self.da
ca = cos(a)
sa = sin(a)
n = GlLine(d=self.d, z_axis=self.z_axis)
n.p = self.c + self.rM * Vector((self.r * ca, self.r * sa, 0))
n.v = self.rM * Vector((length * sa, -length * ca, 0))
if self.da > 0:
n.v = -n.v
return n
def offset(self, offset):
"""
offset > 0 on the right part
"""
if self.da > 0:
radius = self.r + offset
else:
radius = self.r - offset
return GlArc(d=self.d,
radius=radius,
center=self.c,
a0=self.a0,
da=self.da,
z_axis=self.z_axis)
class GlPolygon(Gl):
def __init__(self,
colour=(0.0, 0.0, 0.0, 1.0),
d=3):
self.pts_3d = []
Gl.__init__(self, d, colour)
def set_pos(self, pts_3d):
self.pts_3d = pts_3d
@property
def pts(self):
return self.pts_3d
def draw(self, context, render=False):
"""
render flag when rendering
"""
self.render = render
bgl.glPushAttrib(bgl.GL_ENABLE_BIT)
bgl.glEnable(bgl.GL_BLEND)
if render:
# enable anti-alias on polygons
bgl.glEnable(bgl.GL_POLYGON_SMOOTH)
bgl.glColor4f(*self.colour)
bgl.glBegin(bgl.GL_POLYGON)
for pt in self.pts:
x, y = self.position_2d_from_coord(context, pt, render)
bgl.glVertex2f(x, y)
self._end()
class GlRect(GlPolygon):
def __init__(self,
colour=(0.0, 0.0, 0.0, 1.0),
d=2):
GlPolygon.__init__(self, colour, d)
def draw(self, context, render=False):
self.render = render
bgl.glPushAttrib(bgl.GL_ENABLE_BIT)
bgl.glEnable(bgl.GL_BLEND)
if render:
# enable anti-alias on polygons
bgl.glEnable(bgl.GL_POLYGON_SMOOTH)
bgl.glColor4f(*self.colour)
p0 = self.pts[0]
p1 = self.pts[1]
bgl.glRectf(p0.x, p0.y, p1.x, p1.y)
self._end()
class GlImage(Gl):
def __init__(self,
d=2,
image=None):
self.image = image
self.colour_inactive = (1, 1, 1, 1)
Gl.__init__(self, d)
self.pts_2d = [Vector((0, 0)), Vector((10, 10))]
def set_pos(self, pts):
self.pts_2d = pts
@property
def pts(self):
return self.pts_2d
def draw(self, context, render=False):
if self.image is None:
return
bgl.glPushAttrib(bgl.GL_ENABLE_BIT)
p0 = self.pts[0]
p1 = self.pts[1]
bgl.glEnable(bgl.GL_BLEND)
bgl.glColor4f(*self.colour)
bgl.glRectf(p0.x, p0.y, p1.x, p1.y)
self.image.gl_load()
bgl.glEnable(bgl.GL_BLEND)
bgl.glBindTexture(bgl.GL_TEXTURE_2D, self.image.bindcode[0])
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MIN_FILTER, bgl.GL_NEAREST)
bgl.glTexParameteri(bgl.GL_TEXTURE_2D, bgl.GL_TEXTURE_MAG_FILTER, bgl.GL_NEAREST)
bgl.glEnable(bgl.GL_TEXTURE_2D)
bgl.glBlendFunc(bgl.GL_SRC_ALPHA, bgl.GL_ONE_MINUS_SRC_ALPHA)
# bgl.glColor4f(1, 1, 1, 1)
bgl.glBegin(bgl.GL_QUADS)
bgl.glTexCoord2d(0, 0)
bgl.glVertex2d(p0.x, p0.y)
bgl.glTexCoord2d(0, 1)
bgl.glVertex2d(p0.x, p1.y)
bgl.glTexCoord2d(1, 1)
bgl.glVertex2d(p1.x, p1.y)
bgl.glTexCoord2d(1, 0)
bgl.glVertex2d(p1.x, p0.y)
bgl.glEnd()
self.image.gl_free()
bgl.glDisable(bgl.GL_TEXTURE_2D)
class GlPolyline(GlBaseLine):
def __init__(self, colour, d=3):
self.pts_3d = []
GlBaseLine.__init__(self, d)
self.colour_inactive = colour
def set_pos(self, pts_3d):
self.pts_3d = pts_3d
# self.pts_3d.append(pts_3d[0])
@property
def pts(self):
return self.pts_3d
class GlHandle(GlPolygon):
def __init__(self, sensor_size, size, draggable=False, selectable=False, d=3):
"""
sensor_size : 2d size in pixels of sensor area
size : 3d size of handle
"""
GlPolygon.__init__(self, d=d)
self.colour_active = (1.0, 0.0, 0.0, 1.0)
self.colour_hover = (1.0, 1.0, 0.0, 1.0)
self.colour_normal = (1.0, 1.0, 1.0, 1.0)
self.colour_selected = (0.0, 0.0, 0.7, 1.0)
self.size = size
self.sensor_width = sensor_size
self.sensor_height = sensor_size
self.pos_3d = Vector((0, 0, 0))
self.up_axis = Vector((0, 0, 0))
self.c_axis = Vector((0, 0, 0))
self.hover = False
self.active = False
self.draggable = draggable
self.selectable = selectable
self.selected = False
def set_pos(self, context, pos_3d, direction, normal=Vector((0, 0, 1))):
self.up_axis = direction.normalized()
self.c_axis = self.up_axis.cross(normal)
self.pos_3d = pos_3d
self.pos_2d = self.position_2d_from_coord(context, self.sensor_center)
def check_hover(self, pos_2d):
if self.draggable:
dp = pos_2d - self.pos_2d
self.hover = abs(dp.x) < self.sensor_width and abs(dp.y) < self.sensor_height
@property
def sensor_center(self):
pts = self.pts
n = len(pts)
x, y, z = 0, 0, 0
for pt in pts:
x += pt.x
y += pt.y
z += pt.z
return Vector((x / n, y / n, z / n))
@property
def pts(self):
raise NotImplementedError
@property
def colour(self):
if self.render:
return self.colour_inactive
elif self.draggable:
if self.active:
return self.colour_active
elif self.hover:
return self.colour_hover
elif self.selected:
return self.colour_selected
return self.colour_normal
else:
return self.colour_inactive
class SquareHandle(GlHandle):
def __init__(self, sensor_size, size, draggable=False, selectable=False):
GlHandle.__init__(self, sensor_size, size, draggable, selectable)
@property
def pts(self):
n = self.up_axis
c = self.c_axis
if self.selected or self.hover or self.active:
scale = 1
else:
scale = 0.5
x = n * self.size * scale
y = c * self.size * scale
return [self.pos_3d - x - y, self.pos_3d + x - y, self.pos_3d + x + y, self.pos_3d - x + y]
class TriHandle(GlHandle):
def __init__(self, sensor_size, size, draggable=False, selectable=False):
GlHandle.__init__(self, sensor_size, size, draggable, selectable)
@property
def pts(self):
n = self.up_axis
c = self.c_axis
# does move sensitive area so disable for tri handle
# may implement sensor_center property to fix this
# if self.selected or self.hover or self.active:
scale = 1
# else:
# scale = 0.5
x = n * self.size * 4 * scale
y = c * self.size * scale
return [self.pos_3d - x + y, self.pos_3d - x - y, self.pos_3d]
class EditableText(GlText, GlHandle):
def __init__(self, sensor_size, size, draggable=False, selectable=False):
GlHandle.__init__(self, sensor_size, size, draggable, selectable)
GlText.__init__(self, colour=(0, 0, 0, 1))
def set_pos(self, context, value, pos_3d, direction, normal=Vector((0, 0, 1))):
self.up_axis = direction.normalized()
self.c_axis = self.up_axis.cross(normal)
self.pos_3d = pos_3d
self.value = value
self._text = self.add_units(context)
x, y = self.text_size(context)
self.pos_2d = self.position_2d_from_coord(context, pos_3d)
self.pos_2d.x += 0.5 * x
self.sensor_width, self.sensor_height = 0.5 * x, y
@property
def sensor_center(self):
return self.pos_3d
class ThumbHandle(GlHandle):
def __init__(self, size_2d, label, image=None, draggable=False, selectable=False, d=2):
GlHandle.__init__(self, size_2d, size_2d, draggable, selectable, d)
self.image = GlImage(image=image)
self.label = GlText(d=2, label=label.replace("_", " ").capitalize())
self.frame = GlPolyline((1, 1, 1, 1), d=2)
self.frame.closed = True
self.size_2d = size_2d
self.sensor_width = 0.5 * size_2d.x
self.sensor_height = 0.5 * size_2d.y
self.colour_normal = (0.715, 0.905, 1, 0.9)
self.colour_hover = (1, 1, 1, 1)
def set_pos(self, context, pos_2d):
"""
pos 2d is center !!
"""
self.pos_2d = pos_2d
ts = self.label.text_size(context)
self.label.pos_3d = pos_2d + Vector((-0.5 * ts.x, ts.y - 0.5 * self.size_2d.y))
p0, p1 = self.pts
self.image.set_pos(self.pts)
self.frame.set_pos([p0, Vector((p1.x, p0.y)), p1, Vector((p0.x, p1.y))])
@property
def pts(self):
s = 0.5 * self.size_2d
return [self.pos_2d - s, self.pos_2d + s]
@property
def sensor_center(self):
return self.pos_2d + 0.5 * self.size_2d
def draw(self, context, render=False):
self.render = render
self.image.colour_inactive = self.colour
GlHandle.draw(self, context, render=False)
self.image.draw(context, render=False)
self.label.draw(context, render=False)
self.frame.draw(context, render=False)
class Screen():
def __init__(self, margin):
self.margin = margin
def size(self, context):
system = context.user_preferences.system
w = context.region.width
h = context.region.height
y_min = self.margin
y_max = h - self.margin
x_min = self.margin
x_max = w - self.margin
if (system.use_region_overlap and
system.window_draw_method in {'TRIPLE_BUFFER', 'AUTOMATIC'}):
area = context.area
for r in area.regions:
if r.type == 'TOOLS':
x_min += r.width
elif r.type == 'UI':
x_max -= r.width
return x_min, x_max, y_min, y_max
class FeedbackPanel():
"""
Feed-back panel
inspired by np_station
"""
def __init__(self, title='Archipack'):
prefs = self.get_prefs(bpy.context)
self.main_title = GlText(d=2,
label=title + " : ",
font_size=prefs.feedback_size_main,
colour=prefs.feedback_colour_main
)
self.title = GlText(d=2,
font_size=prefs.feedback_size_title,
colour=prefs.feedback_colour_main
)
self.spacing = Vector((
0.5 * prefs.feedback_size_shortcut,
0.5 * prefs.feedback_size_shortcut))
self.margin = 50
self.explanation = GlText(d=2,
font_size=prefs.feedback_size_shortcut,
colour=prefs.feedback_colour_main
)
self.shortcut_area = GlPolygon(colour=prefs.feedback_shortcut_area, d=2)
self.title_area = GlPolygon(colour=prefs.feedback_title_area, d=2)
self.shortcuts = []
self.on = False
self.show_title = True
self.show_main_title = True
# read only, when enabled, after draw() the top left coord of info box
self.top = Vector((0, 0))
self.screen = Screen(self.margin)
def disable(self):
self.on = False
def enable(self):
self.on = True
def get_prefs(self, context):
global __name__
try:
# retrieve addon name from imports
addon_name = __name__.split('.')[0]
prefs = context.user_preferences.addons[addon_name].preferences
except:
prefs = DefaultColorScheme
pass
return prefs
def instructions(self, context, title, explanation, shortcuts):
"""
position from bottom to top
"""
prefs = self.get_prefs(context)
self.explanation.label = explanation
self.title.label = title
self.shortcuts = []
for key, label in shortcuts:
key = GlText(d=2, label=key,
font_size=prefs.feedback_size_shortcut,
colour=prefs.feedback_colour_key)
label = GlText(d=2, label=' : ' + label,
font_size=prefs.feedback_size_shortcut,
colour=prefs.feedback_colour_shortcut)
ks = key.text_size(context)
ls = label.text_size(context)
self.shortcuts.append([key, ks, label, ls])
def draw(self, context, render=False):
if self.on:
"""
draw from bottom to top
so we are able to always fit needs
"""
x_min, x_max, y_min, y_max = self.screen.size(context)
available_w = x_max - x_min - 2 * self.spacing.x
main_title_size = self.main_title.text_size(context) + Vector((5, 0))
# h = context.region.height
# 0,0 = bottom left
pos = Vector((x_min + self.spacing.x, y_min))
shortcuts = []
# sort by lines
lines = []
line = []
space = 0
sum_txt = 0
for key, ks, label, ls in self.shortcuts:
space += ks.x + ls.x + self.spacing.x
if pos.x + space > available_w:
txt_spacing = (available_w - sum_txt) / (max(1, len(line) - 1))
sum_txt = 0
space = ks.x + ls.x + self.spacing.x
lines.append((txt_spacing, line))
line = []
sum_txt += ks.x + ls.x
line.append([key, ks, label, ls])
if len(line) > 0:
txt_spacing = (available_w - sum_txt) / (max(1, len(line) - 1))
lines.append((txt_spacing, line))
# reverse lines to draw from bottom to top
lines = list(reversed(lines))
for spacing, line in lines:
pos.y += self.spacing.y
pos.x = x_min + self.spacing.x
for key, ks, label, ls in line:
key.pos_3d = pos.copy()
pos.x += ks.x
label.pos_3d = pos.copy()
pos.x += ls.x + spacing
shortcuts.extend([key, label])
pos.y += ks.y + self.spacing.y
n_shortcuts = len(shortcuts)
# shortcut area
self.shortcut_area.pts_3d = [
(x_min, self.margin),
(x_max, self.margin),
(x_max, pos.y),
(x_min, pos.y)
]
# small space between shortcut area and main title bar
if n_shortcuts > 0:
pos.y += 0.5 * self.spacing.y
self.title_area.pts_3d = [
(x_min, pos.y),
(x_max, pos.y),
(x_max, pos.y + main_title_size.y + 2 * self.spacing.y),
(x_min, pos.y + main_title_size.y + 2 * self.spacing.y)
]
pos.y += self.spacing.y
title_size = self.title.text_size(context)
# check for space available:
# if explanation + title + main_title are too big
# 1 remove main title
# 2 remove title
explanation_size = self.explanation.text_size(context)
self.show_title = True
self.show_main_title = True
if title_size.x + explanation_size.x > available_w:
# keep only explanation
self.show_title = False
self.show_main_title = False
elif main_title_size.x + title_size.x + explanation_size.x > available_w:
# keep title + explanation
self.show_main_title = False
self.title.pos_3d = (x_min + self.spacing.x, pos.y)
else:
self.title.pos_3d = (x_min + self.spacing.x + main_title_size.x, pos.y)
self.explanation.pos_3d = (x_max - self.spacing.x - explanation_size.x, pos.y)
self.main_title.pos_3d = (x_min + self.spacing.x, pos.y)
self.shortcut_area.draw(context)
self.title_area.draw(context)
if self.show_title:
self.title.draw(context)
if self.show_main_title:
self.main_title.draw(context)
self.explanation.draw(context)
for s in shortcuts:
s.draw(context)
self.top = Vector((x_min, pos.y + main_title_size.y + self.spacing.y))
class GlCursorFence():
"""
Cursor crossing Fence
"""
def __init__(self, width=1, colour=(1.0, 1.0, 1.0, 0.5), style=2852):
self.line_x = GlLine(d=2)
self.line_x.style = style
self.line_x.width = width
self.line_x.colour_inactive = colour
self.line_y = GlLine(d=2)
self.line_y.style = style
self.line_y.width = width
self.line_y.colour_inactive = colour
self.on = True
def set_location(self, context, location):
w = context.region.width
h = context.region.height
x, y = location
self.line_x.p = Vector((0, y))
self.line_x.v = Vector((w, 0))
self.line_y.p = Vector((x, 0))
self.line_y.v = Vector((0, h))
def enable(self):
self.on = True
def disable(self):
self.on = False
def draw(self, context, render=False):
if self.on:
self.line_x.draw(context)
self.line_y.draw(context)
class GlCursorArea():
def __init__(self,
width=1,
bordercolour=(1.0, 1.0, 1.0, 0.5),
areacolour=(0.5, 0.5, 0.5, 0.08),
style=2852):
self.border = GlPolyline(bordercolour, d=2)
self.border.style = style
self.border.width = width
self.border.closed = True
self.area = GlPolygon(areacolour, d=2)
self.min = Vector((0, 0))
self.max = Vector((0, 0))
self.on = False
def in_area(self, pt):
return (self.min.x <= pt.x and self.max.x >= pt.x and
self.min.y <= pt.y and self.max.y >= pt.y)
def set_location(self, context, p0, p1):
x0, y0 = p0
x1, y1 = p1
if x0 > x1:
x1, x0 = x0, x1
if y0 > y1:
y1, y0 = y0, y1
self.min = Vector((x0, y0))
self.max = Vector((x1, y1))
pos = [
Vector((x0, y0)),
Vector((x0, y1)),
Vector((x1, y1)),
Vector((x1, y0))]
self.area.set_pos(pos)
self.border.set_pos(pos)
def enable(self):
self.on = True
def disable(self):
self.on = False
def draw(self, context, render=False):
if self.on:
self.area.draw(context)
self.border.draw(context) | PypiClean |
/octarine-easypy-0.0.2.tar.gz/octarine-easypy-0.0.2/easypy/tables.py | from io import StringIO
from easypy.collections import defaultlist
from easypy.colors import colored, uncolorize
from easypy.humanize import compact
class Column():
def __init__(self, name, title=None, max_width=None, align=None, header_align=None, padding=None, drop_if_empty=False):
self.name = name
self.max_width = max_width
self.align = align
self.header_align = header_align
self.padding = padding
self.overflow = 'ellipsis'
self.title = title or name
self.drop_if_empty = drop_if_empty
class Table():
def __init__(self, *columns, data=None, max_col_width=None, align='left', header_align='center', padding=1):
"""
:type columns: list of Column
:type data: list of Bunch
:return:
"""
self.data = data or []
self.columns = []
self.max_col_width = max_col_width
self.align = align
self.header_align = header_align
self.padding = padding
for column in columns:
self.add_column(column)
_ALIGN_MAP = dict(left='<', right='>', center='^')
def add_column(self, column: Column):
self.columns.append(column)
def add_row(self, **row):
self.data.append(row)
def render(self):
rendered = defaultlist(list)
columns = []
def _get_value(data, value):
ret = data.get(value)
if ret is None:
ret = ''
return ret
for column in self.columns:
rows = [_get_value(data, column.name) for data in self.data]
if not any(filter(lambda i: i != '', rows)) and column.drop_if_empty:
continue
columns.append(column)
if column.max_width is None:
column.max_width = self.max_col_width
if column.align is None:
column.align = self.align
if column.header_align is None:
column.header_align = self.header_align
if column.padding is None:
column.padding = self.padding
raw_data = [column.title] + rows
colored_data = [colored(str(data)) for data in raw_data]
uncolored_data = [uncolorize(data) for data in colored_data]
max_width = column.max_width or max(len(data) for data in uncolored_data)
for i, data in enumerate(colored_data):
align = column.header_align if i == 0 else column.align
coloring_spacing = len(colored_data[i]) - len(uncolored_data[i])
spacing = max_width + coloring_spacing
format_string = "{{data:{align}{spacing}}}".format(align=self._ALIGN_MAP[align], spacing=spacing)
rendered[i].append(format_string.format(data=data))
output = StringIO()
for r_i, row in enumerate(rendered):
r_parts = []
for col_i, col in enumerate(row):
column = columns[col_i]
padding = column.padding * " "
if column.max_width and r_i > 0:
col = compact(col, column.max_width, suffix_length=column.max_width//10)
r_parts.append("{padding}{col}{padding}".format(col=col, padding=padding))
output.write("|".join(r_parts))
output.write("\n")
if r_i == 0:
output.seek(0)
first_row = output.read()
output.write(len(uncolorize(first_row)) * '-' + "\n")
output.seek(0)
return output.read()
def _test():
table = Table(Column("first", "GREEN<<First>>"))
table.add_column(Column("second", align='right'))
table.add_row(first='1', second='BLUE<<longer>> second MAGENTA<<column>>')
table.add_row(first='longer first column', second='2')
print(table.render()) | PypiClean |
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/compute/v20171201/virtual_machine_scale_set.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['VirtualMachineScaleSet']
class VirtualMachineScaleSet(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
identity: Optional[pulumi.Input[pulumi.InputType['VirtualMachineScaleSetIdentityArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
overprovision: Optional[pulumi.Input[bool]] = None,
plan: Optional[pulumi.Input[pulumi.InputType['PlanArgs']]] = None,
platform_fault_domain_count: Optional[pulumi.Input[int]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
single_placement_group: Optional[pulumi.Input[bool]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['SkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
upgrade_policy: Optional[pulumi.Input[pulumi.InputType['UpgradePolicyArgs']]] = None,
virtual_machine_profile: Optional[pulumi.Input[pulumi.InputType['VirtualMachineScaleSetVMProfileArgs']]] = None,
vm_scale_set_name: Optional[pulumi.Input[str]] = None,
zone_balance: Optional[pulumi.Input[bool]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Describes a Virtual Machine Scale Set.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['VirtualMachineScaleSetIdentityArgs']] identity: The identity of the virtual machine scale set, if configured.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[bool] overprovision: Specifies whether the Virtual Machine Scale Set should be overprovisioned.
:param pulumi.Input[pulumi.InputType['PlanArgs']] plan: Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
:param pulumi.Input[int] platform_fault_domain_count: Fault Domain count for each placement group.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[bool] single_placement_group: When true this limits the scale set to a single placement group, of max size 100 virtual machines.
:param pulumi.Input[pulumi.InputType['SkuArgs']] sku: The virtual machine scale set sku.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
:param pulumi.Input[pulumi.InputType['UpgradePolicyArgs']] upgrade_policy: The upgrade policy.
:param pulumi.Input[pulumi.InputType['VirtualMachineScaleSetVMProfileArgs']] virtual_machine_profile: The virtual machine profile.
:param pulumi.Input[str] vm_scale_set_name: The name of the VM scale set to create or update.
:param pulumi.Input[bool] zone_balance: Whether to force strictly even Virtual Machine distribution cross x-zones in case there is zone outage.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: The virtual machine scale set zones. NOTE: Availability zones can only be set when you create the scale set.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['identity'] = identity
__props__['location'] = location
__props__['overprovision'] = overprovision
__props__['plan'] = plan
__props__['platform_fault_domain_count'] = platform_fault_domain_count
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['single_placement_group'] = single_placement_group
__props__['sku'] = sku
__props__['tags'] = tags
__props__['upgrade_policy'] = upgrade_policy
__props__['virtual_machine_profile'] = virtual_machine_profile
if vm_scale_set_name is None and not opts.urn:
raise TypeError("Missing required property 'vm_scale_set_name'")
__props__['vm_scale_set_name'] = vm_scale_set_name
__props__['zone_balance'] = zone_balance
__props__['zones'] = zones
__props__['name'] = None
__props__['provisioning_state'] = None
__props__['type'] = None
__props__['unique_id'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/latest:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20150615:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20160330:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20160430preview:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20170330:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20180401:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20180601:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20181001:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20190301:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20190701:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20191201:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:VirtualMachineScaleSet"), pulumi.Alias(type_="azure-nextgen:compute/v20201201:VirtualMachineScaleSet")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualMachineScaleSet, __self__).__init__(
'azure-nextgen:compute/v20171201:VirtualMachineScaleSet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualMachineScaleSet':
"""
Get an existing VirtualMachineScaleSet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return VirtualMachineScaleSet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.VirtualMachineScaleSetIdentityResponse']]:
"""
The identity of the virtual machine scale set, if configured.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def overprovision(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the Virtual Machine Scale Set should be overprovisioned.
"""
return pulumi.get(self, "overprovision")
@property
@pulumi.getter
def plan(self) -> pulumi.Output[Optional['outputs.PlanResponse']]:
"""
Specifies information about the marketplace image used to create the virtual machine. This element is only used for marketplace images. Before you can use a marketplace image from an API, you must enable the image for programmatic use. In the Azure portal, find the marketplace image that you want to use and then click **Want to deploy programmatically, Get Started ->**. Enter any required information and then click **Save**.
"""
return pulumi.get(self, "plan")
@property
@pulumi.getter(name="platformFaultDomainCount")
def platform_fault_domain_count(self) -> pulumi.Output[Optional[int]]:
"""
Fault Domain count for each placement group.
"""
return pulumi.get(self, "platform_fault_domain_count")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="singlePlacementGroup")
def single_placement_group(self) -> pulumi.Output[Optional[bool]]:
"""
When true this limits the scale set to a single placement group, of max size 100 virtual machines.
"""
return pulumi.get(self, "single_placement_group")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.SkuResponse']]:
"""
The virtual machine scale set sku.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="uniqueId")
def unique_id(self) -> pulumi.Output[str]:
"""
Specifies the ID which uniquely identifies a Virtual Machine Scale Set.
"""
return pulumi.get(self, "unique_id")
@property
@pulumi.getter(name="upgradePolicy")
def upgrade_policy(self) -> pulumi.Output[Optional['outputs.UpgradePolicyResponse']]:
"""
The upgrade policy.
"""
return pulumi.get(self, "upgrade_policy")
@property
@pulumi.getter(name="virtualMachineProfile")
def virtual_machine_profile(self) -> pulumi.Output[Optional['outputs.VirtualMachineScaleSetVMProfileResponse']]:
"""
The virtual machine profile.
"""
return pulumi.get(self, "virtual_machine_profile")
@property
@pulumi.getter(name="zoneBalance")
def zone_balance(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to force strictly even Virtual Machine distribution cross x-zones in case there is zone outage.
"""
return pulumi.get(self, "zone_balance")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The virtual machine scale set zones. NOTE: Availability zones can only be set when you create the scale set.
"""
return pulumi.get(self, "zones")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop | PypiClean |
/django_gift_card_crawler-1.0.0-py3-none-any.whl/gift_card_crawler/static/gift_card_crawler/js/gift_card_crawler.js | function download(filename, text) {
var current_date = new Date();
var current_time = current_date.getTime();
var element = document.createElement('a');
element.setAttribute('href', 'data:text/csv;charset=utf-8,' + encodeURIComponent(text));
element.setAttribute('download', current_time + '_' + filename);
element.style.display = 'none';
document.body.appendChild(element);
element.click();
document.body.removeChild(element);
}
function ShowLoading(disable_input=false) {
if (disable_input) {
var file_file = document.querySelector('#formFile');
file_file.setAttribute('disabled', 'disabled');
}
var submit_btn = document.querySelector('#CrawlBtn');
submit_btn.setAttribute('disabled', 'disabled');
submit_btn.value = "Crawling...";
}
function RemoveLoading() {
var submit_btn = document.querySelector('#CrawlBtn');
var file_file = document.querySelector('#formFile');
file_file.removeAttribute('disabled');
submit_btn.removeAttribute('disabled');
submit_btn.value = "Start Crawling";
}
async function fetchCards(api_url) {
const response = await fetch(api_url);
var data = await response.json();
if (data.cards_datatable !== null) {
clearInterval(interval);
var changeList = document.getElementById("changelist");
changeList.innerHTML = data.cards_datatable;
RemoveLoading();
var ext_btn = document.getElementById("Extract");
if (ext_btn) {
ext_btn.addEventListener("click", function () {
var cards_info = document.querySelectorAll('tr.card_info');
var gift_cards = "";
cards_info.forEach(function (card_info) {
gift_cards += '="' + card_info.querySelector('.card_number').textContent + '"';
gift_cards += ',="' + card_info.querySelector('.serial_number').textContent + '"';
gift_cards += "\n";
});
var filename = "cards.csv";
download(filename, gift_cards);
}, false);
}
}
} | PypiClean |
/sagemath-polyhedra-9.5b6.tar.gz/sagemath-polyhedra-9.5b6/sage/categories/lambda_bracket_algebras_with_basis.py |
from sage.categories.category_with_axiom import CategoryWithAxiom_over_base_ring
from sage.categories.graded_modules import GradedModulesCategory
class LambdaBracketAlgebrasWithBasis(CategoryWithAxiom_over_base_ring):
"""
The category of Lambda bracket algebras with basis.
EXAMPLES::
sage: LieConformalAlgebras(QQbar).WithBasis()
Category of Lie conformal algebras with basis over Algebraic Field
"""
class ElementMethods:
def index(self):
"""
The index of this basis element.
EXAMPLES::
sage: V = lie_conformal_algebras.NeveuSchwarz(QQ)
sage: V.inject_variables()
Defining L, G, C
sage: G.T(3).index()
('G', 3)
sage: v = V.an_element(); v
L + G + C
sage: v.index()
Traceback (most recent call last):
...
ValueError: index can only be computed for monomials, got L + G + C
"""
if self.is_zero():
return None
if not self.is_monomial():
raise ValueError ("index can only be computed for "
"monomials, got {}".format(self))
return next(iter(self.monomial_coefficients()))
class FinitelyGeneratedAsLambdaBracketAlgebra(CategoryWithAxiom_over_base_ring):
"""
The category of finitely generated lambda bracket algebras with
basis.
EXAMPLES::
sage: C = LieConformalAlgebras(QQbar)
sage: C.WithBasis().FinitelyGenerated()
Category of finitely generated Lie conformal algebras with basis over Algebraic Field
sage: C.WithBasis().FinitelyGenerated() is C.FinitelyGenerated().WithBasis()
True
"""
class Graded(GradedModulesCategory):
"""
The category of H-graded finitely generated lambda bracket
algebras with basis.
EXAMPLES::
sage: LieConformalAlgebras(QQbar).WithBasis().FinitelyGenerated().Graded()
Category of H-graded finitely generated Lie conformal algebras with basis over Algebraic Field
"""
class ParentMethods:
def degree_on_basis(self, m):
r"""
Return the degree of the basis element indexed by ``m``
in ``self``.
EXAMPLES::
sage: V = lie_conformal_algebras.Virasoro(QQ)
sage: V.degree_on_basis(('L',2))
4
"""
if m[0] in self._central_elements:
return 0
return self._weights[self._index_to_pos[m[0]]] + m[1] | PypiClean |
/wig-ng-0.2.tar.gz/wig-ng-0.2/wig/helpers/wps.py |
import struct
class InvalidWPSInformationElement(Exception):
"""Invalid WPS Information Element Exception."""
pass
class WPSElements(object):
"""Contains all WPS data elements constants."""
ID_AP_CHANNEL = 0x1001
ID_ASSOCIATION_STATE = 0x1002
ID_AUTHENTICATION_TYPE = 0x1003
ID_AUTHENTICATION_TYPE_FLAGS = 0x1004
ID_AUTHENTICATOR = 0x1005
ID_CONFIG_METHODS = 0x1008
ID_CONFIGURATION_ERROR = 0x1009
ID_CONFIRMATION_URL4 = 0x100A
ID_CONFIRMATION_URL6 = 0x100B
ID_CONNECTION_TYPE = 0x100C
ID_CONNECTION_TYPE_FLAGS = 0x100D
ID_CREDENTIAL = 0x100E
ID_DEVICE_NAME = 0x1011
ID_DEVICE_PASSWORD_ID = 0x1012
ID_E_HASH1 = 0x1014
ID_E_HASH2 = 0x1015
ID_E_SNONCE1 = 0x1016
ID_E_SNONCE2 = 0x1017
ID_ENCRYPTED_SETTINGS = 0x1018
ID_ENCRYPTED_TYPE = 0x100F
ID_ENCRYPTED_TYPE_FLAGS = 0x1010
ID_ENROLLEE_NONCE = 0x101A
ID_FEATURE_ID = 0x101B
ID_IDENTITY = 0x101C
ID_IDENTITY_PROOF = 0x101D
ID_KEY_WRAP_AUTHENTICATOR = 0x101E
ID_KEY_IDENTIFIER = 0x101F
ID_MAC_ADDRESS = 0x1020
ID_MANUFACTURER = 0x1021
ID_MESSAGE_TYPE = 0x1022
ID_MODEL_NAME = 0x1023
ID_MODEL_NUMBER = 0x1024
ID_NETWORK_INDEX = 0x1026
ID_NETWORK_KEY = 0x1027
ID_NETWORK_KEY_INDEX = 0x1028
ID_NEW_DEVICE_NAME = 0x1029
ID_NEW_PASSWORD = 0x102A
ID_OOB_DEVICE_PASSWORD = 0x102C
ID_OS_VERSION = 0x102D
ID_POWER_LEVEL = 0x102F
ID_PSK_CURRENT = 0x1030
ID_PSK_MAX = 0x1031
ID_PUBLIC_KEY = 0x1032
ID_RADIO_ENABLED = 0x1033
ID_REBOOT = 0x1034
ID_REGISTRAR_CURRENT = 0x1035
ID_REGISTRAR_ESTABLISHED = 0x1036
ID_REGISTRAR_LIST = 0x1037
ID_REGISTRAR_MAX = 0x1038
ID_REGISTRAR_NONCE = 0x1039
ID_REQUEST_TYPE = 0x103A
ID_RESPONSE_TYPE = 0x103B
ID_RF_BANDS = 0x103C
ID_R_HASH1 = 0x103D
ID_R_HASH2 = 0x103E
ID_R_SNONCE1 = 0x103F
ID_R_SNONCE2 = 0x1040
ID_SELECT_REGISTRAR = 0x1041
ID_SERIAL_NUMBER = 0x1042
ID_WIFI_PROTECTED_SETUP_STATE = 0x1044
ID_SSID = 0x1045
ID_TOTAL_NETWORKS = 0x1046
ID_UUID_E = 0x1047
ID_UUID_R = 0x1048
ID_VENDOR_EXTENSION = 0x1049
ID_VERSION = 0x104A
ID_X509_CERTIFICATE_REQUEST = 0x104B
ID_X509_CERTIFICATE = 0x104C
ID_EAP_IDENTITY = 0x104D
ID_MESSAGE_COUNTER = 0x104E
ID_PUBLIC_KEY_HASH = 0x104F
ID_REKEY_KEY = 0x1050
ID_KEY_LIFETIME = 0x1051
ID_PERMITED_CONFIG_METHODS = 0x1052
ID_SELECTED_REGISTRAR_CONFIG_METHODS = 0x1053
ID_PRIMARY_DEVICE_TYPE = 0x1054
ID_SECONDARY_DEVICE_TYPE_LIST = 0x1055
ID_PORTABLE_DEVICE = 0x1056
ID_AP_SETUP_LOCKED = 0x1057
ID_APPLICATION_EXTENSION = 0x1058
ID_EAP_TYPE = 0x1059
ID_INITIALIZATION_VECTOR = 0x1060
ID_PROVIDED_AUTOMATICALLY = 0x1061
ID_8021X_ENABLED = 0x1062
ID_APP_SESSION_KEY = 0x1063
ID_WEP_TRANSMIT_KEY = 0x1064
ID_CONFIG_METHODS_SIZE = 2
ID_VERSION_SIZE = 1
ID_WIFI_PROTECTED_SETUP_STATE_SIZE = 1
ID_UUID_SIZE = 16
ID_PRIMARY_DEVICE_TYPE_SIZE = 8
@staticmethod
def get_element_key(value):
"""Returns string based on the value parameter."""
for wps_item in WPSElements.__dict__.items():
k, v = wps_item
if v == value:
return k.replace("_", " ").lower()[len("ID_"):]
return None
class WPSConfigurationMethods(object):
CONFIG_METHOD_USB = 0x0001
CONFIG_METHOD_ETHERNET = 0x0002
CONFIG_METHOD_LABEL = 0x0004
CONFIG_METHOD_DISPLAY = 0x0008
CONFIG_METHOD_EXTERNAL_NFC_TOKEN = 0x0010
CONFIG_METHOD_INTEGRATED_NFC_TOKEN = 0x0020
CONFIG_NFC_INTERFACE = 0x0040
CONFIG_METHOD_PUSH_BUTTON = 0x0080
CONFIG_METHOD_KEYPAD = 0x0100
@staticmethod
def get_element_key(value):
"""Returns string based on the value parameter."""
for wps_item in WPSElements.__dict__.items():
k, v = wps_item
if v == value:
return k.replace("_", " ").lower()[len("CONFIG_METHOD_"):]
return None
class WPSInformationElement(object):
"""TODO"""
TLV_ID_LENGTH = 2
TLV_SIZE_LENGTH = 2
WPS_IE_SIZE_LENGTH = 1
VENDOR_SPECIFIC_IE_ID = b"\xdd" # Vendor Specific ID
WPS_OUI = b"\x00\x50\xf2" # Microsoft OUI (WiFi Alliance)
WPS_OUI_TYPE = b"\x04" # WPS type
FIXED_DATA_LENGTH = len(VENDOR_SPECIFIC_IE_ID) + WPS_IE_SIZE_LENGTH + len(WPS_OUI) + len(WPS_OUI_TYPE)
def __init__(self, buff):
self.buffer = buff
self.buffer_length = len(buff)
self.__elements__ = dict()
self.__do_basic_verification__()
self.__process_buffer__()
def get_elements(self):
"""Returns a dictionary with the WPS information."""
return self.__elements__.items()
def __do_basic_verification__(self):
"""
Verify if the buffer has the minimal length necessary, the correct OUI and OUI type.
"""
idx = 0
if self.buffer_length <= self.FIXED_DATA_LENGTH:
raise InvalidWPSInformationElement("Invalid buffer length.")
if not struct.pack("B", self.buffer[idx]) == self.VENDOR_SPECIFIC_IE_ID:
raise InvalidWPSInformationElement("Invalid WPS information element id.")
idx += len(self.VENDOR_SPECIFIC_IE_ID) + self.WPS_IE_SIZE_LENGTH
if not self.buffer[idx:self.FIXED_DATA_LENGTH] == self.WPS_OUI + self.WPS_OUI_TYPE:
raise InvalidWPSInformationElement("Invalid WPS information element id.")
@staticmethod
def get_config_methods_string(data):
"""Returns a string with the WPS configuration methods based on the data parameter."""
config_methods_list = list()
config_method_value = struct.unpack("!H", data)[0]
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_USB:
config_methods_list.append("USB")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_ETHERNET:
config_methods_list.append("Ethernet")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_LABEL:
config_methods_list.append("Label")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_DISPLAY:
config_methods_list.append("Display")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_EXTERNAL_NFC_TOKEN:
config_methods_list.append("External NFC Token")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_INTEGRATED_NFC_TOKEN:
config_methods_list.append("Integrated NFC Token")
if config_method_value & WPSConfigurationMethods.CONFIG_NFC_INTERFACE:
config_methods_list.append("NFC Interface")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_PUSH_BUTTON:
config_methods_list.append("Push Button")
if config_method_value & WPSConfigurationMethods.CONFIG_METHOD_KEYPAD:
config_methods_list.append("Keypad")
return bytes(", ".join(config_methods_list), 'ascii')
def get_version_string(self, data):
"""Returns a string with the WPS version based on the data parameter."""
value = "%02X" % struct.unpack("B", data)[0]
return bytes("%s.%s" % (value[0], value[1]), 'ascii')
def get_setup_state_string(self, data):
"""Returns a string with the WPS version based on the data parameter."""
value = struct.unpack("B", data)[0]
if value == 1:
return b"Not-Configured"
elif value == 2:
return b"Configured"
else:
return b"Invalid Value"
def get_uuid_string(self, data):
"""Returns a string with the WPS UUID based on the data parameter."""
uuid = str()
for char in data:
uuid += "%02X" % char
return bytes(uuid, 'ascii')
@staticmethod
def get_primary_device_type_string(data):
"""Returns a string with the WPS primary device type based on the data parameter."""
primary_device_type = bytes()
category = struct.unpack("!H", data[:2])[0]
# subcategory = struct.unpack("!H", data[6:8])[0]
if category == 1:
primary_device_type = b"Computer"
elif category == 2:
primary_device_type = b"Input Device"
elif category == 3:
primary_device_type = b"Printers, Scanners, Faxes and Copiers"
elif category == 4:
primary_device_type = b"Camera"
elif category == 5:
primary_device_type = b"Storage"
elif category == 6:
primary_device_type = b"Network Infrastructure"
elif category == 7:
primary_device_type = b"Displays"
elif category == 8:
primary_device_type = b"Multimedia Devices"
elif category == 9:
primary_device_type = b"Gaming Devices"
elif category == 10:
primary_device_type = b"Telephone"
return primary_device_type
def __process_buffer__(self):
"""
Process data buffer, walkthrough all elements to verify the buffer boundaries and populate the __elements__
attribute.
"""
index = 0
buff = self.buffer[self.FIXED_DATA_LENGTH:]
while index < len(buff):
if not len(buff[index:]) > self.TLV_ID_LENGTH + self.TLV_SIZE_LENGTH:
raise InvalidWPSInformationElement("TLV invalid data.")
tlv_id = struct.unpack("!H", buff[index:index + self.TLV_ID_LENGTH])[0]
index += self.TLV_ID_LENGTH
tlv_size = struct.unpack("!H", buff[index:index + self.TLV_SIZE_LENGTH])[0]
index += self.TLV_SIZE_LENGTH
tlv_name = WPSElements.get_element_key(tlv_id)
tlv_data = buff[index:index + tlv_size]
if tlv_name:
if tlv_id == WPSElements.ID_CONFIG_METHODS and tlv_size == WPSElements.ID_CONFIG_METHODS_SIZE:
self.__elements__[tlv_name] = self.get_config_methods_string(tlv_data)
elif tlv_id == WPSElements.ID_VERSION and tlv_size == WPSElements.ID_VERSION_SIZE:
self.__elements__[tlv_name] = self.get_version_string(tlv_data)
elif tlv_id == WPSElements.ID_WIFI_PROTECTED_SETUP_STATE and \
tlv_size == WPSElements.ID_WIFI_PROTECTED_SETUP_STATE_SIZE:
self.__elements__[tlv_name] = self.get_setup_state_string(tlv_data)
elif (tlv_id == WPSElements.ID_UUID_E or tlv_id == WPSElements.ID_UUID_R) and \
tlv_size == WPSElements.ID_UUID_SIZE:
self.__elements__[tlv_name] = self.get_uuid_string(tlv_data)
elif tlv_id == WPSElements.ID_PRIMARY_DEVICE_TYPE and \
tlv_size == WPSElements.ID_PRIMARY_DEVICE_TYPE_SIZE:
self.__elements__[tlv_name] = self.get_primary_device_type_string(tlv_data)
else:
self.__elements__[tlv_name] = tlv_data
index += tlv_size | PypiClean |
/highcharts_core-1.3.7.tar.gz/highcharts_core-1.3.7/docs/api/_other_convenience_methods.rst |
.. method:: copy(self, other, overwrite = True, **kwargs)
:noindex:
Copy the properties from ``self`` to ``other``.
:param other: The target instance to which the properties of this instance should
be copied.
:type other: :class:`HighchartsMeta`
:param overwrite: if ``True``, properties in ``other`` that are already set will
be overwritten by their counterparts in ``self``. Defaults to ``True``.
:type overwrite: :class:`bool <python:bool>`
:param kwargs: Additional keyword arguments. Some special descendents of
:class:`HighchartsMeta` may have special implementations of this method which
rely on additional keyword arguments.
:returns: A mutated version of ``other`` with new property values
:raises HighchartsValueError: if ``other`` is not the same class as (or subclass of)
``self``
| PypiClean |
/tfmc_machine_parts_pp-0.2.1.tar.gz/tfmc_machine_parts_pp-0.2.1/tfmc/machine_parts_pp.py | __version__ = '0.2.1'
def get_parts_csv_file_name():
'''
Returns the absolute path to the csv file containing the machine parts
and the part of relations.
'''
return _get_local_fname('parts.csv')
def get_dissimilarities_csv_file_name():
'''
Returns the absolute path to the csv file containing the machine part
dissimilarities.
'''
return _get_local_fname('dissimilarities.csv')
def get_all_machine_parts():
'''
Returns all machine parts in a dict with items on the form
(parent_id, (child_id_1,...,child_id_n)).
That is, for every vertex id, the dict maps to the list of child vertices.
All identifiers are integers.
'''
ccs = get_machine_part_connected_components()
full = dict(ccs[0])
for cc in ccs[1:]:
full.update(cc)
return full
def get_all_dissimilarities():
'''
Returns a symmetric numpy array dissimilarity coefficients between machine parts.
All dissimilarities are in the range [0,1].
'''
import numpy as np
fname = _get_local_fname('dissimilarities.csv')
N = len(get_all_machine_parts())
dists = np.zeros((N,N), dtype=float)
with open(fname, 'r') as inf:
for line in inf.readlines():
a,b,d = line.split(',')
a,b,d = [int(a),int(b),float(d)]
dists[a,b] = d
dists[b,a] = d
return dists
def get_machine_part_connected_components(spec=None):
'''
Returns all machine parts in a list of dicts, wher ach dict has items on the form
(parent_id, (child_id_1,...,child_id_n)).
That is, for every vertex id, the dict maps to the list of child vertices.
All identifiers are integers.
Each dict constitutes a connected component of machine parts, and the union of the
dicts is equivalent to the graph returned by get_all_machine_parts().
If no spec is given, then all connected components are returned.
If spec is an iterable of integers in the range {0,..,7}, then the specified
connected components are returned.
If spec is an integer in the range {0,..,7}, then this particular connected component
is returned.
'''
import json
with open(_get_local_fname('machine_parts.json'), 'r') as inf:
data = json.load(inf)
ccs = [_complete_dict(_int_dict(cc)) for cc in data['ccs']]
if spec is None:
return ccs
try:
return [ccs[i] for i in spec]
except TypeError as e:
# Not iterable --- hoping for an integer.
return ccs[spec]
def planted_partition(base,n,mu,var):
'''
Generates a planted partition based on the cc spec with multiplicity n+1.
base - base graph to multiplicate, obtained from
get_machine_part_connected_components(...) or random_induced_subgraph(...)
n - number of copies to make
var - variance of the applied dissimilarity noise
Returns X,D,PP where
X - a dict-graph of all the vertices
D - A numpy dissimilarity array
PP - A list of lists, each list a collection of copy-equivalent elements
'''
C0 = _complete_dict(base)
X,D = _gen_rebased_dissim_space(C0)
XX,DD,PP,_ = _gen_planted_partition(X,D,n,mu,var)
return XX,DD,PP
class MinDegNotSatisfiableError(Exception):
'''
Exception thrown by random_induced_subgraph if the minimum degree
requirement cannot be satisfied.
'''
def __init__(self,minDeg,attempts):
self.minDeg = minDeg
self.attempts = attempts
Exception.__init__(self,'minDeg >= %1.3f not achieved in %d attempts.' % \
(minDeg,attempts))
def random_induced_subgraph(ccs,m,minDeg=0,attempts=1000):
'''
Produces a graph by making a draw of m random vertices from ccs,
and generating the induced subgraph based on the transitive closure of ccs.
If minDeg is specified, successive draws will be done until the average in/out
degree of the induced subgraph is minDeg. If no such graph is found after
'attempts' attempts, the method exits with a
fmcti.machine_parts_pp.MinDegNotSatisfiableError
ccs - the graph to sample from
m - the size of the induced subgraph vertex set
minDeg - minimal acceptable in-out degree [defaults to zero]
attempts - the maximum number of attempts to do before giving up [defaults to 1000]
Returns a graph of m vertices of minimum in/out degree minDeg.
'''
import numpy as np
ccs = _transitive_closure(ccs)
res = _draw_subgraph(ccs,m)
cnt = 1
while np.mean([len(res[x]) for x in res]) < minDeg:
if cnt >= attempts:
raise MinDegNotSatisfiableError(minDeg,attempts)
cnt += 1
res = _draw_subgraph(ccs,m)
return res
def clustering_to_cluster_index_mapping(clusters):
'''
Produces a mapping from all indices in the base space
to the cluster index of the clusters.
clusters - list of clusters
Returns a dict with elements ( x , index of cluster containing x )
'''
mapping = {}
for clstId in range(len(clusters)):
for x in clusters[clstId]:
mapping[x] = clstId
return mapping
def _draw_subgraph(ccs,m):
'''
Generates a random induced subgraph of ccs of m vertices.
ccs - complete graph
m - size of induced subgraph
'''
import numpy as np
import numpy.random as npr
assert len(ccs) >= m
ids = npr.permutation(list(ccs))[:m]
return {i:[x for x in ccs[i] if x in ids] for i in ids}
def _transitive_closure(graph):
'''
Modifies the passed graph by adding relations to achieve
the transitive closure of graph.
graph - The dict graph to compyte the transitive closure of
Returns the passed graph object
'''
visited = set()
X = list(graph)
for x in graph:
graph[x] = _descendants_of(graph,x,X,visited)
return graph
def _descendants_of(graph, x, X, visited):
'''
Part of the _transitive_closure method.
'''
if x in visited:
return graph[x]
visited.add(x)
all_decs = set(graph[x])
for y in list(graph[x]):
decs = _descendants_of(graph,y,X,visited)
all_decs.update(decs)
graph[x] = sorted(all_decs)
return graph[x]
def _int_dict(parts):
'''
Returns a dict with keys converted to ints.
'''
return {int(x):parts[x] for x in parts}
def _complete_dict(parts):
'''
Ensures that the key set matches the full set of vertices in the graph.
Note: the function modifies the passed dict.
'''
# Only need to possibly add child identifieres
ids = set.union(*[set(y) for _,y in parts.items()])
for i in ids:
if not i in parts:
parts[i] = []
return parts
def _get_local_fname(short_name):
'''
Produces an installation independent path to the file short_name, assuming the
file is containeed in this module.
'''
import os.path
import sys
module_filename = sys.modules[__name__].__file__
module_dirname = os.path.dirname(module_filename)
return os.path.join(module_dirname, short_name)
def _gen_rebased_dissim_space(parts):
'''
Based on the passed parts, the method rebases the part indices and produces
a dissimilarity matrix with corresponding dissimilarities.
Returns (parts,dissim-matrix)
'''
import numpy as np
rename,inv_rename = _rebase_map(parts)
dissims = get_all_dissimilarities()
N = len(parts)
res_parts = {rename(x) : [rename(y) for y in parts[x]] for x in parts}
res_diss = np.zeros((N,N), dtype=float)
for i in np.arange(N):
ii = inv_rename(i)
for j in np.arange(i+1,N):
jj = inv_rename(j)
res_diss[i,j] = dissims[ii,jj]
res_diss[j,i] = dissims[jj,ii]
return (res_parts, res_diss)
def _rebase_map(parts):
'''
Use this method to obtain a renaming of party types identifiers so that
they make up a contigous sequence of integers starting at 0.
Note that the rename function is determinied stocahstically, so the
_rebase_map function is not deterministic.
parts - one single dict of parts
Returns two functions (rename,inv_rename) that map int -> int.
rename is the rename function, and inv_rename is the inverse of rename.
'''
import numpy.random as npr
ids = list(npr.permutation(list(parts)))
rename = lambda x : ids.index(x)
inv_rename = lambda y : ids[y]
return (rename,inv_rename)
def _gen_planted_partition(parts,dissims,M,mu,var,no_rename=False):
'''
Generates a planted partition over the indices in parts by
providing M additional copies of parts.
parts - rebased part set
dissims - rebased dissimilarities
M - the required number of additional copies of parts
mu - the mean of the Gaussian noise added to dissimilarities
var - the variance of the Gaussian noise added to dissimilarities
no_rename - If True, then the planted partitions will be on the form
[0,n,2n],[1,n+1,2n+1],... where n is the number of elements
in parts. This is mainly for testing and debugging purposes.
Returns (all_parts, all_dissims,PP,(rename, inv_name))
where all_parts is a dict defining all elements and relations in the
proliferated dataset,
all_dissims specifies all dissimilarities, PP is the list of
planted partitions (that is, a list of lists, where each nested list
is a set of copy-paste vertex ids), and (rename,inv_name) are the rename
and inverse rename fuctions used to re-label the vertices.
Notices that the returned problem set is (stochastically) re-based prior to
returning, to avoid trivial correlation rules for the elements in the planted
partition.
'''
import numpy as np
n = len(parts)
N = n*(M+1)
new_parts = dict(parts)
for i in range(M):
offset = (i+1)*n
offset_parts = {(x+offset):[y+offset for y in parts[x]] for x in parts}
new_parts.update(offset_parts)
assert len(new_parts) == N
# We construct the dissimilarities block-wise.
# Diagonal blocks are dissims, and off-diagonal
# blocks are perturbed dissims. Remember that the blocks array
# must be transpose-symmetric (symmetric elements must be the transpose
# of each other)
blocks = [[None]*(M+1) for _ in np.arange(M+1)]
for i in np.arange(M+1):
blocks[i][i] = dissims
for j in np.arange(i+1,M+1):
blocks[i][j] = _perturbe(dissims,mu,var)
blocks[j][i] = blocks[i][j].T
new_dissims = np.block(blocks)
# Planted partitions are now sequential
PP = [np.arange(i, n*(M+1), n) for i in np.arange(n)]
# Sanity check...
assert np.sum([len(p) for p in PP]) == N
# And now rebase all to re-name vertices
rename,inv_name = _rebase_map(new_parts)
if no_rename:
rename = lambda x : x
inv_name = rename
ren_parts = {rename(x):[rename(y) for y in new_parts[x]] for x in new_parts}
ren_dissims = np.zeros_like(new_dissims)
for i in np.arange(N):
for j in np.arange(i+1,N):
ren_dissims[i,j] = new_dissims[inv_name(i),inv_name(j)]
ren_dissims[j,i] = ren_dissims[i,j]
ren_pp = [[rename(x) for x in p] for p in PP]
return ren_parts, ren_dissims, ren_pp, (rename, inv_name)
def _perturbe(dists,mu,var):
'''
Perturbes the dissimilarities with Gaussian noise with variance var,
using rejection sampling until all pertrubed values are in the range [0,1].
'''
import numpy as np
from numpy.random import normal as N
assert np.all((0 <= dists) & (dists <= 1))
result = np.array(dists, dtype=float)
ok = np.full(dists.shape, False, dtype=bool)
while not np.all(ok):
result[~ok] = dists[~ok] + N(mu,var,size=np.sum(~ok))
ok = (0.0 <= result) & (result <= 1.0)
return result | PypiClean |
/DjangoDjangoAppCenter-0.0.11-py3-none-any.whl/DjangoAppCenter/simpleui/static/admin/simpleui-x/elementui/utils/dom.js | 'use strict';
exports.__esModule = true;
exports.isInContainer = exports.getScrollContainer = exports.isScroll = exports.getStyle = exports.once = exports.off = exports.on = undefined;
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {
return typeof obj;
} : function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
}; /* istanbul ignore next */
exports.hasClass = hasClass;
exports.addClass = addClass;
exports.removeClass = removeClass;
exports.setStyle = setStyle;
var _vue = require('vue');
var _vue2 = _interopRequireDefault(_vue);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {default: obj};
}
var isServer = _vue2.default.prototype.$isServer;
var SPECIAL_CHARS_REGEXP = /([\:\-\_]+(.))/g;
var MOZ_HACK_REGEXP = /^moz([A-Z])/;
var ieVersion = isServer ? 0 : Number(document.documentMode);
/* istanbul ignore next */
var trim = function trim(string) {
return (string || '').replace(/^[\s\uFEFF]+|[\s\uFEFF]+$/g, '');
};
/* istanbul ignore next */
var camelCase = function camelCase(name) {
return name.replace(SPECIAL_CHARS_REGEXP, function (_, separator, letter, offset) {
return offset ? letter.toUpperCase() : letter;
}).replace(MOZ_HACK_REGEXP, 'Moz$1');
};
/* istanbul ignore next */
var on = exports.on = function () {
if (!isServer && document.addEventListener) {
return function (element, event, handler) {
if (element && event && handler) {
element.addEventListener(event, handler, false);
}
};
} else {
return function (element, event, handler) {
if (element && event && handler) {
element.attachEvent('on' + event, handler);
}
};
}
}();
/* istanbul ignore next */
var off = exports.off = function () {
if (!isServer && document.removeEventListener) {
return function (element, event, handler) {
if (element && event) {
element.removeEventListener(event, handler, false);
}
};
} else {
return function (element, event, handler) {
if (element && event) {
element.detachEvent('on' + event, handler);
}
};
}
}();
/* istanbul ignore next */
var once = exports.once = function once(el, event, fn) {
var listener = function listener() {
if (fn) {
fn.apply(this, arguments);
}
off(el, event, listener);
};
on(el, event, listener);
};
/* istanbul ignore next */
function hasClass(el, cls) {
if (!el || !cls) return false;
if (cls.indexOf(' ') !== -1) throw new Error('className should not contain space.');
if (el.classList) {
return el.classList.contains(cls);
} else {
return (' ' + el.className + ' ').indexOf(' ' + cls + ' ') > -1;
}
}
/* istanbul ignore next */
function addClass(el, cls) {
if (!el) return;
var curClass = el.className;
var classes = (cls || '').split(' ');
for (var i = 0, j = classes.length; i < j; i++) {
var clsName = classes[i];
if (!clsName) continue;
if (el.classList) {
el.classList.add(clsName);
} else if (!hasClass(el, clsName)) {
curClass += ' ' + clsName;
}
}
if (!el.classList) {
el.className = curClass;
}
}
/* istanbul ignore next */
function removeClass(el, cls) {
if (!el || !cls) return;
var classes = cls.split(' ');
var curClass = ' ' + el.className + ' ';
for (var i = 0, j = classes.length; i < j; i++) {
var clsName = classes[i];
if (!clsName) continue;
if (el.classList) {
el.classList.remove(clsName);
} else if (hasClass(el, clsName)) {
curClass = curClass.replace(' ' + clsName + ' ', ' ');
}
}
if (!el.classList) {
el.className = trim(curClass);
}
}
/* istanbul ignore next */
var getStyle = exports.getStyle = ieVersion < 9 ? function (element, styleName) {
if (isServer) return;
if (!element || !styleName) return null;
styleName = camelCase(styleName);
if (styleName === 'float') {
styleName = 'styleFloat';
}
try {
switch (styleName) {
case 'opacity':
try {
return element.filters.item('alpha').opacity / 100;
} catch (e) {
return 1.0;
}
default:
return element.style[styleName] || element.currentStyle ? element.currentStyle[styleName] : null;
}
} catch (e) {
return element.style[styleName];
}
} : function (element, styleName) {
if (isServer) return;
if (!element || !styleName) return null;
styleName = camelCase(styleName);
if (styleName === 'float') {
styleName = 'cssFloat';
}
try {
var computed = document.defaultView.getComputedStyle(element, '');
return element.style[styleName] || computed ? computed[styleName] : null;
} catch (e) {
return element.style[styleName];
}
};
/* istanbul ignore next */
function setStyle(element, styleName, value) {
if (!element || !styleName) return;
if ((typeof styleName === 'undefined' ? 'undefined' : _typeof(styleName)) === 'object') {
for (var prop in styleName) {
if (styleName.hasOwnProperty(prop)) {
setStyle(element, prop, styleName[prop]);
}
}
} else {
styleName = camelCase(styleName);
if (styleName === 'opacity' && ieVersion < 9) {
element.style.filter = isNaN(value) ? '' : 'alpha(opacity=' + value * 100 + ')';
} else {
element.style[styleName] = value;
}
}
}
var isScroll = exports.isScroll = function isScroll(el, vertical) {
if (isServer) return;
var determinedDirection = vertical !== null || vertical !== undefined;
var overflow = determinedDirection ? vertical ? getStyle(el, 'overflow-y') : getStyle(el, 'overflow-x') : getStyle(el, 'overflow');
return overflow.match(/(scroll|auto)/);
};
var getScrollContainer = exports.getScrollContainer = function getScrollContainer(el, vertical) {
if (isServer) return;
var parent = el;
while (parent) {
if ([window, document, document.documentElement].includes(parent)) {
return window;
}
if (isScroll(parent, vertical)) {
return parent;
}
parent = parent.parentNode;
}
return parent;
};
var isInContainer = exports.isInContainer = function isInContainer(el, container) {
if (isServer || !el || !container) return false;
var elRect = el.getBoundingClientRect();
var containerRect = void 0;
if ([window, document, document.documentElement, null, undefined].includes(container)) {
containerRect = {
top: 0,
right: window.innerWidth,
bottom: window.innerHeight,
left: 0
};
} else {
containerRect = container.getBoundingClientRect();
}
return elRect.top < containerRect.bottom && elRect.bottom > containerRect.top && elRect.right > containerRect.left && elRect.left < containerRect.right;
}; | PypiClean |
/mitmproxy_lin_customization-5.2.2.1.tar.gz/mitmproxy_lin_customization-5.2.2.1/mitmproxy/platform/windows.py | import contextlib
import ctypes
import ctypes.wintypes
import io
import json
import os
import re
import socket
import socketserver
import threading
import time
import typing
import click
import collections
import collections.abc
import pydivert
import pydivert.consts
REDIRECT_API_HOST = "127.0.0.1"
REDIRECT_API_PORT = 8085
##########################
# Resolver
def read(rfile: io.BufferedReader) -> typing.Any:
x = rfile.readline().strip()
return json.loads(x)
def write(data, wfile: io.BufferedWriter) -> None:
wfile.write(json.dumps(data).encode() + b"\n")
wfile.flush()
class Resolver:
sock: socket.socket
lock: threading.RLock
def __init__(self):
self.sock = None
self.lock = threading.RLock()
def setup(self):
with self.lock:
TransparentProxy.setup()
self._connect()
def _connect(self):
if self.sock:
self.sock.close()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((REDIRECT_API_HOST, REDIRECT_API_PORT))
self.wfile = self.sock.makefile('wb')
self.rfile = self.sock.makefile('rb')
write(os.getpid(), self.wfile)
def original_addr(self, csock: socket.socket):
ip, port = csock.getpeername()[:2]
ip = re.sub(r"^::ffff:(?=\d+.\d+.\d+.\d+$)", "", ip)
ip = ip.split("%", 1)[0]
with self.lock:
try:
write((ip, port), self.wfile)
addr = read(self.rfile)
if addr is None:
raise RuntimeError("Cannot resolve original destination.")
return tuple(addr)
except (EOFError, socket.error):
self._connect()
return self.original_addr(csock)
class APIRequestHandler(socketserver.StreamRequestHandler):
"""
TransparentProxy API: Returns the pickled server address, port tuple
for each received pickled client address, port tuple.
"""
def handle(self):
proxifier: TransparentProxy = self.server.proxifier
try:
pid: int = read(self.rfile)
with proxifier.exempt(pid):
while True:
client = tuple(read(self.rfile))
try:
server = proxifier.client_server_map[client]
except KeyError:
server = None
write(server, self.wfile)
except (EOFError, socket.error):
pass
class APIServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, proxifier, *args, **kwargs):
super().__init__(*args, **kwargs)
self.proxifier = proxifier
self.daemon_threads = True
##########################
# Windows API
# from Windows' error.h
ERROR_INSUFFICIENT_BUFFER = 0x7A
IN6_ADDR = ctypes.c_ubyte * 16
IN4_ADDR = ctypes.c_ubyte * 4
#
# IPv6
#
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366896(v=vs.85).aspx
class MIB_TCP6ROW_OWNER_PID(ctypes.Structure):
_fields_ = [
('ucLocalAddr', IN6_ADDR),
('dwLocalScopeId', ctypes.wintypes.DWORD),
('dwLocalPort', ctypes.wintypes.DWORD),
('ucRemoteAddr', IN6_ADDR),
('dwRemoteScopeId', ctypes.wintypes.DWORD),
('dwRemotePort', ctypes.wintypes.DWORD),
('dwState', ctypes.wintypes.DWORD),
('dwOwningPid', ctypes.wintypes.DWORD),
]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366905(v=vs.85).aspx
def MIB_TCP6TABLE_OWNER_PID(size):
class _MIB_TCP6TABLE_OWNER_PID(ctypes.Structure):
_fields_ = [
('dwNumEntries', ctypes.wintypes.DWORD),
('table', MIB_TCP6ROW_OWNER_PID * size)
]
return _MIB_TCP6TABLE_OWNER_PID()
#
# IPv4
#
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366913(v=vs.85).aspx
class MIB_TCPROW_OWNER_PID(ctypes.Structure):
_fields_ = [
('dwState', ctypes.wintypes.DWORD),
('ucLocalAddr', IN4_ADDR),
('dwLocalPort', ctypes.wintypes.DWORD),
('ucRemoteAddr', IN4_ADDR),
('dwRemotePort', ctypes.wintypes.DWORD),
('dwOwningPid', ctypes.wintypes.DWORD),
]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366921(v=vs.85).aspx
def MIB_TCPTABLE_OWNER_PID(size):
class _MIB_TCPTABLE_OWNER_PID(ctypes.Structure):
_fields_ = [
('dwNumEntries', ctypes.wintypes.DWORD),
('table', MIB_TCPROW_OWNER_PID * size)
]
return _MIB_TCPTABLE_OWNER_PID()
TCP_TABLE_OWNER_PID_CONNECTIONS = 4
class TcpConnectionTable(collections.abc.Mapping):
DEFAULT_TABLE_SIZE = 4096
def __init__(self):
self._tcp = MIB_TCPTABLE_OWNER_PID(self.DEFAULT_TABLE_SIZE)
self._tcp_size = ctypes.wintypes.DWORD(self.DEFAULT_TABLE_SIZE)
self._tcp6 = MIB_TCP6TABLE_OWNER_PID(self.DEFAULT_TABLE_SIZE)
self._tcp6_size = ctypes.wintypes.DWORD(self.DEFAULT_TABLE_SIZE)
self._map = {}
def __getitem__(self, item):
return self._map[item]
def __iter__(self):
return self._map.__iter__()
def __len__(self):
return self._map.__len__()
def refresh(self):
self._map = {}
self._refresh_ipv4()
self._refresh_ipv6()
def _refresh_ipv4(self):
ret = ctypes.windll.iphlpapi.GetExtendedTcpTable(
ctypes.byref(self._tcp),
ctypes.byref(self._tcp_size),
False,
socket.AF_INET,
TCP_TABLE_OWNER_PID_CONNECTIONS,
0
)
if ret == 0:
for row in self._tcp.table[:self._tcp.dwNumEntries]:
local_ip = socket.inet_ntop(socket.AF_INET, bytes(row.ucLocalAddr))
local_port = socket.htons(row.dwLocalPort)
self._map[(local_ip, local_port)] = row.dwOwningPid
elif ret == ERROR_INSUFFICIENT_BUFFER:
self._tcp = MIB_TCPTABLE_OWNER_PID(self._tcp_size.value)
# no need to update size, that's already done.
self._refresh_ipv4()
else:
raise RuntimeError("[IPv4] Unknown GetExtendedTcpTable return code: %s" % ret)
def _refresh_ipv6(self):
ret = ctypes.windll.iphlpapi.GetExtendedTcpTable(
ctypes.byref(self._tcp6),
ctypes.byref(self._tcp6_size),
False,
socket.AF_INET6,
TCP_TABLE_OWNER_PID_CONNECTIONS,
0
)
if ret == 0:
for row in self._tcp6.table[:self._tcp6.dwNumEntries]:
local_ip = socket.inet_ntop(socket.AF_INET6, bytes(row.ucLocalAddr))
local_port = socket.htons(row.dwLocalPort)
self._map[(local_ip, local_port)] = row.dwOwningPid
elif ret == ERROR_INSUFFICIENT_BUFFER:
self._tcp6 = MIB_TCP6TABLE_OWNER_PID(self._tcp6_size.value)
# no need to update size, that's already done.
self._refresh_ipv6()
else:
raise RuntimeError("[IPv6] Unknown GetExtendedTcpTable return code: %s" % ret)
def get_local_ip() -> typing.Optional[str]:
# Auto-Detect local IP. This is required as re-injecting to 127.0.0.1 does not work.
# https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
except OSError:
return None
finally:
s.close()
def get_local_ip6(reachable: str) -> typing.Optional[str]:
# The same goes for IPv6, with the added difficulty that .connect() fails if
# the target network is not reachable.
s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
try:
s.connect((reachable, 80))
return s.getsockname()[0]
except OSError:
return None
finally:
s.close()
class Redirect(threading.Thread):
daemon = True
windivert: pydivert.WinDivert
def __init__(
self,
handle: typing.Callable[[pydivert.Packet], None],
filter: str,
layer: pydivert.Layer = pydivert.Layer.NETWORK,
flags: pydivert.Flag = 0
) -> None:
self.handle = handle
self.windivert = pydivert.WinDivert(filter, layer, flags=flags)
super().__init__()
def start(self):
self.windivert.open()
super().start()
def run(self):
while True:
try:
packet = self.windivert.recv()
except WindowsError as e:
if e.winerror == 995:
return
else:
raise
else:
self.handle(packet)
def shutdown(self):
self.windivert.close()
def recv(self) -> typing.Optional[pydivert.Packet]:
"""
Convenience function that receives a packet from the passed handler and handles error codes.
If the process has been shut down, None is returned.
"""
try:
return self.windivert.recv()
except WindowsError as e:
if e.winerror == 995:
return None
else:
raise
class RedirectLocal(Redirect):
trusted_pids: typing.Set[int]
def __init__(
self,
redirect_request: typing.Callable[[pydivert.Packet], None],
filter: str
) -> None:
self.tcp_connections = TcpConnectionTable()
self.trusted_pids = set()
self.redirect_request = redirect_request
super().__init__(self.handle, filter)
def handle(self, packet):
client = (packet.src_addr, packet.src_port)
if client not in self.tcp_connections:
self.tcp_connections.refresh()
# If this fails, we most likely have a connection from an external client.
# In this, case we always want to proxy the request.
pid = self.tcp_connections.get(client, None)
if pid not in self.trusted_pids:
self.redirect_request(packet)
else:
# It's not really clear why we need to recalculate the checksum here,
# but this was identified as necessary in https://github.com/mitmproxy/mitmproxy/pull/3174.
self.windivert.send(packet, recalculate_checksum=True)
TConnection = typing.Tuple[str, int]
class ClientServerMap:
"""A thread-safe LRU dict."""
connection_cache_size: typing.ClassVar[int] = 65536
def __init__(self):
self._lock = threading.Lock()
self._map = collections.OrderedDict()
def __getitem__(self, item: TConnection) -> TConnection:
with self._lock:
return self._map[item]
def __setitem__(self, key: TConnection, value: TConnection) -> None:
with self._lock:
self._map[key] = value
self._map.move_to_end(key)
while len(self._map) > self.connection_cache_size:
self._map.popitem(False)
class TransparentProxy:
"""
Transparent Windows Proxy for mitmproxy based on WinDivert/PyDivert. This module can be used to
redirect both traffic that is forwarded by the host and traffic originating from the host itself.
Requires elevated (admin) privileges. Can be started separately by manually running the file.
How it works:
(1) First, we intercept all packages that match our filter.
We both consider traffic that is forwarded by the OS (WinDivert's NETWORK_FORWARD layer) as well
as traffic sent from the local machine (WinDivert's NETWORK layer). In the case of traffic from
the local machine, we need to exempt packets sent from the proxy to not create a redirect loop.
To accomplish this, we use Windows' GetExtendedTcpTable syscall and determine the source
application's PID.
For each intercepted package, we
1. Store the source -> destination mapping (address and port)
2. Remove the package from the network (by not reinjecting it).
3. Re-inject the package into the local network stack, but with the destination address
changed to the proxy.
(2) Next, the proxy receives the forwarded packet, but does not know the real destination yet
(which we overwrote with the proxy's address). On Linux, we would now call
getsockopt(SO_ORIGINAL_DST). We now access the redirect module's API (see APIRequestHandler),
submit the source information and get the actual destination back (which we stored in 1.1).
(3) The proxy now establishes the upstream connection as usual.
(4) Finally, the proxy sends the response back to the client. To make it work, we need to change
the packet's source address back to the original destination (using the mapping from 1.1),
to which the client believes it is talking to.
Limitations:
- We assume that ephemeral TCP ports are not re-used for multiple connections at the same time.
The proxy will fail if an application connects to example.com and example.org from
192.168.0.42:4242 simultaneously. This could be mitigated by introducing unique "meta-addresses"
which mitmproxy sees, but this would remove the correct client info from mitmproxy.
"""
local: typing.Optional[RedirectLocal] = None
# really weird linting error here.
forward: typing.Optional[Redirect] = None # noqa
response: Redirect
icmp: Redirect
proxy_port: int
filter: str
client_server_map: ClientServerMap
def __init__(
self,
local: bool = True,
forward: bool = True,
proxy_port: int = 8080,
filter: typing.Optional[str] = "tcp.DstPort == 80 or tcp.DstPort == 443",
) -> None:
self.proxy_port = proxy_port
self.filter = (
filter
or
f"tcp.DstPort != {proxy_port} and tcp.DstPort != {REDIRECT_API_PORT} and tcp.DstPort < 49152"
)
self.ipv4_address = get_local_ip()
self.ipv6_address = get_local_ip6("2001:4860:4860::8888")
# print(f"IPv4: {self.ipv4_address}, IPv6: {self.ipv6_address}")
self.client_server_map = ClientServerMap()
self.api = APIServer(self, (REDIRECT_API_HOST, REDIRECT_API_PORT), APIRequestHandler)
self.api_thread = threading.Thread(target=self.api.serve_forever)
self.api_thread.daemon = True
if forward:
self.forward = Redirect(
self.redirect_request,
self.filter,
pydivert.Layer.NETWORK_FORWARD
)
if local:
self.local = RedirectLocal(
self.redirect_request,
self.filter
)
# The proxy server responds to the client. To the client,
# this response should look like it has been sent by the real target
self.response = Redirect(
self.redirect_response,
f"outbound and tcp.SrcPort == {proxy_port}",
)
# Block all ICMP requests (which are sent on Windows by default).
# If we don't do this, our proxy machine may send an ICMP redirect to the client,
# which instructs the client to directly connect to the real gateway
# if they are on the same network.
self.icmp = Redirect(
lambda _: None,
"icmp",
flags=pydivert.Flag.DROP
)
@classmethod
def setup(cls):
# TODO: Make sure that server can be killed cleanly. That's a bit difficult as we don't have access to
# controller.should_exit when this is called.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_unavailable = s.connect_ex((REDIRECT_API_HOST, REDIRECT_API_PORT))
if server_unavailable:
proxifier = TransparentProxy()
proxifier.start()
def start(self):
self.api_thread.start()
self.icmp.start()
self.response.start()
if self.forward:
self.forward.start()
if self.local:
self.local.start()
def shutdown(self):
if self.local:
self.local.shutdown()
if self.forward:
self.forward.shutdown()
self.response.shutdown()
self.icmp.shutdown()
self.api.shutdown()
def redirect_request(self, packet: pydivert.Packet):
# print(" * Redirect client -> server to proxy")
# print(f"{packet.src_addr}:{packet.src_port} -> {packet.dst_addr}:{packet.dst_port}")
client = (packet.src_addr, packet.src_port)
self.client_server_map[client] = (packet.dst_addr, packet.dst_port)
# We do need to inject to an external IP here, 127.0.0.1 does not work.
if packet.address_family == socket.AF_INET:
assert self.ipv4_address
packet.dst_addr = self.ipv4_address
elif packet.address_family == socket.AF_INET6:
if not self.ipv6_address:
self.ipv6_address = get_local_ip6(packet.src_addr)
assert self.ipv6_address
packet.dst_addr = self.ipv6_address
else:
raise RuntimeError("Unknown address family")
packet.dst_port = self.proxy_port
packet.direction = pydivert.consts.Direction.INBOUND
# We need a handle on the NETWORK layer. the local handle is not guaranteed to exist,
# so we use the response handle.
self.response.windivert.send(packet)
def redirect_response(self, packet: pydivert.Packet):
"""
If the proxy responds to the client, let the client believe the target server sent the
packets.
"""
# print(" * Adjust proxy -> client")
client = (packet.dst_addr, packet.dst_port)
try:
packet.src_addr, packet.src_port = self.client_server_map[client]
except KeyError:
print(f"Warning: Previously unseen connection from proxy to {client}")
else:
packet.recalculate_checksums()
self.response.windivert.send(packet, recalculate_checksum=False)
@contextlib.contextmanager
def exempt(self, pid: int):
if self.local:
self.local.trusted_pids.add(pid)
try:
yield
finally:
if self.local:
self.local.trusted_pids.remove(pid)
@click.group()
def cli():
pass
@cli.command()
@click.option("--local/--no-local", default=True,
help="Redirect the host's own traffic.")
@click.option("--forward/--no-forward", default=True,
help="Redirect traffic that's forwarded by the host.")
@click.option("--filter", type=str, metavar="WINDIVERT_FILTER",
help="Custom WinDivert interception rule.")
@click.option("-p", "--proxy-port", type=int, metavar="8080", default=8080,
help="The port mitmproxy is listening on.")
def redirect(**options):
"""Redirect flows to mitmproxy."""
proxy = TransparentProxy(**options)
proxy.start()
print(f" * Redirection active.")
print(f" Filter: {proxy.request_filter}")
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
print(" * Shutting down...")
proxy.shutdown()
print(" * Shut down.")
@cli.command()
def connections():
"""List all TCP connections and the associated PIDs."""
connections = TcpConnectionTable()
connections.refresh()
for (ip, port), pid in connections.items():
print(f"{ip}:{port} -> {pid}")
if __name__ == "__main__":
cli() | PypiClean |
/secscanner2junit-0.1.12.tar.gz/secscanner2junit-0.1.12/README.md | # SecScanner2JUnit
[![PyPI version](https://badge.fury.io/py/secscanner2junit.svg)](https://badge.fury.io/py/secscanner2junit)
[![Downloads](https://pepy.tech/badge/secscanner2junit)](https://pepy.tech/project/secscanner2junit)
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/angrymeir/SecScanner2JUnit)
GitLab offers [security scanning and visualization](https://docs.gitlab.com/ee/user/application_security/) directly via and on their platform.
One nice feature is direct insights on merge requests. However, this feature is only available with the Ultimate tier. To also use this feature on the free tier, one can build around it by taking the security tool output, converting it to the JUnit format, and uploading it as JUnit report.
To summarize, this tool is for you if:
- You use GitLab's free tier
- You use Gitlabs [security templates](https://docs.gitlab.com/ee/user/application_security/)
- You want to easily access security tool output in merge requests
If you are on the GitLabs Ultimate tier, just use their tooling! No need to mess up your `.gitlab-ci.yml` file. :smile:
## Which scanning types are supported?
All scanning types available under the free tier:
- Secret Scanning
- Static Application Security Testing (SAST)
- Container Scanning
- Infrastructure as Code Scanning
## How to use?
Procedure:
1. Overwrite the existing job so that the report can be used by future jobs.
2. Convert report
3. Upload converted report as junit report
**Example for Secret Scanning**
This example can be used as is.
```yaml
stages:
- test
- convert
- include:
- template: Security/Secret-Detection.gitlab-ci.yml
secret_detection:
artifacts:
paths:
- gl-secret-detection-report.json
when: always
secret_convert:
stage: convert
dependencies:
- secret_detection
script:
- pip3 install SecScanner2JUnit
- ss2ju secrets gl-secret-detection-report.json gl-secret-detection-report.xml
artifacts:
reports:
junit: gl-secret-detection-report.xml
```
**Example for SAST**
Since GitLab decides dynamically which scanners to use depending on project languages, it makes sense to first perform a testrun only including the template. This way one can see which jobs are executed and then overwrite them.
```yaml
stages:
- test
- convert
- include:
- template: Security/SAST.gitlab-ci.yml
semgrep-sast:
after_script:
- cp gl-sast-report.json gl-sast-semgrep-report.json
artifacts:
paths:
- gl-sast-semgrep-report.json
when: always
brakeman-sast:
after_script:
- cp gl-sast-report.json gl-sast-brakeman-report.json
artifacts:
paths:
- gl-sast-brakeman-report.json
when: always
semgrep-sast-convert:
stage: convert
dependencies:
- semgrep-sast
script:
- pip3 install SecScanner2JUnit
- ss2ju sast gl-sast-semgrep-report.json gl-sast-semgrep-report.xml
artifacts:
reports:
junit: gl-sast-semgrep-report.xml
brakeman-sast-convert:
stage: convert
dependencies:
- brakeman-sast
script:
- pip3 install SecScanner2JUnit
- ss2ju sast gl-sast-brakeman-report.json gl-sast-brakeman-report.xml
artifacts:
reports:
junit: gl-sast-brakeman-report.xml
```
**Example for Container Scanning**
```yml
- include:
- template: Jobs/Build.gitlab-ci.yml #Build and push the container image
- template: Security/Container-Scanning.gitlab-ci.yml #Scan the built image
container_scanning:
artifacts:
paths:
- gl-container-scanning-report-format.json
when: always
container_scanning-convert:
stage: convert
dependencies:
- container_scanning
script:
- pip3 install SecScanner2JUnit
- ss2ju container_scanning gl-container-scanning-report.json gl-container-scanning-report.xml
artifacts:
reports:
junit: gl-container-scanning-report.xml
```
**Suppression**
You can provide a file with suppression which will allow to ignore some vulnerabilities.
You have to create a file `ss2ju-config.yml` f.e. in `.gitlab` directory which includes:
```yml
sast:
suppressions:
- type: "cwe"
value: "2555"
- type: "find_sec_bugs_type"
value: "SPRING_ENDPOINT"
```
And now you can modify execution commands as follows:
```bash
- ss2ju sast gl-sast-semgrep-report.json gl-sast-semgrep-report.xml .gitlab/ss2ju-config.yml
```
## Future Plans
- [ ] Implement IaC Scanning
| PypiClean |
/dsin100daysv36-6.0.1.tar.gz/dsin100daysv36-6.0.1/notebook/static/components/MathJax/extensions/a11y/mathmaps/es/symbols/math_symbols.js | [{"locale":"es"},{"key":"0021","mappings":{"default":{"default":"factorial"}},"category":"Po"},{"key":"0022","mappings":{"default":{"default":"comillas"}},"category":"Po"},{"key":"0023","mappings":{"default":{"default":"almuhadilla","defaultMP":"signo de número"}},"category":"Po"},{"key":"0024","mappings":{"default":{"default":"dólar"}},"category":"Sc"},{"key":"0025","mappings":{"default":{"default":"porciento"}},"category":"Po"},{"key":"0026","mappings":{"default":{"default":"ampersand"}},"category":"Po"},{"key":"0027","mappings":{"default":{"default":"prima","defaultMP":"apóstrofo"}},"category":"Po"},{"key":"002A","mappings":{"default":{"default":"por"}},"category":"Po"},{"key":"002B","mappings":{"default":{"default":"más"}},"category":"Sm"},{"key":"002C","mappings":{"default":{"default":"coma"}},"category":"Po"},{"key":"002D","mappings":{"default":{"default":"menos"}},"category":"Pd"},{"key":"002E","mappings":{"default":{"default":"punto"}},"category":"Po"},{"key":"002F","mappings":{"default":{"default":"barra oblicua"}},"category":"Po"},{"key":"003A","mappings":{"default":{"default":"dos puntos"}},"category":"Po"},{"key":"003B","mappings":{"default":{"default":"punto y coma"}},"category":"Po"},{"key":"003C","mappings":{"default":{"default":"menor que"}},"category":"Sm"},{"key":"003D","mappings":{"default":{"default":"igual"}},"category":"Sm"},{"key":"003E","mappings":{"default":{"default":"mayor que"}},"category":"Sm"},{"key":"003F","mappings":{"default":{"default":"interrogación derecho","defaultMP":"cierra interrogación"}},"category":"Po"},{"key":"0040","mappings":{"default":{"default":"arroba"}},"category":"Po"},{"key":"005C","mappings":{"default":{"default":"barra inversa"}},"category":"Po"},{"key":"005E","mappings":{"default":{"default":"circunflejo"}},"category":"Sk"},{"key":"005F","mappings":{"default":{"default":"subrayado"}},"category":"Pc"},{"key":"0060","mappings":{"default":{"default":"acento grave"}},"category":"Sk"},{"key":"007C","mappings":{"default":{"default":"barra vertical"}},"category":"Sm"},{"key":"007E","mappings":{"default":{"default":"tilde"}},"category":"Sm"},{"key":"00A1","mappings":{"default":{"default":"admiración izquierdo","defaultMP":"abre admiración"}},"category":"Po"},{"key":"00A2","mappings":{"default":{"default":"centavo"}},"category":"Sc"},{"key":"00A3","mappings":{"default":{"default":"libra esterlina"}},"category":"Sc"},{"key":"00A4","mappings":{"default":{"default":"moneda"}},"category":"Sc"},{"key":"00A5","mappings":{"default":{"default":"yen"}},"category":"Sc"},{"key":"00A6","mappings":{"default":{"default":"barra vertical partida"}},"category":"So"},{"key":"00A7","mappings":{"default":{"default":"sección"}},"category":"Po"},{"key":"00A8","mappings":{"default":{"default":"diéresis"}},"category":"Sk"},{"key":"00A9","mappings":{"default":{"default":"copyright"}},"category":"So"},{"key":"00AA","mappings":{"default":{"default":"ordinal femenino"}},"category":"Lo"},{"key":"00AB","mappings":{"default":{"default":"comillas angulares izquierda","defaultMP":"abre comillas angulares"}},"category":"Pi"},{"key":"00AC","mappings":{"default":{"default":"negación"}},"category":"Sm"},{"key":"00AE","mappings":{"default":{"default":"registrado"}},"category":"So"},{"key":"00AF","mappings":{"default":{"default":"barra"}},"category":"Sk"},{"key":"00B0","mappings":{"default":{"default":"grado"}},"category":"So"},{"key":"00B1","mappings":{"default":{"default":"más menos"}},"category":"Sm"},{"key":"00B4","mappings":{"default":{"default":"acento agudo"}},"category":"Sk"},{"key":"00B5","mappings":{"default":{"default":"micro"}},"category":"Ll"},{"key":"00B6","mappings":{"default":{"default":"párrafo"}},"category":"Po"},{"key":"00B7","mappings":{"default":{"default":"punto medio"}},"category":"Po"},{"key":"00B8","mappings":{"default":{"default":"cedilla"}},"category":"Sk"},{"key":"00BA","mappings":{"default":{"default":"ordinal masculino"}},"category":"Lo"},{"key":"00BB","mappings":{"default":{"default":"comillas angulares derecha","defaultMP":"cierra comillas angulares"}},"category":"Pf"},{"key":"00BF","mappings":{"default":{"default":"interrogación izquierdo","defaultMP":"abre interrogación"}},"category":"Po"},{"key":"00D7","mappings":{"default":{"default":"por"}},"category":"Sm"},{"key":"00F7","mappings":{"default":{"default":"dividido"}},"category":"Sm"},{"key":"02D8","mappings":{"default":{"default":"breve"}},"category":"Sk"},{"key":"02D9","mappings":{"default":{"default":"punto en superescrito"}},"category":"Sk"},{"key":"02DA","mappings":{"default":{"default":"anillo en superescrito"}},"category":"Sk"},{"key":"02DB","mappings":{"default":{"default":"ogonek"}},"category":"Sk"},{"key":"02DC","mappings":{"default":{"default":"tilde"}},"category":"Sk"},{"key":"02DD","mappings":{"default":{"default":"doble acento agudo"}},"category":"Sk"},{"key":"2010","mappings":{"default":{"default":"menos"}},"category":"Pd"},{"key":"2013","mappings":{"default":{"default":"en raya"}},"category":"Pd"},{"key":"2014","mappings":{"default":{"default":"em raya"}},"category":"Pd"},{"key":"2015","mappings":{"default":{"default":"barra horizontal"}},"category":"Pd"},{"key":"2016","mappings":{"default":{"default":"doble barra vertical"}},"category":"Po"},{"key":"2018","mappings":{"default":{"default":"comilla izquierda"}},"category":"Pi"},{"key":"2019","mappings":{"default":{"default":"comilla derecha"}},"category":"Pf"},{"key":"201A","mappings":{"default":{"default":"comilla bajo 9"}},"category":"Ps"},{"key":"201C","mappings":{"default":{"default":"doble comilla"}},"category":"Pi"},{"key":"201D","mappings":{"default":{"default":"doble comilla derecha"}},"category":"Pf"},{"key":"201E","mappings":{"default":{"default":"doble comilla bajo 9"}},"category":"Ps"},{"key":"2020","mappings":{"default":{"default":"daga"}},"category":"Po"},{"key":"2021","mappings":{"default":{"default":"doble daga"}},"category":"Po"},{"key":"2022","mappings":{"default":{"default":"bala"}},"category":"Po"},{"key":"2025","mappings":{"default":{"default":"dos puntos líder"}},"category":"Po"},{"key":"2026","mappings":{"default":{"default":"puntos suspensivos"}},"category":"Po"},{"key":"2030","mappings":{"default":{"default":"tanto por millar"}},"category":"Po"},{"key":"2031","mappings":{"default":{"default":"tanto por diez mil"}},"category":"Po"},{"key":"2032","mappings":{"default":{"default":"prima"}},"category":"Po"},{"key":"2033","mappings":{"default":{"default":"segunda"}},"category":"Po"},{"key":"2034","mappings":{"default":{"default":"tercera"}},"category":"Po"},{"key":"2035","mappings":{"default":{"default":"acento grave"}},"category":"Po"},{"key":"2036","mappings":{"default":{"default":"doble acento grave"}},"category":"Po"},{"key":"2039","mappings":{"default":{"default":"comillas izquierda","defaultMP":"abre comillas"}},"category":"Pi"},{"key":"203A","mappings":{"default":{"default":"comillas derecha","defaultMP":"cierra comillas"}},"category":"Pf"},{"key":"203E","mappings":{"default":{"default":"barra superior"}},"category":"Po"},{"key":"2040","mappings":{"default":{"default":"ligadura"}},"category":"Pc"},{"key":"2041","mappings":{"default":{"default":"punto de inserción"}},"category":"Po"},{"key":"2043","mappings":{"default":{"default":"guión bala"}},"category":"Po"},{"key":"2044","mappings":{"default":{"default":"barra de fracción"}},"category":"Sm"},{"key":"204F","mappings":{"default":{"default":"punto y coma invertido"}},"category":"Po"},{"key":"2050","mappings":{"default":{"default":"cierre superior"}},"category":"Po"},{"key":"2057","mappings":{"default":{"default":"cuarta"}},"category":"Po"},{"key":"2200","mappings":{"default":{"default":"para todo"}},"category":"Sm"},{"key":"2201","mappings":{"default":{"default":"complementario"}},"category":"Sm"},{"key":"2202","mappings":{"default":{"default":"derivada parcial"}},"category":"Sm"},{"key":"2203","mappings":{"default":{"default":"existe"}},"category":"Sm"},{"key":"2204","mappings":{"default":{"default":"no existe"}},"category":"Sm"},{"key":"2205","mappings":{"default":{"default":"conjunto vacío"}},"category":"Sm"},{"key":"2206","mappings":{"default":{"default":"incremento"}},"category":"Sm"},{"key":"2207","mappings":{"default":{"default":"gradiente"}},"category":"Sm"},{"key":"2208","mappings":{"default":{"default":"perteneciente a"}},"category":"Sm"},{"key":"2209","mappings":{"default":{"default":"no perteneciente a"}},"category":"Sm"},{"key":"220A","mappings":{"default":{"default":"perteneciente a"}},"category":"Sm"},{"key":"220B","mappings":{"default":{"default":"comprende a"}},"category":"Sm"},{"key":"220C","mappings":{"default":{"default":"no comprende a"}},"category":"Sm"},{"key":"220D","mappings":{"default":{"default":"comprende a"}},"category":"Sm"},{"key":"220E","mappings":{"default":{"default":"fin de demostración"}},"category":"Sm"},{"key":"220F","mappings":{"default":{"default":"producto"}},"category":"Sm"},{"key":"2210","mappings":{"default":{"default":"coproducto"}},"category":"Sm"},{"key":"2211","mappings":{"default":{"default":"sumatorio"}},"category":"Sm"},{"key":"2212","mappings":{"default":{"default":"menos"}},"category":"Sm"},{"key":"2213","mappings":{"default":{"default":"menos más"}},"category":"Sm"},{"key":"2214","mappings":{"default":{"default":"punto más"}},"category":"Sm"},{"key":"2215","mappings":{"default":{"default":"barra de división"}},"category":"Sm"},{"key":"2216","mappings":{"default":{"default":"menos"}},"category":"Sm"},{"key":"2217","mappings":{"default":{"default":"asterisco"}},"category":"Sm"},{"key":"2218","mappings":{"default":{"default":"composición"}},"category":"Sm"},{"key":"2219","mappings":{"default":{"default":"bala"}},"category":"Sm"},{"key":"221A","mappings":{"default":{"default":"raíz"}},"category":"Sm"},{"key":"221B","mappings":{"default":{"default":"raíz cúbica"}},"category":"Sm"},{"key":"221C","mappings":{"default":{"default":"raíz cuarta"}},"category":"Sm"},{"key":"221D","mappings":{"default":{"default":"proporcional a"}},"category":"Sm"},{"key":"221E","mappings":{"default":{"default":"infinito"}},"category":"Sm"},{"key":"221F","mappings":{"default":{"default":"ángulo recto"}},"category":"Sm"},{"key":"2220","mappings":{"default":{"default":"ángulo"}},"category":"Sm"},{"key":"2221","mappings":{"default":{"default":"ángulo medido"}},"category":"Sm"},{"key":"2222","mappings":{"default":{"default":"ángulo esférico"}},"category":"Sm"},{"key":"2223","mappings":{"default":{"default":"divide a"}},"category":"Sm"},{"key":"2224","mappings":{"default":{"default":"no divide a"}},"category":"Sm"},{"key":"2225","mappings":{"default":{"default":"paralela a"}},"category":"Sm"},{"key":"2226","mappings":{"default":{"default":"no paralela a"}},"category":"Sm"},{"key":"2227","mappings":{"default":{"default":"y"}},"category":"Sm"},{"key":"2228","mappings":{"default":{"default":"o"}},"category":"Sm"},{"key":"2229","mappings":{"default":{"default":"intersección"}},"category":"Sm"},{"key":"222A","mappings":{"default":{"default":"unión"}},"category":"Sm"},{"key":"222B","mappings":{"default":{"default":"integral"}},"category":"Sm"},{"key":"222C","mappings":{"default":{"default":"integral doble"}},"category":"Sm"},{"key":"222D","mappings":{"default":{"default":"integral triple"}},"category":"Sm"},{"key":"222E","mappings":{"default":{"default":"integral de contorno"}},"category":"Sm"},{"key":"222F","mappings":{"default":{"default":"integral de superficie"}},"category":"Sm"},{"key":"2230","mappings":{"default":{"default":"integral de volumen"}},"category":"Sm"},{"key":"2231","mappings":{"default":{"default":"integral de contorno en sentido horario"}},"category":"Sm"},{"key":"2232","mappings":{"default":{"default":"integral de contorno en sentido horario"}},"category":"Sm"},{"key":"2233","mappings":{"default":{"default":"integral de contorno en sentido antihorario"}},"category":"Sm"},{"key":"2234","mappings":{"default":{"default":"por lo tanto"}},"category":"Sm"},{"key":"2235","mappings":{"default":{"default":"porque"}},"category":"Sm"},{"key":"2236","mappings":{"default":{"default":"razón"}},"category":"Sm"},{"key":"2237","mappings":{"default":{"default":"proporción"}},"category":"Sm"},{"key":"2238","mappings":{"default":{"default":"punto menos"}},"category":"Sm"},{"key":"2239","mappings":{"default":{"default":"exceso"}},"category":"Sm"},{"key":"223A","mappings":{"default":{"default":"proporción geométrica"}},"category":"Sm"},{"key":"223B","mappings":{"default":{"default":"homotecia"}},"category":"Sm"},{"key":"223C","mappings":{"default":{"default":"tilde"}},"category":"Sm"},{"key":"223D","mappings":{"default":{"default":"tilde invertido"}},"category":"Sm"},{"key":"223E","mappings":{"default":{"default":"s horizontal invertida"}},"category":"Sm"},{"key":"223F","mappings":{"default":{"default":"sinusoide"}},"category":"Sm"},{"key":"2240","mappings":{"default":{"default":"producto de guirnalda"}},"category":"Sm"},{"key":"2241","mappings":{"default":{"default":"no tilde"}},"category":"Sm"},{"key":"2242","mappings":{"default":{"default":"menos tilde"}},"category":"Sm"},{"key":"2243","mappings":{"default":{"default":"asintóticamente igual a"}},"category":"Sm"},{"key":"2244","mappings":{"default":{"default":"no asintóticamente igual a"}},"category":"Sm"},{"key":"2245","mappings":{"default":{"default":"aproximadamente igual a"}},"category":"Sm"},{"key":"2246","mappings":{"default":{"default":"aproximado pero no igual a"}},"category":"Sm"},{"key":"2247","mappings":{"default":{"default":"no aproximado a"}},"category":"Sm"},{"key":"2248","mappings":{"default":{"default":"aproximado","defaultMP":"casi igual a"}},"category":"Sm"},{"key":"2249","mappings":{"default":{"default":"no es casi igual a"}},"category":"Sm"},{"key":"224A","mappings":{"default":{"default":"igual o casi igual a"}},"category":"Sm"},{"key":"224B","mappings":{"default":{"default":"triple tilde"}},"category":"Sm"},{"key":"224C","mappings":{"default":{"default":"todo igual a"}},"category":"Sm"},{"key":"224D","mappings":{"default":{"default":"equivalente a"}},"category":"Sm"},{"key":"224E","mappings":{"default":{"default":"geométricamente equivalente a"}},"category":"Sm"},{"key":"224F","mappings":{"default":{"default":"diferencia entre"}},"category":"Sm"},{"key":"2250","mappings":{"default":{"default":"se acerca al límite"}},"category":"Sm"},{"key":"2251","mappings":{"default":{"default":"geométricamente igual a"}},"category":"Sm"},{"key":"2252","mappings":{"default":{"default":"aproximadamente igual a o imagen de"}},"category":"Sm"},{"key":"2253","mappings":{"default":{"default":"imagen de o aproximadamente igual a"}},"category":"Sm"},{"key":"2254","mappings":{"default":{"default":"dos puntos igual"}},"category":"Sm"},{"key":"2255","mappings":{"default":{"default":"igual dos puntos"}},"category":"Sm"},{"key":"2256","mappings":{"default":{"default":"igual incluyendo anillo"}},"category":"Sm"},{"key":"2257","mappings":{"default":{"default":"igual anillo a"}},"category":"Sm"},{"key":"2258","mappings":{"default":{"default":"corresponde a"}},"category":"Sm"},{"key":"2259","mappings":{"default":{"default":"estima a"}},"category":"Sm"},{"key":"225A","mappings":{"default":{"default":"equiangular con"}},"category":"Sm"},{"key":"225B","mappings":{"default":{"default":"igual con estrella"}},"category":"Sm"},{"key":"225C","mappings":{"default":{"default":"igual con delta"}},"category":"Sm"},{"key":"225D","mappings":{"default":{"default":"igual por definición a"}},"category":"Sm"},{"key":"225E","mappings":{"default":{"default":"medido por"}},"category":"Sm"},{"key":"225F","mappings":{"default":{"default":"igual con interrogante"}},"category":"Sm"},{"key":"2260","mappings":{"default":{"default":"no es igual a","defaultMP":"distinto de"}},"category":"Sm"},{"key":"2261","mappings":{"default":{"default":"idéntico a"}},"category":"Sm"},{"key":"2262","mappings":{"default":{"default":"no es idéntico a"}},"category":"Sm"},{"key":"2263","mappings":{"default":{"default":"estrictamente equivalente a"}},"category":"Sm"},{"key":"2264","mappings":{"default":{"default":"menor o igual que"}},"category":"Sm"},{"key":"2265","mappings":{"default":{"default":"mayor o igual que"}},"category":"Sm"},{"key":"2266","mappings":{"default":{"default":"menor o igual que"}},"category":"Sm"},{"key":"2267","mappings":{"default":{"default":"mayor o igual que"}},"category":"Sm"},{"key":"2268","mappings":{"default":{"default":"estrictamente menor que"}},"category":"Sm"},{"key":"2269","mappings":{"default":{"default":"estrictamente mayor que"}},"category":"Sm"},{"key":"226A","mappings":{"default":{"default":"mucho menor que"}},"category":"Sm"},{"key":"226B","mappings":{"default":{"default":"mucho mayor que"}},"category":"Sm"},{"key":"226C","mappings":{"default":{"default":"entre"}},"category":"Sm"},{"key":"226D","mappings":{"default":{"default":"no equivalente a"}},"category":"Sm"},{"key":"226E","mappings":{"default":{"default":"no menor que"}},"category":"Sm"},{"key":"226F","mappings":{"default":{"default":"no mayor que"}},"category":"Sm"},{"key":"2270","mappings":{"default":{"default":"no menor ni igual a"}},"category":"Sm"},{"key":"2271","mappings":{"default":{"default":"no mayor ni igual a"}},"category":"Sm"},{"key":"2272","mappings":{"default":{"default":"menor o equivalente a"}},"category":"Sm"},{"key":"2273","mappings":{"default":{"default":"mayor o equivalente a"}},"category":"Sm"},{"key":"2274","mappings":{"default":{"default":"no menor ni equivalente a"}},"category":"Sm"},{"key":"2275","mappings":{"default":{"default":"no mayor ni equivalente a"}},"category":"Sm"},{"key":"2276","mappings":{"default":{"default":"menor o mayor que"}},"category":"Sm"},{"key":"2277","mappings":{"default":{"default":"mayor o menor que"}},"category":"Sm"},{"key":"2278","mappings":{"default":{"default":"no menor ni mayor que"}},"category":"Sm"},{"key":"2279","mappings":{"default":{"default":"no mayor ni menor que"}},"category":"Sm"},{"key":"227A","mappings":{"default":{"default":"precede a"}},"category":"Sm"},{"key":"227B","mappings":{"default":{"default":"sigue a"}},"category":"Sm"},{"key":"227C","mappings":{"default":{"default":"precede o es igual a"}},"category":"Sm"},{"key":"227D","mappings":{"default":{"default":"sigue o es igual a"}},"category":"Sm"},{"key":"227E","mappings":{"default":{"default":"precede o es equivalente a"}},"category":"Sm"},{"key":"227F","mappings":{"default":{"default":"sigue o es equivalente a"}},"category":"Sm"},{"key":"2280","mappings":{"default":{"default":"no precede a"}},"category":"Sm"},{"key":"2281","mappings":{"default":{"default":"no sigue a"}},"category":"Sm"},{"key":"2282","mappings":{"default":{"default":"incluido en"}},"category":"Sm"},{"key":"2283","mappings":{"default":{"default":"contiene a"}},"category":"Sm"},{"key":"2284","mappings":{"default":{"default":"no incluido en"}},"category":"Sm"},{"key":"2285","mappings":{"default":{"default":"no contiene a"}},"category":"Sm"},{"key":"2286","mappings":{"default":{"default":"incluido o igual a"}},"category":"Sm"},{"key":"2287","mappings":{"default":{"default":"contiene o es igual a"}},"category":"Sm"},{"key":"2288","mappings":{"default":{"default":"no incluido ni igual a"}},"category":"Sm"},{"key":"2289","mappings":{"default":{"default":"no contiene ni es igual a"}},"category":"Sm"},{"key":"228A","mappings":{"default":{"default":"incluido estrictamente en"}},"category":"Sm"},{"key":"228B","mappings":{"default":{"default":"contiene estrictamente a"}},"category":"Sm"},{"key":"228C","mappings":{"default":{"default":"familia de conjuntos"}},"category":"Sm"},{"key":"228D","mappings":{"default":{"default":"producto de familia de conjuntos"}},"category":"Sm"},{"key":"228E","mappings":{"default":{"default":"unión de familia de conjuntos"}},"category":"Sm"},{"key":"228F","mappings":{"default":{"default":"imagen cuadrada de"}},"category":"Sm"},{"key":"2290","mappings":{"default":{"default":"original cuadrado de"}},"category":"Sm"},{"key":"2291","mappings":{"default":{"default":"imagen cuadrada de o igual a"}},"category":"Sm"},{"key":"2292","mappings":{"default":{"default":"original cuadrado de o igual a"}},"category":"Sm"},{"key":"2293","mappings":{"default":{"default":"intersección cuadrada"}},"category":"Sm"},{"key":"2294","mappings":{"default":{"default":"unión cuadrada"}},"category":"Sm"},{"key":"2295","mappings":{"default":{"default":"más en círculo"}},"category":"Sm"},{"key":"2296","mappings":{"default":{"default":"menos en círculo"}},"category":"Sm"},{"key":"2297","mappings":{"default":{"default":"por en círculo"}},"category":"Sm"},{"key":"2298","mappings":{"default":{"default":"barra en círculo"}},"category":"Sm"},{"key":"2299","mappings":{"default":{"default":"punto en círculo"}},"category":"Sm"},{"key":"229A","mappings":{"default":{"default":"anillo en círculo"}},"category":"Sm"},{"key":"229B","mappings":{"default":{"default":"asterisco en círculo"}},"category":"Sm"},{"key":"229C","mappings":{"default":{"default":"igual en círculo"}},"category":"Sm"},{"key":"229D","mappings":{"default":{"default":"menos en círculo"}},"category":"Sm"},{"key":"229E","mappings":{"default":{"default":"más en cuadrado"}},"category":"Sm"},{"key":"229F","mappings":{"default":{"default":"menos en cuadrado"}},"category":"Sm"},{"key":"22A0","mappings":{"default":{"default":"veces en cuadrado"}},"category":"Sm"},{"key":"22A1","mappings":{"default":{"default":"punto en cuadrado"}},"category":"Sm"},{"key":"22A2","mappings":{"default":{"default":"t horizontal hacia la izquierda"}},"category":"Sm"},{"key":"22A3","mappings":{"default":{"default":"t horizontal hacia la derecha"}},"category":"Sm"},{"key":"22A4","mappings":{"default":{"default":"perpendicular"}},"category":"Sm"},{"key":"22A5","mappings":{"default":{"default":"perpendicular invertida"}},"category":"Sm"},{"key":"22A6","mappings":{"default":{"default":"afirmación"}},"category":"Sm"},{"key":"22A7","mappings":{"default":{"default":"modela"}},"category":"Sm"},{"key":"22A8","mappings":{"default":{"default":"verdadero"}},"category":"Sm"},{"key":"22A9","mappings":{"default":{"default":"obliga"}},"category":"Sm"},{"key":"22AA","mappings":{"default":{"default":"triple barra vertical torniquete derecho"}},"category":"Sm"},{"key":"22AB","mappings":{"default":{"default":"doble barra vertical doble torniquete derecho"}},"category":"Sm"},{"key":"22AC","mappings":{"default":{"default":"no prueba"}},"category":"Sm"},{"key":"22AD","mappings":{"default":{"default":"falso"}},"category":"Sm"},{"key":"22AE","mappings":{"default":{"default":"no obliga"}},"category":"Sm"},{"key":"22AF","mappings":{"default":{"default":"doble barra vertical doble torniquete derecho negada"}},"category":"Sm"},{"key":"22B0","mappings":{"default":{"default":"precede respecto de"}},"category":"Sm"},{"key":"22B1","mappings":{"default":{"default":"sigue respecto de"}},"category":"Sm"},{"key":"22B2","mappings":{"default":{"default":"subgrupo normal de"}},"category":"Sm"},{"key":"22B3","mappings":{"default":{"default":"contiene como subgrupo normal a"}},"category":"Sm"},{"key":"22B4","mappings":{"default":{"default":"subgrupo normal o es igual a"}},"category":"Sm"},{"key":"22B5","mappings":{"default":{"default":"contiene como subgrupo normal o es igual a"}},"category":"Sm"},{"key":"22B6","mappings":{"default":{"default":"original de"}},"category":"Sm"},{"key":"22B7","mappings":{"default":{"default":"imagen de"}},"category":"Sm"},{"key":"22B8","mappings":{"default":{"default":"multifunción"}},"category":"Sm"},{"key":"22B9","mappings":{"default":{"default":"matriz hermitiana conjugada"}},"category":"Sm"},{"key":"22BA","mappings":{"default":{"default":"interpola"}},"category":"Sm"},{"key":"22BB","mappings":{"default":{"default":"o excluyente"}},"category":"Sm"},{"key":"22BC","mappings":{"default":{"default":"no y"}},"category":"Sm"},{"key":"22BD","mappings":{"default":{"default":"no o"}},"category":"Sm"},{"key":"22BF","mappings":{"default":{"default":"triángulo rectángulo"}},"category":"Sm"},{"key":"22C0","mappings":{"default":{"default":"y"}},"category":"Sm"},{"key":"22C1","mappings":{"default":{"default":"o"}},"category":"Sm"},{"key":"22C2","mappings":{"default":{"default":"intersección"}},"category":"Sm"},{"key":"22C3","mappings":{"default":{"default":"unión"}},"category":"Sm"},{"key":"22C4","mappings":{"default":{"default":"operador diamante"}},"category":"Sm"},{"key":"22C5","mappings":{"default":{"default":"punto"}},"category":"Sm"},{"key":"22C6","mappings":{"default":{"default":"estrella"}},"category":"Sm"},{"key":"22C7","mappings":{"default":{"default":"dividido por"}},"category":"Sm"},{"key":"22C8","mappings":{"default":{"default":"moño"}},"category":"Sm"},{"key":"22C9","mappings":{"default":{"default":"factor normal izquierdo producto semidirecto"}},"category":"Sm"},{"key":"22CA","mappings":{"default":{"default":"factor normal derecho producto semidirecto"}},"category":"Sm"},{"key":"22CB","mappings":{"default":{"default":"producto semidirecto izquierdo"}},"category":"Sm"},{"key":"22CC","mappings":{"default":{"default":"producto semidirecto derecho"}},"category":"Sm"},{"key":"22CD","mappings":{"default":{"default":"tilde invertida igual a"}},"category":"Sm"},{"key":"22CE","mappings":{"default":{"default":"o rizada"}},"category":"Sm"},{"key":"22CF","mappings":{"default":{"default":"y rizada"}},"category":"Sm"},{"key":"22D0","mappings":{"default":{"default":"doble incluido"}},"category":"Sm"},{"key":"22D1","mappings":{"default":{"default":"doble contiene"}},"category":"Sm"},{"key":"22D2","mappings":{"default":{"default":"doble intersección"}},"category":"Sm"},{"key":"22D3","mappings":{"default":{"default":"doble unión"}},"category":"Sm"},{"key":"22D4","mappings":{"default":{"default":"bieldo"}},"category":"Sm"},{"key":"22D5","mappings":{"default":{"default":"paralela o igual a"}},"category":"Sm"},{"key":"22D6","mappings":{"default":{"default":"menor que con punto"}},"category":"Sm"},{"key":"22D7","mappings":{"default":{"default":"mayor que con punto"}},"category":"Sm"},{"key":"22D8","mappings":{"default":{"default":"mucho menor que"}},"category":"Sm"},{"key":"22D9","mappings":{"default":{"default":"mucho mayor que"}},"category":"Sm"},{"key":"22DA","mappings":{"default":{"default":"menor igual o mayor que"}},"category":"Sm"},{"key":"22DB","mappings":{"default":{"default":"mayor igual o menor que"}},"category":"Sm"},{"key":"22DC","mappings":{"default":{"default":"menor o igual que"}},"category":"Sm"},{"key":"22DD","mappings":{"default":{"default":"mayor o igual que"}},"category":"Sm"},{"key":"22DE","mappings":{"default":{"default":"precede o es igual a"}},"category":"Sm"},{"key":"22DF","mappings":{"default":{"default":"sigue o es igual a"}},"category":"Sm"},{"key":"22E0","mappings":{"default":{"default":"no precede ni es igual a"}},"category":"Sm"},{"key":"22E1","mappings":{"default":{"default":"no sigue ni es igual a"}},"category":"Sm"},{"key":"22E2","mappings":{"default":{"default":"no es imagen cuadrada ni igual a"}},"category":"Sm"},{"key":"22E3","mappings":{"default":{"default":"no es original cuadrada ni igual a"}},"category":"Sm"},{"key":"22E4","mappings":{"default":{"default":"imagen cuadrada estricta"}},"category":"Sm"},{"key":"22E5","mappings":{"default":{"default":"original cuadrada estricta"}},"category":"Sm"},{"key":"22E6","mappings":{"default":{"default":"menor que no equivalente a"}},"category":"Sm"},{"key":"22E7","mappings":{"default":{"default":"mayor que no equivalente a"}},"category":"Sm"},{"key":"22E8","mappings":{"default":{"default":"precede no equivalente a"}},"category":"Sm"},{"key":"22E9","mappings":{"default":{"default":"sigue no equivalente a"}},"category":"Sm"},{"key":"22EA","mappings":{"default":{"default":"no subgrupo normal de"}},"category":"Sm"},{"key":"22EB","mappings":{"default":{"default":"no contiene como subgrupo normal a"}},"category":"Sm"},{"key":"22EC","mappings":{"default":{"default":"no es subgrupo normal ni igual a"}},"category":"Sm"},{"key":"22ED","mappings":{"default":{"default":"no contiene como subgrupo normal ni es igual a"}},"category":"Sm"},{"key":"22EE","mappings":{"default":{"default":"puntos suspensivos verticales"}},"category":"Sm"},{"key":"22EF","mappings":{"default":{"default":"puntos suspensivos altos"}},"category":"Sm"},{"key":"22F0","mappings":{"default":{"default":"puntos suspensivos diagonales subiendo"}},"category":"Sm"},{"key":"22F1","mappings":{"default":{"default":"puntos suspensivos diagonales bajando"}},"category":"Sm"},{"key":"22F2","mappings":{"default":{"default":"pertenece con trazo horizontal"}},"category":"Sm"},{"key":"22F3","mappings":{"default":{"default":"pertenece con barra vertical al fin de raya horizontal"}},"category":"Sm"},{"key":"22F4","mappings":{"default":{"default":"pertenece en pequeño con trazo vertical"}},"category":"Sm"},{"key":"22F5","mappings":{"default":{"default":"pertenece con punto arriba"}},"category":"Sm"},{"key":"22F6","mappings":{"default":{"default":"pertenece con barra arriba"}},"category":"Sm"},{"key":"22F7","mappings":{"default":{"default":"perteneciente subrayado"}},"category":"Sm"},{"key":"22F8","mappings":{"default":{"default":"perteneciente subrayado"}},"category":"Sm"},{"key":"22F9","mappings":{"default":{"default":"pertenece con dos trazos horizontales"}},"category":"Sm"},{"key":"22FA","mappings":{"default":{"default":"contiene o es igual a"}},"category":"Sm"},{"key":"22FB","mappings":{"default":{"default":"contiene con marca"}},"category":"Sm"},{"key":"22FC","mappings":{"default":{"default":"contiene con marca"}},"category":"Sm"},{"key":"22FD","mappings":{"default":{"default":"contiene con barra"}},"category":"Sm"},{"key":"22FE","mappings":{"default":{"default":"perteneciente con barra"}},"category":"Sm"},{"key":"22FF","mappings":{"default":{"default":"z anotación asociación de bolsa"}},"category":"Sm"},{"key":"2300","mappings":{"default":{"default":"diámetro"}},"category":"So"},{"key":"2302","mappings":{"default":{"default":"casa"}},"category":"So"},{"key":"2305","mappings":{"default":{"default":"en proyectividad con"}},"category":"So"},{"key":"2306","mappings":{"default":{"default":"en perspectividad con"}},"category":"So"},{"key":"2310","mappings":{"default":{"default":"no invertido"}},"category":"So"},{"key":"2312","mappings":{"default":{"default":"arco"}},"category":"So"},{"key":"2313","mappings":{"default":{"default":"segmento"}},"category":"So"},{"key":"27C1","mappings":{"default":{"default":"triángulo encerrando triángulo"}},"category":"Sm"},{"key":"27C2","mappings":{"default":{"default":"perpendicular"}},"category":"Sm"},{"key":"27C3","mappings":{"default":{"default":"incluido en"}},"category":"Sm"},{"key":"27C4","mappings":{"default":{"default":"contiene a"}},"category":"Sm"},{"key":"27C7","mappings":{"default":{"default":"punto en círculo"}},"category":"Sm"},{"key":"27C8","mappings":{"default":{"default":"atrás tajo antes de subconjunto"}},"category":"Sm"},{"key":"27C9","mappings":{"default":{"default":"sobreconjunto antes de tajo"}},"category":"Sm"},{"key":"27CA","mappings":{"default":{"default":"barra vertical con golpe horizontal"}},"category":"Sm"},{"key":"27CC","mappings":{"default":{"default":"división larga"}},"category":"Sm"},{"key":"27D1","mappings":{"default":{"default":"y con punto interior"}},"category":"Sm"},{"key":"27D2","mappings":{"default":{"default":"elemento de apertura hacia arriba"}},"category":"Sm"},{"key":"27D3","mappings":{"default":{"default":"ángulo inferior derecho con punto"}},"category":"Sm"},{"key":"27D4","mappings":{"default":{"default":"ángulo inferior izquierdo con punto"}},"category":"Sm"},{"key":"27D5","mappings":{"default":{"default":"izquierdo exterior une"}},"category":"Sm"},{"key":"27D6","mappings":{"default":{"default":"derecho exterior une"}},"category":"Sm"},{"key":"27D7","mappings":{"default":{"default":"lleno exterior une"}},"category":"Sm"},{"key":"27D8","mappings":{"default":{"default":"perpendicular inversa"}},"category":"Sm"},{"key":"27D9","mappings":{"default":{"default":"perpendicular"}},"category":"Sm"},{"key":"27DA","mappings":{"default":{"default":"izquierdo y derecho doble torniquete"}},"category":"Sm"},{"key":"27DB","mappings":{"default":{"default":"doble T horizontal"}},"category":"Sm"},{"key":"27DC","mappings":{"default":{"default":"multifunción por la izquierda"}},"category":"Sm"},{"key":"27DD","mappings":{"default":{"default":"T horizontal derecha larga"}},"category":"Sm"},{"key":"27DE","mappings":{"default":{"default":"T horizontal izquierda larga"}},"category":"Sm"},{"key":"27DF","mappings":{"default":{"default":"perpendicular inversa con círculo arriba"}},"category":"Sm"},{"key":"27E0","mappings":{"default":{"default":"pastilla dividida por regla horizontal"}},"category":"Sm"},{"key":"27E1","mappings":{"default":{"default":"diamante cóncavo vacío"}},"category":"Sm"},{"key":"27E2","mappings":{"default":{"default":"diamante cóncavo con tictac hacia izquierda"}},"category":"Sm"},{"key":"27E3","mappings":{"default":{"default":"diamante cóncavo con tictac hacia derecha"}},"category":"Sm"},{"key":"27E4","mappings":{"default":{"default":"cuadrado con tictac hacia izquierda"}},"category":"Sm"},{"key":"27E5","mappings":{"default":{"default":"cuadrado con tictac hacia derecha"}},"category":"Sm"},{"key":"299A","mappings":{"default":{"default":"zigzag en vertical"}},"category":"Sm"},{"key":"29B0","mappings":{"default":{"default":"conjunto vacío invertido"}},"category":"Sm"},{"key":"29B1","mappings":{"default":{"default":"conjunto vacío con barra"}},"category":"Sm"},{"key":"29B2","mappings":{"default":{"default":"conjunto vacío con círculo"}},"category":"Sm"},{"key":"29B5","mappings":{"default":{"default":"círculo con barra"}},"category":"Sm"},{"key":"29B6","mappings":{"default":{"default":"barra en círculo"}},"category":"Sm"},{"key":"29B7","mappings":{"default":{"default":"paralelas en círculo"}},"category":"Sm"},{"key":"29B9","mappings":{"default":{"default":"perpendicular en círculo"}},"category":"Sm"},{"key":"29BB","mappings":{"default":{"default":"x superimpresa en círculo"}},"category":"Sm"},{"key":"29BC","mappings":{"default":{"default":"circled anticlockwise rotated division"}},"category":"Sm"},{"key":"29BE","mappings":{"default":{"default":"bullet vacía en círculo"}},"category":"Sm"},{"key":"29BF","mappings":{"default":{"default":"bullet en círculo"}},"category":"Sm"},{"key":"29C0","mappings":{"default":{"default":"menor que en círculo"}},"category":"Sm"},{"key":"29C1","mappings":{"default":{"default":"mayor que en círculo"}},"category":"Sm"},{"key":"29C2","mappings":{"default":{"default":"círculo con círculo a la derecha"}},"category":"Sm"},{"key":"29C3","mappings":{"default":{"default":"circle with two horizontal strokes to the right"}},"category":"Sm"},{"key":"29C4","mappings":{"default":{"default":"cuadrado con diagonal ascendente"}},"category":"Sm"},{"key":"29C5","mappings":{"default":{"default":"cuadrado con diagonal descendente"}},"category":"Sm"},{"key":"29C9","mappings":{"default":{"default":"dos cuadrados unidos"}},"category":"Sm"},{"key":"29CD","mappings":{"default":{"default":"triangle with serifs at bottom"}},"category":"Sm"},{"key":"29CE","mappings":{"default":{"default":"triángulo hacia la derecha sobre triángulo hacia la izquierda"}},"category":"Sm"},{"key":"29CF","mappings":{"default":{"default":"subgrupo normal o igual a"}},"category":"Sm"},{"key":"29D0","mappings":{"default":{"default":"contiene como subgrupo normal o es igual a"}},"category":"Sm"},{"key":"29DC","mappings":{"default":{"default":"infinito incompleto"}},"category":"Sm"},{"key":"29DE","mappings":{"default":{"default":"no infinito"}},"category":"Sm"},{"key":"29E3","mappings":{"default":{"default":"igual tachado doble "}},"category":"Sm"},{"key":"29E4","mappings":{"default":{"default":"igual con tilde, con doble tachado"}},"category":"Sm"},{"key":"29E5","mappings":{"default":{"default":"idéntico a con doble tachado"}},"category":"Sm"},{"key":"29EB","mappings":{"default":{"default":"rombo relleno"}},"category":"Sm"},{"key":"29F4","mappings":{"default":{"default":"rule delayed"}},"category":"Sm"},{"key":"29F6","mappings":{"default":{"default":"solidus with overbar"}},"category":"Sm"},{"key":"2A0C","mappings":{"default":{"default":"operador integral cuádruple"}},"category":"Sm"},{"key":"2A0D","mappings":{"default":{"default":"finite part integral"}},"category":"Sm"},{"key":"2A10","mappings":{"default":{"default":"circulation function"}},"category":"Sm"},{"key":"2A11","mappings":{"default":{"default":"integral en sentido antihorario"}},"category":"Sm"},{"key":"2A12","mappings":{"default":{"default":"integral lineal para rectángulo en polo"}},"category":"Sm"},{"key":"2A13","mappings":{"default":{"default":"integral lineal para semicírculo en polo"}},"category":"Sm"},{"key":"2A14","mappings":{"default":{"default":"integral lineal excluyendo el polo"}},"category":"Sm"},{"key":"2A15","mappings":{"default":{"default":"integral en torno de un punto operador"}},"category":"Sm"},{"key":"2A16","mappings":{"default":{"default":"operador integral para cuaterniones"}},"category":"Sm"},{"key":"2A22","mappings":{"default":{"default":"más con círculo"}},"category":"Sm"},{"key":"2A23","mappings":{"default":{"default":"más con ángulo"}},"category":"Sm"},{"key":"2A24","mappings":{"default":{"default":"tilde con más suscrito"}},"category":"Sm"},{"key":"2A25","mappings":{"default":{"default":"más con punto inferior"}},"category":"Sm"},{"key":"2A26","mappings":{"default":{"default":"tilde con más"}},"category":"Sm"},{"key":"2A27","mappings":{"default":{"default":"más con 2 suscrito"}},"category":"Sm"},{"key":"2A29","mappings":{"default":{"default":"menos con coma"}},"category":"Sm"},{"key":"2A2A","mappings":{"default":{"default":"menos con punto inferior"}},"category":"Sm"},{"key":"2A2D","mappings":{"default":{"default":"más en semicírculo izquierdo"}},"category":"Sm"},{"key":"2A2E","mappings":{"default":{"default":"más en semicírculo derecho"}},"category":"Sm"},{"key":"2A2F","mappings":{"default":{"default":"producto vectorial"}},"category":"Sm"},{"key":"2A30","mappings":{"default":{"default":"por con punto"}},"category":"Sm"},{"key":"2A31","mappings":{"default":{"default":"por con subrayado"}},"category":"Sm"},{"key":"2A33","mappings":{"default":{"default":"smash product"}},"category":"Sm"},{"key":"2A34","mappings":{"default":{"default":"por en semicírculo izquierdo"}},"category":"Sm"},{"key":"2A35","mappings":{"default":{"default":"por en semicírculo derecho"}},"category":"Sm"},{"key":"2A36","mappings":{"default":{"default":"por en círculo con ángulo"}},"category":"Sm"},{"key":"2A37","mappings":{"default":{"default":"por en anillo"}},"category":"Sm"},{"key":"2A38","mappings":{"default":{"default":"división en círculo"}},"category":"Sm"},{"key":"2A39","mappings":{"default":{"default":"más en triángulo"}},"category":"Sm"},{"key":"2A3A","mappings":{"default":{"default":"menos en triángulo"}},"category":"Sm"},{"key":"2A3B","mappings":{"default":{"default":"por en triángulo"}},"category":"Sm"},{"key":"2A3C","mappings":{"default":{"default":"producto interior"}},"category":"Sm"},{"key":"2A3F","mappings":{"default":{"default":"coproducto"}},"category":"Sm"},{"key":"2A40","mappings":{"default":{"default":"intersección con punto"}},"category":"Sm"},{"key":"2A42","mappings":{"default":{"default":"unión con barra"}},"category":"Sm"},{"key":"2A43","mappings":{"default":{"default":"intersección con barra"}},"category":"Sm"},{"key":"2A44","mappings":{"default":{"default":"intersección con and"}},"category":"Sm"},{"key":"2A45","mappings":{"default":{"default":"unión con or"}},"category":"Sm"},{"key":"2A46","mappings":{"default":{"default":"unión sobre intersección"}},"category":"Sm"},{"key":"2A47","mappings":{"default":{"default":"intersección sobre unión"}},"category":"Sm"},{"key":"2A48","mappings":{"default":{"default":"unión sobre barra, sobre intersección"}},"category":"Sm"},{"key":"2A49","mappings":{"default":{"default":"intersección sobre barra, sobre unión "}},"category":"Sm"},{"key":"2A4A","mappings":{"default":{"default":"unión y unión"}},"category":"Sm"},{"key":"2A4B","mappings":{"default":{"default":"intersección e intersección"}},"category":"Sm"},{"key":"2A4C","mappings":{"default":{"default":"unión cerrada"}},"category":"Sm"},{"key":"2A4D","mappings":{"default":{"default":"intersección cerrada"}},"category":"Sm"},{"key":"2A50","mappings":{"default":{"default":"unión cerrada con por"}},"category":"Sm"},{"key":"2A53","mappings":{"default":{"default":"y"}},"category":"Sm"},{"key":"2A54","mappings":{"default":{"default":"o"}},"category":"Sm"},{"key":"2A55","mappings":{"default":{"default":"two intersecting logical and"}},"category":"Sm"},{"key":"2A56","mappings":{"default":{"default":"two intersecting logical or"}},"category":"Sm"},{"key":"2A57","mappings":{"default":{"default":"sloping large or"}},"category":"Sm"},{"key":"2A58","mappings":{"default":{"default":"sloping large and"}},"category":"Sm"},{"key":"2A5A","mappings":{"default":{"default":"logical and with middle stem"}},"category":"Sm"},{"key":"2A5B","mappings":{"default":{"default":"logical or with middle stem"}},"category":"Sm"},{"key":"2A5C","mappings":{"default":{"default":"logical and with horizontal dash"}},"category":"Sm"},{"key":"2A5D","mappings":{"default":{"default":"logical or with horizontal dash"}},"category":"Sm"},{"key":"2A5F","mappings":{"default":{"default":"logical and with underbar"}},"category":"Sm"},{"key":"2A66","mappings":{"default":{"default":"igual con punto suscrito"}},"category":"Sm"},{"key":"2A6A","mappings":{"default":{"default":"tilde con punto"}},"category":"Sm"},{"key":"2A6D","mappings":{"default":{"default":"congruente con punto"}},"category":"Sm"},{"key":"2A6F","mappings":{"default":{"default":"casi igual con ángulo"}},"category":"Sm"},{"key":"2A71","mappings":{"default":{"default":"igual con más suscrito"}},"category":"Sm"},{"key":"2A72","mappings":{"default":{"default":"igual con más"}},"category":"Sm"},{"key":"2A73","mappings":{"default":{"default":"igual con operador tilde"}},"category":"Sm"},{"key":"2A74","mappings":{"default":{"default":"igual con dos puntos"}},"category":"Sm"},{"key":"2A75","mappings":{"default":{"default":"igual doble"}},"category":"Sm"},{"key":"2A77","mappings":{"default":{"default":"igual entre cuatro puntos"}},"category":"Sm"},{"key":"2A78","mappings":{"default":{"default":"equivalente con cuatro puntos"}},"category":"Sm"},{"key":"2A79","mappings":{"default":{"default":"menor que con círculo interior"}},"category":"Sm"},{"key":"2A7A","mappings":{"default":{"default":"mayor que con círculo interior"}},"category":"Sm"},{"key":"2A7B","mappings":{"default":{"default":"menor que con interrogante"}},"category":"Sm"},{"key":"2A7C","mappings":{"default":{"default":"mayor que con interrogante"}},"category":"Sm"},{"key":"2A7D","mappings":{"default":{"default":"estrictamente menor que"}},"category":"Sm"},{"key":"2A7E","mappings":{"default":{"default":"estrictamente mayor que"}},"category":"Sm"},{"key":"2A7F","mappings":{"default":{"default":"estrictamente menor que con punto interior"}},"category":"Sm"},{"key":"2A80","mappings":{"default":{"default":"estrictamente mayor que con punto interior"}},"category":"Sm"},{"key":"2A81","mappings":{"default":{"default":"estrictamente menor que con punto"}},"category":"Sm"},{"key":"2A82","mappings":{"default":{"default":"estrictamente mayor que con punto"}},"category":"Sm"},{"key":"2A83","mappings":{"default":{"default":"estrictamente menor que con punto"}},"category":"Sm"},{"key":"2A84","mappings":{"default":{"default":"estrictamente mayor que con punto"}},"category":"Sm"},{"key":"2A89","mappings":{"default":{"default":"menor que, pero no aproximado a"}},"category":"Sm"},{"key":"2A8A","mappings":{"default":{"default":"mayor que, pero no aproximado a"}},"category":"Sm"},{"key":"2A8D","mappings":{"default":{"default":"menor, similar o igual a"}},"category":"Sm"},{"key":"2A8E","mappings":{"default":{"default":"mayor, similar o igual a"}},"category":"Sm"},{"key":"2A8F","mappings":{"default":{"default":"menor, similar o mayor que"}},"category":"Sm"},{"key":"2A90","mappings":{"default":{"default":"mayor, similar o menor que"}},"category":"Sm"},{"key":"2A91","mappings":{"default":{"default":"menor, mayor o igual a"}},"category":"Sm"},{"key":"2A92","mappings":{"default":{"default":"mayor, menor o igual a"}},"category":"Sm"},{"key":"2A93","mappings":{"default":{"default":"estrictamente menor o mayor que"}},"category":"Sm"},{"key":"2A94","mappings":{"default":{"default":"estrictamente mayor o menor que"}},"category":"Sm"},{"key":"2A97","mappings":{"default":{"default":"estrictamente menor que con punto interior"}},"category":"Sm"},{"key":"2A98","mappings":{"default":{"default":"estrictamente mayor que con punto interior"}},"category":"Sm"},{"key":"2A99","mappings":{"default":{"default":"menor o igual a"}},"category":"Sm"},{"key":"2A9A","mappings":{"default":{"default":"mayor o igual a"}},"category":"Sm"},{"key":"2A9D","mappings":{"default":{"default":"equivalente o menor que"}},"category":"Sm"},{"key":"2A9E","mappings":{"default":{"default":"mayor o similar a"}},"category":"Sm"},{"key":"2A9F","mappings":{"default":{"default":"similar, menor o igual a"}},"category":"Sm"},{"key":"2AA0","mappings":{"default":{"default":"similar, mayor o igual a"}},"category":"Sm"},{"key":"2AA1","mappings":{"default":{"default":"mucho menor que"}},"category":"Sm"},{"key":"2AA2","mappings":{"default":{"default":"mucho mayor que"}},"category":"Sm"},{"key":"2AA4","mappings":{"default":{"default":"menor que, mayor que"}},"category":"Sm"},{"key":"2AA5","mappings":{"default":{"default":"mayor o menor que"}},"category":"Sm"},{"key":"2AA6","mappings":{"default":{"default":"menor que cerrado con curva"}},"category":"Sm"},{"key":"2AA7","mappings":{"default":{"default":"mayor que cerrado con curva"}},"category":"Sm"},{"key":"2AA8","mappings":{"default":{"default":"estrictamente menor que cerrado con curva"}},"category":"Sm"},{"key":"2AA9","mappings":{"default":{"default":"estrictamente mayor que cerrado con curva"}},"category":"Sm"},{"key":"2AAA","mappings":{"default":{"default":"más reducido que"}},"category":"Sm"},{"key":"2AAB","mappings":{"default":{"default":"más amplio que"}},"category":"Sm"},{"key":"2AAC","mappings":{"default":{"default":"más reducido o igual a"}},"category":"Sm"},{"key":"2AAD","mappings":{"default":{"default":"más amplio o igual a"}},"category":"Sm"},{"key":"2AAE","mappings":{"default":{"default":"diferencia entre"}},"category":"Sm"},{"key":"2AAF","mappings":{"default":{"default":"precede o es igual a"}},"category":"Sm"},{"key":"2AB5","mappings":{"default":{"default":"precede estrictamente a"}},"category":"Sm"},{"key":"2AB6","mappings":{"default":{"default":"sigue estrictamente a"}},"category":"Sm"},{"key":"2ABB","mappings":{"default":{"default":"precede mucho antes"}},"category":"Sm"},{"key":"2ABC","mappings":{"default":{"default":"sigue mucho después"}},"category":"Sm"},{"key":"2ABD","mappings":{"default":{"default":"subrelación de"}},"category":"Sm"},{"key":"2ABE","mappings":{"default":{"default":"contiene como subrelación a"}},"category":"Sm"},{"key":"2ABF","mappings":{"default":{"default":"incluido con más suscrito"}},"category":"Sm"},{"key":"2AC0","mappings":{"default":{"default":"contiene con más suscrito"}},"category":"Sm"},{"key":"2AC1","mappings":{"default":{"default":"incluido con por suscrito"}},"category":"Sm"},{"key":"2AC2","mappings":{"default":{"default":"contiene con por suscrito"}},"category":"Sm"},{"key":"2AC3","mappings":{"default":{"default":"incluido con punto o es igual a"}},"category":"Sm"},{"key":"2AC4","mappings":{"default":{"default":"contiene con punto o es igual a"}},"category":"Sm"},{"key":"2AC7","mappings":{"default":{"default":"incluido aproximadamente en"}},"category":"Sm"},{"key":"2AC8","mappings":{"default":{"default":"contiene aproximadamente a"}},"category":"Sm"},{"key":"2ACF","mappings":{"default":{"default":"incluido con cierre en"}},"category":"Sm"},{"key":"2AD0","mappings":{"default":{"default":"contiene al cierre del subconjunto"}},"category":"Sm"},{"key":"2AD1","mappings":{"default":{"default":"incluido el cierre o es igual a"}},"category":"Sm"},{"key":"2AD2","mappings":{"default":{"default":"contiene al cierre del subconjunto o es igual a"}},"category":"Sm"},{"key":"2AD3","mappings":{"default":{"default":"está incluido o contiene a"}},"category":"Sm"},{"key":"2AD4","mappings":{"default":{"default":"contiene o está incluido en"}},"category":"Sm"},{"key":"2AD5","mappings":{"default":{"default":"incluido doblemente en"}},"category":"Sm"},{"key":"2AD6","mappings":{"default":{"default":"contiene doblemente a"}},"category":"Sm"},{"key":"2AD7","mappings":{"default":{"default":"contiene o está incluido en"}},"category":"Sm"},{"key":"2AD8","mappings":{"default":{"default":"contiene o está incluido en"}},"category":"Sm"},{"key":"2AD9","mappings":{"default":{"default":"perteneciente dirigido hacia abajo"}},"category":"Sm"},{"key":"2ADA","mappings":{"default":{"default":"pitchfork with tee top"}},"category":"Sm"},{"key":"2ADB","mappings":{"default":{"default":"transversal intersection"}},"category":"Sm"},{"key":"2AE4","mappings":{"default":{"default":"double left turnstile vertical bar"}},"category":"Sm"},{"key":"2AE6","mappings":{"default":{"default":"long dash from left member of double vertical"}},"category":"Sm"},{"key":"2AE7","mappings":{"default":{"default":"short down tack with overbar"}},"category":"Sm"},{"key":"2AE8","mappings":{"default":{"default":"barra con perpendicular"}},"category":"Sm"},{"key":"2AE9","mappings":{"default":{"default":"short up tack above short down tack"}},"category":"Sm"},{"key":"2AEB","mappings":{"default":{"default":"double up tack"}},"category":"Sm"},{"key":"2AEC","mappings":{"default":{"default":"double stroke not sign"}},"category":"Sm"},{"key":"2AED","mappings":{"default":{"default":"reversed double stroke not sign"}},"category":"Sm"},{"key":"2AEE","mappings":{"default":{"default":"does not divide with reversed negation slash"}},"category":"Sm"},{"key":"2AEF","mappings":{"default":{"default":"barra vertical con círculo"}},"category":"Sm"},{"key":"2AF0","mappings":{"default":{"default":"barra con círculo inferior"}},"category":"Sm"},{"key":"2AF1","mappings":{"default":{"default":"down tack with circle below"}},"category":"Sm"},{"key":"2AF2","mappings":{"default":{"default":"paralela con marca"}},"category":"Sm"},{"key":"2AF3","mappings":{"default":{"default":"paralela con tilde"}},"category":"Sm"},{"key":"FE68","mappings":{"default":{"default":"división entera"}},"category":"Po"},{"key":"FF5C","mappings":{"default":{"default":"barra vertical"}},"category":"Sm"}] | PypiClean |
/libSDT4-0.0.0-py3-none-any.whl/lib_py_parse/helper/eval_service.py | import lib_py_parse.helper.eval_helper as eval_helper
import lib_py_parse.utils.exceptions as exceptions
def resolve_func_type_from_arguments(arguments):
T1 = [
T2.strip()
for T2 in arguments.split('->')
]
if len(T1) != 2:
error_msgs = [
'invalid fxn statement',
'fxn statement must be formatted as:',
'fxn ({arguments}) -> {return_type}:',
f'input: {arguments}',
]
exceptions.raise_exception_ue(error_msgs)
fparam_ids, arg_types_typon = eval_helper.read_fxn_params(T1[0])
return_type_typon = T1[1]
return fparam_ids, arg_types_typon, return_type_typon
def resolve_args_only_func_type(arguments):
T1 = [
T2.strip()
for T2 in arguments.split('->')
]
if len(T1) != 1:
error_msgs = [
'invalid fxn argument declaration',
'arguments must be formatted as:',
'({arg_1}: {type_1}, ..., {arg_i}: {type_i}, ..., {arg_n}: {type_n})',
f'input: {arguments}',
]
exceptions.raise_exception_ue(error_msgs)
fparam_ids, arg_types_typon = eval_helper.read_fxn_params(T1[0])
return fparam_ids, arg_types_typon
def resolve_func_type_from_arguments_msa(arguments, module_L0_skeleton, current_module):
fparam_ids, arg_types_typon, return_type_typon = resolve_func_type_from_arguments(arguments)
arg_types_typon_msa = [
eval_helper.fmt_type_module_space_aware_module_L0_skeleton(arg_type_typon, current_module, module_L0_skeleton)
for arg_type_typon in arg_types_typon
]
return_type_typon_msa = eval_helper.fmt_type_module_space_aware_module_L0_skeleton(return_type_typon, current_module, module_L0_skeleton)
return fparam_ids, arg_types_typon_msa, return_type_typon_msa
def resolve_args_only_func_type_msa(arguments, module_L0_skeleton, current_module):
fparam_ids, arg_types_typon = resolve_args_only_func_type(arguments)
arg_types_typon_msa = [
eval_helper.fmt_type_module_space_aware_module_L0_skeleton(arg_type_typon, current_module, module_L0_skeleton)
for arg_type_typon in arg_types_typon
]
return fparam_ids, arg_types_typon_msa
def is_valid_var_name(var_name):
for i in range(0, len(var_name)):
if not var_name[i].isalnum() and var_name[i] != '_':
return False
return True | PypiClean |
/jupyterlab_remote_contents-0.1.1.tar.gz/jupyterlab_remote_contents-0.1.1/node_modules/@rjsf/core/lib/components/widgets/CheckboxesWidget.js | import React from "react";
import PropTypes from "prop-types";
function selectValue(value, selected, all) {
var at = all.indexOf(value);
var updated = selected.slice(0, at).concat(value, selected.slice(at)); // As inserting values at predefined index positions doesn't work with empty
// arrays, we need to reorder the updated selection to match the initial order
return updated.sort(function (a, b) {
return all.indexOf(a) > all.indexOf(b);
});
}
function deselectValue(value, selected) {
return selected.filter(function (v) {
return v !== value;
});
}
function CheckboxesWidget(props) {
var id = props.id,
disabled = props.disabled,
options = props.options,
value = props.value,
autofocus = props.autofocus,
readonly = props.readonly,
_onChange = props.onChange;
var enumOptions = options.enumOptions,
enumDisabled = options.enumDisabled,
inline = options.inline;
return React.createElement("div", {
className: "checkboxes",
id: id
}, enumOptions.map(function (option, index) {
var checked = value.indexOf(option.value) !== -1;
var itemDisabled = enumDisabled && enumDisabled.indexOf(option.value) != -1;
var disabledCls = disabled || itemDisabled || readonly ? "disabled" : "";
var checkbox = React.createElement("span", null, React.createElement("input", {
type: "checkbox",
id: "".concat(id, "_").concat(index),
checked: checked,
disabled: disabled || itemDisabled || readonly,
autoFocus: autofocus && index === 0,
onChange: function onChange(event) {
var all = enumOptions.map(function (_ref) {
var value = _ref.value;
return value;
});
if (event.target.checked) {
_onChange(selectValue(option.value, value, all));
} else {
_onChange(deselectValue(option.value, value));
}
}
}), React.createElement("span", null, option.label));
return inline ? React.createElement("label", {
key: index,
className: "checkbox-inline ".concat(disabledCls)
}, checkbox) : React.createElement("div", {
key: index,
className: "checkbox ".concat(disabledCls)
}, React.createElement("label", null, checkbox));
}));
}
CheckboxesWidget.defaultProps = {
autofocus: false,
options: {
inline: false
}
};
if (process.env.NODE_ENV !== "production") {
CheckboxesWidget.propTypes = {
schema: PropTypes.object.isRequired,
id: PropTypes.string.isRequired,
options: PropTypes.shape({
enumOptions: PropTypes.array,
inline: PropTypes.bool
}).isRequired,
value: PropTypes.any,
required: PropTypes.bool,
readonly: PropTypes.bool,
disabled: PropTypes.bool,
multiple: PropTypes.bool,
autofocus: PropTypes.bool,
onChange: PropTypes.func
};
}
export default CheckboxesWidget;
//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"sources":["../../../src/components/widgets/CheckboxesWidget.js"],"names":["React","PropTypes","selectValue","value","selected","all","at","indexOf","updated","slice","concat","sort","a","b","deselectValue","filter","v","CheckboxesWidget","props","id","disabled","options","autofocus","readonly","onChange","enumOptions","enumDisabled","inline","map","option","index","checked","itemDisabled","disabledCls","checkbox","event","target","label","defaultProps","process","env","NODE_ENV","propTypes","schema","object","isRequired","string","shape","array","bool","any","required","multiple","func"],"mappings":"AAAA,OAAOA,KAAP,MAAkB,OAAlB;AACA,OAAOC,SAAP,MAAsB,YAAtB;;AAEA,SAASC,WAAT,CAAqBC,KAArB,EAA4BC,QAA5B,EAAsCC,GAAtC,EAA2C;AACzC,MAAMC,EAAE,GAAGD,GAAG,CAACE,OAAJ,CAAYJ,KAAZ,CAAX;AACA,MAAMK,OAAO,GAAGJ,QAAQ,CAACK,KAAT,CAAe,CAAf,EAAkBH,EAAlB,EAAsBI,MAAtB,CAA6BP,KAA7B,EAAoCC,QAAQ,CAACK,KAAT,CAAeH,EAAf,CAApC,CAAhB,CAFyC,CAGzC;AACA;;AACA,SAAOE,OAAO,CAACG,IAAR,CAAa,UAACC,CAAD,EAAIC,CAAJ;AAAA,WAAUR,GAAG,CAACE,OAAJ,CAAYK,CAAZ,IAAiBP,GAAG,CAACE,OAAJ,CAAYM,CAAZ,CAA3B;AAAA,GAAb,CAAP;AACD;;AAED,SAASC,aAAT,CAAuBX,KAAvB,EAA8BC,QAA9B,EAAwC;AACtC,SAAOA,QAAQ,CAACW,MAAT,CAAgB,UAAAC,CAAC;AAAA,WAAIA,CAAC,KAAKb,KAAV;AAAA,GAAjB,CAAP;AACD;;AAED,SAASc,gBAAT,CAA0BC,KAA1B,EAAiC;AAAA,MACvBC,EADuB,GACyCD,KADzC,CACvBC,EADuB;AAAA,MACnBC,QADmB,GACyCF,KADzC,CACnBE,QADmB;AAAA,MACTC,OADS,GACyCH,KADzC,CACTG,OADS;AAAA,MACAlB,KADA,GACyCe,KADzC,CACAf,KADA;AAAA,MACOmB,SADP,GACyCJ,KADzC,CACOI,SADP;AAAA,MACkBC,QADlB,GACyCL,KADzC,CACkBK,QADlB;AAAA,MAC4BC,SAD5B,GACyCN,KADzC,CAC4BM,QAD5B;AAAA,MAEvBC,WAFuB,GAEeJ,OAFf,CAEvBI,WAFuB;AAAA,MAEVC,YAFU,GAEeL,OAFf,CAEVK,YAFU;AAAA,MAEIC,MAFJ,GAEeN,OAFf,CAEIM,MAFJ;AAG/B,SACE;AAAK,IAAA,SAAS,EAAC,YAAf;AAA4B,IAAA,EAAE,EAAER;AAAhC,KACGM,WAAW,CAACG,GAAZ,CAAgB,UAACC,MAAD,EAASC,KAAT,EAAmB;AAClC,QAAMC,OAAO,GAAG5B,KAAK,CAACI,OAAN,CAAcsB,MAAM,CAAC1B,KAArB,MAAgC,CAAC,CAAjD;AACA,QAAM6B,YAAY,GAChBN,YAAY,IAAIA,YAAY,CAACnB,OAAb,CAAqBsB,MAAM,CAAC1B,KAA5B,KAAsC,CAAC,CADzD;AAEA,QAAM8B,WAAW,GACfb,QAAQ,IAAIY,YAAZ,IAA4BT,QAA5B,GAAuC,UAAvC,GAAoD,EADtD;AAEA,QAAMW,QAAQ,GACZ,kCACE;AACE,MAAA,IAAI,EAAC,UADP;AAEE,MAAA,EAAE,YAAKf,EAAL,cAAWW,KAAX,CAFJ;AAGE,MAAA,OAAO,EAAEC,OAHX;AAIE,MAAA,QAAQ,EAAEX,QAAQ,IAAIY,YAAZ,IAA4BT,QAJxC;AAKE,MAAA,SAAS,EAAED,SAAS,IAAIQ,KAAK,KAAK,CALpC;AAME,MAAA,QAAQ,EAAE,kBAAAK,KAAK,EAAI;AACjB,YAAM9B,GAAG,GAAGoB,WAAW,CAACG,GAAZ,CAAgB;AAAA,cAAGzB,KAAH,QAAGA,KAAH;AAAA,iBAAeA,KAAf;AAAA,SAAhB,CAAZ;;AACA,YAAIgC,KAAK,CAACC,MAAN,CAAaL,OAAjB,EAA0B;AACxBP,UAAAA,SAAQ,CAACtB,WAAW,CAAC2B,MAAM,CAAC1B,KAAR,EAAeA,KAAf,EAAsBE,GAAtB,CAAZ,CAAR;AACD,SAFD,MAEO;AACLmB,UAAAA,SAAQ,CAACV,aAAa,CAACe,MAAM,CAAC1B,KAAR,EAAeA,KAAf,CAAd,CAAR;AACD;AACF;AAbH,MADF,EAgBE,kCAAO0B,MAAM,CAACQ,KAAd,CAhBF,CADF;AAoBA,WAAOV,MAAM,GACX;AAAO,MAAA,GAAG,EAAEG,KAAZ;AAAmB,MAAA,SAAS,4BAAqBG,WAArB;AAA5B,OACGC,QADH,CADW,GAKX;AAAK,MAAA,GAAG,EAAEJ,KAAV;AAAiB,MAAA,SAAS,qBAAcG,WAAd;AAA1B,OACE,mCAAQC,QAAR,CADF,CALF;AASD,GAnCA,CADH,CADF;AAwCD;;AAEDjB,gBAAgB,CAACqB,YAAjB,GAAgC;AAC9BhB,EAAAA,SAAS,EAAE,KADmB;AAE9BD,EAAAA,OAAO,EAAE;AACPM,IAAAA,MAAM,EAAE;AADD;AAFqB,CAAhC;;AAOA,IAAIY,OAAO,CAACC,GAAR,CAAYC,QAAZ,KAAyB,YAA7B,EAA2C;AACzCxB,EAAAA,gBAAgB,CAACyB,SAAjB,GAA6B;AAC3BC,IAAAA,MAAM,EAAE1C,SAAS,CAAC2C,MAAV,CAAiBC,UADE;AAE3B1B,IAAAA,EAAE,EAAElB,SAAS,CAAC6C,MAAV,CAAiBD,UAFM;AAG3BxB,IAAAA,OAAO,EAAEpB,SAAS,CAAC8C,KAAV,CAAgB;AACvBtB,MAAAA,WAAW,EAAExB,SAAS,CAAC+C,KADA;AAEvBrB,MAAAA,MAAM,EAAE1B,SAAS,CAACgD;AAFK,KAAhB,EAGNJ,UANwB;AAO3B1C,IAAAA,KAAK,EAAEF,SAAS,CAACiD,GAPU;AAQ3BC,IAAAA,QAAQ,EAAElD,SAAS,CAACgD,IARO;AAS3B1B,IAAAA,QAAQ,EAAEtB,SAAS,CAACgD,IATO;AAU3B7B,IAAAA,QAAQ,EAAEnB,SAAS,CAACgD,IAVO;AAW3BG,IAAAA,QAAQ,EAAEnD,SAAS,CAACgD,IAXO;AAY3B3B,IAAAA,SAAS,EAAErB,SAAS,CAACgD,IAZM;AAa3BzB,IAAAA,QAAQ,EAAEvB,SAAS,CAACoD;AAbO,GAA7B;AAeD;;AAED,eAAepC,gBAAf","sourcesContent":["import React from \"react\";\nimport PropTypes from \"prop-types\";\n\nfunction selectValue(value, selected, all) {\n  const at = all.indexOf(value);\n  const updated = selected.slice(0, at).concat(value, selected.slice(at));\n  // As inserting values at predefined index positions doesn't work with empty\n  // arrays, we need to reorder the updated selection to match the initial order\n  return updated.sort((a, b) => all.indexOf(a) > all.indexOf(b));\n}\n\nfunction deselectValue(value, selected) {\n  return selected.filter(v => v !== value);\n}\n\nfunction CheckboxesWidget(props) {\n  const { id, disabled, options, value, autofocus, readonly, onChange } = props;\n  const { enumOptions, enumDisabled, inline } = options;\n  return (\n    <div className=\"checkboxes\" id={id}>\n      {enumOptions.map((option, index) => {\n        const checked = value.indexOf(option.value) !== -1;\n        const itemDisabled =\n          enumDisabled && enumDisabled.indexOf(option.value) != -1;\n        const disabledCls =\n          disabled || itemDisabled || readonly ? \"disabled\" : \"\";\n        const checkbox = (\n          <span>\n            <input\n              type=\"checkbox\"\n              id={`${id}_${index}`}\n              checked={checked}\n              disabled={disabled || itemDisabled || readonly}\n              autoFocus={autofocus && index === 0}\n              onChange={event => {\n                const all = enumOptions.map(({ value }) => value);\n                if (event.target.checked) {\n                  onChange(selectValue(option.value, value, all));\n                } else {\n                  onChange(deselectValue(option.value, value));\n                }\n              }}\n            />\n            <span>{option.label}</span>\n          </span>\n        );\n        return inline ? (\n          <label key={index} className={`checkbox-inline ${disabledCls}`}>\n            {checkbox}\n          </label>\n        ) : (\n          <div key={index} className={`checkbox ${disabledCls}`}>\n            <label>{checkbox}</label>\n          </div>\n        );\n      })}\n    </div>\n  );\n}\n\nCheckboxesWidget.defaultProps = {\n  autofocus: false,\n  options: {\n    inline: false,\n  },\n};\n\nif (process.env.NODE_ENV !== \"production\") {\n  CheckboxesWidget.propTypes = {\n    schema: PropTypes.object.isRequired,\n    id: PropTypes.string.isRequired,\n    options: PropTypes.shape({\n      enumOptions: PropTypes.array,\n      inline: PropTypes.bool,\n    }).isRequired,\n    value: PropTypes.any,\n    required: PropTypes.bool,\n    readonly: PropTypes.bool,\n    disabled: PropTypes.bool,\n    multiple: PropTypes.bool,\n    autofocus: PropTypes.bool,\n    onChange: PropTypes.func,\n  };\n}\n\nexport default CheckboxesWidget;\n"]} | PypiClean |
/metadata_extraction_vnpt_media-0.0.1.tar.gz/metadata_extraction_vnpt_media-0.0.1/classify_image/efficientnet/__init__.py |
import functools
from .__version__ import __version__
_KERAS_BACKEND = None
_KERAS_LAYERS = None
_KERAS_MODELS = None
_KERAS_UTILS = None
def get_submodules_from_kwargs(kwargs):
backend = kwargs.get('backend', _KERAS_BACKEND)
layers = kwargs.get('layers', _KERAS_LAYERS)
models = kwargs.get('models', _KERAS_MODELS)
utils = kwargs.get('utils', _KERAS_UTILS)
for key in kwargs.keys():
if key not in ['backend', 'layers', 'models', 'utils']:
raise TypeError('Invalid keyword argument: %s', key)
return backend, layers, models, utils
def inject_keras_modules(func):
import keras
@functools.wraps(func)
def wrapper(*args, **kwargs):
kwargs['backend'] = keras.backend
kwargs['layers'] = keras.layers
kwargs['models'] = keras.models
kwargs['utils'] = keras.utils
return func(*args, **kwargs)
return wrapper
def inject_tfkeras_modules(func):
import tensorflow.keras as tfkeras
@functools.wraps(func)
def wrapper(*args, **kwargs):
kwargs['backend'] = tfkeras.backend
kwargs['layers'] = tfkeras.layers
kwargs['models'] = tfkeras.models
kwargs['utils'] = tfkeras.utils
return func(*args, **kwargs)
return wrapper
def init_keras_custom_objects():
import keras
from . import model
custom_objects = {
'swish': inject_keras_modules(model.get_swish)(),
'FixedDropout': inject_keras_modules(model.get_dropout)()
}
try:
keras.utils.generic_utils.get_custom_objects().update(custom_objects)
except AttributeError:
keras.utils.get_custom_objects().update(custom_objects)
def init_tfkeras_custom_objects():
import tensorflow.keras as tfkeras
from . import model
custom_objects = {
'swish': inject_tfkeras_modules(model.get_swish)(),
'FixedDropout': inject_tfkeras_modules(model.get_dropout)()
}
tfkeras.utils.get_custom_objects().update(custom_objects) | PypiClean |
/OceanMonkey-1.0.0.tar.gz/OceanMonkey-1.0.0/README.rst | .. image:: https://raw.githubusercontent.com/chipscoco/OceanMonkey/main/artwork/logo.jpg
Overview
========
OceanMonkey is a High-Level Distributed Web Crawling and Web Scraping framework, used to
crawl websites and extract structured data from their pages. It can be used for
a wide range of purposes, from data mining to monitoring and automated testing.
OceanMonkey was brought to life and is maintained by chenzhengqiang(wechat:Pretty-Style, blog:http://www.chipscoco.com) while teaching the python's web scraping in 2021.
Requirements
============
* Python 3.5+
* Works on Linux, Windows, macOS, BSD
Install
=======
The quick way:
pip install oceanmonkey
Quick start
=============
Firstly execute **monkeys startproject** in command line to create a OceanMonkey Project like the following:
monkeys startproject BeBe
Then write your crawling logic in gibbons.py under the monkeys' directory and write your storing logic in orangutans.py.
Execute the **monkeys run** command under the project's directory finally when you finish your coding work:
cd BeBe
monkeys run
| PypiClean |
/dvh-analytics-0.5.11.tar.gz/dvh-analytics-0.5.11/dvh/modules/settings/sql_config.py | from __future__ import print_function
import time
from bokeh.models.widgets import Button, TextInput, Div, PasswordInput
from bokeh.layouts import row, column
from ..tools.io.preferences.sql import write_sql_connection_settings, validate_sql_connection, load_sql_settings,\
is_sql_connection_defined
from ..tools.io.database.sql_connector import DVH_SQL
class SqlConfig:
def __init__(self):
self.config = {}
self.load(update_widgets=False)
self.input_types = ['host', 'port', 'dbname', 'user', 'password']
self.input_widget = {key: TextInput for key in self.input_types}
self.input_widget['password'] = PasswordInput
div_sql = Div(text="<b>SQL Settings</b>")
title = {'host': 'Host',
'port': 'Port',
'dbname': 'Database Name',
'user': 'User (Leave blank for OS authentication)',
'password': 'Password (Leave blank for OS authentication)'}
self.input = {key: self.input_widget[key](value=self.config[key], title=title[key], width=300) for key in
self.input_types}
for key in self.input_types:
self.input[key].on_change('value', self.save_needed)
self.check_tables_button = Button(label='Check Tables', button_type='primary', width=100)
self.check_tables_button.on_click(self.check_tables)
self.create_tables_button = Button(label='Create Tables', button_type='primary', width=100)
self.create_tables_button.on_click(self.create_tables)
self.clear_tables_button = Button(label='Clear Tables', button_type='primary', width=100)
self.clear_tables_button.on_click(self.clear_tables)
self.reload_button = Button(label='Reload', button_type='primary', width=100)
self.reload_button.on_click(self.load)
self.save_button = Button(label='Save', button_type='default', width=100)
self.save_button.on_click(self.save)
self.echo_button = Button(label="Echo", button_type='primary', width=100)
self.echo_button.on_click(self.echo)
self.layout = column(div_sql,
row(self.input['host'], self.input['port']),
row(self.input['user'], self.input['password']),
self.input['dbname'],
row(self.reload_button, self.echo_button, self.save_button),
row(self.check_tables_button, self.create_tables_button, self.clear_tables_button))
def load(self, update_widgets=True):
self.config = load_sql_settings()
if not is_sql_connection_defined():
write_sql_connection_settings(self.config)
if update_widgets:
for key in self.input_types:
self.input[key].value = self.config[key]
self.save_button.button_type = 'default'
self.save_button.label = 'Save'
def save(self):
self.update_config()
write_sql_connection_settings(self.config)
self.load()
self.save_button.button_type = 'default'
self.save_button.label = 'Save'
def update_config(self):
for key in self.input_types:
self.config[key] = self.input[key].value
def echo(self):
self.update_config()
initial_button_type = self.echo_button.button_type
initial_label = self.echo_button.label
if validate_sql_connection(config=self.config, verbose=False):
self.echo_button.button_type = 'success'
self.echo_button.label = 'Success'
else:
self.echo_button.button_type = 'warning'
self.echo_button.label = 'Fail'
time.sleep(1.5)
self.echo_button.button_type = initial_button_type
self.echo_button.label = initial_label
def check_tables(self):
initial_label = self.check_tables_button.label
initial_button_type = self.check_tables_button.button_type
try:
table_result = {table: DVH_SQL().check_table_exists(table) for table in ['dvhs', 'plans', 'beams', 'rxs']}
if all(table_result.values()):
self.check_tables_button.button_type = 'success'
self.check_tables_button.label = 'Success'
else:
self.check_tables_button.button_type = 'warning'
self.check_tables_button.label = 'Fail'
except:
self.check_tables_button.button_type = 'warning'
self.check_tables_button.label = 'No Connection'
time.sleep(1.5)
self.check_tables_button.button_type = initial_button_type
self.check_tables_button.label = initial_label
def create_tables(self):
initial_label = self.create_tables_button.label
initial_button_type = self.create_tables_button.button_type
if initial_label == 'Cancel':
self.create_tables_button.button_type = 'primary'
self.create_tables_button.label = 'Create Tables'
self.clear_tables_button.button_type = 'primary'
self.clear_tables_button.label = 'Clear Tables'
else:
try:
DVH_SQL().initialize_database()
except:
self.create_tables_button.button_type = 'warning'
self.create_tables_button.label = 'No Connection'
time.sleep(1.5)
self.create_tables_button.button_type = initial_button_type
self.create_tables_button.label = initial_label
def clear_tables(self):
if self.clear_tables_button.button_type == 'danger':
try:
DVH_SQL().reinitialize_database()
except:
self.clear_tables_button.button_type = 'warning'
self.clear_tables_button.label = 'No Connection'
time.sleep(1.5)
self.clear_tables_button.button_type = 'primary'
self.clear_tables_button.label = 'Clear Tables'
self.create_tables_button.button_type = 'primary'
self.create_tables_button.label = 'Create Tables'
self.clear_tables_button.button_type = 'primary'
self.clear_tables_button.label = 'Clear Tables'
elif self.clear_tables_button.button_type == 'primary':
self.clear_tables_button.button_type = 'danger'
self.clear_tables_button.label = 'Are you sure?'
self.create_tables_button.button_type = 'success'
self.create_tables_button.label = 'Cancel'
def save_needed(self, attr, old, new):
self.save_button.label = 'Save Needed'
self.save_button.button_type = 'warning' | PypiClean |
/ndn-bootstrap-0.1.post2.tar.gz/ndn-bootstrap-0.1.post2/bootstrap/ndncert/challenge_encoder.py | from typing import List
from ndn.encoding import parse_tl_num
from .protocol_v3 import *
from .cert_state import *
from ..crypto_tools import *
def get_parameter_keys(selected_challenge: str, status: int, packet_format: str) -> List[str]:
_keys = {
'possession': {
STATUS_BEFORE_CHALLENGE: {
'request': ['issued-cert']
},
STATUS_CHALLENGE: {
'request': ['proof'],
'response': ['nonce']
},
STATUS_SUCCESS: {
'response': []
},
STATUS_FAILURE: {
'response': []
},
}
}
return _keys[selected_challenge][status][packet_format]
class ChallengeEncoder(object):
def __init__(self, id):
self.cert_state = CertState()
self.cert_state.id = id
@staticmethod
def _probe_parameter_list(buf, offset) -> int:
while offset < len(buf):
tlv_type, tlv_type_size = parse_tl_num(buf[offset:])
tlv_length, tlv_length_size = parse_tl_num(buf[offset + tlv_type_size:])
if tlv_type == TLV_PARAMETER_KEY_TYPE:
return offset
else:
offset += tlv_type_size + tlv_length_size + tlv_length
return offset
@staticmethod
def _parse_parameter_list(buf, offset) -> List[Parameter]:
parameters = []
while offset < len(buf):
tlv_type, tlv_type_size = parse_tl_num(buf[offset:])
offset += tlv_type_size
tlv_length, tlv_length_size = parse_tl_num(buf[offset:])
offset += tlv_length_size
tlv_value = buf[offset : offset + tlv_length]
if tlv_type == TLV_PARAMETER_KEY_TYPE:
parameter = Parameter()
parameter.key = tlv_value
parameters.append(parameter)
elif tlv_type == TLV_PARAMETER_VALUE_TYPE:
if parameters[-1].value == None:
parameters[-1].value = tlv_value
offset += tlv_length
return parameters
def prepare_challenge_request(self, parameter_keys: List[str]) -> bytearray:
request = ChallengeRequest()
request.selected_challenge = self.cert_state.selected_challenge
# encode parameters
parameter_buf = []
for parameter_key in parameter_keys:
parameter_buf += self.cert_state.encode_parameter(parameter_key)
# concatentaion
return request.encode() + bytearray(parameter_buf)
def parse_challenge_request(self, plaintext: bytes):
offset = self._probe_parameter_list(plaintext, offset = 0)
request = ChallengeRequest.parse(plaintext[:offset])
self.cert_state.selected_challenge = request.selected_challenge
# parsing parameters
parameters = self._parse_parameter_list(plaintext, offset)
for parameter in parameters:
self.cert_state.parameters.append(parameter)
def prepare_challenge_response(self, parameter_keys: List[str]) -> bytearray:
response = ChallengeResponse()
response.status = self.cert_state.status
response.challenge_status = self.cert_state.challenge_status
response.remaining_time = self.cert_state.remaining_time
response.remaining_tries = self.cert_state.remaining_tries
if self.cert_state.issued_cert_name is not None:
response.issued_cert_name = self.cert_state.issued_cert_name
response.forwarding_hint = self.cert_state.forwarding_hint
# encode parameters
parameter_buf = []
for parameter_key in parameter_keys:
parameter_buf += self.cert_state.encode_parameter(parameter_key)
# concatentaion
return response.encode() + bytearray(parameter_buf)
def parse_challenge_response(self, plaintext: bytes):
offset = self._probe_parameter_list(plaintext, offset = 0)
response = ChallengeResponse.parse(plaintext[:offset])
self.cert_state.status = response.status
self.cert_state.challenge_status = response.challenge_status
self.cert_state.remaining_time = response.remaining_time
self.cert_state.remaining_tries = response.remaining_tries
if response.issued_cert_name is not None:
self.cert_state.issued_cert_name = response.issued_cert_name
if response.forwarding_hint is not None:
self.cert_state.forwarding_hint = response.forwarding_hint
parameters = self._parse_parameter_list(plaintext, offset)
for parameter in parameters:
self.cert_state.parameters.append(parameter) | PypiClean |
/pytorch_transformers-1.2.0-py3-none-any.whl/pytorch_transformers/modeling_distilbert.py | from __future__ import absolute_import, division, print_function, unicode_literals
import json
import logging
import math
import copy
import sys
from io import open
import itertools
import numpy as np
import torch
import torch.nn as nn
from pytorch_transformers.modeling_utils import PretrainedConfig, PreTrainedModel, add_start_docstrings, prune_linear_layer
import logging
logger = logging.getLogger(__name__)
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP = {
'distilbert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-pytorch_model.bin",
'distilbert-base-uncased-distilled-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-distilled-squad-pytorch_model.bin"
}
DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
'distilbert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-config.json",
'distilbert-base-uncased-distilled-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-distilled-squad-config.json"
}
class DistilBertConfig(PretrainedConfig):
pretrained_config_archive_map = DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
def __init__(self,
vocab_size_or_config_json_file=30522,
max_position_embeddings=512,
sinusoidal_pos_embds=True,
n_layers=6,
n_heads=12,
dim=768,
hidden_dim=4*768,
dropout=0.1,
attention_dropout=0.1,
activation='gelu',
initializer_range=0.02,
tie_weights_=True,
qa_dropout=0.1,
seq_classif_dropout=0.2,
**kwargs):
super(DistilBertConfig, self).__init__(**kwargs)
if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2
and isinstance(vocab_size_or_config_json_file, unicode)):
with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader:
json_config = json.loads(reader.read())
for key, value in json_config.items():
self.__dict__[key] = value
elif isinstance(vocab_size_or_config_json_file, int):
self.vocab_size = vocab_size_or_config_json_file
self.max_position_embeddings = max_position_embeddings
self.sinusoidal_pos_embds = sinusoidal_pos_embds
self.n_layers = n_layers
self.n_heads = n_heads
self.dim = dim
self.hidden_dim = hidden_dim
self.dropout = dropout
self.attention_dropout = attention_dropout
self.activation = activation
self.initializer_range = initializer_range
self.tie_weights_ = tie_weights_
self.qa_dropout = qa_dropout
self.seq_classif_dropout = seq_classif_dropout
else:
raise ValueError("First argument must be either a vocabulary size (int)"
" or the path to a pretrained model config file (str)")
@property
def hidden_size(self):
return self.dim
@property
def num_attention_heads(self):
return self.n_heads
@property
def num_hidden_layers(self):
return self.n_layers
### UTILS AND BUILDING BLOCKS OF THE ARCHITECTURE ###
def gelu(x):
return 0.5 * x * (1.0 + torch.erf(x / math.sqrt(2.0)))
def create_sinusoidal_embeddings(n_pos, dim, out):
position_enc = np.array([
[pos / np.power(10000, 2 * (j // 2) / dim) for j in range(dim)]
for pos in range(n_pos)
])
out[:, 0::2] = torch.FloatTensor(np.sin(position_enc[:, 0::2]))
out[:, 1::2] = torch.FloatTensor(np.cos(position_enc[:, 1::2]))
out.detach_()
out.requires_grad = False
class Embeddings(nn.Module):
def __init__(self,
config):
super(Embeddings, self).__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.dim, padding_idx=0)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.dim)
if config.sinusoidal_pos_embds:
create_sinusoidal_embeddings(n_pos=config.max_position_embeddings,
dim=config.dim,
out=self.position_embeddings.weight)
self.LayerNorm = nn.LayerNorm(config.dim, eps=1e-12)
self.dropout = nn.Dropout(config.dropout)
def forward(self, input_ids):
"""
Parameters
----------
input_ids: torch.tensor(bs, max_seq_length)
The token ids to embed.
Outputs
-------
embeddings: torch.tensor(bs, max_seq_length, dim)
The embedded tokens (plus position embeddings, no token_type embeddings)
"""
seq_length = input_ids.size(1)
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) # (max_seq_length)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids) # (bs, max_seq_length)
word_embeddings = self.word_embeddings(input_ids) # (bs, max_seq_length, dim)
position_embeddings = self.position_embeddings(position_ids) # (bs, max_seq_length, dim)
embeddings = word_embeddings + position_embeddings # (bs, max_seq_length, dim)
embeddings = self.LayerNorm(embeddings) # (bs, max_seq_length, dim)
embeddings = self.dropout(embeddings) # (bs, max_seq_length, dim)
return embeddings
class MultiHeadSelfAttention(nn.Module):
def __init__(self, config):
super(MultiHeadSelfAttention, self).__init__()
self.n_heads = config.n_heads
self.dim = config.dim
self.dropout = nn.Dropout(p=config.attention_dropout)
self.output_attentions = config.output_attentions
assert self.dim % self.n_heads == 0
self.q_lin = nn.Linear(in_features=config.dim, out_features=config.dim)
self.k_lin = nn.Linear(in_features=config.dim, out_features=config.dim)
self.v_lin = nn.Linear(in_features=config.dim, out_features=config.dim)
self.out_lin = nn.Linear(in_features=config.dim, out_features=config.dim)
self.pruned_heads = set()
def prune_heads(self, heads):
attention_head_size = self.dim // self.n_heads
if len(heads) == 0:
return
mask = torch.ones(self.n_heads, attention_head_size)
heads = set(heads) - self.pruned_heads
for head in heads:
head -= sum(1 if h < head else 0 for h in self.pruned_heads)
mask[head] = 0
mask = mask.view(-1).contiguous().eq(1)
index = torch.arange(len(mask))[mask].long()
# Prune linear layers
self.q_lin = prune_linear_layer(self.q_lin, index)
self.k_lin = prune_linear_layer(self.k_lin, index)
self.v_lin = prune_linear_layer(self.v_lin, index)
self.out_lin = prune_linear_layer(self.out_lin, index, dim=1)
# Update hyper params
self.n_heads = self.n_heads - len(heads)
self.dim = attention_head_size * self.n_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(self, query, key, value, mask, head_mask = None):
"""
Parameters
----------
query: torch.tensor(bs, seq_length, dim)
key: torch.tensor(bs, seq_length, dim)
value: torch.tensor(bs, seq_length, dim)
mask: torch.tensor(bs, seq_length)
Outputs
-------
weights: torch.tensor(bs, n_heads, seq_length, seq_length)
Attention weights
context: torch.tensor(bs, seq_length, dim)
Contextualized layer. Optional: only if `output_attentions=True`
"""
bs, q_length, dim = query.size()
k_length = key.size(1)
# assert dim == self.dim, 'Dimensions do not match: %s input vs %s configured' % (dim, self.dim)
# assert key.size() == value.size()
dim_per_head = self.dim // self.n_heads
assert 2 <= mask.dim() <= 3
causal = (mask.dim() == 3)
mask_reshp = (bs, 1, 1, k_length)
def shape(x):
""" separate heads """
return x.view(bs, -1, self.n_heads, dim_per_head).transpose(1, 2)
def unshape(x):
""" group heads """
return x.transpose(1, 2).contiguous().view(bs, -1, self.n_heads * dim_per_head)
q = shape(self.q_lin(query)) # (bs, n_heads, q_length, dim_per_head)
k = shape(self.k_lin(key)) # (bs, n_heads, k_length, dim_per_head)
v = shape(self.v_lin(value)) # (bs, n_heads, k_length, dim_per_head)
q = q / math.sqrt(dim_per_head) # (bs, n_heads, q_length, dim_per_head)
scores = torch.matmul(q, k.transpose(2,3)) # (bs, n_heads, q_length, k_length)
mask = (mask==0).view(mask_reshp).expand_as(scores) # (bs, n_heads, q_length, k_length)
scores.masked_fill_(mask, -float('inf')) # (bs, n_heads, q_length, k_length)
weights = nn.Softmax(dim=-1)(scores) # (bs, n_heads, q_length, k_length)
weights = self.dropout(weights) # (bs, n_heads, q_length, k_length)
# Mask heads if we want to
if head_mask is not None:
weights = weights * head_mask
context = torch.matmul(weights, v) # (bs, n_heads, q_length, dim_per_head)
context = unshape(context) # (bs, q_length, dim)
context = self.out_lin(context) # (bs, q_length, dim)
if self.output_attentions:
return (context, weights)
else:
return (context,)
class FFN(nn.Module):
def __init__(self, config):
super(FFN, self).__init__()
self.dropout = nn.Dropout(p=config.dropout)
self.lin1 = nn.Linear(in_features=config.dim, out_features=config.hidden_dim)
self.lin2 = nn.Linear(in_features=config.hidden_dim, out_features=config.dim)
assert config.activation in ['relu', 'gelu'], "activation ({}) must be in ['relu', 'gelu']".format(config.activation)
self.activation = gelu if config.activation == 'gelu' else nn.ReLU()
def forward(self, input):
x = self.lin1(input)
x = self.activation(x)
x = self.lin2(x)
x = self.dropout(x)
return x
class TransformerBlock(nn.Module):
def __init__(self, config):
super(TransformerBlock, self).__init__()
self.n_heads = config.n_heads
self.dim = config.dim
self.hidden_dim = config.hidden_dim
self.dropout = nn.Dropout(p=config.dropout)
self.activation = config.activation
self.output_attentions = config.output_attentions
assert config.dim % config.n_heads == 0
self.attention = MultiHeadSelfAttention(config)
self.sa_layer_norm = nn.LayerNorm(normalized_shape=config.dim, eps=1e-12)
self.ffn = FFN(config)
self.output_layer_norm = nn.LayerNorm(normalized_shape=config.dim, eps=1e-12)
def forward(self, x, attn_mask=None, head_mask=None):
"""
Parameters
----------
x: torch.tensor(bs, seq_length, dim)
attn_mask: torch.tensor(bs, seq_length)
Outputs
-------
sa_weights: torch.tensor(bs, n_heads, seq_length, seq_length)
The attention weights
ffn_output: torch.tensor(bs, seq_length, dim)
The output of the transformer block contextualization.
"""
# Self-Attention
sa_output = self.attention(query=x, key=x, value=x, mask=attn_mask, head_mask=head_mask)
if self.output_attentions:
sa_output, sa_weights = sa_output # (bs, seq_length, dim), (bs, n_heads, seq_length, seq_length)
else: # To handle these `output_attention` or `output_hidden_states` cases returning tuples
assert type(sa_output) == tuple
sa_output = sa_output[0]
sa_output = self.sa_layer_norm(sa_output + x) # (bs, seq_length, dim)
# Feed Forward Network
ffn_output = self.ffn(sa_output) # (bs, seq_length, dim)
ffn_output = self.output_layer_norm(ffn_output + sa_output) # (bs, seq_length, dim)
output = (ffn_output,)
if self.output_attentions:
output = (sa_weights,) + output
return output
class Transformer(nn.Module):
def __init__(self, config):
super(Transformer, self).__init__()
self.n_layers = config.n_layers
self.output_attentions = config.output_attentions
self.output_hidden_states = config.output_hidden_states
layer = TransformerBlock(config)
self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.n_layers)])
def forward(self, x, attn_mask=None, head_mask=None):
"""
Parameters
----------
x: torch.tensor(bs, seq_length, dim)
Input sequence embedded.
attn_mask: torch.tensor(bs, seq_length)
Attention mask on the sequence.
Outputs
-------
hidden_state: torch.tensor(bs, seq_length, dim)
Sequence of hiddens states in the last (top) layer
all_hidden_states: Tuple[torch.tensor(bs, seq_length, dim)]
Tuple of length n_layers with the hidden states from each layer.
Optional: only if output_hidden_states=True
all_attentions: Tuple[torch.tensor(bs, n_heads, seq_length, seq_length)]
Tuple of length n_layers with the attention weights from each layer
Optional: only if output_attentions=True
"""
all_hidden_states = ()
all_attentions = ()
hidden_state = x
for i, layer_module in enumerate(self.layer):
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_state,)
layer_outputs = layer_module(x=hidden_state,
attn_mask=attn_mask,
head_mask=head_mask[i])
hidden_state = layer_outputs[-1]
if self.output_attentions:
assert len(layer_outputs) == 2
attentions = layer_outputs[0]
all_attentions = all_attentions + (attentions,)
else:
assert len(layer_outputs) == 1
# Add last layer
if self.output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_state,)
outputs = (hidden_state,)
if self.output_hidden_states:
outputs = outputs + (all_hidden_states,)
if self.output_attentions:
outputs = outputs + (all_attentions,)
return outputs # last-layer hidden state, (all hidden states), (all attentions)
### INTERFACE FOR ENCODER AND TASK SPECIFIC MODEL ###
class DistilBertPreTrainedModel(PreTrainedModel):
""" An abstract class to handle weights initialization and
a simple interface for downloading and loading pretrained models.
"""
config_class = DistilBertConfig
pretrained_model_archive_map = DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP
load_tf_weights = None
base_model_prefix = "distilbert"
def __init__(self, *inputs, **kwargs):
super(DistilBertPreTrainedModel, self).__init__(*inputs, **kwargs)
def _init_weights(self, module):
""" Initialize the weights.
"""
if isinstance(module, nn.Embedding):
if module.weight.requires_grad:
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if isinstance(module, nn.Linear):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
DISTILBERT_START_DOCSTRING = r"""
DistilBERT is a small, fast, cheap and light Transformer model
trained by distilling Bert base. It has 40% less parameters than
`bert-base-uncased`, runs 60% faster while preserving over 95% of
Bert's performances as measured on the GLUE language understanding benchmark.
Here are the differences between the interface of Bert and DistilBert:
- DistilBert doesn't have `token_type_ids`, you don't need to indicate which token belongs to which segment. Just separate your segments with the separation token `tokenizer.sep_token` (or `[SEP]`)
- DistilBert doesn't have options to select the input positions (`position_ids` input). This could be added if necessary though, just let's us know if you need this option.
For more information on DistilBERT, please refer to our
`detailed blog post`_
.. _`detailed blog post`:
https://medium.com/huggingface/distilbert-8cf3380435b5
Parameters:
config (:class:`~pytorch_transformers.DistilBertConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the configuration.
Check out the :meth:`~pytorch_transformers.PreTrainedModel.from_pretrained` method to load the model weights.
"""
DISTILBERT_INPUTS_DOCSTRING = r"""
Inputs:
**input_ids** ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
Indices of input sequence tokens in the vocabulary.
The input sequences should start with `[CLS]` and end with `[SEP]` tokens.
For now, ONLY BertTokenizer(`bert-base-uncased`) is supported and you should use this tokenizer when using DistilBERT.
**attention_mask**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens.
**head_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``:
Mask to nullify selected heads of the self-attention modules.
Mask values selected in ``[0, 1]``:
``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**.
"""
@add_start_docstrings("The bare DistilBERT encoder/transformer outputing raw hidden-states without any specific head on top.",
DISTILBERT_START_DOCSTRING, DISTILBERT_INPUTS_DOCSTRING)
class DistilBertModel(DistilBertPreTrainedModel):
r"""
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)``
Sequence of hidden-states at the output of the last layer of the model.
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = DistilBertTokenizer.from_pretrained('distilbert-base-uncased')
model = DistilBertModel.from_pretrained('distilbert-base-uncased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
outputs = model(input_ids)
last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
"""
def __init__(self, config):
super(DistilBertModel, self).__init__(config)
self.embeddings = Embeddings(config) # Embeddings
self.transformer = Transformer(config) # Encoder
self.init_weights()
def _resize_token_embeddings(self, new_num_tokens):
old_embeddings = self.embeddings.word_embeddings
new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens)
self.embeddings.word_embeddings = new_embeddings
return self.embeddings.word_embeddings
def _prune_heads(self, heads_to_prune):
""" Prunes heads of the model.
heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
See base class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.transformer.layer[layer].attention.prune_heads(heads)
def forward(self,
input_ids, attention_mask=None, head_mask=None):
if attention_mask is None:
attention_mask = torch.ones_like(input_ids) # (bs, seq_length)
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
if head_mask is not None:
if head_mask.dim() == 1:
head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1)
head_mask = head_mask.expand(self.config.num_hidden_layers, -1, -1, -1, -1)
elif head_mask.dim() == 2:
head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer
head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility
else:
head_mask = [None] * self.config.num_hidden_layers
embedding_output = self.embeddings(input_ids) # (bs, seq_length, dim)
tfmr_output = self.transformer(x=embedding_output,
attn_mask=attention_mask,
head_mask=head_mask)
hidden_state = tfmr_output[0]
output = (hidden_state, ) + tfmr_output[1:]
return output # last-layer hidden-state, (all hidden_states), (all attentions)
@add_start_docstrings("""DistilBert Model with a `masked language modeling` head on top. """,
DISTILBERT_START_DOCSTRING, DISTILBERT_INPUTS_DOCSTRING)
class DistilBertForMaskedLM(DistilBertPreTrainedModel):
r"""
**masked_lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
Labels for computing the masked language modeling loss.
Indices should be in ``[-1, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring)
Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels
in ``[0, ..., config.vocab_size]``
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**loss**: (`optional`, returned when ``masked_lm_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
Masked language modeling loss.
**prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)``
Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = DistilBertTokenizer.from_pretrained('distilbert-base-uncased')
model = DistilBertForMaskedLM.from_pretrained('distilbert-base-uncased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
outputs = model(input_ids, masked_lm_labels=input_ids)
loss, prediction_scores = outputs[:2]
"""
def __init__(self, config):
super(DistilBertForMaskedLM, self).__init__(config)
self.output_attentions = config.output_attentions
self.output_hidden_states = config.output_hidden_states
self.distilbert = DistilBertModel(config)
self.vocab_transform = nn.Linear(config.dim, config.dim)
self.vocab_layer_norm = nn.LayerNorm(config.dim, eps=1e-12)
self.vocab_projector = nn.Linear(config.dim, config.vocab_size)
self.init_weights()
self.tie_weights()
self.mlm_loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
def tie_weights(self):
""" Make sure we are sharing the input and output embeddings.
Export to TorchScript can't handle parameter sharing so we are cloning them instead.
"""
self._tie_or_clone_weights(self.vocab_projector,
self.distilbert.embeddings.word_embeddings)
def forward(self, input_ids, attention_mask=None, masked_lm_labels=None, head_mask=None):
dlbrt_output = self.distilbert(input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask)
hidden_states = dlbrt_output[0] # (bs, seq_length, dim)
prediction_logits = self.vocab_transform(hidden_states) # (bs, seq_length, dim)
prediction_logits = gelu(prediction_logits) # (bs, seq_length, dim)
prediction_logits = self.vocab_layer_norm(prediction_logits) # (bs, seq_length, dim)
prediction_logits = self.vocab_projector(prediction_logits) # (bs, seq_length, vocab_size)
outputs = (prediction_logits, ) + dlbrt_output[1:]
if masked_lm_labels is not None:
mlm_loss = self.mlm_loss_fct(prediction_logits.view(-1, prediction_logits.size(-1)),
masked_lm_labels.view(-1))
outputs = (mlm_loss,) + outputs
return outputs # (mlm_loss), prediction_logits, (all hidden_states), (all attentions)
@add_start_docstrings("""DistilBert Model transformer with a sequence classification/regression head on top (a linear layer on top of
the pooled output) e.g. for GLUE tasks. """,
DISTILBERT_START_DOCSTRING, DISTILBERT_INPUTS_DOCSTRING)
class DistilBertForSequenceClassification(DistilBertPreTrainedModel):
r"""
**labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
Labels for computing the sequence classification/regression loss.
Indices should be in ``[0, ..., config.num_labels - 1]``.
If ``config.num_labels == 1`` a regression loss is computed (Mean-Square loss),
If ``config.num_labels > 1`` a classification loss is computed (Cross-Entropy).
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
Classification (or regression if config.num_labels==1) loss.
**logits**: ``torch.FloatTensor`` of shape ``(batch_size, config.num_labels)``
Classification (or regression if config.num_labels==1) scores (before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = DistilBertTokenizer.from_pretrained('distilbert-base-uncased')
model = DistilBertForSequenceClassification.from_pretrained('distilbert-base-uncased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
labels = torch.tensor([1]).unsqueeze(0) # Batch size 1
outputs = model(input_ids, labels=labels)
loss, logits = outputs[:2]
"""
def __init__(self, config):
super(DistilBertForSequenceClassification, self).__init__(config)
self.num_labels = config.num_labels
self.distilbert = DistilBertModel(config)
self.pre_classifier = nn.Linear(config.dim, config.dim)
self.classifier = nn.Linear(config.dim, config.num_labels)
self.dropout = nn.Dropout(config.seq_classif_dropout)
self.init_weights()
def forward(self, input_ids, attention_mask=None, labels=None, head_mask=None):
distilbert_output = self.distilbert(input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask)
hidden_state = distilbert_output[0] # (bs, seq_len, dim)
pooled_output = hidden_state[:, 0] # (bs, dim)
pooled_output = self.pre_classifier(pooled_output) # (bs, dim)
pooled_output = nn.ReLU()(pooled_output) # (bs, dim)
pooled_output = self.dropout(pooled_output) # (bs, dim)
logits = self.classifier(pooled_output) # (bs, dim)
outputs = (logits,) + distilbert_output[1:]
if labels is not None:
if self.num_labels == 1:
loss_fct = nn.MSELoss()
loss = loss_fct(logits.view(-1), labels.view(-1))
else:
loss_fct = nn.CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
outputs = (loss,) + outputs
return outputs # (loss), logits, (hidden_states), (attentions)
@add_start_docstrings("""DistilBert Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of
the hidden-states output to compute `span start logits` and `span end logits`). """,
DISTILBERT_START_DOCSTRING, DISTILBERT_INPUTS_DOCSTRING)
class DistilBertForQuestionAnswering(DistilBertPreTrainedModel):
r"""
**start_positions**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`).
Position outside of the sequence are not taken into account for computing the loss.
**end_positions**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`).
Position outside of the sequence are not taken into account for computing the loss.
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
Total span extraction loss is the sum of a Cross-Entropy for the start and end positions.
**start_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length,)``
Span-start scores (before SoftMax).
**end_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length,)``
Span-end scores (before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = DistilBertTokenizer.from_pretrained('distilbert-base-uncased')
model = DistilBertForQuestionAnswering.from_pretrained('distilbert-base-uncased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
start_positions = torch.tensor([1])
end_positions = torch.tensor([3])
outputs = model(input_ids, start_positions=start_positions, end_positions=end_positions)
loss, start_scores, end_scores = outputs[:2]
"""
def __init__(self, config):
super(DistilBertForQuestionAnswering, self).__init__(config)
self.distilbert = DistilBertModel(config)
self.qa_outputs = nn.Linear(config.dim, config.num_labels)
assert config.num_labels == 2
self.dropout = nn.Dropout(config.qa_dropout)
self.init_weights()
def forward(self, input_ids, attention_mask=None, start_positions=None, end_positions=None, head_mask=None):
distilbert_output = self.distilbert(input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask)
hidden_states = distilbert_output[0] # (bs, max_query_len, dim)
hidden_states = self.dropout(hidden_states) # (bs, max_query_len, dim)
logits = self.qa_outputs(hidden_states) # (bs, max_query_len, 2)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1) # (bs, max_query_len)
end_logits = end_logits.squeeze(-1) # (bs, max_query_len)
outputs = (start_logits, end_logits,) + distilbert_output[1:]
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions.clamp_(0, ignored_index)
end_positions.clamp_(0, ignored_index)
loss_fct = nn.CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
outputs = (total_loss,) + outputs
return outputs # (loss), start_logits, end_logits, (hidden_states), (attentions) | PypiClean |
/loo-v2020-2020.2.tar.gz/loo-v2020-2020.2/loopy/frontend/fortran/expression.py | from __future__ import division, with_statement
__copyright__ = "Copyright (C) 2013 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from pymbolic.parser import Parser as ExpressionParserBase
from loopy.frontend.fortran.diagnostic import TranslationError
from six.moves import intern
import numpy as np
import pytools.lex
import re
_less_than = intern("less_than")
_greater_than = intern("greater_than")
_less_equal = intern("less_equal")
_greater_equal = intern("greater_equal")
_equal = intern("equal")
_not_equal = intern("not_equal")
_not = intern("not")
_and = intern("and")
_or = intern("or")
# {{{ expression parser
class FortranExpressionParser(ExpressionParserBase):
lex_table = [
(_less_than, pytools.lex.RE(r"\.lt\.", re.I)),
(_greater_than, pytools.lex.RE(r"\.gt\.", re.I)),
(_less_equal, pytools.lex.RE(r"\.le\.", re.I)),
(_greater_equal, pytools.lex.RE(r"\.ge\.", re.I)),
(_equal, pytools.lex.RE(r"\.eq\.", re.I)),
(_not_equal, pytools.lex.RE(r"\.ne\.", re.I)),
(_not, pytools.lex.RE(r"\.not\.", re.I)),
(_and, pytools.lex.RE(r"\.and\.", re.I)),
(_or, pytools.lex.RE(r"\.or\.", re.I)),
] + ExpressionParserBase.lex_table
def __init__(self, tree_walker):
self.tree_walker = tree_walker
_PREC_FUNC_ARGS = 1
def parse_terminal(self, pstate):
scope = self.tree_walker.scope_stack[-1]
from pymbolic.primitives import Subscript, Call, Variable
from pymbolic.parser import (
_identifier, _openpar, _closepar, _float)
next_tag = pstate.next_tag()
if next_tag is _float:
value = pstate.next_str_and_advance().lower()
if "d" in value:
dtype = np.float64
else:
dtype = np.float32
value = value.replace("d", "e")
if value.startswith("."):
value = "0"+value
elif value.startswith("-."):
value = "-0"+value[1:]
return dtype(float(value))
elif next_tag is _identifier:
name = pstate.next_str_and_advance()
if pstate.is_at_end() or pstate.next_tag() is not _openpar:
# not a subscript
scope.use_name(name)
return Variable(name)
left_exp = Variable(name)
pstate.advance()
pstate.expect_not_end()
if scope.is_known(name):
cls = Subscript
else:
cls = Call
if pstate.next_tag is _closepar:
pstate.advance()
left_exp = cls(left_exp, ())
else:
args = self.parse_expression(pstate, self._PREC_FUNC_ARGS)
if not isinstance(args, tuple):
args = (args,)
left_exp = cls(left_exp, args)
pstate.expect(_closepar)
pstate.advance()
return left_exp
else:
return ExpressionParserBase.parse_terminal(
self, pstate)
COMP_MAP = {
_less_than: "<",
_less_equal: "<=",
_greater_than: ">",
_greater_equal: ">=",
_equal: "==",
_not_equal: "!=",
}
def parse_prefix(self, pstate, min_precedence=0):
from pymbolic.parser import _PREC_UNARY
import pymbolic.primitives as primitives
pstate.expect_not_end()
if pstate.is_next(_not):
pstate.advance()
return primitives.LogicalNot(
self.parse_expression(pstate, _PREC_UNARY))
else:
return ExpressionParserBase.parse_prefix(self, pstate)
def parse_postfix(self, pstate, min_precedence, left_exp):
from pymbolic.parser import (
_PREC_CALL, _PREC_COMPARISON, _openpar,
_PREC_LOGICAL_OR, _PREC_LOGICAL_AND)
from pymbolic.primitives import (
Comparison, LogicalAnd, LogicalOr)
next_tag = pstate.next_tag()
if next_tag is _openpar and _PREC_CALL > min_precedence:
raise TranslationError("parenthesis operator only works on names")
elif next_tag in self.COMP_MAP and _PREC_COMPARISON > min_precedence:
pstate.advance()
left_exp = Comparison(
left_exp,
self.COMP_MAP[next_tag],
self.parse_expression(pstate, _PREC_COMPARISON))
did_something = True
elif next_tag is _and and _PREC_LOGICAL_AND > min_precedence:
pstate.advance()
left_exp = LogicalAnd((left_exp,
self.parse_expression(pstate, _PREC_LOGICAL_AND)))
did_something = True
elif next_tag is _or and _PREC_LOGICAL_OR > min_precedence:
pstate.advance()
left_exp = LogicalOr((left_exp,
self.parse_expression(pstate, _PREC_LOGICAL_OR)))
did_something = True
else:
left_exp, did_something = ExpressionParserBase.parse_postfix(
self, pstate, min_precedence, left_exp)
if isinstance(left_exp, tuple) and min_precedence < self._PREC_FUNC_ARGS:
# this must be a complex literal
if len(left_exp) != 2:
raise TranslationError("complex literals must have "
"two entries")
r, i = left_exp
dtype = (r.dtype.type(0) + i.dtype.type(0))
if dtype == np.float32:
dtype = np.complex64
else:
dtype = np.complex128
left_exp = dtype(float(r) + float(i)*1j)
return left_exp, did_something
# }}}
# vim: foldmethod=marker | PypiClean |
/django-cruds-padminlte-0.0.13a5.tar.gz/django-cruds-padminlte-0.0.13a5/cruds_adminlte/static/ckeditor/plugins/specialchar/dialogs/lang/eo.js | /*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("specialchar","eo",{euro:"Eŭrosigno",lsquo:"Supra 6-citilo",rsquo:"Supra 9-citilo",ldquo:"Supra 66-citilo",rdquo:"Supra 99-citilo",ndash:"Streketo",mdash:"Substreko",iexcl:"Renversita krisigno",cent:"Cendosigno",pound:"Pundosigno",curren:"Monersigno",yen:"Enosigno",brvbar:"Rompita vertikala streko",sect:"Kurba paragrafo",uml:"Tremao",copy:"Kopirajtosigno",ordf:"Adjektiva numerfinaĵo",laquo:"Duobla malplio-citilo",not:"Negohoko",reg:"Registrita marko",macr:"Superstreko",deg:"Gradosigno",
sup2:"Supra indico 2",sup3:"Supra indico 3",acute:"Dekstra korno",micro:"Mikrosigno",para:"Rekta paragrafo",middot:"Meza punkto",cedil:"Zoeto",sup1:"Supra indico 1",ordm:"Substantiva numerfinaĵo",raquo:"Duobla plio-citilo",frac14:"Kvaronosigno",frac12:"Duonosigno",frac34:"Trikvaronosigno",iquest:"renversita demandosigno",Agrave:"Latina ĉeflitero A kun liva korno",Aacute:"Latina ĉeflitero A kun dekstra korno",Acirc:"Latina ĉeflitero A kun ĉapelo",Atilde:"Latina ĉeflitero A kun tildo",Auml:"Latina ĉeflitero A kun tremao",
Aring:"Latina ĉeflitero A kun superringo",AElig:"Latina ĉeflitera ligaturo Æ",Ccedil:"Latina ĉeflitero C kun zoeto",Egrave:"Latina ĉeflitero E kun liva korno",Eacute:"Latina ĉeflitero E kun dekstra korno",Ecirc:"Latina ĉeflitero E kun ĉapelo",Euml:"Latina ĉeflitero E kun tremao",Igrave:"Latina ĉeflitero I kun liva korno",Iacute:"Latina ĉeflitero I kun dekstra korno",Icirc:"Latina ĉeflitero I kun ĉapelo",Iuml:"Latina ĉeflitero I kun tremao",ETH:"Latina ĉeflitero islanda edo",Ntilde:"Latina ĉeflitero N kun tildo",
Ograve:"Latina ĉeflitero O kun liva korno",Oacute:"Latina ĉeflitero O kun dekstra korno",Ocirc:"Latina ĉeflitero O kun ĉapelo",Otilde:"Latina ĉeflitero O kun tildo",Ouml:"Latina ĉeflitero O kun tremao",times:"Multipliko",Oslash:"Latina ĉeflitero O trastrekita",Ugrave:"Latina ĉeflitero U kun liva korno",Uacute:"Latina ĉeflitero U kun dekstra korno",Ucirc:"Latina ĉeflitero U kun ĉapelo",Uuml:"Latina ĉeflitero U kun tremao",Yacute:"Latina ĉeflitero Y kun dekstra korno",THORN:"Latina ĉeflitero islanda dorno",
szlig:"Latina etlitero germana sozo (akra s)",agrave:"Latina etlitero a kun liva korno",aacute:"Latina etlitero a kun dekstra korno",acirc:"Latina etlitero a kun ĉapelo",atilde:"Latina etlitero a kun tildo",auml:"Latina etlitero a kun tremao",aring:"Latina etlitero a kun superringo",aelig:"Latina etlitera ligaturo æ",ccedil:"Latina etlitero c kun zoeto",egrave:"Latina etlitero e kun liva korno",eacute:"Latina etlitero e kun dekstra korno",ecirc:"Latina etlitero e kun ĉapelo",euml:"Latina etlitero e kun tremao",
igrave:"Latina etlitero i kun liva korno",iacute:"Latina etlitero i kun dekstra korno",icirc:"Latina etlitero i kun ĉapelo",iuml:"Latina etlitero i kun tremao",eth:"Latina etlitero islanda edo",ntilde:"Latina etlitero n kun tildo",ograve:"Latina etlitero o kun liva korno",oacute:"Latina etlitero o kun dekstra korno",ocirc:"Latina etlitero o kun ĉapelo",otilde:"Latina etlitero o kun tildo",ouml:"Latina etlitero o kun tremao",divide:"Dividosigno",oslash:"Latina etlitero o trastrekita",ugrave:"Latina etlitero u kun liva korno",
uacute:"Latina etlitero u kun dekstra korno",ucirc:"Latina etlitero u kun ĉapelo",uuml:"Latina etlitero u kun tremao",yacute:"Latina etlitero y kun dekstra korno",thorn:"Latina etlitero islanda dorno",yuml:"Latina etlitero y kun tremao",OElig:"Latina ĉeflitera ligaturo Œ",oelig:"Latina etlitera ligaturo œ",372:"Latina ĉeflitero W kun ĉapelo",374:"Latina ĉeflitero Y kun ĉapelo",373:"Latina etlitero w kun ĉapelo",375:"Latina etlitero y kun ĉapelo",sbquo:"Suba 9-citilo",8219:"Supra renversita 9-citilo",
bdquo:"Suba 99-citilo",hellip:"Tripunkto",trade:"Varmarka signo",9658:"Nigra sago dekstren",bull:"Bulmarko",rarr:"Sago dekstren",rArr:"Duobla sago dekstren",hArr:"Duobla sago maldekstren",diams:"Nigra kvadrato",asymp:"Preskaŭ egala"}); | PypiClean |
/edg-0.0.1.tar.gz/edg-0.0.1/electronics_lib/JlcInductor.py | from typing import *
import re
from electronics_abstract_parts import *
from .JlcPart import DescriptionParser, JlcTableSelector
class JlcInductor(TableInductor, SmdStandardPackageSelector, JlcTableSelector):
PACKAGE_FOOTPRINT_MAP = {
'0402': 'Inductor_SMD:L_0402_1005Metric',
'0603': 'Inductor_SMD:L_0603_1608Metric',
'0805': 'Inductor_SMD:L_0805_2012Metric',
'1206': 'Inductor_SMD:L_1206_3216Metric',
'1210': 'Inductor_SMD:L_1210_3225Metric',
'1812': 'Inductor_SMD:L_1812_4532Metric',
'L0402': 'Inductor_SMD:L_0402_1005Metric',
'L0603': 'Inductor_SMD:L_0603_1608Metric',
'L0805': 'Inductor_SMD:L_0805_2012Metric',
'L1206': 'Inductor_SMD:L_1206_3216Metric',
'L1210': 'Inductor_SMD:L_1210_3225Metric',
'L1812': 'Inductor_SMD:L_1812_4532Metric',
}
# a secondary parsing method if the package parser fails
PART_FOOTPRINT_PARSERS: List[DescriptionParser] = [
(re.compile("^NR(20|24|30|40|50|60|80).*$"),
lambda match: {
PartsTableFootprint.KICAD_FOOTPRINT: f'Inductor_SMD:L_Taiyo-Yuden_NR-{match.group(1)}xx'
}),
(re.compile("^SRR1015-.*$"),
lambda match: {
PartsTableFootprint.KICAD_FOOTPRINT: f'Inductor_SMD:L_Bourns-SRR1005'
}),
(re.compile("^SRR1210A?-.*$"),
lambda match: {
PartsTableFootprint.KICAD_FOOTPRINT: f'Inductor_SMD:L_Bourns_SRR1210A'
}),
(re.compile("^SRR1260A?-.*$"),
lambda match: {
PartsTableFootprint.KICAD_FOOTPRINT: f'Inductor_SMD:L_Bourns_SRR1260'
}),
# Kicad does not have stock 1008 footprint
]
DESCRIPTION_PARSERS: List[DescriptionParser] = [
(re.compile("(\S+A) (\S+H) (±\S+%) (\S+Ω) .* Inductors.*"),
lambda match: {
TableInductor.INDUCTANCE: Range.from_tolerance(PartParserUtil.parse_value(match.group(2), 'H'),
PartParserUtil.parse_tolerance(match.group(3))),
TableInductor.FREQUENCY_RATING: Range.all(), # ignored, checked elsewhere
TableInductor.CURRENT_RATING: Range.zero_to_upper(PartParserUtil.parse_value(match.group(1), 'A')),
TableInductor.DC_RESISTANCE: Range.zero_to_upper(PartParserUtil.parse_value(match.group(4), 'Ω')),
}),
(re.compile("(\S+A) (\S+H) ±(\S+H) (\S+Ω) .* Inductors.*"),
lambda match: {
TableInductor.INDUCTANCE: Range.from_abs_tolerance(PartParserUtil.parse_value(match.group(2), 'H'),
PartParserUtil.parse_value(match.group(3), 'H')),
TableInductor.FREQUENCY_RATING: Range.all(), # ignored, checked elsewhere
TableInductor.CURRENT_RATING: Range.zero_to_upper(PartParserUtil.parse_value(match.group(1), 'A')),
TableInductor.DC_RESISTANCE: Range.zero_to_upper(PartParserUtil.parse_value(match.group(4), 'Ω')),
}),
]
@init_in_parent
def __init__(self, *args, ignore_frequency: BoolLike = False, **kwargs):
super().__init__(*args, **kwargs)
self.require(ignore_frequency | (self.frequency == Range.zero_to_upper(0)),
"JLC inductors do not have frequency data, frequency spec must be ignored")
@classmethod
def _make_table(cls) -> PartsTable:
def parse_row(row: PartsTableRow) -> Optional[Dict[PartsTableColumn, Any]]:
if row['Second Category'] not in ('Inductors (SMD)', 'Power Inductors'):
return None
new_cols = {}
footprint = cls.PACKAGE_FOOTPRINT_MAP.get(row[cls._PACKAGE_HEADER])
if footprint is not None:
new_cols[cls.KICAD_FOOTPRINT] = footprint
else:
footprint_cols = cls.parse_full_description(row[cls.PART_NUMBER_COL], cls.PART_FOOTPRINT_PARSERS)
if footprint_cols is not None:
new_cols.update(footprint_cols)
else:
return None
desc_cols = cls.parse_full_description(row[cls.DESCRIPTION_COL], cls.DESCRIPTION_PARSERS)
if desc_cols is not None:
new_cols.update(desc_cols)
else:
return None
new_cols.update(cls._parse_jlcpcb_common(row))
return new_cols
return cls._jlc_table().map_new_columns(parse_row) | PypiClean |
/retro_data_structures-0.23.0-py3-none-any.whl/retro_data_structures/properties/corruption/objects/CoverPoint.py | import dataclasses
import struct
import typing
from retro_data_structures.game_check import Game
from retro_data_structures.properties.base_property import BaseObjectType
from retro_data_structures.properties.corruption.archetypes.EditorProperties import EditorProperties
@dataclasses.dataclass()
class CoverPoint(BaseObjectType):
editor_properties: EditorProperties = dataclasses.field(default_factory=EditorProperties)
unknown_0x969de5ff: int = dataclasses.field(default=1)
should_crouch: bool = dataclasses.field(default=True)
horizontal_safe_angle: float = dataclasses.field(default=180.0)
vertical_safe_angle: float = dataclasses.field(default=90.0)
minimum_safe_range: float = dataclasses.field(default=5.0)
maximum_safe_range: float = dataclasses.field(default=500.0)
lock_time: float = dataclasses.field(default=10.0)
unknown_0x25717ace: bool = dataclasses.field(default=True)
unknown_0xc83e3228: int = dataclasses.field(default=0)
@classmethod
def game(cls) -> Game:
return Game.CORRUPTION
def get_name(self) -> typing.Optional[str]:
return self.editor_properties.name
def set_name(self, name: str) -> None:
self.editor_properties.name = name
@classmethod
def object_type(cls) -> str:
return 'COVR'
@classmethod
def from_stream(cls, data: typing.BinaryIO, size: typing.Optional[int] = None, default_override: typing.Optional[dict] = None):
struct_id, size, property_count = struct.unpack(">LHH", data.read(8))
assert struct_id == 0xFFFFFFFF
root_size_start = data.tell() - 2
present_fields = default_override or {}
for _ in range(property_count):
property_id, property_size = struct.unpack(">LH", data.read(6))
start = data.tell()
try:
property_name, decoder = _property_decoder[property_id]
present_fields[property_name] = decoder(data, property_size)
except KeyError:
raise RuntimeError(f"Unknown property: 0x{property_id:08x}")
assert data.tell() - start == property_size
assert data.tell() - root_size_start == size
return cls(**present_fields)
def to_stream(self, data: typing.BinaryIO, default_override: typing.Optional[dict] = None):
default_override = default_override or {}
data.write(b'\xff\xff\xff\xff') # struct object id
root_size_offset = data.tell()
data.write(b'\x00\x00') # placeholder for root struct size
data.write(b'\x00\n') # 10 properties
data.write(b'%ZE\x80') # 0x255a4580
before = data.tell()
data.write(b'\x00\x00') # size placeholder
self.editor_properties.to_stream(data)
after = data.tell()
data.seek(before)
data.write(struct.pack(">H", after - before - 2))
data.seek(after)
data.write(b'\x96\x9d\xe5\xff') # 0x969de5ff
data.write(b'\x00\x04') # size
data.write(struct.pack('>l', self.unknown_0x969de5ff))
data.write(b'\x80\x01\xc3\xbe') # 0x8001c3be
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.should_crouch))
data.write(b'FwI\x85') # 0x46774985
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.horizontal_safe_angle))
data.write(b'\xd9\xd7\xaf\xa6') # 0xd9d7afa6
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.vertical_safe_angle))
data.write(b'\xeb\xca?\xca') # 0xebca3fca
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.minimum_safe_range))
data.write(b'\xf8\x8f\xd9a') # 0xf88fd961
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.maximum_safe_range))
data.write(b'0\x8e\xdcD') # 0x308edc44
data.write(b'\x00\x04') # size
data.write(struct.pack('>f', self.lock_time))
data.write(b'%qz\xce') # 0x25717ace
data.write(b'\x00\x01') # size
data.write(struct.pack('>?', self.unknown_0x25717ace))
data.write(b'\xc8>2(') # 0xc83e3228
data.write(b'\x00\x04') # size
data.write(struct.pack('>l', self.unknown_0xc83e3228))
struct_end_offset = data.tell()
data.seek(root_size_offset)
data.write(struct.pack(">H", struct_end_offset - root_size_offset - 2))
data.seek(struct_end_offset)
@classmethod
def from_json(cls, data: dict):
return cls(
editor_properties=EditorProperties.from_json(data['editor_properties']),
unknown_0x969de5ff=data['unknown_0x969de5ff'],
should_crouch=data['should_crouch'],
horizontal_safe_angle=data['horizontal_safe_angle'],
vertical_safe_angle=data['vertical_safe_angle'],
minimum_safe_range=data['minimum_safe_range'],
maximum_safe_range=data['maximum_safe_range'],
lock_time=data['lock_time'],
unknown_0x25717ace=data['unknown_0x25717ace'],
unknown_0xc83e3228=data['unknown_0xc83e3228'],
)
def to_json(self) -> dict:
return {
'editor_properties': self.editor_properties.to_json(),
'unknown_0x969de5ff': self.unknown_0x969de5ff,
'should_crouch': self.should_crouch,
'horizontal_safe_angle': self.horizontal_safe_angle,
'vertical_safe_angle': self.vertical_safe_angle,
'minimum_safe_range': self.minimum_safe_range,
'maximum_safe_range': self.maximum_safe_range,
'lock_time': self.lock_time,
'unknown_0x25717ace': self.unknown_0x25717ace,
'unknown_0xc83e3228': self.unknown_0xc83e3228,
}
def _decode_editor_properties(data: typing.BinaryIO, property_size: int):
return EditorProperties.from_stream(data, property_size)
def _decode_unknown_0x969de5ff(data: typing.BinaryIO, property_size: int):
return struct.unpack('>l', data.read(4))[0]
def _decode_should_crouch(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_horizontal_safe_angle(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_vertical_safe_angle(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_minimum_safe_range(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_maximum_safe_range(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_lock_time(data: typing.BinaryIO, property_size: int):
return struct.unpack('>f', data.read(4))[0]
def _decode_unknown_0x25717ace(data: typing.BinaryIO, property_size: int):
return struct.unpack('>?', data.read(1))[0]
def _decode_unknown_0xc83e3228(data: typing.BinaryIO, property_size: int):
return struct.unpack('>l', data.read(4))[0]
_property_decoder: typing.Dict[int, typing.Tuple[str, typing.Callable[[typing.BinaryIO, int], typing.Any]]] = {
0x255a4580: ('editor_properties', _decode_editor_properties),
0x969de5ff: ('unknown_0x969de5ff', _decode_unknown_0x969de5ff),
0x8001c3be: ('should_crouch', _decode_should_crouch),
0x46774985: ('horizontal_safe_angle', _decode_horizontal_safe_angle),
0xd9d7afa6: ('vertical_safe_angle', _decode_vertical_safe_angle),
0xebca3fca: ('minimum_safe_range', _decode_minimum_safe_range),
0xf88fd961: ('maximum_safe_range', _decode_maximum_safe_range),
0x308edc44: ('lock_time', _decode_lock_time),
0x25717ace: ('unknown_0x25717ace', _decode_unknown_0x25717ace),
0xc83e3228: ('unknown_0xc83e3228', _decode_unknown_0xc83e3228),
} | PypiClean |
/RsCMPX_WcdmaMeas-4.0.185.tar.gz/RsCMPX_WcdmaMeas-4.0.185/RsCMPX_WcdmaMeas/Implementations/WcdmaMeas/Prach/State/All.py | from typing import List
from .....Internal.Core import Core
from .....Internal.CommandsGroup import CommandsGroup
from .....Internal import Conversions
from .....Internal.Types import DataType
from .....Internal.ArgSingleList import ArgSingleList
from .....Internal.ArgSingle import ArgSingle
from ..... import enums
# noinspection PyPep8Naming,PyAttributeOutsideInit,SpellCheckingInspection
class AllCls:
"""All commands group definition. 1 total commands, 0 Subgroups, 1 group commands"""
def __init__(self, core: Core, parent):
self._core = core
self._cmd_group = CommandsGroup("all", core, parent)
# noinspection PyTypeChecker
def fetch(self, timeout: float = None, target_main_state: enums.TargetMainState = None, target_sync_state: enums.TargetSyncState = None) -> List[enums.ResourceState]:
"""SCPI: FETCh:WCDMa:MEASurement<instance>:PRACh:STATe:ALL \n
Snippet: value: List[enums.ResourceState] = driver.wcdmaMeas.prach.state.all.fetch(timeout = 1.0, target_main_state = enums.TargetMainState.OFF, target_sync_state = enums.TargetSyncState.ADJusted) \n
Queries the main measurement state and the measurement substates. Without query parameters, the states are returned
immediately. With query parameters, the states are returned when the <TargetMainState> and the <TargetSyncState> are
reached or when the <Timeout> expires. \n
:param timeout: No help available
:param target_main_state: Target MainState for the query Default is RUN.
:param target_sync_state: Target SyncState for the query Default is ADJ.
:return: state: No help available"""
param = ArgSingleList().compose_cmd_string(ArgSingle('timeout', timeout, DataType.Float, None, is_optional=True), ArgSingle('target_main_state', target_main_state, DataType.Enum, enums.TargetMainState, is_optional=True), ArgSingle('target_sync_state', target_sync_state, DataType.Enum, enums.TargetSyncState, is_optional=True))
response = self._core.io.query_str(f'FETCh:WCDMa:MEASurement<Instance>:PRACh:STATe:ALL? {param}'.rstrip())
return Conversions.str_to_list_enum(response, enums.ResourceState) | PypiClean |
/django_core_helpers-0.1.3-py3-none-any.whl/core_helpers/db/fields/core.py | import uuid
from django.core.exceptions import ValidationError
from django.db.models import BooleanField, DateTimeField
from django.db.models import UUIDField as _UUIDField
from django.utils.translation import gettext_lazy as _
class AutoCreatedField(DateTimeField):
"""
AutoCreatedField
By default, sets auto_now_add=True
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault("auto_now_add", True)
super().__init__(*args, **kwargs)
class AutoModifiedField(DateTimeField):
"""
AutoModifiedField
By default, auto_now=True
Sets value to now every time the object is saved.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault("auto_now", True)
super().__init__(*args, **kwargs)
def pre_save(self, model_instance, add):
if not getattr(model_instance, "update_modified", True):
return getattr(model_instance, self.attname)
return super().pre_save(model_instance, add)
class UUIDFieldMixin:
"""UUIDFieldMixin sets the default value."""
DEFAULT_MAX_LENGTH = 36
def __init__(self, verbose_name=None, primary_key=True, version=4, editable=False, *args, **kwargs):
kwargs.setdefault("max_length", self.DEFAULT_MAX_LENGTH)
kwargs.setdefault("primary_key", primary_key)
kwargs.setdefault("editable", editable)
kwargs.setdefault("default", self._get_default(version))
super().__init__(verbose_name=verbose_name, *args, **kwargs)
@classmethod
def _get_default(cls, version):
if version == 2:
raise ValidationError(_("UUID version 2 is not supported."))
if version < 1 or version > 5:
raise ValidationError(_("UUID version is not valid."))
if version == 1:
return uuid.uuid1
elif version == 3:
return uuid.uuid3
elif version == 4:
return uuid.uuid4
elif version == 5:
return uuid.uuid5
class UUIDField(UUIDFieldMixin, _UUIDField):
"""UUID field to use as primary key of model.
By default, sets primary_key=True, version=4, editable=False
"""
class AutoActiveField(BooleanField):
"""
AutoActiveField
By default, sets default=True
"""
def __init__(self, verbose_name=_("Is active?"), default=True, *args, **kwargs):
kwargs.setdefault("default", default)
super().__init__(verbose_name, *args, **kwargs) | PypiClean |
/strfseconds-0.0.0b1-py3-none-any.whl/strfseconds.py | def strfseconds(seconds, formatstring='%h2:%m2:%s2', ndecimal=3):
"""Convert seconds to units of time.
:param float or int seconds: The seconds to format
:param str formatstring: The formatstring containing the format specifiers.
Time units are:
%w for weeks
%d for days
%h for hours
%m for minutes
%s for seconds
%l for milliseconds (0.001 second)
%f for microseconds (0.000001 second)
%o for the unchanged seconds value
:param int ndecimal: The number of decimals applied to the smallest unit
:return: str: The formatted formatstring with all format specifiers replaced
:rtype: str
:raises TypeError: If seconds is not int, float or long
:raises ValueError: If seconds is less than zero
:raises TypeError: If formatstring is not a string
:raises TypeError: If ndecimal is not an integer
:raises ValueError: If ndecimal is smaller than 0
"""
# Add al available time units
units =[
{ # Weeks - secs = 60 * 60 * 24 * 7
'name': 'w',
'secs': 604800
},
{ # Days - secs = 60 * 60 * 24
'name': 'd',
'secs': 86400
},
{ # Hours - secs = 60 * 60
'name': 'h',
'secs': 3600
},
{ # Minutes
'name': 'm',
'secs': 60
},
{ # Seconds
'name': 's',
'secs': 1
},
{ # Milliseconds
'name': 'l',
'secs': .001
},
{ # Microseconds
'name': 'f',
'secs': .000001
}]
# Input validation plus initialization
# seconds must be an integer or a float. If not raise type error.
# (same behaviour as datetime.datetime.fromtimestamp).
if type(seconds) not in (int, float):
raise TypeError(
"a float is required (got type %s)" % type(seconds).__name__)
if seconds < 0:
raise ValueError(
"seconds must be greater than or equal to zero")
# Add max value for seconds like datetime.timedelta does;
# 86399999999999 is 2739726 years
# timedelta.max = timedelta(days=999999999, hours=23, minutes=59,
# seconds=59, microseconds=999999)
if seconds > 86399999999999.999999:
raise OverflowError(
"seconds=%d; must be less than or equal to 86399999999999.999999" % (seconds))
# formatstring must be of type string.
if type(formatstring) != str:
raise TypeError(
"formatstring must be of type string (got type %s)" % type(formatstring).__name__)
# ndecimal must be an integer greater than or equal to 0.
if type(ndecimal) != int:
raise TypeError(
"ndecimal must be of type integer (got type %s)" % type(ndecimal).__name__)
if ndecimal < 0:
raise ValueError(
"ndecimal must be greater than or equal to 0 (got %s)" % ndecimal)
# Initializtion is done
# print(seconds)
# Initialize seconds will be populated by `divmod` calculations.
# Performace upgrade; removed seconds_remaining
# seconds = seconds
# Replace %o by the value passed for seconds
formatstring = formatstring.replace('%o', str(seconds))
# Determine the smallest timeunit in the formatstring;
# when no smallest (any) timeunit found return the formatstring.
# (same behaviour as 'datetime.strftime').
smallest_unit_in_formatstring = None
for unit in units:
if formatstring.find( '%' + unit['name'] ) != -1:
smallest_unit_in_formatstring = unit['name']
if smallest_unit_in_formatstring is None:
return formatstring
# For every available time unit ...
for unit in units:
# ... check if unit is defined in the formatstring, if not
# continue to the next unit.
if formatstring.find('%' + unit['name']) == -1:
continue
# Execute divmod on seconds for this units size: divide the
# number of seconds by this unit's number of seconds.
# The unit_size is assigned the quotient, seconds is assigned
# the remainder.
unit_size, seconds = divmod(seconds, unit['secs'])
# If this unit is the smallest unit add the remaining seconds
# as a decimal of this unit seconds size.
if smallest_unit_in_formatstring == unit['name']:
unit_size += 1 / unit['secs'] * seconds
# Disable rounding completely; it is bad design to round
# when possible (not incrementing unit size) and not round
# when not possible.
# Decimals will be precessed by format call with high value
# for '.f' after which they will be truncated.
# # Determine if unit_size can be formatted safely with format() and
# # that it will not roundup base unit size.
# prevent_rounding = False
# if round( 1 / unit['secs'] * seconds, ndecimal) == 1:
# prevent_rounding = True
# #else:
# # unit_size = round(unit_size, ndecimal)
# Calculations are done. Convert unit_size to string and format
# appearance.
# Determine leading zeroes ( '0%s' ).
#
# (Do not use None for unit_rjust_length as it's value is
# concatenated to this units name in the replace call.)
unit_rjust_length = ''
i = formatstring.find('%' + unit['name'] )
if len(formatstring) > (i + 2) and formatstring[i+2] in ( '1', '2', '3', '4', '5', '6', '7', '8', '9'):
unit_rjust_length = formatstring[i+2]
if smallest_unit_in_formatstring != unit['name'] :
# Apply format() for all non-smallest units. These units
# do not have decimals.
# format string '0{unit_rjust_length}.0f'.
# - Leading zeroes: Set '0%s.' to '0{unit_rjust_length}.'
# - Decimals: Set '.%sf' to '.0f'
unit_size = format(unit_size, '0%s.0f' %
str(unit_rjust_length or 0))
else:
# Apply format() for smallest unit.
# Prevent rounding, use format() with a value for '.%f'
# being 12 times larger than {ndecimal} after that chop
# off the decimals using substring.
#
# Leading zeroes: Set '0%s.' to {ndecimal} + 12 +
# {unit_rjust_length} + 1 (the decimal sign)
# Decimals: Set '%sf' to a value relative to {ndecimal};
# add 12 decimals.
unit_size = format(unit_size, '0%s.%sf' % (
str((ndecimal + 12) + int(unit_rjust_length or 0) + 1),
str(ndecimal + 12)
))
if ndecimal == 0:
# No decimals; chop off string including and after the
# decimal sign
unit_size = unit_size[0:unit_size.find('.')]
else:
# Decimals; chop off string after decimal sign plus
# {ndecimal} plus 1 for the decimal sign itself.
unit_size = unit_size[0:unit_size.find('.') + ndecimal + 1]
# Truncate decimals manually
# Convert unit_size to string ...
# unit_size = str(unit_size)
#if ndecimal == 0:
# # No decimals; chop off values including and after the
# # decimal sign
# unit_size = unit_size[0:unit_size.find('.')]
#else:
# # Decimals; chop off values {ndecimal} after decimal sign
# # (plus 1 for the decimal sign itself).
# unit_size = unit_size[0:unit_size.find('.') + ndecimal + 1]
# # ljust zeroes; when ndecimal=3 '0.5' should display
# # as '0.500'
# unit_size = unit_size.ljust( unit_size.find('.') + ndecimal + 1, '0')
## rjust zeroes (leading zeroes)
#unit_size = unit_size.rjust( len(unit_size) - unit_size.find('.') + int(unit_rjust_length), '0')
# This is the last step for this unit's loop iteration; replace
# of the formatstring.
formatstring = formatstring.replace('%' + unit['name'] + str(unit_rjust_length), unit_size)
return formatstring | PypiClean |
/django-vpos-0.1.4.tar.gz/django-vpos-0.1.4/vpos/configs.py | from django.conf import settings
from django.utils.translation import gettext_lazy as _
from vpos.exceptions import VposConfigurationError as Err
DEFAULTS: dict = {
# required
'POS_ID': None,
'TOKEN': None,
'URL': None,
'MODE': 'production',
# optionals
# fees expect a tuple in this order (percent, min amount, max amount, plus amount)
# ex: (0.7, None, 5000, 0)
# ex: (15.3, 500.54, None, 68)
'BANK_FEE': None,
'VPOS_FEE': None,
'VPOS_SUPERVISOR_CARD': None,
'VPOS_BASE_URL': 'https://vpos.ao/api/v1',
'VPOS_TEST_SUPERVISOR_CARD': '9610123456123412341234123456789012345'}
VPOS_STATUS_REASON: dict = {
# client
'3000': _('Refused by client'),
# Processor
'2010': _('Request was refused by the processor'),
'2009': _('Parent transaction is too old to be refunded'),
'2008': _('Invalid merchant email'),
'2007': _('Invalid or Inactive supervisor card'),
'2006': _('Insufficient funds in POS available for refund'),
'2005': _('POS is closed and unable to accept transactions'),
'2004': _('Request timed-out and was refused by the processor'),
'2003': _('Card or network daily limit exceeded'),
'2002': _('Refused by the card issuer'),
'2001': _("Insufficient funds in client's account"),
'2000': _('Generic processor error'),
# Gateway
'1003': _('Parent transaction ID of refund request is not an accepted Payment'),
'1002': _('Gateway is not authorized to execute transactions on the specified POS'),
'1001': _('Request timed-out and will not be processed'),
'1000': _('Generic gateway error')}
class VposSettings:
__defaults: dict
def __init__(self, defaults: dict = None, user_settings: dict = None) -> None:
if user_settings:
self.__settings = user_settings
self.__defaults = defaults or DEFAULTS
@property
def settigs(self) -> dict:
if not hasattr(self, '__settings'):
return getattr(settings, 'VPOS', {})
return self.__settings
def validate(self):
"""configurations validation"""
for attr in dir(self):
if 'validate_' in attr:
getattr(self, attr)()
def validate_mode(self):
modes = ('production', 'sandbox')
if self.MODE not in modes:
raise Err(
"Invalid MODE: '%s'. Must be "
"production or sandbox" % self.MODE)
def validate_token(self):
if not self.POS_ID:
raise Err('TOKEN is required')
def validate_url(self):
if not self.POS_ID:
raise Err('URL is required for callback transaction confirmation')
def validate_vpos_base_url(self):
if not self.POS_ID:
raise Err('VPOS_BASE_URL is required')
def validate_vpos_supervisor_card(self):
if not self.POS_ID:
raise Err('VPOS_SUPERVISOR_CARD is required')
def validate_vpos_fee(self):
if self.VPOS_FEE != None and not isinstance(self.VPOS_FEE, tuple):
raise Err('VPOS_FEE if set, must be a tuple in this order: (percent, min amount, max amount, puls amount)')
def validate_bank_fee(self):
if self.BANK_FEE != None and not isinstance(self.BANK_FEE, tuple):
raise Err('BANK_FEE if set, must be a tuple in this order: (percent, min amount, max amount, puls amount)')
def validate_pos_id(self):
if not self.POS_ID:
raise Err('POS_ID is required')
def get_supervisor_card(self) -> str:
if self.MODE == 'sandbox':
return self.VPOS_TEST_SUPERVISOR_CARD
return self.VPOS_SUPERVISOR_CARD
def __getattr__(self, attr: str):
if attr not in self.__defaults:
raise AttributeError("Invalid VPOS setting: '%s'" % attr)
try:
value = self.settigs[attr]
except KeyError:
value = self.__defaults[attr]
setattr(self, attr, value)
return value
conf = VposSettings() | PypiClean |
/apache-superset_qwerty-2.0.11.tar.gz/apache-superset_qwerty-2.0.11/superset-frontend/plugins/plugin-chart-table/README.md | <!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
## @superset-ui/plugin-chart-table
[![Version](https://img.shields.io/npm/v/@superset-ui/plugin-chart-table.svg?style=flat-square)](https://www.npmjs.com/package/@superset-ui/plugin-chart-table)
[![David (path)](https://img.shields.io/david/apache-superset/superset-ui-plugins.svg?path=packages%2Fsuperset-ui-plugin-chart-table&style=flat-square)](https://david-dm.org/apache-superset/superset-ui-plugins?path=plugins/superset-ui-plugin-chart-table)
This plugin provides Table chart for Superset.
### Usage
Configure `key`, which can be any `string`, and register the plugin. This `key` will be used to
lookup this chart throughout the app.
```js
import TableChartPlugin from '@superset-ui/plugin-chart-table';
new TableChartPlugin().configure({ key: 'table' }).register();
```
Then use it via `SuperChart`. See
[storybook](https://apache-superset.github.io/superset-ui-plugins/?selectedKind=plugin-chart-table)
for more details.
```js
<SuperChart
chartType="table"
width={600}
height={600}
formData={...}
queriesData={[{
data: {...},
}]}
/>
```
| PypiClean |
/parameters-validation-1.2.0.tar.gz/parameters-validation-1.2.0/parameters_validation/builtin_validations.py | from numbers import Number
from typing import Sized
from parameters_validation.parameter_validation_decorator import parameter_validation
@parameter_validation
def strongly_typed(param: object, arg_name: str, arg_type: type):
"""
Validation to reject null, empty or blank strings.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: strongly_typed(str)):
... print(bar)
...
... foo("") # valid: parameter is a string
... foo(None) # invalid: NoneType does not inherit from string
... foo(1) # invalid: integer does not inherit from string
:param param: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises TypeError: invalid parameter, i.e. :param param: has type that doesn't inherits from the expected :param arg_type:
"""
validation_error = None
arg = _build_arg(arg_name, arg_type)
try:
if not isinstance(param, arg_type):
validation_error = TypeError("`{arg}` must be of type `{arg_type}`".format(arg=arg, arg_type=arg_type))
except: # TODO: fail at function definition time
raise RuntimeError("`strongly_typed` validation must receive the type to enforce")
if validation_error:
raise validation_error
@parameter_validation
def non_blank(string: str, arg_name: str, arg_type: type = str):
"""
Validation to reject null, empty or blank strings.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: non_blank(str)):
... print(bar)
...
... foo(".") # valid: string is not null, empty nor blank
... foo(None) # invalid: string is null
... foo("") # invalid: string is empty
... foo(" ") # invalid: string is blank (i.e., contains just whitespaces)
:param string: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises ValueError: invalid parameter, i.e. :param string: is either of type `NoneType`, empty (no length) or blank (contains just whitespaces)
:raises RuntimeError: unable to validate parameter (possibly :param string: is of an unexpected type)
"""
validation_error = None
arg = _build_arg(arg_name, arg_type)
try:
if not bool(string and string.strip()):
validation_error = ValueError(
"Parameter `{arg}` cannot be blank nor empty".format(arg=arg))
except Exception as e:
validation_error = RuntimeError(
"Unable to validate parameter `{arg}`: {error_name}{error}".format(arg=arg, error_name=e.__class__.__name__, error=e), e)
if validation_error:
raise validation_error
@parameter_validation
def non_null(obj: object, arg_name: str, arg_type: type = object):
"""
Validation to reject null objects.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: non_null(str)):
... print(bar)
...
... foo("") # valid: object is not null
... foo(False) # valid: object is not null
... foo(None) # invalid: object is null
:param obj: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises ValueError: invalid parameter, i.e. :param obj: is of type `NoneType`
"""
arg = _build_arg(arg_name, arg_type)
if obj is None:
raise ValueError("Parameter `{arg}` cannot not be None".format(arg=arg))
@parameter_validation
def non_empty(obj: Sized, arg_name: str, arg_type: type = object):
"""
Validation to reject empty objects.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: non_empty(str)):
... print(bar)
...
... foo(".") # valid: object is not empty
... foo([None, None]) # valid: object is not empty
... foo("") # invalid: object is empty
... foo({}) # invalid: object is empty
:param obj: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises ValueError: invalid parameter, i.e. :param obj: has size zero (no length)
:raises RuntimeError: unable to validate parameter (possibly the parameter is of an unexpected type)
"""
validation_error = None
arg = _build_arg(arg_name, arg_type)
try:
if len(obj) == 0:
validation_error = ValueError("Parameter `{arg}` cannot be empty".format(arg=arg))
except Exception as e:
validation_error = RuntimeError(
"Unable to validate parameter `{arg}`: {error_name}{error}".format(arg=arg, error_name=e.__class__.__name__, error=e), e)
if validation_error:
raise validation_error
@parameter_validation
def no_whitespaces(string: str, arg_name: str, arg_type: type = str):
"""
Validation to reject strings with whitespaces.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: no_whitespaces(str)):
... print(bar)
...
... foo("sao_paulo") # valid: string does not contain whitespaces
... foo("sao paulo") # invalid: string does contain whitespaces
... foo("") # valid: string does not contain whitespaces
... foo(" ") # invalid: string does contain whitespaces
:param string: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises ValueError: invalid parameter, i.e. :param string: contains one or more whitespaces
:raises RuntimeError: unable to validate parameter (possibly :param string: is of an unexpected type)
"""
validation_error = None
arg = _build_arg(arg_name, arg_type)
try:
if " " in string:
validation_error = ValueError(
"Parameter `{arg}` cannot contain whitespaces".format(arg=arg))
except Exception as e:
validation_error = RuntimeError(
"Unable to validate parameter `{arg}`: {error_name}{error}".format(arg=arg, error_name=e.__class__.__name__, error=e), e)
if validation_error:
raise validation_error
@parameter_validation
def non_negative(number: Number, arg_name: str, arg_type: type = str):
"""
Validation to reject negative numbers.
>>> from parameters_validation import validate_parameters
...
... @validate_parameters
... def foo(bar: non_negative(float)):
... print(bar)
...
... foo(0.0) # valid: number is non-negative
... foo(-0.1) # invalid: number is negative
:param number: the parameter's value being validated
:param arg_name: the argument name for this parameter (provided by the :meth:`parameter_validation` decorator)
:param arg_type: the argument type for this parameter (provided by the :meth:`parameter_validation` decorator)
:return: None
:raises ValueError: invalid parameter, i.e. :param number: contains one or more whitespaces
:raises RuntimeError: unable to validate parameter (possibly :param number: is of an unexpected type)
"""
validation_error = None
arg = _build_arg(arg_name, arg_type)
try:
if number < 0:
validation_error = ValueError(
"Parameter `{arg}` cannot be negative".format(arg=arg))
except Exception as e:
validation_error = RuntimeError(
"Unable to validate parameter `{arg}`: {error_name}{error}".format(arg=arg, error_name=e.__class__.__name__, error=e), e)
if validation_error:
raise validation_error
def _build_arg(arg_name, arg_type):
arg = arg_name
if arg_type is not None:
try:
arg += " <{t}>".format(t=arg_type.__name__)
except AttributeError:
arg += " <{t}>".format(t=arg_type._name)
return arg | PypiClean |
/nautobot_dns_records-0.2.0-py3-none-any.whl/nautobot_dns_records/migrations/0003_srvrecord.py |
import django.core.serializers.json
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import nautobot.extras.models.mixins
import nautobot.extras.models.statuses
import nautobot_dns_records.validators
import taggit.managers
import uuid
class Migration(migrations.Migration):
dependencies = [
("extras", "0054_scheduledjob_kwargs_request_user_change"),
("dcim", "0019_device_redundancy_group_data_migration"),
("nautobot_dns_records", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="SrvRecord",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True
),
),
("created", models.DateField(auto_now_add=True, null=True)),
("last_updated", models.DateTimeField(auto_now=True, null=True)),
(
"_custom_field_data",
models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder),
),
(
"label",
models.CharField(max_length=255, validators=[nautobot_dns_records.validators.validate_dns_name]),
),
(
"ttl",
models.IntegerField(
validators=[
django.core.validators.MinValueValidator(1),
django.core.validators.MaxValueValidator(604800),
]
),
),
(
"priority",
models.IntegerField(
validators=[
django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(65535),
]
),
),
(
"weight",
models.IntegerField(
default=0,
validators=[
django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(65535),
],
),
),
(
"port",
models.IntegerField(
validators=[
django.core.validators.MinValueValidator(0),
django.core.validators.MaxValueValidator(65535),
]
),
),
(
"target",
models.CharField(max_length=255, validators=[nautobot_dns_records.validators.validate_dns_name]),
),
(
"device",
models.ForeignKey(
blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="dcim.device"
),
),
(
"status",
nautobot.extras.models.statuses.StatusField(
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="nautobot_dns_records_srvrecord_related",
to="extras.status",
),
),
("tags", taggit.managers.TaggableManager(through="extras.TaggedItem", to="extras.Tag")),
],
options={
"abstract": False,
},
bases=(
models.Model,
nautobot.extras.models.mixins.DynamicGroupMixin,
nautobot.extras.models.mixins.NotesMixin,
),
),
] | PypiClean |
/django-skylark-0.4.6.tar.gz/django-skylark-0.4.6/src/skylark/templates/chirp/media/dojox/image/Badge.js | if(!dojo._hasResource["dojox.image.Badge"]){ //_hasResource checks added by build. Do not use _hasResource directly in your code.
dojo._hasResource["dojox.image.Badge"] = true;
dojo.provide("dojox.image.Badge");
dojo.experimental("dojox.image.Badge");
dojo.require("dijit._Widget");
dojo.require("dijit._Templated");
dojo.require("dojo.fx.easing");
dojo.declare("dojox.image.Badge", [dijit._Widget, dijit._Templated], {
// summary: A simple grid of Images that loops through thumbnails
//
baseClass: "dojoxBadge",
templateString:'<div class="dojoxBadge" dojoAttachPoint="containerNode"></div>',
// children: String
// A CSS3 Selector that determines the node to become a child
children: "div.dojoxBadgeImage",
// rows: Integer
// Number of Rows to display
rows: 4,
// cols: Integer
// Number of Columns to display
cols: 5,
// cellSize: Integer
// Size in PX of each thumbnail
cellSize: 50,
// cellMargin: Integer
// Size in PX to adjust for cell margins
cellMargin: 1,
// delay: Integer
// Time (in ms) to show the image before sizing down again
delay: 2000,
// threads: Integer
// how many cycles will be going "simultaneously" (>2 not reccommended)
threads: 1,
// easing: Function|String
// An easing function to use when showing the node (does not apply to shrinking)
easing: "dojo.fx.easing.backOut",
startup: function(){
if(this._started){ return; }
if(dojo.isString(this.easing)){
this.easing = dojo.getObject(this.easing);
}
this.inherited(arguments);
this._init();
},
_init: function(){
// summary: Setup and layout the images
var _row = 0,
_w = this.cellSize;
dojo.style(this.domNode, {
width: _w * this.cols + "px",
height: _w * this.rows + "px"
});
this._nl = dojo.query(this.children, this.containerNode)
.forEach(function(n, _idx){
var _col = _idx % this.cols,
t = _row * _w,
l = _col * _w,
m = this.cellMargin * 2;
dojo.style(n, {
top: t + "px",
left: l + "px",
width: _w - m + "px",
height: _w - m + "px"
});
if(_col == this.cols - 1){ _row++; }
dojo.addClass(n, this.baseClass + "Image");
}, this)
;
var l = this._nl.length;
while(this.threads--){
var s = Math.floor(Math.random() * l);
setTimeout(dojo.hitch(this, "_enbiggen", {
target: this._nl[s]
}), this.delay * this.threads);
}
},
_getCell: function(/* DomNode */ n){
// summary: Return information about the position for a given node
var _pos = this._nl.indexOf(n);
if(_pos >= 0){
var _col = _pos % this.cols;
var _row = Math.floor(_pos / this.cols);
return { x: _col, y: _row, n: this._nl[_pos], io: _pos };
}else{
return undefined;
}
},
_getImage: function(){
// summary: Returns the next image in the list, or the first one if not available
return "url('')";
},
_enbiggen: function(/* Event|DomNode */ e){
// summary: Show the passed node in the picker
var _pos = this._getCell(e.target || e);
if (_pos){
// we have a node, and know where it is
var m = this.cellMargin,
_cc = (this.cellSize * 2) - (m * 2),
props = {
height: _cc,
width: _cc
}
;
var _tehDecider = function(){
// if we have room, we'll want to decide which direction to go
// let "teh decider" decide.
return Math.round(Math.random());
};
if(_pos.x == this.cols - 1 || (_pos.x > 0 && _tehDecider() )){
// we have to go left, at right edge (or we want to and not on left edge)
props.left = this.cellSize * (_pos.x - m);
}
if(_pos.y == this.rows - 1 || (_pos.y > 0 && _tehDecider() )){
// we have to go up, at bottom edge (or we want to and not at top)
props.top = this.cellSize * (_pos.y - m);
}
var bc = this.baseClass;
dojo.addClass(_pos.n, bc + "Top");
dojo.addClass(_pos.n, bc + "Seen");
dojo.animateProperty({ node: _pos.n, properties: props,
onEnd: dojo.hitch(this, "_loadUnder", _pos, props),
easing: this.easing
}).play();
}
},
_loadUnder: function(info, props){
// summary: figure out which three images are being covered, and
// determine if they need loaded or not
var idx = info.io;
var nodes = [];
var isLeft = (props.left >= 0);
var isUp = (props.top >= 0);
var c = this.cols,
// the three node index's we're allegedly over:
e = idx + (isLeft ? -1 : 1),
f = idx + (isUp ? -c : c),
// don't ask:
g = (isUp ? (isLeft ? e - c : f + 1) : (isLeft ? f - 1 : e + c)),
bc = this.baseClass;
dojo.forEach([e, f, g], function(x){
var n = this._nl[x];
if(n){
if(dojo.hasClass(n, bc + "Seen")){
// change the background image out?
dojo.removeClass(n, bc + "Seen");
}
}
},this);
setTimeout(dojo.hitch(this, "_disenbiggen", info, props), this.delay * 1.25);
},
_disenbiggen: function(info, props){
// summary: Hide the passed node (info.n), passing along properties
// received.
if(props.top >= 0){
props.top += this.cellSize;
}
if(props.left >= 0){
props.left += this.cellSize;
}
var _cc = this.cellSize - (this.cellMargin * 2);
dojo.animateProperty({
node: info.n,
properties: dojo.mixin(props, {
width:_cc,
height:_cc
}),
onEnd: dojo.hitch(this, "_cycle", info, props)
}).play(5);
},
_cycle: function(info, props){
// summary: Select an un-viewed image from the list, and show it
var bc = this.baseClass;
dojo.removeClass(info.n, bc + "Top");
var ns = this._nl.filter(function(n){
return !dojo.hasClass(n, bc + "Seen")
});
var c = ns[Math.floor(Math.random() * ns.length)];
setTimeout(dojo.hitch(this,"_enbiggen", { target: c }), this.delay / 2)
}
});
} | PypiClean |
/pulumi_google_native-0.31.2a1689827148.tar.gz/pulumi_google_native-0.31.2a1689827148/pulumi_google_native/integrations/v1alpha/get_certificate.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetCertificateResult',
'AwaitableGetCertificateResult',
'get_certificate',
'get_certificate_output',
]
@pulumi.output_type
class GetCertificateResult:
def __init__(__self__, certificate_status=None, credential_id=None, description=None, display_name=None, name=None, raw_certificate=None, requestor_id=None, valid_end_time=None, valid_start_time=None):
if certificate_status and not isinstance(certificate_status, str):
raise TypeError("Expected argument 'certificate_status' to be a str")
pulumi.set(__self__, "certificate_status", certificate_status)
if credential_id and not isinstance(credential_id, str):
raise TypeError("Expected argument 'credential_id' to be a str")
pulumi.set(__self__, "credential_id", credential_id)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if raw_certificate and not isinstance(raw_certificate, dict):
raise TypeError("Expected argument 'raw_certificate' to be a dict")
pulumi.set(__self__, "raw_certificate", raw_certificate)
if requestor_id and not isinstance(requestor_id, str):
raise TypeError("Expected argument 'requestor_id' to be a str")
pulumi.set(__self__, "requestor_id", requestor_id)
if valid_end_time and not isinstance(valid_end_time, str):
raise TypeError("Expected argument 'valid_end_time' to be a str")
pulumi.set(__self__, "valid_end_time", valid_end_time)
if valid_start_time and not isinstance(valid_start_time, str):
raise TypeError("Expected argument 'valid_start_time' to be a str")
pulumi.set(__self__, "valid_start_time", valid_start_time)
@property
@pulumi.getter(name="certificateStatus")
def certificate_status(self) -> str:
"""
Status of the certificate
"""
return pulumi.get(self, "certificate_status")
@property
@pulumi.getter(name="credentialId")
def credential_id(self) -> str:
"""
Immutable. Credential id that will be used to register with trawler INTERNAL_ONLY
"""
return pulumi.get(self, "credential_id")
@property
@pulumi.getter
def description(self) -> str:
"""
Description of the certificate
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
Name of the certificate
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def name(self) -> str:
"""
Auto generated primary key
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="rawCertificate")
def raw_certificate(self) -> 'outputs.GoogleCloudIntegrationsV1alphaClientCertificateResponse':
"""
Input only. Raw client certificate which would be registered with trawler
"""
return pulumi.get(self, "raw_certificate")
@property
@pulumi.getter(name="requestorId")
def requestor_id(self) -> str:
"""
Immutable. Requestor ID to be used to register certificate with trawler
"""
return pulumi.get(self, "requestor_id")
@property
@pulumi.getter(name="validEndTime")
def valid_end_time(self) -> str:
"""
The timestamp after which certificate will expire
"""
return pulumi.get(self, "valid_end_time")
@property
@pulumi.getter(name="validStartTime")
def valid_start_time(self) -> str:
"""
The timestamp after which certificate will be valid
"""
return pulumi.get(self, "valid_start_time")
class AwaitableGetCertificateResult(GetCertificateResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCertificateResult(
certificate_status=self.certificate_status,
credential_id=self.credential_id,
description=self.description,
display_name=self.display_name,
name=self.name,
raw_certificate=self.raw_certificate,
requestor_id=self.requestor_id,
valid_end_time=self.valid_end_time,
valid_start_time=self.valid_start_time)
def get_certificate(certificate_id: Optional[str] = None,
location: Optional[str] = None,
product_id: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCertificateResult:
"""
Get a certificates in the specified project.
"""
__args__ = dict()
__args__['certificateId'] = certificate_id
__args__['location'] = location
__args__['productId'] = product_id
__args__['project'] = project
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('google-native:integrations/v1alpha:getCertificate', __args__, opts=opts, typ=GetCertificateResult).value
return AwaitableGetCertificateResult(
certificate_status=pulumi.get(__ret__, 'certificate_status'),
credential_id=pulumi.get(__ret__, 'credential_id'),
description=pulumi.get(__ret__, 'description'),
display_name=pulumi.get(__ret__, 'display_name'),
name=pulumi.get(__ret__, 'name'),
raw_certificate=pulumi.get(__ret__, 'raw_certificate'),
requestor_id=pulumi.get(__ret__, 'requestor_id'),
valid_end_time=pulumi.get(__ret__, 'valid_end_time'),
valid_start_time=pulumi.get(__ret__, 'valid_start_time'))
@_utilities.lift_output_func(get_certificate)
def get_certificate_output(certificate_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
product_id: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCertificateResult]:
"""
Get a certificates in the specified project.
"""
... | PypiClean |
/autogluon.tabular-0.7.0b20230217-py3-none-any.whl/autogluon/tabular/models/catboost/catboost_utils.py | import logging
from autogluon.core.constants import BINARY, MULTICLASS, REGRESSION, SOFTCLASS
logger = logging.getLogger(__name__)
# TODO: Add weight support?
# TODO: Can these be optimized? What computational cost do they have compared to the default catboost versions?
class CustomMetric:
def __init__(self, metric, is_higher_better, needs_pred_proba):
self.metric = metric
self.is_higher_better = is_higher_better
self.needs_pred_proba = needs_pred_proba
@staticmethod
def get_final_error(error, weight):
return error
def is_max_optimal(self):
return self.is_higher_better
def evaluate(self, approxes, target, weight):
raise NotImplementedError
def get_catboost_metric_from_ag_metric(metric, problem_type):
if problem_type == SOFTCLASS:
from .catboost_softclass_utils import SoftclassCustomMetric
if metric.name != 'soft_log_loss':
logger.warning("Setting metric=soft_log_loss, the only metric supported for softclass problem_type")
return SoftclassCustomMetric(metric=None, is_higher_better=True, needs_pred_proba=True)
elif problem_type == BINARY:
metric_map = dict(
log_loss='Logloss',
accuracy='Accuracy',
roc_auc='AUC',
f1='Logloss', # f1 uses Logloss because f1 in CatBoost is not reliable (causes errors between versions)
f1_macro='Logloss',
f1_micro='Logloss',
f1_weighted='Logloss',
balanced_accuracy='BalancedAccuracy',
recall='Recall',
recall_macro='Recall',
recall_micro='Recall',
recall_weighted='Recall',
precision='Precision',
precision_macro='Precision',
precision_micro='Precision',
precision_weighted='Precision',
)
metric_class = metric_map.get(metric.name, 'Logloss')
elif problem_type == MULTICLASS:
metric_map = dict(
log_loss='MultiClass',
accuracy='Accuracy',
)
metric_class = metric_map.get(metric.name, 'MultiClass')
elif problem_type == REGRESSION:
metric_map = dict(
mean_squared_error='RMSE',
root_mean_squared_error='RMSE',
mean_absolute_error='MAE',
median_absolute_error='MedianAbsoluteError',
r2='R2',
)
metric_class = metric_map.get(metric.name, 'RMSE')
else:
raise AssertionError(f'CatBoost does not support {problem_type} problem type.')
return metric_class | PypiClean |
/phenotrex-0.6.0.tar.gz/phenotrex-0.6.0/docs/usage.rst | ==============
Usage Tutorial
==============
The following tutorial illustrates training, evaluation, inference and model introspection with phenotrex
using the phenotrex command line interface. For further information on flags and parameters used
in this tutorial, please consult the relevant CLI documentation available via
``phenotrex <command> --help``.
To illustrate phenotrex's capabilities, a small dataset of genome assemblies will be used.
To download all required data, run:
.. code-block:: console
$ curl -OL http://fileshare.csb.univie.ac.at/phenotrex/tutorial_data.tar
$ tar -xf tutorial_data.tar
$ cd tutorial_data
To then install phenotrex, including its capability for extracting features from FASTA files,
in a new virtual environment:
.. code-block:: console
$ python3 -m venv phenotrex-env
$ source ./phenotrex-env/bin/activate
$ pip install phenotrex[fasta]
Creation of Phenotrex Input Features
------------------------------------
Phenotrex operates on presence/absence patterns of `eggNOG`_ cluster IDs in the passed genome.
If a DNA FASTA file is passed to phenotrex,
`Prodigal`_ is first used to find protein sequences - this step is skipped if a protein FASTA file
is passed instead. To then find eggNOG cluster IDs from protein sequences, `deepnog`_ is used.
Input files to feature creation may thus be DNA or protein multi-FASTA files, which may optionally
be gzipped.
Feature creation is computationally demanding. For this reason, direct input of individual
FASTA files to phenotrex is only implemented for prediction.
For model training, evaluation (and for batch prediction), tabular files must be created from
input FASTA files representing the genotype of all input files, encoded as eggNOG cluster IDs. This
allows reuse of the created features for all training and evaluation purposes.
To create a tabular genotype file suitable for use in phenotrex training:
.. code-block:: console
$ phenotrex compute-genotype \
--out T3SS.train_eval.genotype \
--threads 4 \
train_eval/genomes/*.fna.gz
After some time, this will create a new tab-separated values (TSV) file ``T3SS.train_eval.genotype``
in the current directory, of the following shape:
.. code-block::
#feature_type:eggNOG5-tax-2
GCA_003096415.1.fna.gz COG0656 COG0661 COG3161 COG0358 ...
GCF_000006765.1.fna.gz COG0593 COG3255 COG1195 COG0187 ...
GCF_000006905.1.fna.gz COG2202 COG0169 COG0237 COG0847 ...
...
Here, all lines starting with ``#`` denote a metadata field read by phenotrex - for example the
type of features, in this case eggNOG clusters for the NCBI taxon ID 2 (Bacteria) from eggNOG version 5.
In each following line, the first field denotes the identifier of the input genome (the file name),
followed by all features found in the genome, each separated by tabs.
.. note::
Feature creation by phenotrex scales reasonably well with the number of threads
supplied to the ``compute-genotype`` command (``--threads``). However, for large sets of genomes
and when compute cluster resources are available to the user, it may be more expedient to compute
subsets of genomes in parallel on different machines, and concatenate them afterwards.
Training of Phenotrex Classifiers
---------------------------------
To train a phenotrex classifier, two tabular input files are required: The ``genotype`` file
(created from FASTA files in the last section), containing representations of the input genomes; and
the ``phenotype`` file, containing true phenotypic trait values for each input genome on which to train and
evaluate the model. For this tutorial, we provide a phenotype file containing information on Type 3
secretion system (T3SS) presence in each of the input genomes.
The tabular phenotype file required for training and model evaluation has the following shape:
.. code-block:: console
Identifier T3SS
GCF_000012905.2.fna.gz NO
GCF_000195735.1.fna.gz NO
GCF_000060345.1.fna.gz NO
GCF_000959505.1.fna.gz YES
GCF_000220235.1.fna.gz NO
GCF_000190695.1.fna.gz NO
GCF_000007605.1.fna.gz YES
GCF_000195995.1.fna.gz YES
GCF_000015365.1.fna.gz NO
GCF_000173115.1.fna.gz NO
GCF_000173095.1.fna.gz NO
GCA_003096415.1.fna.gz NO
...
The first column of the file contains identifiers (file names) mapping to those in the genotype file,
and the second column contains true phenotypic trait values. During training, the model will store the header of
column 2 as the name of the trait.
Phenotrex implements model training using two different machine learning algorithms:
`XGBoost`_ (XGB) and `Support Vector Machine`_ (SVM). For each algorithm, a number of hyperparameters
are settable for training and evaluation. Please consult the output of
``phenotrex train xgb --help`` and ``phenotrex train svm --help``, as well as the relevant documentation of the
underlying implementations. When no hyperparameters are selected, reasonable
(but possibly suboptimal) defaults are used.
To train an XGB classifier with the previously created genotype and the given phenotype file:
.. code-block:: console
$ phenotrex train xgb \
--genotype T3SS.train_eval.genotype \
--phenotype train_eval/T3SS.train_eval.phenotype \
--weights \
--out T3SS.pkl
This will create a new model artifact ``T3SS.pkl`` in the current directory, and a
tabular file ``T3SS.pkl.rank`` representing the relative impact of input features on prediction
output as learned by the model.
Performance Estimation of Phenotrex Classifiers
-----------------------------------------------
The default way for phenotrex to estimate model performance (other than applying the trained model
to a held back test set) is `nested cross-validation`_ (CV).
This allows the estimation of predictive performance for a given set of training data and hyperparameters.
To estimate performance of the model trained in the previous section, we perform a 10x/5x nested
cross-validation like so:
.. code-block:: console
$ phenotrex cv xgb \
--genotype T3SS.train_eval.genotype \
--phenotype train_eval/T3SS.train_eval.phenotype \
--out T3SS.misclassifications.tsv \
--folds 5 \
--replicates 10 \
--threads 4
After training, predictive performance metrics averaged over outer CV folds will be printed
to stderr, and a new tabular file ``T3SS.misclassifications.tsv`` will be created. This file
contains the identifiers, phenotypic trait labels and fractions of misclassifications of the sample over
outer CV folds.
.. note::
The above command does not accept a trained model artifact.
Since cross-validation is performed by training several models on subsets of the given data, a final
model is not warranted here.
In general, training of the final classifier with ``phenotrex train {xgb,svm}``
should be performed only when satisified with performance of the selected hyperparameters as given
by cross-validation.
Performance Estimation for Metagenomic Phenotrex Classifiers
------------------------------------------------------------
For phenotrex models intended to be applied to metagenome assembled genomes, it is useful to estimate the impact
of missing and/or contaminating genomic features on the model output. In phenotrex, this is achieved
by randomly resampling the features of validation genomes to simulate incompleteness and contamination
(see `Feldbauer et al. 2015`_). For example, to estimate performance of a model on 80% complete and
5% contaminated genomic bins, nested cross-validation is performed where from each validation sample
20% of eggNOG cluster features are randomly removed. To simulate 5% contamination conservatively, a requisite
number of eggNOG clusters are added to the genome drawn randomly only from genomes of the opposite label.
This is performed at regular intervals of completeness and contamination, resulting in a JSON file
detailing the estimated predictive performance at each step.
By default, a grid of 5% increments of completeness and contamination is evaluated.
To perform cross-validation under consideration of contamination and completeness (CCCV), perform:
.. code-block:: console
$ phenotrex cccv xgb \
--genotype T3SS.train_eval.genotype \
--phenotype train_eval/T3SS.train_eval.phenotype \
--out T3SS.cccv.json \
--folds 5 \
--replicates 10 \
--threads 4 \
--verb
The above command results in a file ``T3SS.cccv.json`` being created, performance metrics at each
step of the completeness/contamination grid.
.. note::
The default binary classification performance metric used by phenotrex is Balanced Accuracy (bACC),
which is the arithmetic mean of Sensitivity and Specificity of prediction:
.. math::
bACC = \frac{1}{2} (\frac{TP}{TP + FN} + \frac{TN}{TN + FP})
This metric avoids inflating performance estimates on imbalanced datasets, and ranges from 0.5
(performance is indistinguishable from random) to 1.0 (perfect performance).
Users are encouraged to determine the contamination and completeness levels of input metagenomic bins
(e.g. using `CheckM`_), and critically examine the validity of predictions made by the classifier using
the estimated performance at the closest point in the completeness/contamination grid.
Predicting Phenotypes with Phenotrex
------------------------------------
For prediction, the threshold confidence of the classifier can be specified - all predictions
with confidence below this threshold are then masked with 'N/A'.
.. note::
The reported ``Confidence`` of the classifier is the model's internal confidence in its prediction
given its input data. If the input genome is significantly incomplete or contaminated, this measure
may be misleading, as the genome could be missing vital information required for correct classification
by the model. For such cases, the external confidence measure for the given completeness/contamination level as
computed by ``phenotrex cccv {xgb,svm}`` should be considered as well.
Prediction of phenotypic traits with a pre-computed genotype file derived from genomes in the
``test/genomes`` directory (see section `Creation of Phenotrex Input Features`_):
.. code-block:: console
$ phenotrex predict \
--genotype test/T3SS.test.genotype \
--classifier T3SS.pkl \
--min_proba 0.6 \
--verb > T3SS.test_predictions.tsv
The ``predict`` command outputs prediction results directly to stdout. When redirecting stdout to
a file, this results in a 3-column TSV file of the following shape:
.. code-block::
# Trait: T3SS
Identifier Trait present Confidence
GCF_000006645.1.fna.gz YES 0.8604
GCF_000006665.1.fna.gz YES 0.8675
GCF_000006825.1.fna.gz NO 0.6617
GCF_000007165.1.fna.gz YES 0.6771
GCF_000007205.1.fna.gz YES 0.8261
GCF_000007445.1.fna.gz YES 0.8183
...
Lines starting with ``#`` represent metadata, in this case the trait name saved in the used model artifact.
Explanation of Phenotrex Predictions
------------------------------------
In addition to providing predicted trait labels and confidence measures,
phenotrex can provide additional explanations of its decision process.
This can help debug faulty hyperparameter configurations and help identify errors in the training data.
Model explanation is done by gauging the importance of input features identified in genomes at training and prediction time.
Feature Importance at Training Time
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The relative impact of features learned by phenotrex models is output at training time when the flag
``--weights`` is added to the ``phenotrex train {xgb,svm}`` command. The meaning of the importance
differs depending on the selected ML algorithm: when using XGB, the measure represents the overall
importance of that feature in the decision process of the model (irrespective of the final prediction),
when using SVM, the measure correlates with the probability of calling YES (positive
values) or NO (negative values) for the trait in question.
Feature Importance at Prediction Time
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A second, and arguably more useful type of explanation can be computed at prediction time.
For each predicted genome, a list of features is created which, either by presence or absence,
contributed most to the prediction output for that genome.
Feature importance is represented by `SHAP`_ (SHapley Additive exPlanations) values.
The sum of SHAP values of all features considered by the model is directly related to the probability
of calling YES for the trait and genome in question.
.. note::
Feature explanation at prediction time is implemented by the `shap package`_, which efficiently
computes the required explanations for XGB models with de facto zero overhead.
For SVM models however, this calculation can be extremely costly. We thus suggest that for
use cases where model explainability is important, XGB should be preferred over SVM.
To create feature explanations at prediction time:
.. code-block:: console
$ phenotrex predict \
--genotype test/T3SS.test.genotype \
--classifier T3SS.pkl \
--min_proba 0.6 \
--out_explain_summary T3SS.expl_summary.tsv \
--out_explain_per_sample T3SS.expl_per_sample.tsv \
--n_max_explained_features 10 \
--verb > T3SS.test_predictions.tsv
In addition to the original output file containing predictions, two additional files have been created:
- ``T3SS.expl_per_sample.tsv``
This file contains for each predicted genome, the features which had the highest impact on the
model output, as well as the sign of that impact.
.. code-block:: console
rank Sample Feature Feature Presence SHAP Value (class=YES) Feature Annotation
...
0 GCF_000006825.1.fna.gz COG4789 0.0 -0.46379 Type iii secretion
1 GCF_000006825.1.fna.gz COG1025 0.0 -0.19678 Belongs to the peptidase M16 family
2 GCF_000006825.1.fna.gz COG0814 1.0 0.16128 amino acid
3 GCF_000006825.1.fna.gz COG1330 1.0 0.15993 A helicase nuclease that prepares dsDNA breaks (DSB)...
4 GCF_000006825.1.fna.gz COG1459 1.0 0.14634 type II secretion system
5 GCF_000006825.1.fna.gz COG1450 0.0 -0.14371 Type ii and iii secretion system protein
...
For example, for the genome ``GCF_000006825.1.fna.gz``, we see that the absence of
``COG4789`` is the single most impactful contribution to the prediction output towards the
(correct) prediction ``NO``. We can immediately identify another secretory system component
absent from the genome (``COG1450``) which contributes to this prediction output. However, as
the used model was trained on a small toy dataset, the presence of ``COG0814``
with the somewhat unhelpful annotation "amino acid" and other features make significant
contributions towards flipping the prediction to ``YES``, leading ultimately to a correct
output but with a low confidence of 0.66.
- ``T3SS.expl_summary.tsv``
This file contains the overall highest impact features, averaged over all SHAP contributions in
all predicted genomes. For each feature, the average SHAP value change upon presence or absence
of the feature is given, as well as the number of samples in which the feature was present and
absent.
.. code-block:: console
Feature Mean SHAP If Present Mean SHAP If Absent N(present) N(absent) Feature Annotation
COG4789 0.69559 -0.48636 29 162 Type iii secretion
COG1025 0.26914 -0.17944 46 145 Belongs to the peptidase M16 family
COG1330 0.10883 -0.12163 72 119 A helicase nuclease that prepares dsDNA breaks (DSB)...
COG1929 0.22469 -0.08981 37 154 Belongs to the glycerate kinase type-1 family
COG0833 0.20413 -0.08887 38 153 amino acid
COG0814 0.13396 -0.07835 60 131 amino acid
COG3835 0.18331 -0.05811 38 153 regulator
COG1459 0.11474 -0.05503 73 118 type II secretion system
COG1450 0.03356 -0.10312 107 84 Type ii and iii secretion system protein
.. _eggNOG: http://eggnog5.embl.de/
.. _Prodigal: https://github.com/hyattpd/Prodigal
.. _deepnog: https://github.com/univieCUBE/deepnog
.. _XGBoost: https://xgboost.readthedocs.io/en/release_1.3.0/
.. _Support Vector Machine: https://scikit-learn.org/stable/modules/generated/sklearn.svm.LinearSVC.html
.. _nested cross-validation: https://scikit-learn.org/stable/auto_examples/model_selection/plot_nested_cross_validation_iris.html
.. _Feldbauer et al. 2015: https://bmcbioinformatics.biomedcentral.com/articles/10.1186/1471-2105-16-S14-S1
.. _CheckM: https://github.com/Ecogenomics/CheckM/
.. _SHAP: https://doi.org/10.1038/s42256-019-0138-9
.. _shap package: https://github.com/slundberg/shap
| PypiClean |
/open-aea-web3-6.0.1.tar.gz/open-aea-web3-6.0.1/web3/tools/benchmark/main.py | import argparse
import asyncio
from collections import (
defaultdict,
)
import logging
import sys
import timeit
from typing import (
Any,
Callable,
Dict,
Union,
)
from eth_typing import (
ChecksumAddress,
)
from web3 import (
AsyncHTTPProvider,
AsyncWeb3,
HTTPProvider,
Web3,
)
from web3.middleware import (
async_buffered_gas_estimate_middleware,
async_gas_price_strategy_middleware,
buffered_gas_estimate_middleware,
gas_price_strategy_middleware,
)
from web3.tools.benchmark.node import (
GethBenchmarkFixture,
)
from web3.tools.benchmark.reporting import (
print_entry,
print_footer,
print_header,
)
from web3.tools.benchmark.utils import (
wait_for_aiohttp,
wait_for_http,
)
from web3.types import (
Wei,
)
KEYFILE_PW = "web3py-test"
parser = argparse.ArgumentParser()
parser.add_argument(
"--num-calls",
type=int,
default=10,
help="The number of RPC calls to make",
)
# TODO - layers to test:
# contract.functions.method(...).call()
# w3.eth.call(...)
# HTTPProvider.make_request(...)
def build_web3_http(endpoint_uri: str) -> Web3:
wait_for_http(endpoint_uri)
_w3 = Web3(
HTTPProvider(endpoint_uri),
middlewares=[gas_price_strategy_middleware, buffered_gas_estimate_middleware],
)
return _w3
async def build_async_w3_http(endpoint_uri: str) -> AsyncWeb3:
await wait_for_aiohttp(endpoint_uri)
_w3 = AsyncWeb3(
AsyncHTTPProvider(endpoint_uri),
middlewares=[
async_gas_price_strategy_middleware,
async_buffered_gas_estimate_middleware,
],
)
return _w3
def sync_benchmark(func: Callable[..., Any], n: int) -> Union[float, str]:
try:
starttime = timeit.default_timer()
for _ in range(n):
func()
endtime = timeit.default_timer()
execution_time = endtime - starttime
return execution_time
except Exception:
return "N/A"
async def async_benchmark(func: Callable[..., Any], n: int) -> Union[float, str]:
try:
starttime = timeit.default_timer()
for result in asyncio.as_completed([func() for _ in range(n)]):
await result
execution_time = timeit.default_timer() - starttime
return execution_time
except Exception:
return "N/A"
def unlocked_account(w3: Web3) -> ChecksumAddress:
w3.geth.personal.unlock_account(w3.eth.coinbase, KEYFILE_PW)
return w3.eth.coinbase
async def async_unlocked_account(async_w3: AsyncWeb3) -> ChecksumAddress:
coinbase = await async_w3.eth.coinbase
await async_w3.geth.personal.unlock_account(coinbase, KEYFILE_PW)
return coinbase
def main(logger: logging.Logger, num_calls: int) -> None:
fixture = GethBenchmarkFixture()
for built_fixture in fixture.build():
for _ in built_fixture:
w3_http = build_web3_http(fixture.endpoint_uri)
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
async_w3_http = loop.run_until_complete(
build_async_w3_http(fixture.endpoint_uri)
)
async_unlocked_acct = loop.run_until_complete(
async_unlocked_account(async_w3_http)
)
methods = [
{
"name": "eth_gasPrice",
"params": {},
"exec": lambda: w3_http.eth.gas_price,
"async_exec": lambda: async_w3_http.eth.gas_price,
},
{
"name": "eth_sendTransaction",
"params": {},
"exec": lambda: w3_http.eth.send_transaction(
{
"to": "0xd3CdA913deB6f67967B99D67aCDFa1712C293601",
"from": unlocked_account(w3_http),
"value": Wei(12345),
}
),
"async_exec": lambda: async_w3_http.eth.send_transaction(
{
"to": "0xd3CdA913deB6f67967B99D67aCDFa1712C293601",
"from": async_unlocked_acct,
"value": Wei(12345),
}
),
},
{
"name": "eth_blockNumber",
"params": {},
"exec": lambda: w3_http.eth.block_number,
"async_exec": lambda: async_w3_http.eth.block_number,
},
{
"name": "eth_getBlock",
"params": {},
"exec": lambda: w3_http.eth.get_block(1),
"async_exec": lambda: async_w3_http.eth.get_block(1),
},
]
def benchmark(method: Dict[str, Any]) -> None:
outcomes: Dict[str, Union[str, float]] = defaultdict(lambda: "N/A")
outcomes["name"] = method["name"]
outcomes["HTTPProvider"] = sync_benchmark(
method["exec"],
num_calls,
)
outcomes["AsyncHTTPProvider"] = loop.run_until_complete(
async_benchmark(method["async_exec"], num_calls)
)
print_entry(logger, outcomes)
print_header(logger, num_calls)
for method in methods:
benchmark(method)
print_footer(logger)
if __name__ == "__main__":
args = parser.parse_args()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stdout))
main(logger, args.num_calls) | PypiClean |
/oslo.policy-4.2.1-py3-none-any.whl/oslo_policy/opts.py |
import copy
from oslo_config import cfg
from oslo_policy._i18n import _
__all__ = [
'list_opts',
'set_defaults',
]
_option_group = 'oslo_policy'
_options = [
cfg.BoolOpt('enforce_scope',
default=False,
help=_('This option controls whether or not to enforce scope '
'when evaluating policies. If ``True``, the scope of '
'the token used in the request is compared to the '
'``scope_types`` of the policy being enforced. If the '
'scopes do not match, an ``InvalidScope`` exception '
'will be raised. If ``False``, a message will be '
'logged informing operators that policies are being '
'invoked with mismatching scope.')),
cfg.BoolOpt('enforce_new_defaults',
default=False,
help=_('This option controls whether or not to use old '
'deprecated defaults when evaluating policies. If '
'``True``, the old deprecated defaults are not going '
'to be evaluated. This means if any existing token is '
'allowed for old defaults but is disallowed for new '
'defaults, it will be disallowed. It is encouraged to '
'enable this flag along with the ``enforce_scope`` '
'flag so that you can get the benefits of new defaults '
'and ``scope_type`` together. If ``False``, the '
'deprecated policy check string is logically OR\'d '
'with the new policy check string, allowing for a '
'graceful upgrade experience between releases with '
'new policies, which is the default behavior.')),
cfg.StrOpt('policy_file',
default='policy.json',
help=_('The relative or absolute path of a file that maps '
'roles to permissions for a given service. Relative '
'paths must be specified in relation to the '
'configuration file setting this option.'),
deprecated_group='DEFAULT'),
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Default rule. Enforced when a requested rule is not '
'found.'),
deprecated_group='DEFAULT'),
cfg.MultiStrOpt('policy_dirs',
default=['policy.d'],
help=_('Directories where policy configuration files are '
'stored. They can be relative to any directory '
'in the search path defined by the config_dir '
'option, or absolute paths. The file defined by '
'policy_file must exist for these directories to '
'be searched. Missing or empty directories are '
'ignored.'),
deprecated_group='DEFAULT'),
cfg.StrOpt('remote_content_type',
choices=('application/x-www-form-urlencoded',
'application/json'),
default='application/x-www-form-urlencoded',
help=_("Content Type to send and receive data for "
"REST based policy check")),
cfg.BoolOpt('remote_ssl_verify_server_crt',
help=_("server identity verification for REST based "
"policy check"),
default=False),
cfg.StrOpt('remote_ssl_ca_crt_file',
help=_("Absolute path to ca cert file for REST based "
"policy check")),
cfg.StrOpt('remote_ssl_client_crt_file',
help=_("Absolute path to client cert for REST based "
"policy check")),
cfg.StrOpt('remote_ssl_client_key_file',
help=_("Absolute path client key file REST based "
"policy check")),
]
def list_opts():
"""Return a list of oslo.config options available in the library.
The returned list includes all oslo.config options which may be registered
at runtime by the library.
Each element of the list is a tuple. The first element is the name of the
group under which the list of elements in the second element will be
registered. A group name of None corresponds to the [DEFAULT] group in
config files.
This function is also discoverable via the 'oslo_messaging' entry point
under the 'oslo.config.opts' namespace.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users by this library.
:returns: a list of (group_name, opts) tuples
"""
return [(_option_group, copy.deepcopy(_options))]
def _register(conf):
"""Register the policy options.
We do this in a few places, so use a function to ensure it is done
consistently.
"""
conf.register_opts(_options, group=_option_group)
def set_defaults(conf, policy_file=None, **kwargs):
"""Set defaults for configuration variables.
Overrides default options values.
:param conf: Configuration object, managed by the caller.
:type conf: oslo.config.cfg.ConfigOpts
:param policy_file: The base filename for the file that
defines policies.
:type policy_file: unicode
:param kwargs: Any other configuration variable and their new
default value.
"""
_register(conf)
if policy_file is not None:
cfg.set_defaults(_options, policy_file=policy_file)
if kwargs:
cfg.set_defaults(_options, **kwargs) | PypiClean |
/ctreport_selenium-1.1.4-py3-none-any.whl/ctreport_selenium/ctreport_html/testdetail/details.py | from ctreport_selenium.ctreport_html.properties import status, priority, severity
from ctreport_selenium.utility_classes import Status, Severity
def screenshot_section(test):
c = ''''''
for log in test._logs:
if log["type"] == "screenshot":
c += '''<a id="{}">
<i class="fas fa-image pl-2 pointer" style="font-size:30px; color:#aaa;" onclick="createimagemodal('{}','{}')"></i>
</a>
'''.format(log["id"], log["path"], log["path"])
elif log["type"] == "error":
if log["screenshot"] is not None:
c += '''
<a id="{}">
<i class="fas fa-image pl-2 pointer" style="font-size:30px; color:#cb3434;" onclick="createimagemodal('{}','{}')"></i>
</a>
'''.format(log["id"], log["screenshot"], log["screenshot"])
elif log["type"] == "verify" or log["type"] == "assert":
if log["screenshot"] is not None:
c += '''
<a id="{}" >
<i class="fas fa-image pl-2 pointer" style="font-size:30px; color:#cb3434;" onclick="createimagemodal('{}','{}')"></i>
</a>
'''.format(log["id"], log["screenshot"], log["screenshot"])
return c
def table_content(logs):
c = ''''''
for log in logs:
if log["type"] == "log":
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%;"><i class="{}" style="{}"></i></td>
<td style="width: 10%;">Log</td>
<td style="width: 70%;">{}</td>
<td style="width: 10%;"><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[Status.PASS][0], status[Status.PASS][1], log["message"], log["start-time"])
elif log["type"] == "screenshot":
type_ = '''
<a href="#{}" data-toggle="popover" data-trigger="hover" data-content="{}" data-original-title="" title="">Screenshot</a>
'''.format(log["id"],log["path"])
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%;"><i class="{}" style="{}"></i></td>
<td style="width: 10%;">
{}
</td>
<td style="width: 70%;">{}
</td>
<td style="width: 10%;"><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[Status.PASS][0], status[Status.PASS][1], type_, log["message"],
log["start-time"])
elif log["type"] == "error":
type_ = '''
<a href="#{}" data-toggle="popover" data-trigger="hover" data-content="{}" data-original-title="" title="">Error</a>
'''
screenshot_path = log["screenshot"]
err = ''''''
if log["error"] is not None:
err= '''
<span class="extrasmall">{}</span>
'''.format(log["error"] )
if screenshot_path is None:
type_ = "Error"
else:
type_.format(screenshot_path, screenshot_path)
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%;"><i class="{}" style="{}"></i></td>
<td style="width: 10%;">
{}
</td>
<td style="width: 70%;">{}
<br>
{}
</td>
<td style="width: 10%;"><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[Status.FAIL][0], status[Status.FAIL][1], type_, log["message"], err,
log["start-time"])
elif log["type"] == "broken":
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%;"><i class="{}" style="{}"></i></td>
<td style="width: 10%;">Broken</td>
<td style="width: 70%; color:#F7464A;">{}</td>
<td style="width: 10%; "><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[Status.BROKEN][0], status[Status.BROKEN][1], log["error"][:500], log["start-time"])
elif log["type"] == "skipped":
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%;"><i class="{}" style="{}"></i></td>
<td style="width: 10%;">Skipped</td>
<td style="width: 70%; color:#1E90FF;">{}</td>
<td style="width: 10%; "></td>
</tr>
'''.format(status[Status.SKIP][0], status[Status.SKIP][1], log["message"], log["start-time"])
elif log["type"] == "verify":
message_ = ''''''
if log["message"] is not None:
message_ = '''
<span>{}</span>
<br/>
'''.format(log["message"])
type_ = '''
<a href="#{}" data-toggle="popover" data-trigger="hover" data-content="{}" data-original-title="" title="" style="text-decoration:none">{}</a>
'''
screenshot_path = log["screenshot"]
if screenshot_path is None:
type_ = "Verification"
else:
type_ = type_.format(log["id"], screenshot_path, "Verification")
if log["data-type"] is not "others":
e_a_content = '''
<td style="width: 70%">
'''+message_ + '''
<i class="{} pointer" onclick="expandFooter('info')" style="{}"></i>
Expected:
<i class="fas fa-ellipsis-h pointer" onclick="createmodal('{}')"></i>
Actual:
<i class="fas fa-ellipsis-h pointer" onclick="createmodal('{}')"></i>
<br/>
'''.format(severity[log["severity"]][0], severity[log["severity"]][1], log["id"], log["id"])+ '''
</td>
'''
else:
e_a_content = '''
<td style="width: 70%">
'''+message_ + '''
<i class="{} pointer" onclick="expandFooter('info')" style="{}"></i>
Expected: {} Actual: {}
<br>
'''.format(severity[log["severity"]][0], severity[log["severity"]][1], log["expected"],
log["actual"])+ '''
</td>
'''
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%"><i class="{}" style="{}"></i></td>
<td style="width: 10%">{}</td>
{}
<td style="width: 10%"><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[log["status"]][0], status[log["status"]][1], type_, e_a_content, log["start-time"])
elif log["type"] == "assert":
message_ = ''''''
if log["message"] is not None:
message_ = '''
<span>{}</span>
<br/>
'''.format(log["message"])
type_ = '''
<a href="#{}" data-toggle="popover" data-trigger="hover" data-content="{}" data-original-title="" title="" style="text-decoration:none">{}</a>
'''
screenshot_path = log["screenshot"]
if screenshot_path is None:
type_ = "Assertion"
else:
type_ = type_.format(log["id"], screenshot_path, "Assertion")
if log["data-type"] is not "others":
e_a_content = '''
<td style="width: 70%">
'''+message_ + '''
<i class="{} pointer" onclick="expandFooter('info')" style="{}"></i>
Expected:
<i class="fas fa-ellipsis-h pointer" onclick="createmodal('{}')"></i>
Actual:
<i class="fas fa-ellipsis-h pointer" onclick="createmodal('{}')"></i>
'''.format(severity[Severity.BLOCKER][0], severity[Severity.BLOCKER][1], log["id"], log["id"]) + '''
</td>
'''
else:
e_a_content = '''
<td style="width: 70%">
'''+message_ + '''
<i class="{} pointer" onclick="expandFooter('info')" style="{}"></i>
Expected: {} Actual: {}
<br>
'''.format(severity[Severity.BLOCKER][0], severity[Severity.BLOCKER][1], log["expected"],
log["actual"]) + '''
</td>
'''
c += '''
<tr class="border-bottom">
<td class="text-sm-center" style="width: 10%"><i class="{}" style="{}"></i></td>
<td style="width: 10%">{}</td>
{}
<td style="width: 10%"><span class="extrasmall">{}</span></td>
</tr>
'''.format(status[log["status"]][0], status[log["status"]][1], type_, e_a_content, log["start-time"])
return c
def section(tests):
index = 0
c = ''''''
for test in tests:
section_head = '''
<li class="list-group-item font-weight-bold test-details-header" >
<span>{} {}</span>
<i id="expand" class="fas fa-caret-square-down pointer" style=" font-size:x-large;" data-toggle="collapse" data-target="#moredetails{}" onclick='expandFunction("{}")'></i>
</li>
'''.format(test._id, test._name, index, test._id)
more_details = '''
<li id="moredetails{}" class=" more-details list-group-item small panel-collapse collapse">
<div class="row">
<div class="col-5">
<div class="row">
<div class="col-3">Status</div>
<div class="col-9">
<i class="{}" style="{} font-size: 13px;"></i>
{}</div>
</div>
<div class="row">
<div class="col-3">Priority</div>
<div class="col-9">
<i class="{}" style="{} font-size: 13px;"></i>
{}</div>
</div>
</div>
<div class="col-7">
<div class="row">
<div class="col-3">Start-time</div>
<div class="col-9">{}</div>
</div>
<div class="row">
<div class="col-3">End-time</div>
<div class="col-9">{}</div>
</div>
<div class="row">
<div class="col-3">Duration</div>
<div class="col-9">{}(H:MM:SS)</div>
</div>
</div>
</div>
<div>
<span>Description</span>
<p>{}</p>
</div>
</li>
'''.format(index, status[test._result][0], status[test._result][1], test._result.capitalize(),
priority[test._priority][0],
priority[test._priority][1], test._priority.capitalize(), test._start_time, test._end_time,
test._duration, test._description)
test_steps = '''
<li class="list-group-item">
<table class="table medium table-borderless table-hover test-details-content">
<thead class="small border-bottom">
<tr>
<th class="text-sm-center" >STATUS</th>
<th>TYPE</th>
<th>DETAILS</th>
<th></th>
</tr>
</thead>
<tbody class="small">
''' + table_content(test._logs) + '''
</tbody>
</table>
''' + screenshot_section(test) + '''
</li>
'''.format()
c += '''
<div id="''' + test._id + '''" class="filterDiv1 ''' + test._result + '''" style="padding-bottom: 20px;">
<ul class="list-group">
''' + section_head + '''
''' + more_details + '''
''' + test_steps + '''
</ul>
</div>
'''
index += 1
return c
def content(tests):
c = '''
<div class="col-sm-12 col-lg-7">
<div id="search2">
''' + section(tests) + '''
</div>
</div>
'''
return c | PypiClean |
/accelbyte_py_sdk-0.48.0.tar.gz/accelbyte_py_sdk-0.48.0/accelbyte_py_sdk/api/platform/models/catalog_change_statistics.py |
# template file: ags_py_codegen
# AccelByte Gaming Services Platform Service (4.34.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
class CatalogChangeStatistics(Model):
"""Catalog change statistics (CatalogChangeStatistics)
Properties:
count: (count) REQUIRED int
selected_count: (selectedCount) REQUIRED int
"""
# region fields
count: int # REQUIRED
selected_count: int # REQUIRED
# endregion fields
# region with_x methods
def with_count(self, value: int) -> CatalogChangeStatistics:
self.count = value
return self
def with_selected_count(self, value: int) -> CatalogChangeStatistics:
self.selected_count = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "count"):
result["count"] = int(self.count)
elif include_empty:
result["count"] = 0
if hasattr(self, "selected_count"):
result["selectedCount"] = int(self.selected_count)
elif include_empty:
result["selectedCount"] = 0
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls, count: int, selected_count: int, **kwargs
) -> CatalogChangeStatistics:
instance = cls()
instance.count = count
instance.selected_count = selected_count
return instance
@classmethod
def create_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> CatalogChangeStatistics:
instance = cls()
if not dict_:
return instance
if "count" in dict_ and dict_["count"] is not None:
instance.count = int(dict_["count"])
elif include_empty:
instance.count = 0
if "selectedCount" in dict_ and dict_["selectedCount"] is not None:
instance.selected_count = int(dict_["selectedCount"])
elif include_empty:
instance.selected_count = 0
return instance
@classmethod
def create_many_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> Dict[str, CatalogChangeStatistics]:
return (
{k: cls.create_from_dict(v, include_empty=include_empty) for k, v in dict_}
if dict_
else {}
)
@classmethod
def create_many_from_list(
cls, list_: list, include_empty: bool = False
) -> List[CatalogChangeStatistics]:
return (
[cls.create_from_dict(i, include_empty=include_empty) for i in list_]
if list_
else []
)
@classmethod
def create_from_any(
cls, any_: any, include_empty: bool = False, many: bool = False
) -> Union[
CatalogChangeStatistics,
List[CatalogChangeStatistics],
Dict[Any, CatalogChangeStatistics],
]:
if many:
if isinstance(any_, dict):
return cls.create_many_from_dict(any_, include_empty=include_empty)
elif isinstance(any_, list):
return cls.create_many_from_list(any_, include_empty=include_empty)
else:
raise ValueError()
else:
return cls.create_from_dict(any_, include_empty=include_empty)
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"count": "count",
"selectedCount": "selected_count",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"count": True,
"selectedCount": True,
}
# endregion static methods | PypiClean |
/facenet_khairulimam-0.0.2-py3-none-any.whl/res-facenet/models.py | import torch
import torch.nn as nn
from torchvision.models import resnet50
try:
from torch.hub import load_state_dict_from_url
except ImportError:
from torch.utils.model_zoo import load_url as load_state_dict_from_url
model_urls = dict(
acc_920='https://github.com/khrlimam/facenet/releases/download/acc-0.920/model920-6be7e3e9.pth',
acc_921='https://github.com/khrlimam/facenet/releases/download/acc-0.92135/model921-af60fb4f.pth'
)
def load_state(arch, progress=True):
state = load_state_dict_from_url(model_urls.get(arch), progress=progress)
return state
def model_920(pretrained=True, progress=True):
model = FaceNetModel()
if pretrained:
state = load_state('acc_920', progress)
model.load_state_dict(state['state_dict'])
return model
def model_921(pretrained=True, progress=True):
model = FaceNetModel()
if pretrained:
state = load_state('acc_921', progress)
model.load_state_dict(state['state_dict'])
return model
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
class FaceNetModel(nn.Module):
def __init__(self, pretrained=False):
super(FaceNetModel, self).__init__()
self.model = resnet50(pretrained)
embedding_size = 128
num_classes = 500
self.cnn = nn.Sequential(
self.model.conv1,
self.model.bn1,
self.model.relu,
self.model.maxpool,
self.model.layer1,
self.model.layer2,
self.model.layer3,
self.model.layer4)
# modify fc layer based on https://arxiv.org/abs/1703.07737
self.model.fc = nn.Sequential(
Flatten(),
# nn.Linear(100352, 1024),
# nn.BatchNorm1d(1024),
# nn.ReLU(),
nn.Linear(100352, embedding_size))
self.model.classifier = nn.Linear(embedding_size, num_classes)
def l2_norm(self, input):
input_size = input.size()
buffer = torch.pow(input, 2)
normp = torch.sum(buffer, 1).add_(1e-10)
norm = torch.sqrt(normp)
_output = torch.div(input, norm.view(-1, 1).expand_as(input))
output = _output.view(input_size)
return output
def freeze_all(self):
for param in self.model.parameters():
param.requires_grad = False
def unfreeze_all(self):
for param in self.model.parameters():
param.requires_grad = True
def freeze_fc(self):
for param in self.model.fc.parameters():
param.requires_grad = False
def unfreeze_fc(self):
for param in self.model.fc.parameters():
param.requires_grad = True
def freeze_only(self, freeze):
for name, child in self.model.named_children():
if name in freeze:
for param in child.parameters():
param.requires_grad = False
else:
for param in child.parameters():
param.requires_grad = True
def unfreeze_only(self, unfreeze):
for name, child in self.model.named_children():
if name in unfreeze:
for param in child.parameters():
param.requires_grad = True
else:
for param in child.parameters():
param.requires_grad = False
# returns face embedding(embedding_size)
def forward(self, x):
x = self.cnn(x)
x = self.model.fc(x)
features = self.l2_norm(x)
# Multiply by alpha = 10 as suggested in https://arxiv.org/pdf/1703.09507.pdf
alpha = 10
features = features * alpha
return features
def forward_classifier(self, x):
features = self.forward(x)
res = self.model.classifier(features)
return res | PypiClean |
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/groups/item/transitive_members/item/graph_application/graph_application_request_builder.py | from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ......models import application
from ......models.o_data_errors import o_data_error
class GraphApplicationRequestBuilder():
"""
Casts the previous resource to application.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new GraphApplicationRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/groups/{group%2Did}/transitiveMembers/{directoryObject%2Did}/graph.application{?%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
async def get(self,request_configuration: Optional[GraphApplicationRequestBuilderGetRequestConfiguration] = None) -> Optional[application.Application]:
"""
Get the item of type microsoft.graph.directoryObject as microsoft.graph.application
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[application.Application]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ......models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ......models import application
return await self.request_adapter.send_async(request_info, application.Application, error_mapping)
def to_get_request_information(self,request_configuration: Optional[GraphApplicationRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
Get the item of type microsoft.graph.directoryObject as microsoft.graph.application
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
@dataclass
class GraphApplicationRequestBuilderGetQueryParameters():
"""
Get the item of type microsoft.graph.directoryObject as microsoft.graph.application
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
return original_name
# Expand related entities
expand: Optional[List[str]] = None
# Select properties to be returned
select: Optional[List[str]] = None
@dataclass
class GraphApplicationRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[GraphApplicationRequestBuilder.GraphApplicationRequestBuilderGetQueryParameters] = None | PypiClean |
/cnos_connector-0.1.9.tar.gz/cnos_connector-0.1.9/cnosdb_connector/client.py | from base64 import b64encode
import json
import logging
from urllib.request import urlopen, Request
from urllib.error import HTTPError as UrlLibHTTPError
from .error import HTTPError, DatabaseError
error_msgs = {
400: "parameter error",
401: "authorization error",
404: "api not found",
500: "internal error",
503: "system resources is not sufficient. It is may be caused by a huge query."
}
class Client:
def __init__(self, url: str = "http://localhost:8902/",
tenant: str = "cnosdb",
database: str = "public",
user: str = "root",
password: str = ""):
self._url = url
self._tenant = tenant
self._database = database
self._user = user
self._password = password
def sql(self, q: str) -> dict:
_url = self._url + f"api/v1/sql?db={self._database}&tenant={self._tenant}&pretty=true"
request = self.build_request(url=_url, data=q)
try:
response = urlopen(request)
except UrlLibHTTPError as e:
logging.error(f"Invalid syntax or Object not exists, get ERROR from database : {e}")
return {}
else:
self._check_status(response)
resp = response.read().decode('utf-8')
if resp == "":
return {}
resp = json.loads(resp)
return resp
def line_protocol(self, lines: [str]):
_url = self._url + f"api/v1/write?db={self._database}&pretty=true"
_data = ""
for line in lines:
_data += line + "\n"
request = self.build_request(url=_url, data=_data)
response = urlopen(request)
self._check_status(response)
def set_database(self, database_name):
self._database = database_name
def set_user(self, user, password):
self._user = user
self._password = password
def set_url(self, url):
self._url = url
def _check_status(self, response):
status = response.status
if status != 200:
msg = error_msgs.get(status)
raise HTTPError(status, msg)
def build_request(self, url, data):
_basic = basic_auth(self._user, self._password)
request = Request(url=url, data=data.encode("UTF-8"))
request.add_header("Authorization", "Basic " + _basic)
request.add_header("Accept", "application/json")
return request
def basic_auth(name, pwd):
return b64encode((name + ":" + pwd).encode(encoding="UTF-8")).decode(encoding="UTF-8") | PypiClean |
/formification-1.2.0-py3-none-any.whl/formulaic/static/admin/formulaic/ember-formulaic/node_modules/normalize-path/README.md | # normalize-path [![NPM version](https://img.shields.io/npm/v/normalize-path.svg?style=flat)](https://www.npmjs.com/package/normalize-path) [![NPM monthly downloads](https://img.shields.io/npm/dm/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![NPM total downloads](https://img.shields.io/npm/dt/normalize-path.svg?style=flat)](https://npmjs.org/package/normalize-path) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/normalize-path.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/normalize-path)
> Normalize file path slashes to be unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes unless disabled.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save normalize-path
```
## Usage
```js
var normalize = require('normalize-path');
normalize('\\foo\\bar\\baz\\');
//=> '/foo/bar/baz'
normalize('./foo/bar/baz/');
//=> './foo/bar/baz'
```
Pass `false` as the last argument to **keep** trailing slashes:
```js
normalize('./foo/bar/baz/', false);
//=> './foo/bar/baz/'
normalize('foo\\bar\\baz\\', false);
//=> 'foo/bar/baz/'
```
## About
### Related projects
* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path "Return true if a file path contains the given path.")
* [ends-with](https://www.npmjs.com/package/ends-with): Returns `true` if the given `string` or `array` ends with `suffix` using strict equality for… [more](https://github.com/jonschlinkert/ends-with) | [homepage](https://github.com/jonschlinkert/ends-with "Returns `true` if the given `string` or `array` ends with `suffix` using strict equality for comparisons.")
* [is-absolute](https://www.npmjs.com/package/is-absolute): Polyfill for node.js `path.isAbolute`. Returns true if a file path is absolute. | [homepage](https://github.com/jonschlinkert/is-absolute "Polyfill for node.js `path.isAbolute`. Returns true if a file path is absolute.")
* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative "Returns `true` if the path appears to be relative.")
* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Pollyfill for node.js `path.parse`, parses a filepath into an object. | [homepage](https://github.com/jonschlinkert/parse-filepath "Pollyfill for node.js `path.parse`, parses a filepath into an object.")
* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with "Return `true` if a file path ends with the given string/suffix.")
* [path-segments](https://www.npmjs.com/package/path-segments): Get n specific segments of a file path, e.g. first 2, last 3, etc. | [homepage](https://github.com/jonschlinkert/path-segments "Get n specific segments of a file path, e.g. first 2, last 3, etc.")
* [rewrite-ext](https://www.npmjs.com/package/rewrite-ext): Automatically re-write the destination extension of a filepath based on the source extension. e.g… [more](https://github.com/jonschlinkert/rewrite-ext) | [homepage](https://github.com/jonschlinkert/rewrite-ext "Automatically re-write the destination extension of a filepath based on the source extension. e.g `.coffee` => `.js`. This will only rename the ext, no other path parts are modified.")
* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify "Convert Windows file paths to unix paths.")
### Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 31 | [jonschlinkert](https://github.com/jonschlinkert) |
| 1 | [phated](https://github.com/phated) |
### Building docs
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
### Running tests
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
### Author
**Jon Schlinkert**
* [github/jonschlinkert](https://github.com/jonschlinkert)
* [twitter/jonschlinkert](https://twitter.com/jonschlinkert)
### License
Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.4.3, on March 29, 2017._ | PypiClean |
/vioneta_agro_frontend-20230809.1-py3-none-any.whl/hass_frontend/frontend_es5/17184-dXEh9TiKARE.js | "use strict";(self.webpackChunkvioneta_agro_frontend=self.webpackChunkvioneta_agro_frontend||[]).push([[17184],{32594:function(e,t,i){i.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},57793:function(e,t,i){var n,a=i(88962),s=i(33368),r=i(71650),o=i(82390),d=i(69205),l=i(70906),c=i(91808),u=i(68144),h=i(79932),v=i(44634);i(52039),(0,c.Z)([(0,h.Mo)("ha-battery-icon")],(function(e,t){var i=function(t){(0,d.Z)(n,t);var i=(0,l.Z)(n);function n(){var t;(0,r.Z)(this,n);for(var a=arguments.length,s=new Array(a),d=0;d<a;d++)s[d]=arguments[d];return t=i.call.apply(i,[this].concat(s)),e((0,o.Z)(t)),t}return(0,s.Z)(n)}(t);return{F:i,d:[{kind:"field",decorators:[(0,h.Cb)()],key:"batteryStateObj",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"batteryChargingStateObj",value:void 0},{kind:"method",key:"render",value:function(){return(0,u.dy)(n||(n=(0,a.Z)([' <ha-svg-icon .path="','"></ha-svg-icon> '])),(0,v.$)(this.batteryStateObj,this.batteryChargingStateObj))}}]}}),u.oi)},49400:function(e,t,i){var n,a,s,r,o,d=i(88962),l=i(33368),c=i(71650),u=i(82390),h=i(69205),v=i(70906),p=i(91808),f=i(68144),y=i(79932),b=i(82526),m=["entity_id","assumed_state","attribution","custom_ui_more_info","custom_ui_state_card","device_class","editable","emulated_hue_name","emulated_hue","entity_picture","event_types","friendly_name","haaska_hidden","haaska_name","icon","initial_state","last_reset","restored","state_class","supported_features","unit_of_measurement"],_=i(11654);i(46583),(0,p.Z)([(0,y.Mo)("ha-attributes")],(function(e,t){var i=function(t){(0,h.Z)(n,t);var i=(0,v.Z)(n);function n(){var t;(0,c.Z)(this,n);for(var a=arguments.length,s=new Array(a),r=0;r<a;r++)s[r]=arguments[r];return t=i.call.apply(i,[this].concat(s)),e((0,u.Z)(t)),t}return(0,l.Z)(n)}(t);return{F:i,d:[{kind:"field",decorators:[(0,y.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,y.Cb)()],key:"stateObj",value:void 0},{kind:"field",decorators:[(0,y.Cb)({attribute:"extra-filters"})],key:"extraFilters",value:void 0},{kind:"field",decorators:[(0,y.SB)()],key:"_expanded",value:function(){return!1}},{kind:"method",key:"render",value:function(){var e=this;if(!this.stateObj)return f.Ld;var t=this.computeDisplayAttributes(m.concat(this.extraFilters?this.extraFilters.split(","):[]));return 0===t.length?f.Ld:(0,f.dy)(n||(n=(0,d.Z)([' <ha-expansion-panel .header="','" outlined @expanded-will-change="','"> <div class="attribute-container"> '," </div> </ha-expansion-panel> "," "])),this.hass.localize("ui.components.attributes.expansion_header"),this.expandedChanged,this._expanded?(0,f.dy)(a||(a=(0,d.Z)([" "," "])),t.map((function(t){return(0,f.dy)(s||(s=(0,d.Z)([' <div class="data-entry"> <div class="key"> ',' </div> <div class="value"> '," </div> </div> "])),(0,b.S)(e.hass.localize,e.stateObj,e.hass.entities,t),(0,b.a)(e.hass.localize,e.stateObj,e.hass.locale,e.hass.config,e.hass.entities,t))}))):"",this.stateObj.attributes.attribution?(0,f.dy)(r||(r=(0,d.Z)([' <div class="attribution"> '," </div> "])),this.stateObj.attributes.attribution):"")}},{kind:"get",static:!0,key:"styles",value:function(){return[_.Qx,(0,f.iv)(o||(o=(0,d.Z)([".attribute-container{margin-bottom:8px;direction:ltr}.data-entry{display:flex;flex-direction:row;justify-content:space-between}.data-entry .value{max-width:60%;overflow-wrap:break-word;text-align:right}.key{flex-grow:1}.attribution{color:var(--secondary-text-color);text-align:center;margin-top:16px}pre{font-family:inherit;font-size:inherit;margin:0px;overflow-wrap:break-word;white-space:pre-line}hr{border-color:var(--divider-color);border-bottom:none;margin:16px 0}"])))]}},{kind:"method",key:"computeDisplayAttributes",value:function(e){return this.stateObj?Object.keys(this.stateObj.attributes).filter((function(t){return-1===e.indexOf(t)})):[]}},{kind:"method",key:"expandedChanged",value:function(e){this._expanded=e.detail.expanded}}]}}),f.oi)},46583:function(e,t,i){var n,a,s,r,o,d=i(99312),l=i(81043),c=i(88962),u=i(33368),h=i(71650),v=i(82390),p=i(69205),f=i(70906),y=i(91808),b=i(34541),m=i(47838),_=i(68144),k=i(79932),x=i(83448),g=i(47181),C=i(96151),Z=(i(52039),"M7.41,8.58L12,13.17L16.59,8.58L18,10L12,16L6,10L7.41,8.58Z");(0,y.Z)([(0,k.Mo)("ha-expansion-panel")],(function(e,t){var i,y=function(t){(0,p.Z)(n,t);var i=(0,f.Z)(n);function n(){var t;(0,h.Z)(this,n);for(var a=arguments.length,s=new Array(a),r=0;r<a;r++)s[r]=arguments[r];return t=i.call.apply(i,[this].concat(s)),e((0,v.Z)(t)),t}return(0,u.Z)(n)}(t);return{F:y,d:[{kind:"field",decorators:[(0,k.Cb)({type:Boolean,reflect:!0})],key:"expanded",value:function(){return!1}},{kind:"field",decorators:[(0,k.Cb)({type:Boolean,reflect:!0})],key:"outlined",value:function(){return!1}},{kind:"field",decorators:[(0,k.Cb)({type:Boolean,reflect:!0})],key:"leftChevron",value:function(){return!1}},{kind:"field",decorators:[(0,k.Cb)()],key:"header",value:void 0},{kind:"field",decorators:[(0,k.Cb)()],key:"secondary",value:void 0},{kind:"field",decorators:[(0,k.SB)()],key:"_showContent",value:function(){return this.expanded}},{kind:"field",decorators:[(0,k.IO)(".container")],key:"_container",value:void 0},{kind:"method",key:"render",value:function(){return(0,_.dy)(n||(n=(0,c.Z)([' <div class="top ','"> <div id="summary" @click="','" @keydown="','" @focus="','" @blur="','" role="button" tabindex="0" aria-expanded="','" aria-controls="sect1"> ',' <slot name="header"> <div class="header"> ',' <slot class="secondary" name="secondary">',"</slot> </div> </slot> ",' </div> <slot name="icons"></slot> </div> <div class="container ','" @transitionend="','" role="region" aria-labelledby="summary" aria-hidden="','" tabindex="-1"> '," </div> "])),(0,x.$)({expanded:this.expanded}),this._toggleContainer,this._toggleContainer,this._focusChanged,this._focusChanged,this.expanded,this.leftChevron?(0,_.dy)(a||(a=(0,c.Z)([' <ha-svg-icon .path="','" class="summary-icon ','"></ha-svg-icon> '])),Z,(0,x.$)({expanded:this.expanded})):"",this.header,this.secondary,this.leftChevron?"":(0,_.dy)(s||(s=(0,c.Z)([' <ha-svg-icon .path="','" class="summary-icon ','"></ha-svg-icon> '])),Z,(0,x.$)({expanded:this.expanded})),(0,x.$)({expanded:this.expanded}),this._handleTransitionEnd,!this.expanded,this._showContent?(0,_.dy)(r||(r=(0,c.Z)(["<slot></slot>"]))):"")}},{kind:"method",key:"willUpdate",value:function(e){var t=this;(0,b.Z)((0,m.Z)(y.prototype),"willUpdate",this).call(this,e),e.has("expanded")&&this.expanded&&(this._showContent=this.expanded,setTimeout((function(){t.expanded&&(t._container.style.overflow="initial")}),300))}},{kind:"method",key:"_handleTransitionEnd",value:function(){this._container.style.removeProperty("height"),this._container.style.overflow=this.expanded?"initial":"hidden",this._showContent=this.expanded}},{kind:"method",key:"_toggleContainer",value:(i=(0,l.Z)((0,d.Z)().mark((function e(t){var i,n,a=this;return(0,d.Z)().wrap((function(e){for(;;)switch(e.prev=e.next){case 0:if(!t.defaultPrevented){e.next=2;break}return e.abrupt("return");case 2:if("keydown"!==t.type||"Enter"===t.key||" "===t.key){e.next=4;break}return e.abrupt("return");case 4:if(t.preventDefault(),i=!this.expanded,(0,g.B)(this,"expanded-will-change",{expanded:i}),this._container.style.overflow="hidden",!i){e.next=12;break}return this._showContent=!0,e.next=12,(0,C.y)();case 12:n=this._container.scrollHeight,this._container.style.height="".concat(n,"px"),i||setTimeout((function(){a._container.style.height="0px"}),0),this.expanded=i,(0,g.B)(this,"expanded-changed",{expanded:this.expanded});case 17:case"end":return e.stop()}}),e,this)}))),function(e){return i.apply(this,arguments)})},{kind:"method",key:"_focusChanged",value:function(e){this.shadowRoot.querySelector(".top").classList.toggle("focused","focus"===e.type)}},{kind:"get",static:!0,key:"styles",value:function(){return(0,_.iv)(o||(o=(0,c.Z)([":host{display:block}.top{display:flex;align-items:center;border-radius:var(--ha-card-border-radius,12px)}.top.expanded{border-bottom-left-radius:0px;border-bottom-right-radius:0px}.top.focused{background:var(--input-fill-color)}:host([outlined]){box-shadow:none;border-width:1px;border-style:solid;border-color:var(--ha-card-border-color,var(--divider-color,#e0e0e0));border-radius:var(--ha-card-border-radius,12px)}.summary-icon{margin-left:8px}:host([leftchevron]) .summary-icon{margin-left:0;margin-right:8px}#summary{flex:1;display:flex;padding:var(--expansion-panel-summary-padding,0 8px);min-height:48px;align-items:center;cursor:pointer;overflow:hidden;font-weight:500;outline:0}.summary-icon{transition:transform 150ms cubic-bezier(.4,0,.2,1);direction:var(--direction)}.summary-icon.expanded{transform:rotate(180deg)}.header,::slotted([slot=header]){flex:1}.container{padding:var(--expansion-panel-content-padding,0 8px);overflow:hidden;transition:height .3s cubic-bezier(.4,0,.2,1);height:0px}.container.expanded{height:auto}.secondary{display:block;color:var(--secondary-text-color);font-size:12px}"])))}}]}}),_.oi)},86630:function(e,t,i){var n,a,s=i(99312),r=i(81043),o=i(88962),d=i(33368),l=i(71650),c=i(82390),u=i(69205),h=i(70906),v=i(91808),p=i(34541),f=i(47838),y=i(49412),b=i(3762),m=i(68144),_=i(79932),k=i(38346),x=i(96151);(0,v.Z)([(0,_.Mo)("ha-select")],(function(e,t){var i=function(t){(0,u.Z)(n,t);var i=(0,h.Z)(n);function n(){var t;(0,l.Z)(this,n);for(var a=arguments.length,s=new Array(a),r=0;r<a;r++)s[r]=arguments[r];return t=i.call.apply(i,[this].concat(s)),e((0,c.Z)(t)),t}return(0,d.Z)(n)}(t);return{F:i,d:[{kind:"field",decorators:[(0,_.Cb)({type:Boolean})],key:"icon",value:void 0},{kind:"method",key:"renderLeadingIcon",value:function(){return this.icon?(0,m.dy)(n||(n=(0,o.Z)(['<span class="mdc-select__icon"><slot name="icon"></slot></span>']))):m.Ld}},{kind:"method",key:"connectedCallback",value:function(){(0,p.Z)((0,f.Z)(i.prototype),"connectedCallback",this).call(this),window.addEventListener("translations-updated",this._translationsUpdated)}},{kind:"method",key:"disconnectedCallback",value:function(){(0,p.Z)((0,f.Z)(i.prototype),"disconnectedCallback",this).call(this),window.removeEventListener("translations-updated",this._translationsUpdated)}},{kind:"field",key:"_translationsUpdated",value:function(){var e=this;return(0,k.D)((0,r.Z)((0,s.Z)().mark((function t(){return(0,s.Z)().wrap((function(t){for(;;)switch(t.prev=t.next){case 0:return t.next=2,(0,x.y)();case 2:e.layoutOptions();case 3:case"end":return t.stop()}}),t)}))),500)}},{kind:"field",static:!0,key:"styles",value:function(){return[b.W,(0,m.iv)(a||(a=(0,o.Z)([".mdc-select:not(.mdc-select--disabled) .mdc-select__icon{color:var(--secondary-text-color)}.mdc-select__anchor{width:var(--ha-select-min-width,200px)}.mdc-select--filled .mdc-floating-label{inset-inline-start:12px;inset-inline-end:initial;direction:var(--direction)}.mdc-select--filled.mdc-select--with-leading-icon .mdc-floating-label{inset-inline-start:48px;inset-inline-end:initial;direction:var(--direction)}.mdc-select .mdc-select__anchor{padding-inline-start:12px;padding-inline-end:0px;direction:var(--direction)}.mdc-select__anchor .mdc-floating-label--float-above{transform-origin:var(--float-start)}"])))]}}]}}),y.K)},17184:function(e,t,i){i.r(t);var n,a,s,r,o,d,l,c,u,h=i(88962),v=i(33368),p=i(71650),f=i(82390),y=i(69205),b=i(70906),m=i(91808),_=(i(44577),i(68144)),k=i(79932),x=i(14516),g=i(32594),C=i(82526),Z=i(41826),w=i(22311),V=i(40095),A=i(68307),H=(i(57793),i(49400),i(81312),i(10983),i(86630),i(56007)),O=i(74186),j=i(2939),S=[{translationKey:"start",icon:"M8,5.14V19.14L19,12.14L8,5.14Z",serviceName:"start",isVisible:function(e){return(0,V.e)(e,j.Cv.START)}},{translationKey:"pause",icon:"M14,19H18V5H14M6,19H10V5H6V19Z",serviceName:"pause",isVisible:function(e){return(0,V.e)(e,j.Cv.PAUSE)&&((0,V.e)(e,j.Cv.STATE)||(0,V.e)(e,j.Cv.START))}},{translationKey:"start_pause",icon:"M3,5V19L11,12M13,19H16V5H13M18,5V19H21V5",serviceName:"start_pause",isVisible:function(e){return!(0,V.e)(e,j.Cv.STATE)&&!(0,V.e)(e,j.Cv.START)&&(0,V.e)(e,j.Cv.PAUSE)}},{translationKey:"stop",icon:"M18,18H6V6H18V18Z",serviceName:"stop",isVisible:function(e){return(0,V.e)(e,j.Cv.STOP)}},{translationKey:"clean_spot",icon:"M22.08,11.04H20.08V4H13.05V2H11.04V4H4V11.04H2V13.05H4V20.08H11.04V22.08H13.05V20.08H20.08V13.05H22.08V11.04M18.07,18.07H13.05V16.06H11.04V18.07H6V13.05H8.03V11.04H6V6H11.04V8.03H13.05V6H18.07V11.04H16.06V13.05H18.07V18.07M13.05,12.05A1,1 0 0,1 12.05,13.05C11.5,13.05 11.04,12.6 11.04,12.05C11.04,11.5 11.5,11.04 12.05,11.04C12.6,11.04 13.05,11.5 13.05,12.05Z",serviceName:"clean_spot",isVisible:function(e){return(0,V.e)(e,j.Cv.CLEAN_SPOT)}},{translationKey:"locate",icon:"M12,11.5A2.5,2.5 0 0,1 9.5,9A2.5,2.5 0 0,1 12,6.5A2.5,2.5 0 0,1 14.5,9A2.5,2.5 0 0,1 12,11.5M12,2A7,7 0 0,0 5,9C5,14.25 12,22 12,22C12,22 19,14.25 19,9A7,7 0 0,0 12,2Z",serviceName:"locate",isVisible:function(e){return(0,V.e)(e,j.Cv.LOCATE)}},{translationKey:"return_home",icon:"M12,3L2,12H5V20H19V12H22L12,3M12,7.7C14.1,7.7 15.8,9.4 15.8,11.5C15.8,14.5 12,18 12,18C12,18 8.2,14.5 8.2,11.5C8.2,9.4 9.9,7.7 12,7.7M12,10A1.5,1.5 0 0,0 10.5,11.5A1.5,1.5 0 0,0 12,13A1.5,1.5 0 0,0 13.5,11.5A1.5,1.5 0 0,0 12,10Z",serviceName:"return_to_base",isVisible:function(e){return(0,V.e)(e,j.Cv.RETURN_HOME)}}];(0,m.Z)([(0,k.Mo)("more-info-vacuum")],(function(e,t){var i=function(t){(0,y.Z)(n,t);var i=(0,b.Z)(n);function n(){var t;(0,p.Z)(this,n);for(var a=arguments.length,s=new Array(a),r=0;r<a;r++)s[r]=arguments[r];return t=i.call.apply(i,[this].concat(s)),e((0,f.Z)(t)),t}return(0,v.Z)(n)}(t);return{F:i,d:[{kind:"field",decorators:[(0,k.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,k.Cb)()],key:"stateObj",value:void 0},{kind:"method",key:"render",value:function(){var e=this;if(!this.hass||!this.stateObj)return _.Ld;var t=this.stateObj;return(0,_.dy)(n||(n=(0,h.Z)([" "," "," ",' <ha-attributes .hass="','" .stateObj="','" .extraFilters="','"></ha-attributes> '])),t.state!==H.nZ?(0,_.dy)(a||(a=(0,h.Z)([' <div class="flex-horizontal"> <div> <span class="status-subtitle">',": </span> <span> <strong> "," </strong> </span> </div> "," </div>"])),this.hass.localize("ui.dialogs.more_info_control.vacuum.status"),(0,V.e)(t,j.Cv.STATUS)&&t.attributes.status?(0,C.a)(this.hass.localize,t,this.hass.locale,this.hass.config,this.hass.entities,"status"):(0,Z.D1)(this.hass.localize,t,this.hass.locale,this.hass.config,this.hass.entities),this._renderBattery()):"",S.some((function(e){return e.isVisible(t)}))?(0,_.dy)(s||(s=(0,h.Z)([' <div> <p></p> <div class="status-subtitle"> ',' </div> <div class="flex-horizontal"> '," </div> </div> "])),this.hass.localize("ui.dialogs.more_info_control.vacuum.commands"),S.filter((function(e){return e.isVisible(t)})).map((function(i){return(0,_.dy)(r||(r=(0,h.Z)([' <div> <ha-icon-button .path="','" .entry="','" @click="','" .label="','" .disabled="','"></ha-icon-button> </div> '])),i.icon,i,e.callService,e.hass.localize("ui.dialogs.more_info_control.vacuum.".concat(i.translationKey)),t.state===H.nZ)}))):"",(0,V.e)(t,j.Cv.FAN_SPEED)?(0,_.dy)(o||(o=(0,h.Z)([' <div> <div class="flex-horizontal"> <ha-select .label="','" .disabled="','" .value="','" @selected="','" fixedMenuPosition naturalMenuWidth @closed="','"> ',' </ha-select> <div style="justify-content:center;align-self:center;padding-top:1.3em"> <span> <ha-svg-icon .path="','"></ha-svg-icon> '," </span> </div> </div> <p></p> </div> "])),this.hass.localize("ui.dialogs.more_info_control.vacuum.fan_speed"),t.state===H.nZ,t.attributes.fan_speed,this.handleFanSpeedChanged,g.U,t.attributes.fan_speed_list.map((function(i){return(0,_.dy)(d||(d=(0,h.Z)([' <mwc-list-item .value="','"> '," </mwc-list-item> "])),i,(0,C.a)(e.hass.localize,t,e.hass.locale,e.hass.config,e.hass.entities,"fan_speed",i))})),"M12,11A1,1 0 0,0 11,12A1,1 0 0,0 12,13A1,1 0 0,0 13,12A1,1 0 0,0 12,11M12.5,2C17,2 17.11,5.57 14.75,6.75C13.76,7.24 13.32,8.29 13.13,9.22C13.61,9.42 14.03,9.73 14.35,10.13C18.05,8.13 22.03,8.92 22.03,12.5C22.03,17 18.46,17.1 17.28,14.73C16.78,13.74 15.72,13.3 14.79,13.11C14.59,13.59 14.28,14 13.88,14.34C15.87,18.03 15.08,22 11.5,22C7,22 6.91,18.42 9.27,17.24C10.25,16.75 10.69,15.71 10.89,14.79C10.4,14.59 9.97,14.27 9.65,13.87C5.96,15.85 2,15.07 2,11.5C2,7 5.56,6.89 6.74,9.26C7.24,10.25 8.29,10.68 9.22,10.87C9.41,10.39 9.73,9.97 10.14,9.65C8.15,5.96 8.94,2 12.5,2Z",(0,C.a)(this.hass.localize,t,this.hass.locale,this.hass.config,this.hass.entities,"fan_speed")):"",this.hass,this.stateObj,"fan_speed,fan_speed_list,status,battery_level,battery_icon")}},{kind:"field",key:"_deviceEntities",value:function(){return(0,x.Z)((function(e,t){return Object.values(t).filter((function(t){return t.device_id===e}))}))}},{kind:"method",key:"_renderBattery",value:function(){var e,t=this.stateObj,i=null===(e=this.hass.entities[t.entity_id])||void 0===e?void 0:e.device_id,n=i?this._deviceEntities(i,this.hass.entities):[],a=(0,O.eD)(this.hass,n),s=a?this.hass.states[a.entity_id]:void 0,r=s&&"binary_sensor"===(0,w.N)(s);if(s&&(r||!isNaN(s.state))){var o=(0,O.wX)(this.hass,n),d=o?this.hass.states[null==o?void 0:o.entity_id]:void 0;return(0,_.dy)(l||(l=(0,h.Z)([" <div> <span> ",' <ha-battery-icon .hass="','" .batteryStateObj="','" .batteryChargingStateObj="','"></ha-battery-icon> </span> </div> '])),r?"":"".concat(Number(s.state).toFixed()).concat((0,A.K)(this.hass.locale),"%"),this.hass,s,d)}return(0,V.e)(t,j.Cv.BATTERY)&&t.attributes.battery_level?(0,_.dy)(c||(c=(0,h.Z)([" <div> <span> ","",'% <ha-icon .icon="','"></ha-icon> </span> </div> '])),t.attributes.battery_level.toFixed(),(0,A.K)(this.hass.locale),t.attributes.battery_icon):_.Ld}},{kind:"method",key:"callService",value:function(e){var t=e.target.entry;this.hass.callService("vacuum",t.serviceName,{entity_id:this.stateObj.entity_id})}},{kind:"method",key:"handleFanSpeedChanged",value:function(e){var t=this.stateObj.attributes.fan_speed,i=e.target.value;i&&t!==i&&this.hass.callService("vacuum","set_fan_speed",{entity_id:this.stateObj.entity_id,fan_speed:i})}},{kind:"get",static:!0,key:"styles",value:function(){return(0,_.iv)(u||(u=(0,h.Z)([":host{line-height:1.5}.status-subtitle{color:var(--secondary-text-color)}.flex-horizontal{display:flex;flex-direction:row;justify-content:space-between}"])))}}]}}),_.oi)}}]);
//# sourceMappingURL=17184-dXEh9TiKARE.js.map | PypiClean |
/qcache-0.9.3.tar.gz/qcache-0.9.3/CHANGELOG.rst | Changelog
=========
0.9.3 (2019-01-05)
------------------
* Update dependencies on lz4 and tornado
* Allow float type hinting
0.9.2 (-)
------------------
* Never released
0.9.1 (2017-11-15)
------------------
* Downgrade to Pandas 0.20.3 in an attempt to fix #14.
0.9.0 (2017-11-14)
------------------
* Numexpr filter engine is not available anymore, only Pandas. Numexpr is no longer a requirement of qcache.
NB! Major backwards incompatibility
* Fix #12, like now ignores NaN
* Fix #13, only empty string is considered as NaN when reading CSV
* Fix #8, integer standins remain integers
* Upgrade Pandas to 0.21.0 and Numpy to 0.13.1
0.8.1 (2017-04-06)
------------------
* Bump Pandas to 0.19.2
0.8.0 (2017-01-08)
------------------
* Support client cert verification
0.7.2 (2016-12-18)
------------------
* Fix #10 & #11, minor statistics improvements
0.7.1 (2016-11-30)
------------------
* Fix #9, df overwritten by mistake
0.7.0 (2016-11-09)
------------------
* Compression using LZ4 or GZIP in requests and responses (#3)
* Sub queries in "in" filter (#7)
* Enum type based on Pandas category type (#6)
* Support for stand in columns in queries (#5)
* Additional metrics/statistics for complete request duration for stores and queries
* Update size estimates to do deep inspection of objects contained in dataframe. This should
be more accurate than the previous shallow inspection.
* Update Pandas to 0.19.1
* Update Tornado to 4.4.2
0.6.1 (2016-09-18)
------------------
* Fix packaging, the new qcache.qframe package was missing from the submitted package.
0.6.0 (2016-09-18)
------------------
* New filter engine based on Pandas rather than Numexpr. This enables new types of filters in the where
clause (see below). By default the old engine is still used but the new one can be enabled either
by default on server startup or on a per-query basis by setting the new 'X-QCache-filter-engine' header
to 'pandas'.
* New bitwise filters in the 'pandas' filter engine, 'all_bits' and 'any_bits'.
* New string filters, 'like' and 'ilike' which corresponds roughly to LIKE in SQL with the addition
of regex support. 'like' is case sensitive while 'ilike' is case insensitive.
0.5.0 (2016-06-19)
------------------
* New header when uploading data, 'X-QCache-stand-in-columns', that let you specify default values
for columns that may not be present in the uploaded data.
0.4.2 (2016-06-04)
------------------
* Additional statistics to measure for how long data remains in the cache before it's evicted.
* Bump dependency versions of Pandas, Numexpr and Tornado.
0.4.1 (2016-01-31)
------------------
* Provide the duration for which statistics were collected and statistics buffer size
0.4.0 (2016-01-24)
------------------
* Sub query support with new 'from' clause
* Column aliasing + support for calculated columns
* Error message improvements
0.3.0 (2015-12-23)
------------------
* Accepts conjunctions and disjunctions with only one clause
* Accept POST queries, good for large queries
* Improved performance for "in" queries, up to 30x faster for large lists
0.2.1 (2015-12-15)
------------------
* More efficient cache size tracking
* Check against unknown query clauses
0.2.0 (2015-12-06)
------------------
* Report the unsliced result length as part of the result, nice for pagination for example
* Use connection pooling
* SSL and basic auth support
0.1.0 (2015-10-25)
------------------
* First release that actually does something sensible.
0.0.1 (2015-10-15)
------------------
* First release on PyPI.
| PypiClean |
/python-ucam-webauth-0.9.2.tar.gz/python-ucam-webauth-0.9.2/docs/source/security.rst | Security
========
.. _checking-response-values:
Checking response values
------------------------
You *must* check the `url`, `issue`, `auth` and `sso` attributes of the
response:
* check that `url` matches the current URL being requested / is what you
expect.
Not checking `url` will allow another evil website administrator to replay
responses produced by Raven log-ins to her website to yours, thereby
impersonating someone else.
(Using params as a token (below) doesn't help, since the attacker can
obtain a matching `(cookie, params)` pair from you first, and then ask
the victim to authenticate with `params` set to that value.)
Some frameworks, notably Werkzeug, deduce the current hostname from
the `Host` or `X-Forwarded-Host` headers (with the latter taking
precedence).
.. seealso::
`werkzeug#609 <https://github.com/mitsuhiko/werkzeug/issues/609>`_ and
`issue 5 <https://github.com/danielrichman/python-ucam-webauth/issues/5>`_
This technique may be used to whitelist domains in Flask::
class R(flask.Request):
trusted_hosts = {'www.danielrichman.co.uk'}
app.request_class = R
Alternatively, you could sanitise `Host` and `X-Forwarded-Host` in your
web-server.
If you might have query parameters in your `url`, you need to take care to
handle negative respones from the WLS. See :ref:`cancel_url`.
* check `issue` is within an acceptable range of *now*
... lest someone replay an old response to log in again
* check `auth` and `sso` match `iact` and `aauth`
see :meth:`ucam_webauth.Response.check_iact_aauth`
Not checking `iact`/`aauth` will allow those restrictions to be bypassed
by crafting a custom request to the WLS.
Using params as a token
-----------------------
You might like to set a random nonce in the Request's `params`, save
a hashed (with secret salt) or signed copy in a cookie, and check that they
match in the `Response`.
This is *not* a substitute for any of the checks above, but does make the
`WLS-Response` values in your web server access logs useless.
:class:`ucam_webauth.flask_glue.AuthDecorator` does this.
Signing keys
------------
The keys used by Raven to sign responses are included with
`python-ucam-webauth`. I took care in retrieving them, however you should trust
neither me nor the method by which you installed this package.
*You should check that the copies of the certificates you have are
correct / match the files at the links below* (and audit the code you've
just installed, I guess).
* ``pubkey2`` from `<https://raven.cam.ac.uk/project/keys/>`_
* ``pubkey901`` from `<https://raven.cam.ac.uk/project/keys/demo_server/>`_
| PypiClean |
/scoper-1.1.0.tar.gz/scoper-1.1.0/scoper.py |
import json
import re
import sys
from urllib.parse import urlparse
class tcol:
'''tcol: contains terminal color codes; always close a colorized string by appending "tcol.RESET"'''
GREEN = '\033[92m'
RED = '\033[91m'
RESET = '\033[0m'
class ScoperCore:
def __init__(self, config="", url="", urls=[]):
'''__init__: provide a Burp configuration and a single URL or list of URLs to check'''
self.config = config
if type(self.config).__name__ == "str": # if provided a path to the burp config
with open(self.config, "r") as cnf:
self.conf = json.load(cnf)
cnf.close()
elif type(self.config).__name__ == "dict": # if directly provided the burp config as a dict
self.conf = self.config
self.url = url # provided string
self.urls = urls # provided list
def processUrl(self, conf, clude, url):
'''processUrl(): checks provided URL against the "include" and "exclude" sections of the Burp configuration file; returns the same URL if it meets the criteria, otherwise it returns an empty string'''
url = url.strip()
urlp = urlparse(url)
u = urlp.netloc # domain portion of URL
if urlp.port:
u = u.split(":")[0]
pt = str(urlp.port) # port is checked as a regex string in the Burp config
else:
if "http://" in url:
pt = "80"
elif "https://" in url:
pt = "443"
p = urlp.path # path portion of URL
s = urlp.scheme # http/https portion of URL
if len(p) == 0:
p = "/"
if len(s) == 0:
s = "https" # default to HTTPS
if clude in conf["target"]["scope"]:
for x in conf["target"]["scope"][clude]:
#
#
# track a pass/fail score; unanimous assessment required for result
score = []
if "protocol" in x:
if x["protocol"] != s:
#print("FAILED PROTOCOL CHECK:", clude, s, "requires:", x["protocol"], "\t\t\t\turl:", url)
#return("")
score.append("fail")
else:
score.append("pass")
if "host" in x:
if not re.match(x["host"], u, re.DOTALL):
#print("FAILED HOST CHECK:", clude, u, "requires:", x["host"], "\turl:", url)
#return("")
score.append("fail")
else:
score.append("pass")
if "file" in x: # burp file = url path
if not re.match(x["file"], p, re.DOTALL):
#print("FAILED FILE CHECK:", clude, p, "requires:", x["file"], "\t\t\turl:", url)
#return("")
score.append("fail")
else:
score.append("pass")
if "port" in x:
if not re.match(x["port"], pt, re.DOTALL):
#print("FAILED PORT CHECK:", clude, pt, "requires:", x["port"], "\t\t\t\t\turl:", url)
#return("")
score.append("fail")
else:
score.append("pass")
#
#
# check the score
#
# include pass = is included
# include fail = is not included (so "not" this condition / all passes = hit)
# exclude fail = is included (so "not" this condition / all passes = hit)
# exclude pass = is not included
#
if "fail" not in score:
return(url)
else:
pass
return("") # pending all checks and no results, fail closed
def __repr__(self):
'''__repr__(): returns the result'''
return(self.output)
class ScoperSingle(ScoperCore):
def __init__(self, config="", url="", urls=[]):
'''__init__: initialize variables to check a single URLs'''
super().__init__(config, url, urls) # inherit from ScoperCore
self.output = {}
self.check() # populates self.output
self.json = json.dumps(self.output)
self.color() # sets self.color
def check(self):
'''check(): compares the input URL to the include/exclude logic, and determines if the URL is in scope or not'''
i = self.processUrl(self.conf, "include", self.url)
if len(i) > 0:
e = self.processUrl(self.conf, "exclude", i)
if i != e:
self.output = {'scope':'inside', 'url':i}
else:
self.output = {'scope':'outside', 'url':i}
else:
self.output = {'scope':'outside', 'url':self.url} # default to outside as a precaution
def color(self):
'''color(): sets self.color as a colorized string representation of self.output'''
if "scope" in self.output:
if self.output["scope"] == "inside":
self.color = str(tcol.GREEN+"scope-inside"+tcol.RESET+"\t"+self.output["url"])
if self.output["scope"] == "outside":
self.color = str(tcol.RED+"scope-outside"+tcol.RESET+"\t"+self.output["url"])
class ScoperList(ScoperCore):
def __init__(self, config="", url="", urls=[]):
'''__init__: initialize variables to check a list of URLs'''
super().__init__(config, url, urls) # inherit from ScoperCore
self.output = {'scope':{'inside':[], 'outside':[]}}
self.check() # populates self.output
self.json = json.dumps(self.output)
self.color = self.color()
def check(self):
'''check(): compares the input URL to the include/exclude logic, and determines if the URL is in scope or not'''
for url in self.urls:
i = self.processUrl(self.conf, "include", url)
if len(i) > 0:
e = self.processUrl(self.conf, "exclude", i)
if i != e:
self.output['scope']['inside'].append(i)
else:
self.output['scope']['outside'].append(i)
else:
self.output['scope']['outside'].append(url) # default to outside as a precaution
def color(self):
'''color(): sets self.color as a colorized string representation of self.output'''
cc = ""
for i in self.output['scope']['inside']:
cc += str(tcol.GREEN+"scope-inside"+tcol.RESET+"\t"+i+"\n")
for i in self.output['scope']['outside']:
cc += str(tcol.RED+"scope-outside"+tcol.RESET+"\t"+i+"\n")
return(cc)
def output_generator(self):
'''output_generator(): this generator function produces output similar to ScoperSingle, ex. {'scope':'inside', 'url':'xyz'} for sake of consistency'''
t = []
for i in self.output['scope']['inside']:
t.append({'scope':'inside', 'url':i})
for i in self.output['scope']['outside']:
t.append({'scope':'outside', 'url':i})
for x in t:
yield(x)
def json_generator(self):
'''json_generator(): generator function uses output_generator() to dump JSON strings like ScoperSingle, ex. {"scope":"inside", "url":"xyz"} for sake of consistency'''
for i in self.output_generator():
yield(json.dumps(i)) | PypiClean |
/CAMELS_library-0.3.tar.gz/CAMELS_library-0.3/scripts/autoencoder/architecture.py | import torch
import torch.nn as nn
def weights_init(m):
"""custom weights initialization
"""
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
####################################################################################
####################################################################################
class autoencoder_64a(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64a, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx32x32
self.C1 = nn.Conv2d(1, hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx32x32 ----------> output: 2*hiddenx16x16
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 --------> output: 4*hiddenx8x8
self.C3 = nn.Conv2d(2*hidden, 4*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ----------> output: 8*hiddenx4x4
self.C4 = nn.Conv2d(4*hidden, 8*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B4 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ----------> output: 500x1x1
self.C5 = nn.Conv2d(8*hidden, BN_dim, kernel_size=6, stride=1, padding=1,
bias=True)
self.B5 = nn.BatchNorm2d(BN_dim)
##### bottleneck #####
self.FC1 = nn.Linear(500, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 500) #from bottleneck to C6
##### second, expanding part #####
# input: 500x1x1 ------------> output: 8*hiddenx4x4
self.C6 = nn.ConvTranspose2d(BN_dim, 8*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ------------> output: 4*hiddenx8x8
self.C7 = nn.ConvTranspose2d(8*hidden, 4*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ------------> output: 2*hiddenx16x16
self.C8 = nn.ConvTranspose2d(4*hidden, 2*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B8 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 ----------> output: hiddenx32x32
self.C9 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B9 = nn.BatchNorm2d(hidden)
# input: 1*hiddenx32x32 ----------> output: 1x64x64
self.C10 = nn.ConvTranspose2d(hidden, 1, kernel_size=4, stride=2,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.B2(self.C2(x)))
x = self.LeakyReLU(self.B3(self.C3(x)))
x = self.LeakyReLU(self.B4(self.C4(x)))
#x = self.LeakyReLU(self.B5(self.C5(x)))
x = self.LeakyReLU(self.C5(x))
#inner_cnn = image.shape
#x = x.view(inner_cnn[0],-1)
#x = self.LeakyReLU(self.FC1(x))
#x = self.LeakyReLU(self.FC2(x))
#x = x.view(inner_cnn[0],inner_cnn[1],inner_cnn[2],inner_cnn[3])
#x = self.LeakyReLU(self.B6(self.C6(x)))
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.B7(self.C7(x)))
x = self.LeakyReLU(self.B8(self.C8(x)))
x = self.LeakyReLU(self.B9(self.C9(x)))
x = self.tanh(self.C10(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64c(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64c, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx32x32
self.C1 = nn.Conv2d(1, hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx32x32 ----------> output: 2*hiddenx16x16
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 --------> output: 4*hiddenx8x8
self.C3 = nn.Conv2d(2*hidden, 4*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ----------> output: 8*hiddenx4x4
self.C4 = nn.Conv2d(4*hidden, 8*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B4 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ----------> output: 500x1x1
self.C5 = nn.Conv2d(8*hidden, BN_dim, kernel_size=6, stride=1, padding=1,
bias=True)
self.B5 = nn.BatchNorm2d(BN_dim)
##### bottleneck #####
self.FC1 = nn.Linear(500, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 500) #from bottleneck to C6
##### second, expanding part #####
# input: 500x1x1 ------------> output: 8*hiddenx4x4
self.C6 = nn.ConvTranspose2d(BN_dim, 8*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ------------> output: 4*hiddenx8x8
self.C7 = nn.ConvTranspose2d(8*hidden, 4*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ------------> output: 2*hiddenx16x16
self.C8 = nn.ConvTranspose2d(4*hidden, 2*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B8 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 ----------> output: hiddenx32x32
self.C9 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B9 = nn.BatchNorm2d(hidden)
# input: 1*hiddenx32x32 ----------> output: 1x64x64
self.C10 = nn.ConvTranspose2d(hidden, 1, kernel_size=4, stride=2,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.C2(x))
x = self.LeakyReLU(self.C3(x))
x = self.LeakyReLU(self.C4(x))
x = self.LeakyReLU(self.C5(x))
#inner_cnn = x.shape
#x = x.view(inner_cnn[0],-1)
#x = self.LeakyReLU(self.FC1(x))
#x = self.LeakyReLU(self.FC2(x))
#x = x.view(inner_cnn[0],inner_cnn[1],inner_cnn[2],inner_cnn[3])
#x = self.LeakyReLU(self.B6(self.C6(x)))
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.C7(x))
x = self.LeakyReLU(self.C8(x))
x = self.LeakyReLU(self.C9(x))
x = self.tanh(self.C10(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64d(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64d, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx32x32
self.C1 = nn.Conv2d(1, hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx32x32 ----------> output: 2*hiddenx16x16
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 --------> output: 4*hiddenx8x8
self.C3 = nn.Conv2d(2*hidden, 4*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ----------> output: 8*hiddenx4x4
self.C4 = nn.Conv2d(4*hidden, 8*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B4 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ----------> output: 1000x1x1
self.C5 = nn.Conv2d(8*hidden, 1000, kernel_size=6, stride=1, padding=1,
bias=True)
self.B5 = nn.BatchNorm2d(BN_dim)
##### bottleneck #####
self.FC1 = nn.Linear(1000, 500)
self.FC2 = nn.Linear(500, 250)
self.FC3 = nn.Linear(250, BN_dim)
self.FC4 = nn.Linear(BN_dim, 250)
self.FC5 = nn.Linear(250, 500)
self.FC6 = nn.Linear(500, 1000)
##### second, expanding part #####
# input: 1000x1x1 ------------> output: 8*hiddenx4x4
self.C6 = nn.ConvTranspose2d(1000, 8*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ------------> output: 4*hiddenx8x8
self.C7 = nn.ConvTranspose2d(8*hidden, 4*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ------------> output: 2*hiddenx16x16
self.C8 = nn.ConvTranspose2d(4*hidden, 2*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B8 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 ----------> output: hiddenx32x32
self.C9 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B9 = nn.BatchNorm2d(hidden)
# input: 1*hiddenx32x32 ----------> output: 1x64x64
self.C10 = nn.ConvTranspose2d(hidden, 1, kernel_size=4, stride=2,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.B2(self.C2(x)))
x = self.LeakyReLU(self.B3(self.C3(x)))
x = self.LeakyReLU(self.B4(self.C4(x)))
x = self.LeakyReLU(self.C5(x))
inner_cnn = x.shape
x = x.view(inner_cnn[0],-1)
x = self.LeakyReLU(self.FC1(x))
x = self.LeakyReLU(self.FC2(x))
x = self.LeakyReLU(self.FC3(x))
x = self.LeakyReLU(self.FC4(x))
x = self.LeakyReLU(self.FC5(x))
x = self.LeakyReLU(self.FC6(x))
x = x.view(inner_cnn[0],inner_cnn[1],inner_cnn[2],inner_cnn[3])
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.B7(self.C7(x)))
x = self.LeakyReLU(self.B8(self.C8(x)))
x = self.LeakyReLU(self.B9(self.C9(x)))
x = self.tanh(self.C10(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64e(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64e, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx16x16
self.C1 = nn.Conv2d(1, hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx16x16 ----------> output: 2*hiddenx4x4
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 --------> output: BN_dimx1x1
self.C3 = nn.Conv2d(2*hidden, BN_dim, kernel_size=6, stride=4, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(4*hidden)
##### bottleneck #####
self.FC1 = nn.Linear(500, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 500) #from bottleneck to C6
##### second, expanding part #####
# input: BN_dimx1x1 ------------> output: 2*hiddenx4x4
self.C6 = nn.ConvTranspose2d(BN_dim, 2*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 ------------> output: hiddenx16x16
self.C7 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=6, stride=4,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(1*hidden)
# input: 2*hiddenx16x16 ------------> output: 1x64x64
self.C8 = nn.ConvTranspose2d(1*hidden, 1, kernel_size=6, stride=4,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.B2(self.C2(x)))
x = self.LeakyReLU(self.C3(x))
#inner_cnn = x.shape
#x = x.view(inner_cnn[0],-1)
#x = self.LeakyReLU(self.FC1(x))
#x = self.LeakyReLU(self.FC2(x))
#x = x.view(inner_cnn[0],inner_cnn[1],inner_cnn[2],inner_cnn[3])
#x = self.LeakyReLU(self.B6(self.C6(x)))
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.B7(self.C7(x)))
x = self.tanh(self.C8(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64f(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64f, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx16x16
self.C1 = nn.Conv2d(1, hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx16x16 ----------> output: 2*hiddenx4x4
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 --------> output: BN_dimx1x1
self.C3 = nn.Conv2d(2*hidden, BN_dim, kernel_size=6, stride=4, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(BN_dim)
##### bottleneck #####
self.FC1 = nn.Linear(500, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 500) #from bottleneck to C6
##### second, expanding part #####
# input: BN_dimx1x1 ------------> output: 2*hiddenx4x4
self.C6 = nn.ConvTranspose2d(BN_dim, 2*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 ------------> output: hiddenx16x16
self.C7 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=6, stride=4,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(1*hidden)
# input: 2*hiddenx16x16 ------------> output: 1x64x64
self.C8 = nn.ConvTranspose2d(1*hidden, 1, kernel_size=6, stride=4,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.B1(self.C1(image)))
x = self.LeakyReLU(self.B2(self.C2(x)))
x = self.LeakyReLU(self.B3(self.C3(x)))
#inner_cnn = x.shape
#x = x.view(inner_cnn[0],-1)
#x = self.LeakyReLU(self.FC1(x))
#x = self.LeakyReLU(self.FC2(x))
#x = x.view(inner_cnn[0],inner_cnn[1],inner_cnn[2],inner_cnn[3])
#x = self.LeakyReLU(self.B6(self.C6(x)))
x = self.LeakyReLU(self.B6(self.C6(x)))
x = self.LeakyReLU(self.B7(self.C7(x)))
x = self.tanh(self.C8(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64g(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64g, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx16x16
self.C1 = nn.Conv2d(1, hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx16x16 ----------> output: 2*hiddenx4x4
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=6, stride=4, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 --------> output: BN_dimx1x1
self.C3 = nn.Conv2d(2*hidden, BN_dim, kernel_size=6, stride=4, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(BN_dim)
##### bottleneck #####
self.FC1 = nn.Linear(500, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 500) #from bottleneck to C6
##### second, expanding part #####
# input: BN_dimx1x1 ------------> output: 2*hiddenx4x4
self.C6 = nn.ConvTranspose2d(BN_dim, 2*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx4x4 ------------> output: hiddenx16x16
self.C7 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=6, stride=4,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(1*hidden)
# input: 2*hiddenx16x16 ------------> output: 1x64x64
self.C8 = nn.ConvTranspose2d(1*hidden, 1, kernel_size=6, stride=4,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.C2(x))
x = self.LeakyReLU(self.C3(x))
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.C7(x))
x = self.tanh(self.C8(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64h(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64h, self).__init__()
##### first, contracting part #####
# input: 1x64x64 ---------------> output: hiddenx32x32
self.C1 = nn.Conv2d(1, hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B1 = nn.BatchNorm2d(hidden)
# input: hiddenx32x32 ----------> output: 2*hiddenx16x16
self.C2 = nn.Conv2d(hidden, 2*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B2 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 --------> output: 4*hiddenx8x8
self.C3 = nn.Conv2d(2*hidden, 4*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B3 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ----------> output: 8*hiddenx4x4
self.C4 = nn.Conv2d(4*hidden, 8*hidden, kernel_size=4, stride=2, padding=1,
bias=True)
self.B4 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ----------> output: 1000x1x1
self.C5 = nn.Conv2d(8*hidden, 1000, kernel_size=6, stride=1, padding=1,
bias=True)
self.B5 = nn.BatchNorm2d(1000)
##### bottleneck #####
self.FC1 = nn.Linear(1000, BN_dim) #from C5 to bottleneck
self.FC2 = nn.Linear(BN_dim, 1000) #from bottleneck to C6
##### second, expanding part #####
# input: 1000x1x1 ------------> output: 8*hiddenx4x4
self.C6 = nn.ConvTranspose2d(1000, 8*hidden, kernel_size=4, stride=1,
padding=0, bias=True)
self.B6 = nn.BatchNorm2d(8*hidden)
# input: 8*hiddenx4x4 ------------> output: 4*hiddenx8x8
self.C7 = nn.ConvTranspose2d(8*hidden, 4*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B7 = nn.BatchNorm2d(4*hidden)
# input: 4*hiddenx8x8 ------------> output: 2*hiddenx16x16
self.C8 = nn.ConvTranspose2d(4*hidden, 2*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B8 = nn.BatchNorm2d(2*hidden)
# input: 2*hiddenx16x16 ----------> output: hiddenx32x32
self.C9 = nn.ConvTranspose2d(2*hidden, 1*hidden, kernel_size=4, stride=2,
padding=1, bias=True)
self.B9 = nn.BatchNorm2d(hidden)
# input: 1*hiddenx32x32 ----------> output: 1x64x64
self.C10 = nn.ConvTranspose2d(hidden, 1, kernel_size=4, stride=2,
padding=1, bias=True)
self.dropout = nn.Dropout(p=0.5)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = self.LeakyReLU(self.C1(image))
x = self.LeakyReLU(self.B2(self.C2(x)))
x = self.LeakyReLU(self.B3(self.C3(x)))
x = self.LeakyReLU(self.B4(self.C4(x)))
x = self.LeakyReLU(self.C5(x))
dims = x.shape
x = x.view(dims[0],-1)
x = self.LeakyReLU(self.FC1(x))
x = self.LeakyReLU(self.FC2(x))
x = x.view(dims[0],dims[1],dims[2],dims[3])
x = self.LeakyReLU(self.C6(x))
x = self.LeakyReLU(self.B7(self.C7(x)))
x = self.LeakyReLU(self.B8(self.C8(x)))
x = self.LeakyReLU(self.B9(self.C9(x)))
x = self.tanh(self.C10(x))
return x
####################################################################################
####################################################################################
####################################################################################
####################################################################################
class autoencoder_64b(nn.Module):
def __init__(self, BN_dim, hidden):
super(autoencoder_64b, self).__init__()
##### first, contracting part #####
self.FC1 = nn.Linear(64*64, 1000)
self.FC2 = nn.Linear(1000, 500)
self.FC3 = nn.Linear(500, 250)
self.FC4 = nn.Linear(250, BN_dim)
##### second, expanding part #####
self.FC5 = nn.Linear(BN_dim, 250)
self.FC6 = nn.Linear(250, 500)
self.FC7 = nn.Linear(500, 1000)
self.FC8 = nn.Linear(1000, 64*64)
self.dropout = nn.Dropout(p=0.3)
self.ReLU = nn.ReLU()
self.LeakyReLU = nn.LeakyReLU(0.2)
self.tanh = nn.Tanh()
"""
for m in self.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 1)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight)
"""
def forward(self, image):
x = image.view(image.size(0),-1)
x = self.LeakyReLU(self.FC1(x))
x = self.LeakyReLU(self.FC2(x))
x = self.LeakyReLU(self.FC3(x))
x = self.LeakyReLU(self.FC4(x))
x = self.LeakyReLU(self.FC5(x))
x = self.LeakyReLU(self.FC6(x))
x = self.LeakyReLU(self.FC7(x))
x = self.tanh(self.FC8(x))
x = x.view(image.size(0),image.size(1),image.size(2),image.size(3))
return x
####################################################################################
####################################################################################
# ConvTranspose2d(channels_in, channels_out, kernel, stride, padding)
class Generator_64(nn.Module):
def __init__(self, Z_DIM, G_HIDDEN):
super(Generator_64, self).__init__()
self.main = nn.Sequential(
# 1st layer (input: 100x1x1 ----> output: 512x4x4)
nn.ConvTranspose2d(Z_DIM, G_HIDDEN * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(G_HIDDEN * 8),
nn.ReLU(True),
# 2nd layer (input: 512x4x4 ----> output: 256x8x8)
nn.ConvTranspose2d(G_HIDDEN * 8, G_HIDDEN * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN * 4),
nn.ReLU(True),
# 3rd layer (input: 256x8x8 ----> output: 128x16x16)
nn.ConvTranspose2d(G_HIDDEN * 4, G_HIDDEN * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN * 2),
nn.ReLU(True),
# 4th layer (input: 128x16x16 ----> output: 64x32x32)
nn.ConvTranspose2d(G_HIDDEN * 2, G_HIDDEN, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN),
nn.ReLU(True),
# output layer (input: 64x32x32 ----> 1x64x64)
nn.ConvTranspose2d(G_HIDDEN, 1, 4, 2, 1, bias=False),
nn.Tanh()
)
def forward(self, input):
return self.main(input)
#Conv2d(channels_in, channels_out, kernel, stride, padding)
class Discriminator_64(nn.Module):
def __init__(self, D_HIDDEN):
super(Discriminator_64, self).__init__()
self.main = nn.Sequential(
# 1st layer (input: 1x64x64 ----> output: 64x32x32)
nn.Conv2d(1, D_HIDDEN, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# 2nd layer (input: 64x32x32 ----> output: 128x16x16)
nn.Conv2d(D_HIDDEN, D_HIDDEN * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 2),
nn.LeakyReLU(0.2, inplace=True),
# 3rd layer (input: 128x16x16 ----> output: 256x8x8)
nn.Conv2d(D_HIDDEN * 2, D_HIDDEN * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 4),
nn.LeakyReLU(0.2, inplace=True),
# 4th layer (input: 256x8x8 ----> output: 512x4x4)
nn.Conv2d(D_HIDDEN * 4, D_HIDDEN * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 8),
nn.LeakyReLU(0.2, inplace=True),
# output layer (input: 512x4x4 ----> output: 1x1x1)
nn.Conv2d(D_HIDDEN * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid() #Returns a number from 0 to 1: probability
)
def forward(self, input):
return self.main(input).view(-1, 1).squeeze(1)
# ConvTranspose2d(channels_in, channels_out, kernel, stride, padding)
class Generator_128(nn.Module):
def __init__(self, Z_DIM, G_HIDDEN):
super(Generator_128, self).__init__()
self.main = nn.Sequential(
# 1st layer (input: 100x1x1 ----> output: 512x8x8)
nn.ConvTranspose2d(Z_DIM, G_HIDDEN * 8, 8, 1, 0, bias=False),
nn.BatchNorm2d(G_HIDDEN * 8),
nn.ReLU(True),
# 2nd layer (input: 512x8x8 ----> output: 256x16x16)
nn.ConvTranspose2d(G_HIDDEN * 8, G_HIDDEN * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN * 4),
nn.ReLU(True),
# 3rd layer (input: 256x16x16 ----> output: 128x32x32)
nn.ConvTranspose2d(G_HIDDEN * 4, G_HIDDEN * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN * 2),
nn.ReLU(True),
# 4th layer (input: 128x32x32 ----> output: 64x64x64)
nn.ConvTranspose2d(G_HIDDEN * 2, G_HIDDEN, 4, 2, 1, bias=False),
nn.BatchNorm2d(G_HIDDEN),
nn.ReLU(True),
# output layer (input: 64x64x64 ----> 1x128x128)
nn.ConvTranspose2d(G_HIDDEN, 1, 4, 2, 1, bias=False),
)
def forward(self, input):
return self.main(input)
#Conv2d(channels_in, channels_out, kernel, stride, padding)
class Discriminator_128(nn.Module):
def __init__(self, D_HIDDEN):
super(Discriminator_128, self).__init__()
self.main = nn.Sequential(
# 1st layer (input: 1x128x128 ----> output: 64x32x32)
nn.Conv2d(1, D_HIDDEN, 4, 4, 0, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# 2nd layer (input: 64x32x32 ----> output: 128x16x16)
nn.Conv2d(D_HIDDEN, D_HIDDEN * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 2),
nn.LeakyReLU(0.2, inplace=True),
# 3rd layer (input: 128x16x16 ----> output: 256x8x8)
nn.Conv2d(D_HIDDEN * 2, D_HIDDEN * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 4),
nn.LeakyReLU(0.2, inplace=True),
# 4th layer (input: 256x8x8 ----> output: 512x4x4)
nn.Conv2d(D_HIDDEN * 4, D_HIDDEN * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(D_HIDDEN * 8),
nn.LeakyReLU(0.2, inplace=True),
# output layer (input: 512x4x4 ----> output: 1x1x1)
nn.Conv2d(D_HIDDEN * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid() #Returns a number from 0 to 1: probability
)
def forward(self, input):
return self.main(input).view(-1, 1).squeeze(1) | PypiClean |
/aimlbotkernel-1.0.4.tar.gz/aimlbotkernel-1.0.4/README.rst | AIML Chatbot kernel
===================
This is a Jupyter kernel that deploys a chatbot, implemented using the
`python-aiml`_ package. The idea was taken from the `Calysto chatbot`_ kernel.
It has been tested with Jupyter 4.x. The code works with either Python 2.7
or Python 3 (tested with Python 3.4)
Installation
------------
The installation process requires two steps:
1. Install the Python package::
pip install aimlbotkernel
2. Install the kernel into Jupyter::
jupyter aimlbotkernel install [--user] [--logdir <dir>]
The ``--user`` option will install the kernel in the current user's personal
config, while the generic command will install it as a global kernel (but
needs write permissions in the system directories).
The ``--logdir`` specifies the default place into which the logfile will be
written (unless overriden at runtime by the ``LOGDIR`` environment variable).
If no directory is specified, the (platform-specific) default temporal
directory will be used.
Note that the Jupyter kernel installation also installs some custom CSS; its
purpose is to improve the layout of the kernel results as they are presented
in the notebook (but it also means that the rendered notebook will look
slightly different in a Jupyter deployment in which the kernel has not been
installed, or within an online viewer).
To uninstall, perform the inverse operations (in reverse order), to uninstall
the kernel from Jupyter and to remove the Python package::
jupyter aimlbotkernel remove
pip uninstall aimlbotkernel
Operation
---------
Once installed, an *AIML Chatbot* kernel will be available in the Notebook
**New** menu. Starting one such kernel will create a chatbot. The chatbot is
initially empty but can be loaded with a couple of predefined DBs (use the
``%help`` magic for initial instructions).
Notebook input is of two kinds:
* Regular text cells are considered human input and are sent to the chatbot,
which produces its corresponding output
* Cells starting with ``%`` contain "magic" commands that affect the
operation of the kernel (load AIML databases, inspecting/modifying bot
state, saving/loading state to/from disk, etc). Use the ``%help`` magic for
some instructions, and ``%lsmagics`` to show the current list of defined
magics (magics have autocompletion and contextual help).
The `examples` directory contains a few notebooks showing some of the
provided functionality. They can also be seen with `online Notebook viewer`_
(note that, as said above, they will look slightly different than in a running
kernel).
AIML
----
`AIML`_ is an XML-based specification to design conversational agents. Its
most famous application is ALICE, a chatbot (the DB for the free version of
ALICE is included in this kernel, as it is included in python-aiml)
The chatbot can load an AIML database (which is basically a bunch of XML
files). It can also define AIML rules on the fly, by using the ``%aiml`` magic
in a cell.
.. _python-aiml: https://github.com/paulovn/python-aiml
.. _Calysto chatbot: https://github.com/Calysto/calysto_chatbot
.. _AIML: http://www.alicebot.org/aiml.html
.. _online Notebook viewer: http://nbviewer.jupyter.org/github/paulovn/aiml-chatbot-kernel/blob/master/examples/
| PypiClean |
/fastvarints-0.0.1.tar.gz/fastvarints-0.0.1/_skbuild/linux-x86_64-3.9/cmake-install/README.md | scikit_build_example
==============
[![Gitter][gitter-badge]][gitter-link]
| CI | status |
|----------------------|--------|
| conda.recipe | [![Conda Actions Status][actions-conda-badge]][actions-conda-link] |
| pip builds | [![Pip Actions Status][actions-pip-badge]][actions-pip-link] |
An example project built with [pybind11](https://github.com/pybind/pybind11) and scikit-build. Python 3.6+ (see older commits for older versions of Python).
[gitter-badge]: https://badges.gitter.im/pybind/Lobby.svg
[gitter-link]: https://gitter.im/pybind/Lobby
[actions-badge]: https://github.com/pybind/scikit_build_example/workflows/Tests/badge.svg
[actions-conda-link]: https://github.com/pybind/scikit_build_example/actions?query=workflow%3AConda
[actions-conda-badge]: https://github.com/pybind/scikit_build_example/workflows/Conda/badge.svg
[actions-pip-link]: https://github.com/pybind/scikit_build_example/actions?query=workflow%3APip
[actions-pip-badge]: https://github.com/pybind/scikit_build_example/workflows/Pip/badge.svg
[actions-wheels-link]: https://github.com/pybind/scikit_build_example/actions?query=workflow%3AWheels
[actions-wheels-badge]: https://github.com/pybind/scikit_build_example/workflows/Wheels/badge.svg
Installation
------------
- clone this repository
- `pip install ./scikit_build_example`
CI Examples
-----------
There are examples for CI in `.github/workflows`. A simple way to produces
binary "wheels" for all platforms is illustrated in the "wheels.yml" file,
using [`cibuildwheel`][].
License
-------
pybind11 is provided under a BSD-style license that can be found in the LICENSE
file. By using, distributing, or contributing to this project, you agree to the
terms and conditions of this license.
Test call
---------
```python
import scikit_build_example
scikit_build_example.add(1, 2)
```
[`cibuildwheel`]: https://cibuildwheel.readthedocs.io
| PypiClean |
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AlipayEcoMycarParkingCharginginfoSyncRequest.py | import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayEcoMycarParkingCharginginfoSyncModel import AlipayEcoMycarParkingCharginginfoSyncModel
class AlipayEcoMycarParkingCharginginfoSyncRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayEcoMycarParkingCharginginfoSyncModel):
self._biz_content = value
else:
self._biz_content = AlipayEcoMycarParkingCharginginfoSyncModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.eco.mycar.parking.charginginfo.sync'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params | PypiClean |
/collective.hostout-1.0a3.tar.gz/collective.hostout-1.0a3/collective/hostout/__init__.py |
import logging, os, tempfile, urllib2, urlparse
import setuptools.archive_util
import datetime
import zc.buildout
import zc.recipe.egg
from os.path import join
from os.path import dirname, abspath
import ConfigParser
from zc.buildout.buildout import Options, _recipe, _install_and_load
import pkg_resources
import sys
class Recipe:
def __init__(self, buildout, name, options):
self.egg = zc.recipe.egg.Egg(buildout, options['recipe'], options)
self.name, self.options, self.buildout = name, options, buildout
if not buildout['buildout'].get('hostout-main'):
buildout['buildout']['hostout-main'] = name
options['mainhostout'] = self.name
#get all recipes here to make sure we're the last called
self.getAllRecipes()
self.buildout_dir = self.buildout.get('buildout').get('directory')
self.download_cache = self.buildout['buildout'].get('download-cache')
self.install_from_cache = self.buildout['buildout'].get('install-from-cache')
self.options['versions'] = self.buildout['buildout'].get('versions','versions')
self.options['location'] = os.path.join(
self.buildout['buildout']['parts-directory'],
'hostout',
)
self.optionsfile = join(self.options['location'],'hostout.cfg')
self.fabfiles = []
self.subrecipes = []
self.subpackages = []
self.extends(options, [])
self.options.setdefault('dist_dir','dist')
self.options.setdefault('buildout','buildout.cfg')
self.options.setdefault('user','') #get default from .ssh/config
self.options.setdefault('hostname',name) #get default from .ssh/config
idfile = os.path.join(self.buildout_dir,"%s_key"%options.get('hostname'))
self.options.setdefault('identity-file',self.options.get('identity_file',idfile))
self.options.setdefault('buildout-user',self.options['user'])
self.options.setdefault('effective-user',self.options['buildout-user'])
self.options.setdefault('buildout-group','buildout')
self.options.setdefault('host', '')
self.options.setdefault('password','')
self.options.setdefault('post-commands', self.options.get('start_cmd',''))
self.options.setdefault('pre-commands', self.options.get('stop_cmd', ''))
self.options.setdefault('include', self.options.get('extra_config',''))
self.options.setdefault('parts','')
self.options.setdefault('versionsfile','hostoutversions.cfg')
default_path = '~%s/buildout'%self.options['user']
self.options.setdefault('path', self.options.get('remote_path','/var/lib/plone/%s'%name))
# self.extra_config = [s.strip() for s in self.options.get('extra_config','').split('\n') if s.strip()]
self.options.setdefault('buildout_location',self.buildout_dir)
self.options['arguments']="'%s',sys.argv[1:]"%self.optionsfile
self.options['scripts'] = 'hostout'
self.options['eggs'] = '\n'.join(self.options.get('eggs','').split() + ['collective.hostout'])
self.script = zc.recipe.egg.Scripts(buildout, options['recipe'], options)
version = '.'.join([str(i) for i in sys.version_info])
self.options['python-version'] = options.get('python-version', version)
def extends(self, options, seen):
extends = [e.strip() for e in options.get('extends','').split() if e.strip()]
for extension in extends:
if extension in seen:
continue
seen.append(extension)
part = self.buildout.get(extension)
if part is None:
# try interpreting extends as recipe
eopts = {}
# eopts.update(options)
eopts['recipe'] = extension
#buildout._raw[extension] = eopts #dodgy hack since buildout.__setitem__ not implemented
#part = buildout.get(extension)
#part = Options(buildout, name, eopts)
#part._initialize()
reqs, entry = _recipe(eopts)
recipe_class = _install_and_load(reqs, 'zc.buildout', entry, self.buildout)
recipe = recipe_class(self.buildout, self.name, self.options)
self.subrecipes.append(recipe)
egg = extension.split(':')[0]
self.options['eggs'] = '\n'.join(self.options.get('eggs','').split() + [egg])
continue
else:
self.extends(part, seen)
for key in part:
if key in ['fabfiles', 'pre-commands', 'post-commands']:
fabfiles = part[key].split()
self.options[key] = '\n'.join(self.options.get(key, '').split()+fabfiles)
elif key not in self.options.keys():
self.options[key] = part[key]
return seen
def install(self):
installed = []
for recipe in self.subrecipes:
installed = recipe.install()
if installed is None:
installed = []
elif isinstance(installed, basestring):
installed = [installed]
logger = logging.getLogger(self.name)
location = self.options['location']
if not os.path.exists(location):
os.mkdir(location)
config = ConfigParser.ConfigParser()
config.read(self.optionsfile)
fp = open(self.optionsfile, 'w+')
config.write(fp)
fp.close()
self.update()
if self.options.get('mainhostout') is not None:
installed = installed + self.script.install()
return installed + [self.optionsfile]
def update(self):
installed = []
for recipe in self.subrecipes:
installed = recipe.install()
if installed is None:
installed = []
elif isinstance(installed, basestring):
installed = [installed]
if self.options.get('mainhostout') is not None:
installed = installed + self.script.update()
config = ConfigParser.RawConfigParser()
config.optionxform = str
config.read(self.optionsfile)
if not config.has_section(self.name):
config.add_section(self.name)
if not config.has_section('buildout'):
config.add_section('buildout')
for name,value in self.options.items():
config.set(self.name, name, value)
config.set('buildout','location',self.buildout_dir)
if self.options.get('mainhostout') is not None:
self.writeVersions()
config.set('buildout', 'bin-directory', self.buildout.get('buildout').get('directory'))
if self.options['dist_dir']:
config.set('buildout','dist_dir', self.options['dist_dir'])
packages = [p.strip() for p in self.buildout.get('buildout').get('develop','').split()]
packages += [p.strip() for p in self.options.get('packages','').split()]
if not config.has_section('buildout'):
config.add_section('buildout')
config.set('buildout', 'packages', '\n '.join(packages))
#self.options.setdefault('develop','')
fp = open(self.optionsfile, 'w+')
config.write(fp)
fp.close()
return installed+ [self.optionsfile]
def getAllRecipes(self):
recipes = []
for part in [p.strip() for p in self.buildout['buildout']['parts'].split()]:
options = self.buildout.get(part) #HACK
if options is None or not options.get('recipe'):
continue
try:
recipe,subrecipe = options['recipe'].split(':')
except:
recipe=options['recipe']
recipes.append((part,recipe,options))
return recipes
def getVersions(self):
versions = {}
for part, recipe, options in self.getAllRecipes():
egg = zc.recipe.egg.Egg(self.buildout, recipe, options)
spec, entry = _recipe({'recipe':recipe})
req = pkg_resources.Requirement.parse(spec)
dist = pkg_resources.working_set.find(req)
if "collective.hostout" in spec:
continue #HACK
requirements, ws = egg.working_set()
for dist in [dist] + [d for d in ws]:
old_version,dep = versions.get(dist.project_name,('',[]))
if recipe not in dep:
dep.append(recipe)
if dist.version != '0.0':
versions[dist.project_name] = (dist.version,dep)
spec = ""
return versions
def writeVersions(self):
versions = self.getVersions()
f = open(self.options['versionsfile'], "w")
f.write("[%(versions)s]\n" % self.options)
for project_name,info in sorted(versions.items()):
version,deps = info
if [d for d in deps if d != project_name]:
continue
if version != '0.0':
spec='%s = %s' % (project_name,version)+'\n'
else:
spec='#%s = %s' % (project_name,version)+'\n'
f.write( spec )
for project_name,info in sorted(versions.items()):
version,deps = info
spec='\n'
deps = [d for d in deps if d != project_name]
if not deps:
continue
for dep in sorted(deps):
if project_name == dep:
continue
if versions.get(dep):
dver, ddeps = versions.get(dep)
else:
dver = 'Unknown'
spec+='# Required by %s \n' % (dep) #versions[dep][0])
if version != '0.0':
spec+='%s = %s' % (project_name,version)+'\n'
else:
spec+='#%s = %s' % (project_name,version)+'\n'
f.write(spec)
f.close()
# relpath.py
# R.Barran 30/08/2004
import os
def relpath(target, base=os.curdir):
"""
Return a relative path to the target from either the current dir or an optional base dir.
Base can be a directory specified either as absolute or relative to current dir.
"""
if not os.path.exists(target):
raise OSError, 'Target does not exist: '+target
if not os.path.isdir(base):
raise OSError, 'Base is not a directory or does not exist: '+base
base_list = (os.path.abspath(base)).split(os.sep)
target_list = (os.path.abspath(target)).split(os.sep)
# On the windows platform the target may be on a completely different drive from the base.
if os.name in ['nt','dos','os2'] and base_list[0] <> target_list[0]:
raise OSError, 'Target is on a different drive to base. Target: '+target_list[0].upper()+', base: '+base_list[0].upper()
# Starting from the filepath root, work out how much of the filepath is
# shared by base and target.
for i in range(min(len(base_list), len(target_list))):
if base_list[i] <> target_list[i]: break
else:
# If we broke out of the loop, i is pointing to the first differing path elements.
# If we didn't break out of the loop, i is pointing to identical path elements.
# Increment i so that in all cases it points to the first differing path elements.
i+=1
rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:]
return os.path.join(*rel_list) | PypiClean |
/ql-demo-0.4.13.91.tar.gz/ql-demo-0.4.13.91/qldemo/demo.py |
## ql-demo-parse.py
## Shawn Nock, 2014
# Standard Lib
import copy
import json
import os
import re
import struct
# C-Extension wrapping Q3A Huffman Routines
import huffman
import struct
# Constants and enum maps
from qldemo.constants import *
from qldemo.data import (GameState, EntityState, PlayerState,
EntityStateNETF, PlayerStateNETF,
ServerCommand, Snapshot)
# Configuration
# Utility Functions
# Classes
class QLDemo:
gamestate=GameState()
packets=[]
snapshots=[]
scores=[]
def __init__(self, filename):
huffman.init()
huffman.open(filename)
def __iter__(self):
while True:
seq=huffman.readrawlong()
length=huffman.readrawlong()
if seq == -1 or length == -1:
break
huffman.fill(length)
ack = huffman.readlong()
cmd = huffman.readbyte()
r = None
if cmd == SVC_GAMESTATE:
r = self.parse_gamestate()
elif cmd == SVC_SERVERCOMMAND:
r = self.parse_servercommand()
elif cmd == SVC_SNAPSHOT:
r = self.parse_snapshot()
if len(self.snapshots) and r.serverTime == self.snapshots[-1].serverTime:
raise StopIteration
self.snapshots.append(r)
self.packets.append(r)
if r: yield r
def parse_gamestate(self):
ack=huffman.readlong()
while True:
cmd = huffman.readbyte()
if cmd == SVC_EOF:
break
elif cmd == SVC_CONFIGSTRING:
self.parse_configstring()
elif cmd == SVC_BASELINE:
self.parse_baseline()
self.gamestate.clientNum = huffman.readlong()
self.gamestate.checksumFeed = huffman.readlong()
return self.gamestate
def parse_configstring(self, data=(None, None)):
i, string = data
if not i:
i = huffman.readshort()
string = huffman.readbigstring()
dest=self.gamestate.configstrings
fieldname=str(i)
output=string
if CS_STRING_MAP.get(i, None):
dest=self.gamestate.config
fieldname=CS_STRING_MAP.get(i)
if string.startswith("\\"):
output={}
subfields = string.split('\\')
if not fieldname in dest:
dest[fieldname]={}
for x in range(1, len(subfields)-1, 2):
output[subfields[x]]=subfields[x+1]
if i >= CS_PLAYERS and i < CS_PLAYERS+MAX_CLIENTS:
clientNum = i-CS_PLAYERS
fieldname=int(clientNum)
subfields = string.split('\\')
output = {}
for x in range(0, len(subfields), 2):
output[subfields[x]]=subfields[x+1]
if output['t'] == TEAM_SPECTATOR:
dest=self.gamestate.spectators
else:
dest=self.gamestate.players
if i >= CS_SOUNDS and i < CS_SOUNDS+MAX_SOUNDS:
dest=self.gamestate.config
fieldname='sound'+str(i-CS_SOUNDS)
if i >= CS_LOCATIONS and i < CS_LOCATIONS+MAX_LOCATIONS:
dest=self.gamestate.config
fieldname='location{:02d}'.format(i-CS_LOCATIONS)
dest[fieldname]=output
def parse_baseline(self):
newnum = huffman.readbits(GENTITYNUM_BITS)
null_state=EntityState()
baseline = self.read_delta_entity(null_state, newnum)
## Broken for now, disabling for speed
#self.gamestate.baselines[newnum]=baseline
def read_delta_entity(self, frm, num):
## Check for server order to remove a baseline
if huffman.readbits(1) == 1:
# Don't know how we should handle this, it does mean no
# new data; skipping for now
return
## Check for 'no delta' flag
if huffman.readbits(1) == 0:
## No changes, we should make 'from' a copy of
## 'to'... skipping for now
return
last_field = huffman.readbyte()
entity = EntityState()
netf = EntityStateNETF(entity)
for i in range(0, last_field):
if huffman.readbits(1) :
if not netf.bits[i] :
if huffman.readbits(1) != 0:
if huffman.readbits(1) == 0:
netf.fields[i] = huffman.readbits(FLOAT_INT_BITS)
else :
netf.fields[i] = huffman.readfloat()
else:
if huffman.readbits(1) != 0:
netf.fields[i] = huffman.readbits(netf.bits[i])
netf.update()
return entity
def parse_servercommand(self):
seq = huffman.readlong()
string = huffman.readstring()
sc=ServerCommand(seq, string)
#if sc.cmd == "scores_duel":
# sc = self.parse_duel_scores(sc)
# self.scores.append(sc.scores)
#elif sc.cmd == "scores_ctf":
# sc = self.parse_ctf_scores(sc)
# self.scores.append(sc.scores)
#elif sc.cmd == "scores":
# sc = self.parse_old_scores(sc)
# self.scores.append(sc.scores)
if sc.cmd == 'cs' or sc.cmd == 'bcs':
self.update_configstring(sc)
return sc
def update_configstring(self, command):
ls = command.string.split(' ')
cs_num = int(ls[0])
cs = ' '.join(ls[1:]).strip('"')
self.parse_configstring((cs_num, cs))
def parse_duel_scores(self, command):
offset = 1
ls = command.string.split()
num_scores = int(ls[0])
command.scores={}
for client in range(num_scores):
client_num = ls[offset+0]
command.scores[client_num]={}
command.scores[client_num]['score'] = ls[offset+1]
command.scores[client_num]['ping'] = ls[offset+2]
command.scores[client_num]['time'] = ls[offset+3]
command.scores[client_num]['kills'] = ls[offset+4]
command.scores[client_num]['deaths'] = ls[offset+5]
command.scores[client_num]['accuracy'] = ls[offset+6]
command.scores[client_num]['best_weapon'] = ls[offset+7]
command.scores[client_num]['damage_dealt'] = ls[offset+8]
command.scores[client_num]['impressive'] = ls[offset+9]
command.scores[client_num]['excellent'] = ls[offset+10]
command.scores[client_num]['gauntlet'] = ls[offset+11]
command.scores[client_num]['perfect'] = ls[offset+12]
command.scores[client_num]['red_armor_pickups'] = ls[offset+13]
command.scores[client_num]['red_armor_pickup_time'] = ls[offset+14]
command.scores[client_num]['yellow_armor_pickups'] = ls[offset+15]
command.scores[client_num]['yellow_armor_pickup_time'] = ls[offset+16]
command.scores[client_num]['green_armor_pickups'] = ls[offset+17]
command.scores[client_num]['green_armor_pickup_time'] = ls[offset+18]
command.scores[client_num]['mega_health_pickups'] = ls[offset+19]
command.scores[client_num]['mega_healh_pickup_time'] = ls[offset+20]
offset+=21
command.scores[client_num]['weapon_stats'] = []
for i in range(WP_GAUNTLET, WP_NUM_WEAPONS-1):
weapon = {}
weapon['hit'] = ls[offset+0]
weapon['fired'] = ls[offset+1]
weapon['accuracy'] = ls[offset+2]
weapon['damage_dealt'] = ls[offset+3]
weapon['kills'] = ls[offset+4]
command.scores[client_num]['weapon_stats'].append(weapon)
offset+=5
return command
def parse_ctf_scores(self, command):
ls=command.string.split()
command.scores={}
command.scores['TEAM_RED'] = {}
command.scores['TEAM_RED']['red_armor'] = ls[0]
command.scores['TEAM_RED']['yellow_armor'] = ls[1]
command.scores['TEAM_RED']['green_armor'] = ls[2]
command.scores['TEAM_RED']['mega_health'] = ls[3]
command.scores['TEAM_RED']['quad_damage'] = ls[4]
command.scores['TEAM_RED']['battle_suit'] = ls[5]
command.scores['TEAM_RED']['regeneration'] = ls[6]
command.scores['TEAM_RED']['haste'] = ls[7]
command.scores['TEAM_RED']['invisibility'] = ls[8]
command.scores['TEAM_RED']['flag'] = ls[9]
command.scores['TEAM_RED']['medkit'] = ls[10]
command.scores['TEAM_RED']['quad_damage_time'] = ls[11]
command.scores['TEAM_RED']['battle_suit_time'] = ls[12]
command.scores['TEAM_RED']['regeneration_time'] = ls[13]
command.scores['TEAM_RED']['haste_time'] = ls[14]
command.scores['TEAM_RED']['invisibility_time'] = ls[15]
command.scores['TEAM_RED']['flag_time'] = ls[16]
command.scores['TEAM_BLUE'] = {}
command.scores['TEAM_BLUE']['red_armor'] = ls[17]
command.scores['TEAM_BLUE']['yellow_armor'] = ls[18]
command.scores['TEAM_BLUE']['green_armor'] = ls[19]
command.scores['TEAM_BLUE']['mega_health'] = ls[20]
command.scores['TEAM_BLUE']['quad_damage'] = ls[21]
command.scores['TEAM_BLUE']['battle_suit'] = ls[22]
command.scores['TEAM_BLUE']['regeneration'] = ls[23]
command.scores['TEAM_BLUE']['haste'] = ls[24]
command.scores['TEAM_BLUE']['invisibility'] = ls[25]
command.scores['TEAM_BLUE']['flag'] = ls[26]
command.scores['TEAM_BLUE']['medkit'] = ls[27]
command.scores['TEAM_BLUE']['quad_damage_time'] = ls[28]
command.scores['TEAM_BLUE']['battle_suit_time'] = ls[29]
command.scores['TEAM_BLUE']['regeneration_time'] = ls[30]
command.scores['TEAM_BLUE']['haste_time'] = ls[31]
command.scores['TEAM_BLUE']['invisibility_time'] = ls[32]
command.scores['TEAM_BLUE']['flag_time'] = ls[33]
num_scores = int(ls[34])
command.scores['TEAM_RED']['score'] = ls[35]
command.scores['TEAM_BLUE']['score'] = ls[36]
offset = 0
for client in range(num_scores):
client_num = ls[offset+37]
command.scores[client_num]={}
command.scores[client_num]['team'] = ls[offset+38]
command.scores[client_num]['premium'] = ls[offset+39]
command.scores[client_num]['score'] = ls[offset+40]
command.scores[client_num]['ping'] = ls[offset+41]
command.scores[client_num]['time'] = ls[offset+42]
command.scores[client_num]['kills'] = ls[offset+43]
command.scores[client_num]['deaths'] = ls[offset+44]
command.scores[client_num]['powerups'] = ls[offset+45]
command.scores[client_num]['accuracy'] = ls[offset+46]
command.scores[client_num]['best_weapon'] = ls[offset+47]
command.scores[client_num]['impressive'] = ls[offset+48]
command.scores[client_num]['excellent'] = ls[offset+49]
command.scores[client_num]['gauntlet'] = ls[offset+50]
command.scores[client_num]['defend'] = ls[offset+51]
command.scores[client_num]['assist'] = ls[offset+52]
command.scores[client_num]['captures'] = ls[offset+53]
command.scores[client_num]['perfect'] = ls[offset+54]
command.scores[client_num]['alive'] = ls[offset+55]
offset+=19
return command
def parse_old_scores(self, command):
ls = command.string.split()
command.scores = {}
num_scores = int(ls[0])
command.scores['TEAM_RED'] = ls[1]
command.scores['TEAM_BLUE'] = ls[2]
offset=3
for client in range(num_scores):
client_num = ls[offset+0]
command.scores[client_num]={}
command.scores[client_num]['score'] = ls[offset+1]
command.scores[client_num]['ping'] = ls[offset+2]
command.scores[client_num]['time'] = ls[offset+3]
command.scores[client_num]['powerups'] = ls[offset+4]
command.scores[client_num]['accuracy'] = ls[offset+5]
command.scores[client_num]['impressive'] = ls[offset+6]
command.scores[client_num]['excellent'] = ls[offset+7]
command.scores[client_num]['gauntlet'] = ls[offset+8]
command.scores[client_num]['defend'] = ls[offset+9]
command.scores[client_num]['assist'] = ls[offset+10]
command.scores[client_num]['perfect'] = ls[offset+11]
command.scores[client_num]['captures'] = ls[offset+12]
command.scores[client_num]['alive'] = ls[offset+13]
command.scores[client_num]['kills'] = ls[offset+10]
command.scores[client_num]['deaths'] = ls[offset+11]
command.scores[client_num]['best_weapon'] = ls[offset+12]
offset+=18
return command
def parse_snapshot(self):
new_snap = Snapshot()
new_snap.serverTime=huffman.readlong()
#delta_num = huffman.readbyte()
#new_snap.snapFlags = huffman.readbyte()
#new_snap.areamaskLen = huffman.readbyte()
#for i in range(new_snap.areamaskLen+1):
# new_snap.areamask.append(huffman.readbyte())
#ps = self.parse_playerstate()
#new_snap.playerstate=ps
return new_snap
def parse_playerstate(self):
last_field=huffman.readbyte()
player=PlayerState()
netf=PlayerStateNETF(player)
playerStateFieldsNum = len( netf.bits )
if last_field > playerStateFieldsNum :
return None
for i in range( 0, last_field) :
if huffman.readbits( 1 ) :
if netf.bits[ i ] == 0 :
if huffman.readbits( 1 ) == 0 :
netf.fields[ i ] = huffman.readbits( FLOAT_INT_BITS ) - FLOAT_INT_BIAS
else :
netf.fields[ i ] = huffman.readfloat()
else :
bits = netf.bits[ i ]
netf.fields[ i ] = huffman.readbits( bits )
netf.update()
if huffman.readbits( 1 ) :
if huffman.readbits( 1 ) :
c = huffman.readshort()
for i in range( MAX_STATS ) :
if c & ( 1 << i ) :
player.stats[ i ] = huffman.readshort()
if huffman.readbits( 1 ) :
c = huffman.readshort()
for i in range( MAX_PERSISTANT ) :
if c & ( 1 << i ) :
player.persistant[ i ] = huffman.readshort()
if huffman.readbits( 1 ) :
c = huffman.readshort()
for i in range( MAX_WEAPONS ) :
if c & ( 1 << i ) :
player.ammo[ i ] = huffman.readshort()
if huffman.readbits( 1 ) :
c = huffman.readshort()
for i in range( MAX_POWERUPS ) :
if c & ( 1 << i ) :
player.powerups[ i ] = huffman.readlong()
return player | PypiClean |
/xfcs-1.1.6.tar.gz/xfcs-1.1.6/docs/usage.md | ## xfcs Usage:
------------------------------------------------
### Overview:
XFCS can read and export any data within FCS 3.0, 3.1 compliant files. Default file type output is csv.
The intent of this project is to assist others in the analysis of flow cytometry data by providing access to the data in a universally useable format.
Short, cautionary notes in advance of riveting documentation:
* The FCS text section (which I refer to as metadata), contains all necessary information to interpret, scale, and transform raw values.
Extracted data sets contain only the relevant numerical values per channel.
Use the conveniently included metadata option to generate a csv file alongside the data.
* If you are generating multiple metadata files, use the `--output` option to prevent overwrite. The default naming format has no concern for existing files of the same name.
------------------------------------------------
### File input:
With no input files entered, the current directory will be searched for fcs files. Applies to both data and metadata.
1. Recursive search:
Enables recursive search of current directory.
--recursive, -r
2. List input files:
Optional designate input file(s) instead of default directory search.
--input, -i file1.fcs file3.fcs
------------------------------------------------
### Extract Data:
xfcs data --options
All data sets are written to their own, separate file and named based on their source FCS file and the type of set. Default output is csv file in long aka third normal form with parameter names as column names.
#### Data Set Options:
Extracted data sets include all parameters relative to the specified transform. Except for raw output, time and event count data is included to all data sets. Any number of the options below can be enabled within the same command. If a file does not have parameters which apply to a requested data set, a notice will be displayed and other applicable data sets will be extracted.
Commands can be combined using their short version and each data set will still generate their own file. e.g. extract raw, channel, and scale values simultaneously
xfcs data -wcs
1. Raw:
Parameter data extracted with no scaling, transforms or bit masks applied.
--raw, -w
2. Channel:
Raw data with bit masks, if applicable. Time is normalized to start at 0 (unless `--ref-time` enabled).
Event count is normalized if it exists (unless `--ref-count` enabled) or it is automatically added.
--channel, -c
3. Scale:
Includes only parameters with a log10 or gain scale applied.
Event count and time automatically included.
--scale, -s
4. Channel Scale (xcxs):
Includes any parameter channel values that do not have a scale value, all parameter scale values.
Event count and time automatically included.
--xcxs, -x
5. Fluorescence Compensated:
Parameters located in $SPILLOVER matrix and their compensated channel values. Event count and time automatically included.
--fl-comp, -f
6. Scaled Fluorescence Compensated:
Any parameter with both compensation and log10 scaling. Log scaling is applied to fluorescence compensated values. Event count and time automatically included.
--scale-fl-comp, -p
#### Time and Event Count Options:
1. Use actual event count parameter data (if it exists) instead of normalizing start to one.
--ref-count, -e
2. Use actual time parameter data (if it exists) instead of normalizing start to zero.
--ref-time, -t
#### Output Options:
- Output defaults to csv file. To use HDF5 instead:
--hdf5
- Automatically generate metadata csv file for each fcs file.
--metadata, -m
------------------------------------------------
See [metadata_workflow][metawork] for step by step instructions.
------------------------------------------------
### Extract Metadata:
xfcs metadata --options
Extracts all header and text section keyword, value pairs and writes content to csv file. Multiple FCS files can be written to the same csv file regardless of shared keywords. Default format is wide.
#### Third normal form (long, tidy):
Outputs CSV in long format where each row is one fcs file.
--tidy, -t
#### Additional Input Option:
Limit input to n number of most recent files.
--limit n, -l n
#### Output Option:
Default behavior is for all FCS files to be included within the same csv file and named based on the current directory. One of the 2 options below can be selected to enable either separate metadata files per FCS file, or specified filename and filepath for the default merged csv file.
- Each input FCS file generates one csv file.
--sep-files, -s
- Designate the output .csv filepath for combined, default metadata file. Also applies to a merged file which utilizes a previously generated csv file.
--output file.csv, -o file.csv
#### Keyword Metadata Option:
- Keyword text file.
Generate user keyword text file containing all keywords located within all FCS files scanned. Necessary for utilizing Keyword filtering and statistics. Generates `FCS_USER_KW.txt` within current directory.
--get-kw, -g
- Keyword Filter and Stats.
Filter text section keyword values to create custom metadata output. Remove any unwanted keywords from `FCS_USER_KW.txt` and enter path in command like below. Additional numeric keyword statistics described at the end.
--kw-filter user_kw.txt, -k user_kw.txt
- Merge.
Append new FCS metadata to existing FCS metadata csv file. Keywords used in existing metadata file will act as a filter for new FCS files. Filename output option will work in conjunction with this command.
--append-to metadata_filepath.csv, -a metadata_filepath.csv
------------------------------------------------
### Metadata Numeric Keyword Mean:
Using the `FCS_USER_KW.txt` file, a numeric keyword can have a rolling mean column added to metadata output. Default historic mean range is 10 but can be specified. If used in combination with the add on module xfcsdashboard, parameter mean values will be grouped with their source for easy comparison.
Appending MEAN to any keyword will enable this feature.
Example keyword: $P25V
- enable mean column
```
$P25V_MEAN
```
- enable mean column with history of 5 last values
```
$P25V_MEAN_5
```
If you are tracking values from multiple machine configurations, the parameter id numbers are not necessarily standardized. The keyword mean values are calculated by matching the actual keyword between different fcs files.
For example: $P4N is __FS Lin__ in a specific machine configuration. But, after disabling or enabling other color channels $P4N now refers to __SS Area__. Tracking the voltage of $P4V will provide invalid results as it includes data for 2 different channels.
Luckily, a solution below!
1. Determine the channel names for current parameter id numbers.
This information can be found within a metadata file or quickly located by using the following command:
xfcs metadata --spx-names
2. Edit your keyword prefs text file and include any mean values using the specific channel attribute and name. If you are tracking voltage and want $P4V, include this line in the keyword text file:
$PxV_FSLIN_MEAN_10
By utilizing this notation, xfcs will match the parameter by name to all other files. If FS Lin is $P8 in a different configuration, it will include the correct $P8V value within the same column.
Questions and requests can be sent to: <[email protected]>
Enjoy your flow data!
[metawork]: metadata_workflow.md
| PypiClean |
/upaas-admin-0.3.1.tar.gz/upaas-admin-0.3.1/upaas_admin/apps/scheduler/forms.py | from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from upaas_admin.common.forms import CrispyMongoForm
from upaas_admin.apps.scheduler.models import ApplicationRunPlan
class ApplicationRunPlanForm(CrispyMongoForm):
submit_label = 'Start'
submit_icon_class = 'fa fa-play'
# FIXME patch crispy form-horizontal to support checkboxes
form_class = ''
label_class = ''
field_class = ''
layout = ['workers_min', 'workers_max']
class Meta:
document = ApplicationRunPlan
exclude = ('application', 'backends', 'memory_per_worker',
'max_log_size')
def clean(self):
workers_min = self.cleaned_data.get('workers_min')
workers_max = self.cleaned_data.get('workers_max')
if not self.instance.id:
apps_running = self.user.limits_usage['running_apps']
apps_limit = self.user.limits['running_apps']
if apps_limit and apps_running >= apps_limit:
raise forms.ValidationError(
_("Already running maximum allowed applications "
"({count}), can't start another one").format(
count=apps_running))
if workers_min is None or workers_max is None:
return self.cleaned_data
if workers_min > workers_max:
raise forms.ValidationError(_("Minimum workers number cannot be"
"lower than maximum"))
workers_used = self.user.limits_usage['workers']
if self.instance.id:
run_plan = ApplicationRunPlan.objects(id=self.instance.id).first()
workers_used -= run_plan.workers_max
workers_limit = self.user.limits['workers']
if workers_limit:
workers_available = max(workers_limit - workers_used, 0)
if workers_min > workers_available:
raise forms.ValidationError(_(
"Only {available} workers available, cannot set "
"{workers} as minimum ").format(
available=workers_available, workers=workers_min))
if workers_max > workers_available:
raise forms.ValidationError(_(
"Only {available} workers available, cannot set "
"{workers} as maximum ").format(
available=workers_available, workers=workers_max))
return self.cleaned_data
class EditApplicationRunPlanForm(ApplicationRunPlanForm):
submit_label = 'Save'
class Meta:
document = ApplicationRunPlan
exclude = ('application', 'backends', 'memory_per_worker',
'max_log_size') | PypiClean |
/ODAMNet-1.1.0-py3-none-any.whl/odamnet/odamnet.py | # Methods
import odamnet.CTD_functions as CTD
import odamnet.WP_functions as WP
import odamnet.methods_functions as methods
# Libraries
import os
import click
from click_option_group import optgroup, RequiredMutuallyExclusiveOptionGroup
import odamnet.customClick as customClick
from alive_progress import alive_bar
import shutil as shutil
from importlib import metadata
# Script version
__version__ = metadata.version(__package__ or __name__)
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.group(context_settings=CONTEXT_SETTINGS, cls=customClick.NaturalOrderGroup)
@click.version_option(__version__)
def main():
"""
[OPTIONS] =
overlap | domino | multixrank | networkCreation | networkDownloading
Analyse the molecular relationships between chemicals and rare diseases.
Select the approach you want to perform:
odamnet [overlap | domino | multixrank] -h
To create a pathways/processes network and its bipartite network:
odamnet networkCreation -h
To download networks from NDEx:
odamnet networkDownloading -h
"""
pass
@main.command(short_help='Overlap analysis', context_settings=CONTEXT_SETTINGS)
@optgroup.group('Extract target genes list from', cls=RequiredMutuallyExclusiveOptionGroup, help='Choice the way to extract target genes')
@optgroup.option('-c', '--chemicalsFile', 'chemicalsFile', type=click.File(), help='Chemicals file name')
@optgroup.option('-r', '--CTD_file', 'CTD_file', type=click.File(), help='CTD file name')
@optgroup.option('-t', '--targetGenesFile', 'targetGenesFile', type=click.File(), help='Target genes file name')
@click.option('--directAssociation', 'directAssociation', default=True, type=bool, show_default=True,
help='True: Extract target genes from chemicals \n '
'False: Extract target genes from chemicals + its child chemicals')
@click.option('--nbPub', 'nbPub', default=2, type=int, show_default=True,
help='Minimum number of publications to keep an interaction')
@click.option('--GMT', 'pathOfInterestGMT', type=click.File(), cls=customClick.RequiredIf, required_if='backgroundFile',
help='Pathways of interest file name (GMT file)\n')
@click.option('--backgroundFile', 'backgroundFile', type=click.File(), cls=customClick.RequiredIf,
required_if='pathOfInterestGMT', help='Background genes file name\n')
@click.option('-o', '--outputPath', 'outputPath', type=click.Path(), default='OutputResults', show_default=True,
help='Output folder name to save results')
def overlap(chemicalsFile, CTD_file, targetGenesFile, directAssociation, nbPub, pathOfInterestGMT, backgroundFile, outputPath):
"""
Perform Overlap analysis between genes targeted by chemicals and rare diseases pathways.
"""
# Parameters
outputPath = os.path.join(outputPath, 'OutputOverlapResults')
featuresDict = {}
pathwaysOfInterestList = []
# Check if outputPath exist and create it if it does not exist
if not os.path.exists(outputPath):
os.makedirs(outputPath, exist_ok=True)
# Extract genes from background and pathways of interest
if pathOfInterestGMT:
# Files reading
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwaysOfInterestList = WP.readGMTFile(GMTFile=pathOfInterestGMT)
backgroundGenesDict, backgroundsList = WP.readBackgroundsFile(backgroundsFile=backgroundFile)
pathwaysOfInterestList = list(zip(pathwaysOfInterestList, backgroundsList))
analysisName = 'pathOfInterest'
else:
# Request WP
with alive_bar(title='Request WikiPathways', theme='musical') as bar:
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwayOfInterestList = WP.rareDiseasesWPrequest(outputPath=outputPath)
backgroundGenesDict = WP.allHumanGenesFromWP(outputPath=outputPath)
for pathway in pathwayOfInterestList:
pathwaysOfInterestList.append([pathway, list(backgroundGenesDict.keys())[0]])
analysisName = 'RDWP'
bar()
if chemicalsFile:
# Analysis from factor list
featuresDict = CTD.targetGenesExtraction(chemicalsFile=chemicalsFile, directAssociations=directAssociation,
outputPath=outputPath, nbPub=nbPub)
if targetGenesFile:
# Analysis from gene list
featuresDict['genesList'] = CTD.readFeaturesFile(featuresFile=targetGenesFile)
if CTD_file:
# Analysis from CTD file
featuresDict = CTD.readCTDFile(CTDFile=CTD_file, nbPub=nbPub, outputPath=outputPath)
# Overlap between our features list and pathways of interest
methods.overlapAnalysis(targetGenesDict=featuresDict,
pathOfInterestGenesDict=pathOfInterestGenesDict,
pathOfInterestNamesDict=pathOfInterestNamesDict,
pathwaysOfInterestList=pathwaysOfInterestList,
backgroundGenesDict=backgroundGenesDict,
outputPath=outputPath,
analysisName=analysisName)
print('Overlap analysis finished')
@main.command(short_help='Active Module Identification analysis', context_settings=CONTEXT_SETTINGS)
@optgroup.group('Extract target genes list from', cls=RequiredMutuallyExclusiveOptionGroup, help='Choice the way to extract target genes')
@optgroup.option('-c', '--chemicalsFile', 'chemicalsFile', type=click.File(), help='Chemicals file name')
@optgroup.option('-r', '--CTD_file', 'CTD_file', type=click.File(), help='CTD file name')
@optgroup.option('-t', '--targetGenesFile', 'targetGenesFile', type=click.File(), help='Target genes file name')
@click.option('--directAssociation', 'directAssociation', default=True, type=bool, show_default=True,
help='True: Extract target genes from chemicals \n False: Extract target genes from chemicals + its child chemicals')
@click.option('--nbPub', 'nbPub', default=2, type=int, show_default=True,
help='Minimum number of publications to keep an interaction')
@click.option('-n', '--networkFile', 'networkFileName', type=str, metavar='FILENAME', required=True, help='Network file name')
@click.option('--netUUID', 'networkUUID', type=str, help='NDEx network ID')
@click.option('--GMT', 'pathOfInterestGMT', type=click.File(), cls=customClick.RequiredIf, required_if='backgroundFile',
help='Pathways of interest file name (GMT file)\n')
@click.option('--backgroundFile', 'backgroundFile', type=click.File(), cls=customClick.RequiredIf,
required_if='pathOfInterestGMT', help='Background genes file name\n')
@click.option('-o', '--outputPath', 'outputPath', type=click.Path(), default='OutputResults', show_default=True,
help='Output folder name to save results')
def DOMINO(chemicalsFile, CTD_file, targetGenesFile, networkFileName, networkUUID, directAssociation, nbPub, pathOfInterestGMT, backgroundFile,
outputPath):
"""
Perform Active module identification analysis between genes targeted by chemicals and rare diseases pathways using
DOMINO.
1. DOMINO on network using target genes as seed
2. Overlap analysis between identified active modules and rare disease pahtways
"""
# Parameters
outputPath = os.path.join(outputPath, 'OutputDOMINOResults')
featuresDict = {}
pathwaysOfInterestList = []
# Check if outputPath exist and create it if it does not exist
if not os.path.exists(outputPath):
os.makedirs(outputPath, exist_ok=True)
# Extract network from NDEx website
if os.path.exists(networkFileName):
if networkUUID:
print('Network File already exists. Give only file name (without network UUID) or rename/remove file.')
exit()
else:
if networkUUID:
methods.downloadNDExNetwork(networkUUID=networkUUID, outputFileName=networkFileName, simplify=False)
else:
print('Network file doesn\'t exist. Add the network UUID to request NEDx or give another network file.')
exit()
# Extract genes from background and pathways of interest
if pathOfInterestGMT:
# Files reading
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwaysOfInterestList = WP.readGMTFile(GMTFile=pathOfInterestGMT)
backgroundGenesDict, backgroundsList = WP.readBackgroundsFile(backgroundsFile=backgroundFile)
pathwaysOfInterestList = list(zip(pathwaysOfInterestList, backgroundsList))
analysisName = 'pathOfInterest'
else:
# Request WP
with alive_bar(title='Request WikiPathways', theme='musical') as bar:
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwayOfInterestList = WP.rareDiseasesWPrequest(outputPath=outputPath)
backgroundGenesDict = WP.allHumanGenesFromWP(outputPath=outputPath)
for pathway in pathwayOfInterestList:
pathwaysOfInterestList.append([pathway, list(backgroundGenesDict.keys())[0]])
analysisName = 'RDWP'
bar()
if chemicalsFile:
# Analysis from factor list
featuresDict = CTD.targetGenesExtraction(chemicalsFile=chemicalsFile, directAssociations=directAssociation,
outputPath=outputPath, nbPub=nbPub)
if targetGenesFile:
# Analysis from gene list
featuresDict['genesList'] = CTD.readFeaturesFile(featuresFile=targetGenesFile)
if CTD_file:
# Analysis from CTD file
featuresDict = CTD.readCTDFile(CTDFile=CTD_file, nbPub=nbPub, outputPath=outputPath)
# DOMINO analysis for each environmental factor
methods.DOMINOandOverlapAnalysis(featuresDict=featuresDict,
networkFileName=networkFileName,
pathOfInterestGenesDict=pathOfInterestGenesDict,
pathOfInterestNamesDict=pathOfInterestNamesDict,
pathwaysOfInterestList=pathwaysOfInterestList,
backgroundGenesDict=backgroundGenesDict,
outputPath=outputPath,
analysisName=analysisName)
@main.command(short_help='Random Walk with Restart analysis', context_settings=CONTEXT_SETTINGS)
@optgroup.group('Extract target genes list from', cls=RequiredMutuallyExclusiveOptionGroup, help='Choice the way to extract target genes')
@optgroup.option('-c', '--chemicalsFile', 'chemicalsFile', type=click.File(), help='Chemicals file name')
@optgroup.option('-r', '--CTD_file', 'CTD_file', type=click.File(), help='CTD file name')
@optgroup.option('-t', '--targetGenesFile', 'targetGenesFile', type=click.File(), help='Target genes file name')
@click.option('--directAssociation', 'directAssociation', default=True, type=bool, show_default=True,
help='True: Extract target genes from chemicals \n False: Extract target genes from chemicals + its child chemicals')
@click.option('--nbPub', 'nbPub', default=2, type=int, show_default=True,
help='Minimum number of publications to keep an interaction')
@click.option('--configPath', 'configPath', type=click.Path(), required=True, help='Configurations file path name')
@click.option('--networksPath', 'networksPath', type=click.Path(), required=True, help='Network directory path')
@click.option('--seedsFile', 'seedsFileName', type=str, required=True, help='Seeds file path name', metavar='FILENAME')
@click.option('--sifFileName', 'sifFileName', type=str, required=True, help='Name of the output file network SIF', metavar='FILENAME')
@click.option('--top', 'top', type=int, default=10, show_default=True,
help='Top number of results to write into output file')
@click.option('-o', '--outputPath', 'outputPath', type=click.Path(), default='OutputResults', show_default=True,
help='Output folder name to save results')
def multiXrank(chemicalsFile, CTD_file, targetGenesFile, directAssociation, nbPub, configPath,
networksPath, seedsFileName, outputPath, sifFileName, top):
"""
Performs a Random Walk with Restart analysis using multiXrank with genes and diseases multilayers.
"""
# Parameters
outputPath = os.path.join(outputPath, 'OutputMultiXRankResults')
featuresDict = {}
nodesList = []
# Check if outputPath exist and create it if it does not exist
if not os.path.exists(outputPath):
os.makedirs(outputPath, exist_ok=True)
# Seeds initiation
if chemicalsFile:
# Analysis from factor list
featuresDict = CTD.targetGenesExtraction(chemicalsFile=chemicalsFile, directAssociations=directAssociation,
outputPath=outputPath, nbPub=nbPub)
if targetGenesFile:
# Analysis from gene list
featuresDict['genesList'] = CTD.readFeaturesFile(featuresFile=targetGenesFile)
if CTD_file:
# Analysis from CTD file
featuresDict = CTD.readCTDFile(CTDFile=CTD_file, nbPub=nbPub, outputPath=outputPath)
# Extract nodes from multilayer
with alive_bar(title='Extract nodes from multilayer', theme='musical') as bar:
for root, dirs, files in os.walk(networksPath + '/multiplex'):
for filename in files:
with open(root + '/' + filename, 'r') as networkFileHandler:
for line in networkFileHandler:
nodes = line.strip().split('\t')
for n in nodes:
if n not in nodesList:
nodesList.append(n)
bar()
# Remove seed that are missing in network
# Run RWR
for factor in featuresDict:
print('\tRandom walk analysis for : ' + factor)
# Create output folder
analysisOutputPath = outputPath + '/RWR_' + factor
# If folder exist, change the name of it to not erase it
n = 1
while os.path.exists(analysisOutputPath):
analysisOutputPath = outputPath + '/RWR_' + factor + '_' + str(n)
n = n + 1
# Check if outputPath exist and create it if it does not exist
if not os.path.exists(analysisOutputPath):
os.makedirs(analysisOutputPath, exist_ok=True)
# Output names creation
sifPathName = os.path.join(analysisOutputPath, sifFileName)
# Write gene list into seed file
seedList = []
for gene in featuresDict[factor]:
if gene in nodesList:
seedList.append(gene)
with open(seedsFileName, 'w') as seedFileHandler:
seedFileHandler.write('\n'.join(seedList))
seedFileHandler.write('\n')
print('Number of seeds : ' + str(len(seedList)))
# Run multiXrank
shutil.copyfile(seedsFileName, analysisOutputPath + '/' + os.path.basename(seedsFileName))
shutil.copyfile(configPath, analysisOutputPath + '/' + os.path.basename(configPath))
methods.RWR(configPath=configPath, networksPath=networksPath, outputPath=analysisOutputPath,
sifPathName=sifPathName, top=top)
@main.command('networkCreation', short_help='Network and its bipartite creation', context_settings=CONTEXT_SETTINGS)
@click.option('--networksPath', 'networksPath', type=click.Path(), required=True,
help='Output path to save the network')
@click.option('--networksName', 'networksName', type=str, default='WP_RareDiseasesNetwork.gr', show_default=True,
metavar='FILENAME', help='Network output name')
@click.option('--bipartitePath', 'bipartitePath', type=click.Path(), required=True,
help='Output path to save the bipartite')
@click.option('--bipartiteName', 'bipartiteName', type=str, default='Bipartite_WP_RareDiseases_geneSymbols.gr',
show_default=True, metavar='FILENAME', help='Bipartite output name')
@click.option('--GMT', 'pathOfInterestGMT', type=click.File(),
help='Pathways of interest in GMT like format (e.g. from WP request).')
@click.option('-o', '--outputPath', 'outputPath', type=click.Path(), default='OutputResults', show_default=True,
help='Output path name (for complementary output files)')
def createNetworkFiles(pathOfInterestGMT, networksPath, networksName, bipartitePath, bipartiteName, outputPath):
"""
Creates network (GR format) from WikiPathways request or pathways of interest given in GMT file.
"""
# Parameters
outputPath = os.path.join(outputPath, 'OutputCreateNetwork')
networkFileName = networksPath + '/' + networksName
bipartiteFileName = bipartitePath + '/' + bipartiteName
# Check if outputPath exist and create it if it does not exist
if not os.path.exists(outputPath):
os.makedirs(outputPath, exist_ok=True)
# Check if networksPath exist and create it if it does not exist
if not os.path.exists(networksPath):
os.makedirs(networksPath, exist_ok=True)
# Check if bipartitePath exist and create it if it does not exist
if not os.path.exists(bipartitePath):
os.makedirs(bipartitePath, exist_ok=True)
# Extract pathways of interest
if pathOfInterestGMT:
# From file
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwaysOfInterestList = WP.readGMTFile(GMTFile=pathOfInterestGMT)
else:
# From request
pathOfInterestGenesDict, pathOfInterestNamesDict, pathwayOfInterestList = WP.rareDiseasesWPrequest(outputPath=outputPath)
# Create network and bipartite
methods.createNetworkandBipartiteFiles(bipartiteName=bipartiteFileName,
networkName=networkFileName,
pathOfInterestGenesDict=pathOfInterestGenesDict)
@main.command('networkDownloading', short_help='Download networks from NDEx', context_settings=CONTEXT_SETTINGS)
@click.option('--netUUID', 'networkUUID', type=str, help='NDEx network ID', required=True)
@click.option('--networkFile', 'networkFileName', type=str, metavar='FILENAME', required=True, help='Network file name')
@click.option('--simple', 'simple', type=bool, default=False, help='Remove interaction column and header')
def networkDownloading(networkUUID, networkFileName, simple):
"""
Download networks from NDEx using the UUID network.
Create SIF (3 columns with header) or GR (2 columns without header) network
"""
# Check if network already exist
if os.path.exists(networkFileName):
print('\nNetwork file already exists. Rename or remove network file.')
exit()
# Extract network from NDEx website
methods.downloadNDExNetwork(networkUUID=networkUUID, outputFileName=networkFileName, simplify=simple)
if __name__ == '__main__':
main() | PypiClean |
/django-cms-fragments-0.0.5.tar.gz/django-cms-fragments-0.0.5/cms_fragments/static/acejs/theme-twilight.js | define("ace/theme/twilight",["require","exports","module","ace/lib/dom"],function(a,b,c){b.isDark=!0,b.cssClass="ace-twilight",b.cssText=".ace-twilight .ace_editor { border: 2px solid rgb(159, 159, 159);}.ace-twilight .ace_editor.ace_focus { border: 2px solid #327fbd;}.ace-twilight .ace_gutter { background: #e8e8e8; color: #333;}.ace-twilight .ace_print_margin { width: 1px; background: #e8e8e8;}.ace-twilight .ace_scroller { background-color: #141414;}.ace-twilight .ace_text-layer { cursor: text; color: #F8F8F8;}.ace-twilight .ace_cursor { border-left: 2px solid #A7A7A7;}.ace-twilight .ace_cursor.ace_overwrite { border-left: 0px; border-bottom: 1px solid #A7A7A7;} .ace-twilight .ace_marker-layer .ace_selection { background: rgba(221, 240, 255, 0.20);}.ace-twilight .ace_marker-layer .ace_step { background: rgb(198, 219, 174);}.ace-twilight .ace_marker-layer .ace_bracket { margin: -1px 0 0 -1px; border: 1px solid rgba(255, 255, 255, 0.25);}.ace-twilight .ace_marker-layer .ace_active_line { background: rgba(255, 255, 255, 0.031);}.ace-twilight .ace_marker-layer .ace_selected_word { border: 1px solid rgba(221, 240, 255, 0.20);} .ace-twilight .ace_invisible { color: rgba(255, 255, 255, 0.25);}.ace-twilight .ace_keyword { color:#CDA869;}.ace-twilight .ace_constant { color:#CF6A4C;}.ace-twilight .ace_invalid.ace_illegal { color:#F8F8F8;background-color:rgba(86, 45, 86, 0.75);}.ace-twilight .ace_invalid.ace_deprecated { text-decoration:underline;font-style:italic;color:#D2A8A1;}.ace-twilight .ace_support { color:#9B859D;}.ace-twilight .ace_fold { background-color: #AC885B; border-color: #F8F8F8;}.ace-twilight .ace_support.ace_function { color:#DAD085;}.ace-twilight .ace_string { color:#8F9D6A;}.ace-twilight .ace_string.ace_regexp { color:#E9C062;}.ace-twilight .ace_comment { font-style:italic;color:#5F5A60;}.ace-twilight .ace_variable { color:#7587A6;}.ace-twilight .ace_xml_pe { color:#494949;}.ace-twilight .ace_meta.ace_tag { color:#AC885B;}.ace-twilight .ace_entity.ace_name.ace_function { color:#AC885B;}.ace-twilight .ace_markup.ace_underline { text-decoration:underline;}.ace-twilight .ace_markup.ace_heading { color:#CF6A4C;}.ace-twilight .ace_markup.ace_list { color:#F9EE98;}";var d=a("../lib/dom");d.importCssString(b.cssText,b.cssClass)}) | PypiClean |
/django-divier-0.6.tar.gz/django-divier-0.6/django/views/generic/dates.py | import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
class YearMixin:
"""Mixin for views manipulating year-based data."""
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"""Return the year for which this view should display data."""
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_("No year specified"))
return year
def get_next_year(self, date):
"""Get the next valid year."""
return _get_next_prev(self, date, is_previous=False, period='year')
def get_previous_year(self, date):
"""Get the previous valid year."""
return _get_next_prev(self, date, is_previous=True, period='year')
def _get_next_year(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
def _get_current_year(self, date):
"""Return the start date of the current interval."""
return date.replace(month=1, day=1)
class MonthMixin:
"""Mixin for views manipulating month-based data."""
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"""Return the month for which this view should display data."""
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_("No month specified"))
return month
def get_next_month(self, date):
"""Get the next valid month."""
return _get_next_prev(self, date, is_previous=False, period='month')
def get_previous_month(self, date):
"""Get the previous valid month."""
return _get_next_prev(self, date, is_previous=True, period='month')
def _get_next_month(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
if date.month == 12:
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
else:
return date.replace(month=date.month + 1, day=1)
def _get_current_month(self, date):
"""Return the start date of the previous interval."""
return date.replace(day=1)
class DayMixin:
"""Mixin for views manipulating day-based data."""
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"""Return the day for which this view should display data."""
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_("No day specified"))
return day
def get_next_day(self, date):
"""Get the next valid day."""
return _get_next_prev(self, date, is_previous=False, period='day')
def get_previous_day(self, date):
"""Get the previous valid day."""
return _get_next_prev(self, date, is_previous=True, period='day')
def _get_next_day(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=1)
def _get_current_day(self, date):
"""Return the start date of the current interval."""
return date
class WeekMixin:
"""Mixin for views manipulating week-based data."""
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"""Return the week for which this view should display data."""
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_("No week specified"))
return week
def get_next_week(self, date):
"""Get the next valid week."""
return _get_next_prev(self, date, is_previous=False, period='week')
def get_previous_week(self, date):
"""Get the previous valid week."""
return _get_next_prev(self, date, is_previous=True, period='week')
def _get_next_week(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date + datetime.timedelta(days=7 - self._get_weekday(date))
except OverflowError:
raise Http404(_("Date out of range"))
def _get_current_week(self, date):
"""Return the start date of the current interval."""
return date - datetime.timedelta(self._get_weekday(date))
def _get_weekday(self, date):
"""
Return the weekday for a given date.
The first day according to the week format is 0 and the last day is 6.
"""
week_format = self.get_week_format()
if week_format == '%W': # week starts on Monday
return date.weekday()
elif week_format == '%U': # week starts on Sunday
return (date.weekday() + 1) % 7
else:
raise ValueError("unknown week format: %s" % week_format)
class DateMixin:
"""Mixin class for views manipulating date-based data."""
date_field = None
allow_future = False
def get_date_field(self):
"""Get the name of the date field to be used to filter by."""
if self.date_field is None:
raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Return `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
@cached_property
def uses_datetime_field(self):
"""
Return `True` if the date field is a `DateTimeField` and `False`
if it's a `DateField`.
"""
model = self.get_queryset().model if self.model is None else self.model
field = model._meta.get_field(self.get_date_field())
return isinstance(field, models.DateTimeField)
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value)
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""Abstract base class for date-based views displaying a list of objects."""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(
object_list=self.object_list,
date_list=self.date_list,
**extra_context
)
return self.render_to_response(context)
def get_dated_items(self):
"""Obtain the list of dates and items."""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset; use the
date field by default.
"""
return '-%s' % self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = timezone.now() if self.uses_datetime_field else timezone_today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = not qs if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or
'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
raise Http404(
_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': queryset.model._meta.verbose_name_plural,
}
)
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items. Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""Top-level archive of date-based items."""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""List of objects published in a given year."""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""List of objects published in a given year."""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""List of objects published in a given month."""
date_list_period = 'day'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""List of objects published in a given month."""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""List of objects published in a given week."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_choices = {'%W': '1', '%U': '0'}
try:
week_start = week_choices[week_format]
except KeyError:
raise ValueError('Unknown week format %r. Choices are: %s' % (
week_format,
', '.join(sorted(week_choices)),
))
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""List of objects published in a given week."""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""List of objects published on a given day."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""List of objects published on a given day."""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""List of objects published today."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""List of objects published today."""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""Get the object this request displays."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False."
) % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super().get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Get a datetime.date object given a format string and a year, month, and day
(only year is mandatory). Raise a 404 for an invalid date.
"""
format = year_format + delim + month_format + delim + day_format
datestr = str(year) + delim + str(month) + delim + str(day)
try:
return datetime.datetime.strptime(datestr, format).date()
except ValueError:
raise Http404(_('Invalid date string “%(datestr)s” given format “%(format)s”') % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Get the next or the previous valid date. The idea is to allow links on
month/day views to never be 404s by never providing a date that'll be
invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= timezone_today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = timezone.now()
else:
now = timezone_today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = timezone.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
def timezone_today():
"""Return the current date in the current time zone."""
if settings.USE_TZ:
return timezone.localdate()
else:
return datetime.date.today() | PypiClean |
/grug_test-0.0.7.tar.gz/grug_test-0.0.7/grug_test/__dependencies__/__sources__/informative_iterator/README.md | # What is this?
I wanted a light, clean, configurable alternative to tqdm. So I made one, complete with animations.
<img width="1724" alt="Screen Shot 2022-07-03 at 8 30 01 PM" src="https://user-images.githubusercontent.com/17692058/177068705-9efc56d3-4300-4982-a07a-6db9aa61df8d.png">
# How do I use this?
`pip install informative_iterator`
```python
from informative_iterator import ProgressBar
import time
#
# example 1
#
for progress, each_element in ProgressBar([ 1, 2, 3, "any iterable thing" ]):
time.sleep(0.002)
# example output:
# [>..................................] 0.00% | 0/1000 | started: 13:18:32 | eta: ________ | remaining: ________ |
# example output:
# [==============>....................] 42.50% | 425/1000 | started: 13:18:32 | eta: 13:18:44 | remaining: 0:07sec |
# example output:
# [================================>..] 93.10% | 931/1000 | started: 13:18:32 | eta: 13:18:44 | remaining: 0:01sec |
# example output:
# Done in 0:12sec at 13:18:44
#
# example 2
#
import random
def custom_iterable():
while True:
yield random.random()
for progress, each in ProgressBar(custom_iterable(), iterations=10000):
time.sleep(0.002)
#
# example 3
#
for progress, each in ProgressBar(10000):
time.sleep(0.002)
# index, just like using enumerate()
print('progress.index = ', progress.index)
# percent with two decimal places. ex: 99.5
print('progress.percent = ', progress.percent)
# the output of time.time() for this iteration (seconds since unix epoch)
print('progress.time = ', progress.time)
# boolean (updates dont always get printed every iteration)
print('progress.updated = ', progress.updated)
# int, doesn't change with each iteration: its the size of the iterator
print('progress.total_iterations = ', progress.total_iterations)
#
# example 4
#
# update ~30 times a second for smooth looking progress
for progress, each in ProgressBar(10000, seconds_per_print=0.03):
time.sleep(0.002)
#
# example 5
#
# have all progress bars default to trying to update update ~30 times a second
ProgressBar.configure(
seconds_per_print=0.03,
)
for progress, each in ProgressBar(10000):
time.sleep(0.002)
#
# example 6
#
ProgressBar.configure(
# all the options (these exist as arguments for ProgressBar as well)
layout=[ 'title', 'bar', 'percent', 'spacer', 'fraction', 'spacer', 'start_time', 'spacer', 'end_time', 'spacer', 'remaining_time', 'spacer', ],
spacer=" | ",
minmal=False, # False => defaults to normal layout
minimal_layout=[ 'title', 'bar', 'spacer', 'end_time', 'spacer', ],
inline=True,
disable_logging=False, # turn off all the output
progress_bar_size=35, # 35 characters
seconds_per_print=1, # print every second
percent_per_print=10, # And print every 10% of progress
)
for progress, each in ProgressBar(10000):
time.sleep(0.002)
```
| PypiClean |
/microsoft-bing-visualsearch-1.0.0.tar.gz/microsoft-bing-visualsearch-1.0.0/visual_search_client/aio/_configuration.py |
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
VERSION = "unknown"
class VisualSearchClientConfiguration(Configuration):
"""Configuration for VisualSearchClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
"""
def __init__(
self,
credential: "AsyncTokenCredential",
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
super(VisualSearchClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.credential_scopes = kwargs.pop('credential_scopes', [])
kwargs.setdefault('sdk_moniker', 'visualsearchclient/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if not self.credential_scopes and not self.authentication_policy:
raise ValueError("You must provide either credential_scopes or authentication_policy as kwargs")
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) | PypiClean |
/robotframework_robocop-2.2.0-py3-none-any.whl/robocop/checkers/naming.py | import re
from collections import defaultdict
from pathlib import Path
from robot.api import Token
from robot.parsing.model.blocks import Keyword
from robot.parsing.model.statements import Arguments, KeywordCall
from robocop.checkers import VisitorChecker
from robocop.rules import Rule, RuleParam, RuleSeverity
from robocop.utils import (
ROBOT_VERSION,
find_robot_vars,
keyword_col,
normalize_robot_name,
normalize_robot_var_name,
pattern_type,
remove_robot_vars,
token_col,
)
rules = {
"0301": Rule(
RuleParam(
name="pattern",
default=re.compile(r"[\.\?]"),
converter=pattern_type,
desc="pattern defining characters (not) allowed in a name",
),
rule_id="0301",
name="not-allowed-char-in-name",
msg="Not allowed character '{{ character }}' found in {{ block_name }} name",
severity=RuleSeverity.WARNING,
docs="""
Reports not allowed pattern found in Test Case or Keyword names. By default it's dot (`.`). You can
configure what patterns are reported by calling::
robocop --configure not-allowed-char-in-name:pattern:regex_pattern
`regex_pattern` should define regex pattern not allowed in names. For example `[@\[]` pattern
reports any occurrence of `@[` characters.
""",
),
"0302": Rule(
RuleParam(
name="convention",
default="each_word_capitalized",
converter=str,
desc="possible values: 'each_word_capitalized' (default) or 'first_word_capitalized'",
),
RuleParam(
name="pattern",
default=re.compile(r""),
converter=pattern_type,
desc="pattern for accepted words in keyword",
),
rule_id="0302",
name="wrong-case-in-keyword-name",
msg="Keyword name '{{ keyword_name }}' does not follow case convention",
severity=RuleSeverity.WARNING,
),
"0303": Rule(
rule_id="0303",
name="keyword-name-is-reserved-word",
msg="'{{ keyword_name }}' is a reserved keyword{{ error_msg }}",
severity=RuleSeverity.ERROR,
docs="""
Do not use reserved names for keyword names. Following names are reserved:
- IF
- ELSE IF
- ELSE
- FOR
- END
- WHILE
- CONTINUE
- RETURN
- TRY
- EXCEPT
""",
),
"0305": Rule(
rule_id="0305",
name="underscore-in-keyword-name",
msg="Underscores in keyword name '{{ keyword_name }}' can be replaced with spaces",
severity=RuleSeverity.WARNING,
docs="""
Example::
# bad
keyword_with_underscores
# good
Keyword Without Underscores
""",
),
"0306": Rule(
rule_id="0306",
name="setting-name-not-in-title-case",
msg="Setting name '{{ setting_name }}' should use title or upper case",
severity=RuleSeverity.WARNING,
docs="""
Good::
*** Settings ***
Resource file.resource
*** Test Cases ***
Test
[DOCUMENTATION] Some documentation
Step
Bad::
*** Settings ***
resource file.resource
*** Test Cases ***
Test
[documentation] Some documentation
Step
""",
),
"0307": Rule(
rule_id="0307",
name="section-name-invalid",
msg="Section name should be in format '{{ section_title_case }}' or '{{ section_upper_case }}'",
severity=RuleSeverity.WARNING,
docs="""
Good::
*** SETTINGS ***
*** Keywords ***
Bad::
*** keywords ***
""",
),
"0308": Rule(
rule_id="0308",
name="not-capitalized-test-case-title",
msg="Test case '{{ test_name }}' title should start with capital letter",
severity=RuleSeverity.WARNING,
),
"0309": Rule(
rule_id="0309",
name="section-variable-not-uppercase",
msg="Section variable '{{ variable_name }}' name should be uppercase",
severity=RuleSeverity.WARNING,
),
"0310": Rule(
rule_id="0310",
name="non-local-variables-should-be-uppercase",
msg="Test, suite and global variables should be uppercase",
severity=RuleSeverity.WARNING,
),
"0311": Rule(
rule_id="0311",
name="else-not-upper-case",
msg="ELSE and ELSE IF should be upper case",
severity=RuleSeverity.ERROR,
),
"0312": Rule(
rule_id="0312",
name="keyword-name-is-empty",
msg="Keyword name should not be empty",
severity=RuleSeverity.ERROR,
),
"0313": Rule(
rule_id="0313",
name="test-case-name-is-empty",
msg="Test case name should not be empty",
severity=RuleSeverity.ERROR,
),
"0314": Rule(
rule_id="0314",
name="empty-library-alias",
msg="Library alias should not be empty",
severity=RuleSeverity.ERROR,
docs="""
Use non-empty name when using library import with alias.
Good::
*** Settings ***
Library CustomLibrary AS AnotherName
Bad::
*** Settings ***
Library CustomLibrary AS
""",
),
"0315": Rule(
rule_id="0315",
name="duplicated-library-alias",
msg="Library alias should not be the same as original name",
severity=RuleSeverity.WARNING,
docs="""
Example of rule violation::
*** Settings ***
Library CustomLibrary AS CustomLibrary # same as library name
Library CustomLibrary AS Custom Library # same as library name (spaces are ignored)
""",
),
"0316": Rule(
rule_id="0316",
name="possible-variable-overwriting",
msg="Variable '{{ variable_name }}' may overwrite similar variable inside '{{ block_name }}' {{ block_type }}. "
"Note that variables are case-insensitive, and also spaces and underscores are ignored.",
severity=RuleSeverity.INFO,
),
"0317": Rule(
rule_id="0317",
name="hyphen-in-variable-name",
msg="Use underscore in variable name '{{ variable_name }}' instead of hyphens to "
"avoid treating them like minus sign",
severity=RuleSeverity.INFO,
docs="""
Robot Framework supports evaluation of Python code inside ${ } brackets. For example::
${var2} Set Variable ${${var}-${var2}}
That's why there is possibility that hyphen in name is not recognized as part of name but as minus sign.
Better to use underscore (if it's intended)::
${var2} Set Variable ${ ${var}_${var2}}
""",
),
"0318": Rule(
rule_id="0318",
name="bdd-without-keyword-call",
msg="BDD reserved keyword '{{ keyword_name }}' not followed by any keyword{{ error_msg }}",
severity=RuleSeverity.WARNING,
docs="""
When using BDD reserved keywords (such as `GIVEN`, `WHEN`, `AND`, `BUT` or `THEN`) use them together with
name of the keyword to run.
Good::
Given Setup Is Complete
When User Log In
Then User Should See Welcome Page
Bad::
Given
When User Log In
Then User Should See Welcome Page
Since those words are used for BDD style it's also recommended not to use them within the keyword name.
""",
),
"0319": Rule(
rule_id="0319",
name="deprecated-statement",
msg="'{{ statement_name }}' is deprecated since Robot Framework version "
"{{ version }}, use '{{ alternative }}' instead",
severity=RuleSeverity.WARNING,
),
"0320": Rule(
RuleParam(
name="pattern",
default=re.compile(r"[\.\?]"),
converter=pattern_type,
desc="pattern defining characters (not) allowed in a name",
),
rule_id="0320",
name="not-allowed-char-in-filename",
msg="Not allowed character '{{ character }}' found in {{ block_name }} name",
severity=RuleSeverity.WARNING,
docs="""
Reports not allowed pattern found in Suite names. By default it's dot (`.`). You can
configure what characters are reported by calling::
robocop --configure not-allowed-char-in-filename:pattern:regex_pattern
`regex_pattern` should define regex pattern for characters not allowed in names. For example `[@\[]` pattern
reports any occurrence of `@[` characters.
""",
),
}
class InvalidCharactersInNameChecker(VisitorChecker):
"""Checker for invalid characters in suite, test case or keyword name."""
reports = (
"not-allowed-char-in-filename",
"not-allowed-char-in-name",
)
def visit_File(self, node):
source = node.source if node.source else self.source
if source:
suite_name = Path(source).stem
if "__init__" in suite_name:
suite_name = Path(source).parent.name
for iter in self.param("not-allowed-char-in-filename", "pattern").finditer(suite_name):
self.report(
"not-allowed-char-in-filename",
character=iter.group(),
block_name="suite",
node=node,
col=node.col_offset + iter.start(0) + 1,
)
super().visit_File(node)
def check_if_pattern_in_node_name(self, node, name_of_node, is_keyword=False):
""" Search if regex pattern found from node name.
Skips embedded variables from keyword name
"""
node_name = node.name
variables = find_robot_vars(node_name) if is_keyword else []
start_pos = 0
for variable in variables:
# Loop and skip variables:
# Search pattern from start_pos to variable starting position
# example `Keyword With ${em.bedded} Two ${second.Argument} Argument``
# is splitted to:
# 1. `Keyword With `
# 2. ` Two `
# 3. ` Argument` - last part is searched in finditer part after this loop
tmp_node_name = node_name[start_pos:variable[0]]
match = self.param("not-allowed-char-in-name", "pattern").search(tmp_node_name)
if match:
self.report(
"not-allowed-char-in-name",
character=match.group(),
block_name=f"'{node_name}' {name_of_node}",
node=node,
col=node.col_offset + match.start(0) + 1,
)
start_pos = variable[1]
for iter in self.param("not-allowed-char-in-name", "pattern").finditer(node_name, start_pos):
self.report(
"not-allowed-char-in-name",
character=iter.group(),
block_name=f"'{node.name}' {name_of_node}",
node=node,
col=node.col_offset + iter.start(0) + 1,
)
def visit_TestCaseName(self, node): # noqa
self.check_if_pattern_in_node_name(node, "test case")
def visit_KeywordName(self, node): # noqa
self.check_if_pattern_in_node_name(node, "keyword", is_keyword=True)
def reserved_error_msg(name, reserved_type):
return (
f". It must be in uppercase ({name.upper()}) when used as a marker with {reserved_type}. "
f"Each marker should have minimum of 2 spaces as separator."
)
def uppercase_error_msg(name):
return f". It must be in uppercase ({name.upper()}) when used as a statement"
class KeywordNamingChecker(VisitorChecker):
"""Checker for keyword naming violations."""
reports = (
"wrong-case-in-keyword-name",
"keyword-name-is-reserved-word",
"underscore-in-keyword-name",
"else-not-upper-case",
"keyword-name-is-empty",
"bdd-without-keyword-call",
)
reserved_words = {
3: {
"for": reserved_error_msg("for", "'FOR' loop"),
"end": reserved_error_msg("end", "'FOR' loop"),
},
4: {
"if": uppercase_error_msg("if"),
"else if": uppercase_error_msg("else if"),
"else": uppercase_error_msg("else"),
"for": reserved_error_msg("for", "'FOR' loop"),
"end": reserved_error_msg("end", "'FOR' or 'IF'"),
},
5: {
"if": uppercase_error_msg("if"),
"else if": uppercase_error_msg("else if"),
"else": uppercase_error_msg("else"),
"for": reserved_error_msg("for", "'FOR' loop"),
"end": reserved_error_msg("end", "'FOR', 'IF' or 'TRY EXCEPT'"),
"while": uppercase_error_msg("while"),
"continue": uppercase_error_msg("continue"),
"return": uppercase_error_msg("return"),
"try": reserved_error_msg("try", "'TRY EXCEPT'"),
"except": reserved_error_msg("except", "'TRY EXCEPT'"),
"finally": reserved_error_msg("finally", "'TRY EXCEPT'"),
},
}
else_statements = {"else", "else if"}
bdd = {"given", "when", "and", "but", "then"}
def __init__(self):
self.letter_pattern = re.compile(r"[^\w()-]|_", re.UNICODE)
self.inside_if_block = False
super().__init__()
def visit_Setup(self, node): # noqa
self.check_keyword_naming(node.name, node)
visit_TestTeardown = visit_SuiteTeardown = visit_Teardown = visit_TestSetup = visit_SuiteSetup = visit_Setup
def visit_Keyword(self, node): # noqa
if not node.name:
self.report("keyword-name-is-empty", node=node)
else:
self.check_keyword_naming(node.name, node)
self.generic_visit(node)
def visit_KeywordCall(self, node): # noqa
if self.inside_if_block and node.keyword and node.keyword.lower() in self.else_statements:
self.report("else-not-upper-case", node=node, col=keyword_col(node))
self.check_keyword_naming(node.keyword, node)
def visit_If(self, node): # noqa
self.inside_if_block = True
self.generic_visit(node)
self.inside_if_block = False
def check_keyword_naming(self, keyword_name, node): # noqa
if not keyword_name or keyword_name.lstrip().startswith("#"):
return
if keyword_name == r"/": # old for loop, / are interpreted as keywords
return
if (
isinstance(node, KeywordCall)
and normalize_robot_name(keyword_name, remove_prefix="builtin.") == "runkeywordif"
):
for token in node.data_tokens:
if (token.value.lower() in self.else_statements) and not token.value.isupper():
self.report(
"keyword-name-is-reserved-word",
keyword_name=token.value,
error_msg=reserved_error_msg(token.value, "'Run Keyword If'"),
node=node,
col=token.col_offset + 1,
)
elif self.check_if_keyword_is_reserved(keyword_name, node):
return
self.check_bdd_keywords(keyword_name, node)
normalized = remove_robot_vars(keyword_name)
normalized = self.param("wrong-case-in-keyword-name", "pattern").sub("", normalized)
normalized = normalized.split(".")[-1] # remove any imports ie ExternalLib.SubLib.Log -> Log
normalized = normalized.replace("'", "") # replace ' apostrophes
if "_" in normalized:
self.report("underscore-in-keyword-name", keyword_name=keyword_name, node=node)
words = self.letter_pattern.sub(" ", normalized).split(" ")
if self.param("wrong-case-in-keyword-name", "convention") == "first_word_capitalized":
words = words[:1]
if any(word[0].islower() for word in words if word):
self.report("wrong-case-in-keyword-name", keyword_name=keyword_name, node=node)
def check_bdd_keywords(self, keyword_name, node):
if keyword_name.lower() not in self.bdd or isinstance(node, Keyword):
return
arg = node.get_token(Token.ARGUMENT)
suffix = f". Use one space between: '{keyword_name.title()} {arg.value}'" if arg else ""
col = token_col(node, Token.NAME, Token.KEYWORD)
self.report("bdd-without-keyword-call", keyword_name=keyword_name, error_msg=suffix, node=node, col=col)
def check_if_keyword_is_reserved(self, keyword_name, node):
# if there is typo in syntax, it is interpreted as keyword
reserved = self.reserved_words[ROBOT_VERSION.major]
if keyword_name.lower() not in reserved:
return False
if keyword_name.lower() in self.else_statements and self.inside_if_block:
return False # handled by else-not-upper-case
error_msg = reserved[keyword_name.lower()]
col = keyword_col(node) if isinstance(node, KeywordCall) else keyword_col(node.header)
self.report(
"keyword-name-is-reserved-word",
keyword_name=keyword_name,
error_msg=error_msg,
node=node,
col=col,
)
return True
class SettingsNamingChecker(VisitorChecker):
"""Checker for settings and sections naming violations."""
reports = (
"setting-name-not-in-title-case",
"section-name-invalid",
"empty-library-alias",
"duplicated-library-alias",
)
def __init__(self):
self.section_name_pattern = re.compile(r"\*\*\*\s.+\s\*\*\*")
super().__init__()
def visit_SectionHeader(self, node): # noqa
name = node.data_tokens[0].value
if not self.section_name_pattern.match(name) or not (name.istitle() or name.isupper()):
valid_name = f"*** {node.name.title()} ***"
self.report(
"section-name-invalid", section_title_case=valid_name, section_upper_case=valid_name.upper(), node=node
)
def visit_Setup(self, node): # noqa
self.check_setting_name(node.data_tokens[0].value, node)
visit_SuiteSetup = (
visit_TestSetup
) = (
visit_Teardown
) = (
visit_SuiteTeardown
) = (
visit_TestTeardown
) = (
visit_ForceTags
) = (
visit_DefaultTags
) = (
visit_ResourceImport
) = (
visit_VariablesImport
) = visit_Documentation = visit_Tags = visit_Timeout = visit_Template = visit_Arguments = visit_Return = visit_Setup
def visit_LibraryImport(self, node): # noqa
self.check_setting_name(node.data_tokens[0].value, node)
with_name = node.get_token(Token.WITH_NAME)
if with_name is None:
for arg in node.get_tokens(Token.ARGUMENT):
if arg.value and arg.value == "WITH NAME":
self.report("empty-library-alias", node=arg, col=arg.col_offset + 1)
else:
if node.alias.replace(" ", "") == node.name.replace(" ", ""): # New Name == NewName
name_token = node.get_tokens(Token.NAME)[-1]
self.report(
"duplicated-library-alias",
node=name_token,
col=name_token.col_offset + 1,
)
def check_setting_name(self, name, node):
if not (name.istitle() or name.isupper()):
self.report("setting-name-not-in-title-case", setting_name=name, node=node)
class TestCaseNamingChecker(VisitorChecker):
"""Checker for test case naming violations."""
reports = (
"not-capitalized-test-case-title",
"test-case-name-is-empty",
)
def visit_TestCase(self, node): # noqa
if not node.name:
self.report("test-case-name-is-empty", node=node)
elif not node.name[0].isupper():
self.report("not-capitalized-test-case-title", test_name=node.name, node=node)
class VariableNamingChecker(VisitorChecker):
"""Checker for variable naming violations."""
reports = (
"section-variable-not-uppercase",
"non-local-variables-should-be-uppercase",
"hyphen-in-variable-name",
)
def __init__(self):
self.set_variable_variants = {
"settaskvariable",
"settestvariable",
"setsuitevariable",
"setglobalvariable",
}
super().__init__()
def visit_VariableSection(self, node): # noqa
for child in node.body:
if not child.data_tokens:
continue
token = child.data_tokens[0]
if token.type == Token.VARIABLE and token.value and not token.value.isupper():
self.report(
"section-variable-not-uppercase",
variable_name=token.value,
lineno=token.lineno,
col=token.col_offset + 1,
)
def visit_KeywordCall(self, node): # noqa
for token in node.get_tokens(Token.ASSIGN):
if "-" in token.value:
self.report(
"hyphen-in-variable-name",
variable_name=token.value,
lineno=token.lineno,
col=token.col_offset + 1,
)
if not node.keyword:
return
if normalize_robot_name(node.keyword, remove_prefix="builtin.") in self.set_variable_variants:
if len(node.data_tokens) < 2:
return
token = node.data_tokens[1]
if token.type == Token.ARGUMENT and not token.value.isupper():
self.report(
"non-local-variables-should-be-uppercase",
node=node,
col=token.col_offset + 1,
)
class SimilarVariableChecker(VisitorChecker):
"""Checker for finding same variables with similar names."""
reports = ("possible-variable-overwriting",)
def __init__(self):
self.variables = defaultdict(set)
self.parent_name = ""
self.parent_type = ""
super().__init__()
def visit_Keyword(self, node): # noqa
self.variables = defaultdict(set)
self.parent_name = node.name
self.parent_type = type(node).__name__
self.visit_vars_and_find_similar(node)
self.generic_visit(node)
visit_TestCase = visit_Keyword
def visit_KeywordCall(self, node): # noqa
tokens = node.get_tokens(Token.ASSIGN)
self.find_similar_variables(tokens, node)
def visit_For(self, node): # noqa
for var in node.variables:
self.variables[normalize_robot_var_name(var)].add(var)
self.generic_visit(node)
visit_ForLoop = visit_For
def visit_vars_and_find_similar(self, node):
"""
Creates a dictionary `variables` with normalized variable name as a key
and ads a list of all detected variations of this variable in the node as a value,
then it checks if similar variable was found.
"""
for child in node.body:
# read arguments from Test Case or Keyword
if isinstance(child, Arguments):
for token in child.get_tokens(Token.ARGUMENT):
self.variables[normalize_robot_var_name(token.value)].add(token.value)
def find_similar_variables(self, tokens, node):
for token in tokens:
normalized_token = normalize_robot_var_name(token.value)
if normalized_token in self.variables and token.value not in self.variables[normalized_token]:
self.report(
"possible-variable-overwriting",
variable_name=token.value,
block_name=self.parent_name,
block_type=self.parent_type,
node=node,
lineno=token.lineno,
col=token.col_offset,
)
self.variables[normalized_token].add(token.value)
class DeprecatedStatementChecker(VisitorChecker):
"""Checker for deprecated statements."""
reports = ("deprecated-statement",)
deprecated_keywords = {
5: {
"runkeywordunless": "IF",
"runkeywordif": "IF",
"exitforloop": "BREAK",
"exitforloopif": "IF and BREAK",
"continueforloop": "CONTINUE",
"continueforloopif": "IF and CONTINUE",
"returnfromkeyword": "RETURN",
"returnfromkeywordif": "IF and RETURN",
},
}
def visit_KeywordCall(self, node): # noqa
self.check_if_keyword_is_deprecated(node.keyword, node)
def visit_SuiteSetup(self, node): # noqa
self.check_if_keyword_is_deprecated(node.name, node)
visit_TestSetup = visit_Setup = visit_SuiteTeardown = visit_TestTeardown = visit_Teardown = visit_SuiteSetup
def visit_Return(self, node): # noqa
"""For RETURN use visit_ReturnStatement - visit_Return will most likely visit RETURN in the future"""
if ROBOT_VERSION.major < 5:
return
self.report(
"deprecated-statement",
statement_name="[Return]",
alternative="RETURN",
node=node,
col=token_col(node, Token.RETURN),
version="5.*",
)
def check_if_keyword_is_deprecated(self, keyword_name, node):
normalized_keyword_name = normalize_robot_name(keyword_name, remove_prefix="builtin.")
deprecated_statements = self.deprecated_keywords.get(ROBOT_VERSION.major, {})
if normalized_keyword_name in deprecated_statements:
alternative = deprecated_statements[normalized_keyword_name]
col = token_col(node, Token.NAME, Token.KEYWORD)
self.report(
"deprecated-statement",
statement_name=keyword_name,
alternative=alternative,
node=node,
col=col,
version=f"{ROBOT_VERSION.major}.*",
) | PypiClean |
/django_denorm_iplweb-1.1.1-py3-none-any.whl/denorm/migrations/0001_squashed_0012_alter_dirtyinstance_object_id.py |
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
replaces = [
("denorm", "0001_initial"),
("denorm", "0002_dirtyinstance_func_name"),
("denorm", "0003_auto_20211002_1955"),
("denorm", "0004_alter_dirtyinstance_success"),
("denorm", "0005_dirtyinstance_created_on"),
("denorm", "0006_auto_20211003_0346"),
("denorm", "0007_auto_created_on_now"),
("denorm", "0008_alter_dirtyinstance_object_id"),
("denorm", "0009_alter_dirtyinstance_object_id"),
("denorm", "0010_alter_dirtyinstance_object_id"),
("denorm", "0011_alter_dirtyinstance_object_id"),
("denorm", "0012_alter_dirtyinstance_object_id"),
]
initial = True
dependencies = [
("contenttypes", "0001_initial"),
]
operations = [
migrations.CreateModel(
name="DirtyInstance",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("object_id", models.TextField(blank=True, null=True)),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="contenttypes.contenttype",
),
),
],
),
migrations.RunSQL(
sql="\n -- Notify when record get inserted into 'django_denorm' table\n"
" CREATE OR REPLACE FUNCTION notify_django_denorm_queue()\n"
" RETURNS trigger AS $$\n DECLARE\n BEGIN\n"
" PERFORM pg_notify('django_denorm_process', '');\n RETURN NEW;\n"
" END;\n $$ LANGUAGE plpgsql;\n\n "
"CREATE TRIGGER notify_django_denorm_queue\n AFTER INSERT ON denorm_dirtyinstance\n"
" FOR EACH STATEMENT\n EXECUTE PROCEDURE notify_django_denorm_queue();\n"
" ",
reverse_sql="\n DROP TRIGGER notify_django_denorm_queue ON denorm_dirtyinstance;\n "
" DROP FUNCTION notify_django_denorm_queue;\n ",
),
migrations.AddField(
model_name="dirtyinstance",
name="processing_finished",
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name="dirtyinstance",
name="traceback",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="dirtyinstance",
name="created_on",
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
migrations.AddField(
model_name="dirtyinstance",
name="func_name",
field=models.TextField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.IntegerField(db_index=True, default=0),
preserve_default=False,
),
migrations.AddField(
model_name="dirtyinstance",
name="processing_started",
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
migrations.AddField(
model_name="dirtyinstance",
name="success",
field=models.NullBooleanField(db_index=True),
),
migrations.RunSQL(
sql="ALTER TABLE denorm_dirtyinstance ALTER COLUMN created_on SET DEFAULT CURRENT_TIMESTAMP",
reverse_sql="",
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="dirtyinstance",
name="object_id",
field=models.IntegerField(blank=True, null=True),
),
] | PypiClean |
/sematic_ee-0.0.1-py3-none-any.whl/sematic/api/endpoints/request_parameters.py | import json
import logging
from http import HTTPStatus
from typing import Callable, Dict, List, Literal, Optional, Tuple, Union, cast
# Third-party
import flask
import sqlalchemy
from sqlalchemy.sql.elements import BooleanClauseList, ColumnElement
logger = logging.getLogger(__name__)
# Default page size
DEFAULT_LIMIT = 20
ORDER_BY_DIRECTIONS = {
"asc": sqlalchemy.asc,
"desc": sqlalchemy.desc,
}
ColumnMapping = Dict[str, sqlalchemy.Column]
Scalar = Union[str, int, float, bool, None]
ColumnPredicate = Dict[str, Dict[str, Union[Scalar, List[Scalar]]]]
BooleanPredicate = Dict[Literal["AND", "OR"], List[ColumnPredicate]]
Filters = Union[
ColumnPredicate,
BooleanPredicate,
]
def get_request_parameters(
args: Dict[str, str],
model: type,
default_order: Literal["asc", "desc"] = "desc",
) -> Tuple[
int,
Callable,
Optional[str],
Optional[sqlalchemy.Column],
Optional[BooleanClauseList],
]:
"""
Extract, validate, and format query parameters.
Parameters
----------
args : Dict[str, str]
The request argument as returned by `flask.request.args`.
model : type
The Sqlalchemy model for which to tailor the parameters.
default_order : Literal["asc", "desc"]
The default order to return in case the arguments do not specify an explicit
order. Defaults to "desc".
Returns
-------
Tuple[
int,
Callable,
Optional[str],
Optional[sqlalchemy.Column],
List[ColumnElement]
] : limit, order, cursor, group_by, filters
"""
logger.debug("Raw request parameters: %s; model: %s", args, model)
limit: int = int(args.get("limit", DEFAULT_LIMIT))
if not (limit == -1 or limit > 0):
raise ValueError("limit must be greater than 0 or -1")
def _none_if_empty(name: str) -> Optional[str]:
value = args.get(name)
if value is not None and len(value) == 0:
value = None
return value
cursor = _none_if_empty("cursor")
group_by, group_by_column = _none_if_empty("group_by"), None
column_mapping = _get_column_mapping(model)
if group_by is not None:
if group_by not in column_mapping:
raise ValueError(f"Unsupported group_by value {repr(group_by)}")
group_by_column = column_mapping[group_by]
filters_json: str = args.get("filters", "{}")
try:
filters: Dict = json.loads(filters_json)
except Exception as e:
raise ValueError(f"Malformed filters: {filters_json}, error: {e}")
sql_predicates = (
_get_sql_predicates(filters, column_mapping) if len(filters) > 0 else None
)
order = ORDER_BY_DIRECTIONS.get(args.get("order", default_order))
if order is None:
raise ValueError(
f"invalid value for 'order'; expected one of: "
f"{list(ORDER_BY_DIRECTIONS.keys())}; got: '{args.get('order')}'"
)
return limit, order, cursor, group_by_column, sql_predicates
def jsonify_error(error: str, status: HTTPStatus):
return flask.Response(
json.dumps(dict(error=error)),
status=status.value,
mimetype="application/json",
)
def _get_column_mapping(model: type) -> Dict[str, sqlalchemy.Column]:
"""
Create a mapping of column name to column for a SQLAlchemy model.
"""
return {column.name: column for column in model.__table__.columns} # type: ignore
def _get_sql_predicates(
filters: Filters, column_mapping: ColumnMapping
) -> BooleanClauseList:
"""
Basic support for AND and OR filter predicates.
filters are of the form:
```
{"column_name": {"operator": "value"}}
OR
{
"AND": [
{"column_name": {"operator": "value"}},
{"column_name": {"operator": "value"}}
]
}
OR
{
"OR": [
{"column_name": {"operator": "value"}},
{"column_name": {"operator": "value"}}
]
}
```
"""
operand = list(filters.keys())[0]
if operand in {"AND", "OR"}:
filters = cast(BooleanPredicate, filters)
operand = cast(Literal["AND", "OR"], operand)
operator = dict(AND=sqlalchemy.and_, OR=sqlalchemy.or_)[operand]
return operator(
*[
_extract_single_predicate(filter_, column_mapping)
for filter_ in filters[operand]
]
)
else:
filter_ = cast(ColumnPredicate, filters)
return sqlalchemy.and_(_extract_single_predicate(filter_, column_mapping))
def _extract_single_predicate(
filter_: ColumnPredicate, column_mapping: ColumnMapping
) -> ColumnElement:
column_name = list(filter_.keys())[0]
try:
column = column_mapping[column_name]
except KeyError:
raise Exception(f"Unknown filter field: {column_name}")
condition = filter_[column_name]
if len(condition) == 0:
raise Exception(f"Empty filter: {filter_}")
operator = list(condition.keys())[0]
value = condition[operator]
# Will obviously need to add more, only supporting eq and in for now
if operator == "eq":
return column == value
if operator == "in":
return column.in_(value)
raise NotImplementedError(f"Unsupported filter: {filter_}") | PypiClean |
/FitBenchmarking-1.0.0.tar.gz/FitBenchmarking-1.0.0/fitbenchmarking/results_processing/emissions_table.py | import os
from fitbenchmarking.results_processing.base_table import Table
class EmissionsTable(Table):
r"""
The emissions (kg CO\ :sub:`2`\ eq) results are calculated
from an average (over num_runs) using the
`codecarbon <https://mlco2.github.io/codecarbon/index.html>`_ module.
num_runs is set in :ref:`options`.
Configuration for codecarbon is set in ``.codecarbon.config``.
Please note that for tracking CPU power usage on Windows or Mac,
``Intel Power Gadget`` shoud also be installed. For more information,
see the Methodology section of the `codecarbon docs
<https://mlco2.github.io/codecarbon/methodology.html#cpu>`_.
"""
def __init__(self, results, best_results, options, group_dir, pp_locations,
table_name):
"""
Initialise the emissions table which shows the average emissions
results
:param results: Results grouped by row and category (for colouring)
:type results:
dict[str, dict[str, list[utils.fitbm_result.FittingResult]]]
:param best_results: The best results from each row/category
:type best_results:
dict[str, dict[str, utils.fitbm_result.FittingResult]],
:param options: Options used in fitting
:type options: utils.options.Options
:param group_dir: path to the directory where group results should be
stored
:type group_dir: str
:param pp_locations: tuple containing the locations of the
performance profiles (acc then runtime)
:type pp_locations: tuple(str,str)
:param table_name: Name of the table
:type table_name: str
"""
super().__init__(results, best_results, options, group_dir,
pp_locations, table_name)
self.name = 'emissions'
self.has_pp = True
self.pp_filenames = [os.path.relpath(self.pp_locations[1], group_dir)]
self.cbar_title = "Problem-Specific Cell Shading: Relative Emissions"
def get_value(self, result):
"""
Gets the main value to be reported in the tables for a given result
Note that the first value (relative emissions) will be used in the
default colour handling.
:param result: The result to generate the values for.
:type result: FittingResult
:return: The normalised emissions with respect to the smallest
emissions value and absolute emissions for the result.
:rtype: tuple(float, float)
"""
rel_value = result.norm_emissions
abs_value = result.emissions
return rel_value, abs_value | PypiClean |
/pulumi_alicloud-3.44.0a1693632188.tar.gz/pulumi_alicloud-3.44.0a1693632188/pulumi_alicloud/ecs/get_auto_snapshot_policies.py |
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetAutoSnapshotPoliciesResult',
'AwaitableGetAutoSnapshotPoliciesResult',
'get_auto_snapshot_policies',
'get_auto_snapshot_policies_output',
]
@pulumi.output_type
class GetAutoSnapshotPoliciesResult:
"""
A collection of values returned by getAutoSnapshotPolicies.
"""
def __init__(__self__, id=None, ids=None, name_regex=None, names=None, output_file=None, policies=None, status=None, tags=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if ids and not isinstance(ids, list):
raise TypeError("Expected argument 'ids' to be a list")
pulumi.set(__self__, "ids", ids)
if name_regex and not isinstance(name_regex, str):
raise TypeError("Expected argument 'name_regex' to be a str")
pulumi.set(__self__, "name_regex", name_regex)
if names and not isinstance(names, list):
raise TypeError("Expected argument 'names' to be a list")
pulumi.set(__self__, "names", names)
if output_file and not isinstance(output_file, str):
raise TypeError("Expected argument 'output_file' to be a str")
pulumi.set(__self__, "output_file", output_file)
if policies and not isinstance(policies, list):
raise TypeError("Expected argument 'policies' to be a list")
pulumi.set(__self__, "policies", policies)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def ids(self) -> Sequence[str]:
return pulumi.get(self, "ids")
@property
@pulumi.getter(name="nameRegex")
def name_regex(self) -> Optional[str]:
return pulumi.get(self, "name_regex")
@property
@pulumi.getter
def names(self) -> Sequence[str]:
return pulumi.get(self, "names")
@property
@pulumi.getter(name="outputFile")
def output_file(self) -> Optional[str]:
return pulumi.get(self, "output_file")
@property
@pulumi.getter
def policies(self) -> Sequence['outputs.GetAutoSnapshotPoliciesPolicyResult']:
return pulumi.get(self, "policies")
@property
@pulumi.getter
def status(self) -> Optional[str]:
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, Any]]:
return pulumi.get(self, "tags")
class AwaitableGetAutoSnapshotPoliciesResult(GetAutoSnapshotPoliciesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAutoSnapshotPoliciesResult(
id=self.id,
ids=self.ids,
name_regex=self.name_regex,
names=self.names,
output_file=self.output_file,
policies=self.policies,
status=self.status,
tags=self.tags)
def get_auto_snapshot_policies(ids: Optional[Sequence[str]] = None,
name_regex: Optional[str] = None,
output_file: Optional[str] = None,
status: Optional[str] = None,
tags: Optional[Mapping[str, Any]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAutoSnapshotPoliciesResult:
"""
This data source provides the Ecs Auto Snapshot Policies of the current Alibaba Cloud user.
> **NOTE:** Available in v1.117.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.ecs.get_auto_snapshot_policies(ids=["sp-bp14e66xxxxxxxx"],
name_regex="tf-testAcc")
pulumi.export("firstEcsAutoSnapshotPolicyId", example.policies[0].id)
```
:param Sequence[str] ids: A list of Auto Snapshot Policy IDs.
:param str name_regex: A regex string to filter results by Auto Snapshot Policy name.
:param str output_file: File name where to save data source results (after running `pulumi preview`).
:param str status: The status of Auto Snapshot Policy.
:param Mapping[str, Any] tags: A mapping of tags to assign to the resource.
"""
__args__ = dict()
__args__['ids'] = ids
__args__['nameRegex'] = name_regex
__args__['outputFile'] = output_file
__args__['status'] = status
__args__['tags'] = tags
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('alicloud:ecs/getAutoSnapshotPolicies:getAutoSnapshotPolicies', __args__, opts=opts, typ=GetAutoSnapshotPoliciesResult).value
return AwaitableGetAutoSnapshotPoliciesResult(
id=pulumi.get(__ret__, 'id'),
ids=pulumi.get(__ret__, 'ids'),
name_regex=pulumi.get(__ret__, 'name_regex'),
names=pulumi.get(__ret__, 'names'),
output_file=pulumi.get(__ret__, 'output_file'),
policies=pulumi.get(__ret__, 'policies'),
status=pulumi.get(__ret__, 'status'),
tags=pulumi.get(__ret__, 'tags'))
@_utilities.lift_output_func(get_auto_snapshot_policies)
def get_auto_snapshot_policies_output(ids: Optional[pulumi.Input[Optional[Sequence[str]]]] = None,
name_regex: Optional[pulumi.Input[Optional[str]]] = None,
output_file: Optional[pulumi.Input[Optional[str]]] = None,
status: Optional[pulumi.Input[Optional[str]]] = None,
tags: Optional[pulumi.Input[Optional[Mapping[str, Any]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAutoSnapshotPoliciesResult]:
"""
This data source provides the Ecs Auto Snapshot Policies of the current Alibaba Cloud user.
> **NOTE:** Available in v1.117.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.ecs.get_auto_snapshot_policies(ids=["sp-bp14e66xxxxxxxx"],
name_regex="tf-testAcc")
pulumi.export("firstEcsAutoSnapshotPolicyId", example.policies[0].id)
```
:param Sequence[str] ids: A list of Auto Snapshot Policy IDs.
:param str name_regex: A regex string to filter results by Auto Snapshot Policy name.
:param str output_file: File name where to save data source results (after running `pulumi preview`).
:param str status: The status of Auto Snapshot Policy.
:param Mapping[str, Any] tags: A mapping of tags to assign to the resource.
"""
... | PypiClean |
/lookout_sdk-0.6.3-py3-none-any.whl/lookout/sdk/event_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from bblfsh.github.com.gogo.protobuf.gogoproto import gogo_pb2 as github_dot_com_dot_gogo_dot_protobuf_dot_gogoproto_dot_gogo__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='lookout/sdk/event.proto',
package='pb',
syntax='proto3',
serialized_pb=_b('\n\x17lookout/sdk/event.proto\x12\x02pb\x1a-github.com/gogo/protobuf/gogoproto/gogo.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"d\n\x0e\x43ommitRevision\x12(\n\x04\x62\x61se\x18\x01 \x01(\x0b\x32\x14.pb.ReferencePointerB\x04\xc8\xde\x1f\x00\x12(\n\x04head\x18\x02 \x01(\x0b\x32\x14.pb.ReferencePointerB\x04\xc8\xde\x1f\x00\"\xa9\x01\n\x10ReferencePointer\x12:\n\x17internal_repository_url\x18\x01 \x01(\tB\x19\xe2\xde\x1f\x15InternalRepositoryURL\x12K\n\x0ereference_name\x18\x02 \x01(\tB3\xfa\xde\x1f/gopkg.in/src-d/go-git.v4/plumbing.ReferenceName\x12\x0c\n\x04hash\x18\x03 \x01(\t\"\x94\x02\n\tPushEvent\x12\x10\n\x08provider\x18\x01 \x01(\t\x12#\n\x0binternal_id\x18\x02 \x01(\tB\x0e\xe2\xde\x1f\nInternalID\x12\x38\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\xc8\xde\x1f\x00\x90\xdf\x1f\x01\x12\x0f\n\x07\x63ommits\x18\x04 \x01(\r\x12\x18\n\x10\x64istinct_commits\x18\x05 \x01(\r\x12\x34\n\rconfiguration\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructB\x04\xc8\xde\x1f\x00\x12\x35\n\x0f\x63ommit_revision\x18\x07 \x01(\x0b\x32\x12.pb.CommitRevisionB\x08\xc8\xde\x1f\x00\xd0\xde\x1f\x01\"\xa6\x03\n\x0bReviewEvent\x12\x10\n\x08provider\x18\x01 \x01(\t\x12#\n\x0binternal_id\x18\x02 \x01(\tB\x0e\xe2\xde\x1f\nInternalID\x12\x38\n\ncreated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\xc8\xde\x1f\x00\x90\xdf\x1f\x01\x12\x38\n\nupdated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\xc8\xde\x1f\x00\x90\xdf\x1f\x01\x12\x14\n\x0cis_mergeable\x18\x05 \x01(\x08\x12*\n\x06source\x18\x08 \x01(\x0b\x32\x14.pb.ReferencePointerB\x04\xc8\xde\x1f\x00\x12\x34\n\rconfiguration\x18\n \x01(\x0b\x32\x17.google.protobuf.StructB\x04\xc8\xde\x1f\x00\x12\'\n\rrepository_id\x18\x0b \x01(\rB\x10\xe2\xde\x1f\x0cRepositoryID\x12\x0e\n\x06number\x18\x0c \x01(\r\x12\x35\n\x0f\x63ommit_revision\x18\x07 \x01(\x0b\x32\x12.pb.CommitRevisionB\x08\xc8\xde\x1f\x00\xd0\xde\x1f\x01:\x04\xe0\xa1\x1f\x00\x42\x04\xc8\xe1\x1e\x00\x62\x06proto3')
,
dependencies=[github_dot_com_dot_gogo_dot_protobuf_dot_gogoproto_dot_gogo__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_COMMITREVISION = _descriptor.Descriptor(
name='CommitRevision',
full_name='pb.CommitRevision',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='pb.CommitRevision.base', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='head', full_name='pb.CommitRevision.head', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000')), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=141,
serialized_end=241,
)
_REFERENCEPOINTER = _descriptor.Descriptor(
name='ReferencePointer',
full_name='pb.ReferencePointer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='internal_repository_url', full_name='pb.ReferencePointer.internal_repository_url', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\025InternalRepositoryURL')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reference_name', full_name='pb.ReferencePointer.reference_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\372\336\037/gopkg.in/src-d/go-git.v4/plumbing.ReferenceName')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hash', full_name='pb.ReferencePointer.hash', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=244,
serialized_end=413,
)
_PUSHEVENT = _descriptor.Descriptor(
name='PushEvent',
full_name='pb.PushEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='provider', full_name='pb.PushEvent.provider', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='internal_id', full_name='pb.PushEvent.internal_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\nInternalID')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='created_at', full_name='pb.PushEvent.created_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commits', full_name='pb.PushEvent.commits', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='distinct_commits', full_name='pb.PushEvent.distinct_commits', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configuration', full_name='pb.PushEvent.configuration', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_revision', full_name='pb.PushEvent.commit_revision', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\320\336\037\001')), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=416,
serialized_end=692,
)
_REVIEWEVENT = _descriptor.Descriptor(
name='ReviewEvent',
full_name='pb.ReviewEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='provider', full_name='pb.ReviewEvent.provider', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='internal_id', full_name='pb.ReviewEvent.internal_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\nInternalID')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='created_at', full_name='pb.ReviewEvent.created_at', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updated_at', full_name='pb.ReviewEvent.updated_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_mergeable', full_name='pb.ReviewEvent.is_mergeable', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source', full_name='pb.ReviewEvent.source', index=5,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configuration', full_name='pb.ReviewEvent.configuration', index=6,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='repository_id', full_name='pb.ReviewEvent.repository_id', index=7,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\014RepositoryID')), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='number', full_name='pb.ReviewEvent.number', index=8,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_revision', full_name='pb.ReviewEvent.commit_revision', index=9,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\320\336\037\001')), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\241\037\000')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=695,
serialized_end=1117,
)
_COMMITREVISION.fields_by_name['base'].message_type = _REFERENCEPOINTER
_COMMITREVISION.fields_by_name['head'].message_type = _REFERENCEPOINTER
_PUSHEVENT.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_PUSHEVENT.fields_by_name['configuration'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_PUSHEVENT.fields_by_name['commit_revision'].message_type = _COMMITREVISION
_REVIEWEVENT.fields_by_name['created_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_REVIEWEVENT.fields_by_name['updated_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_REVIEWEVENT.fields_by_name['source'].message_type = _REFERENCEPOINTER
_REVIEWEVENT.fields_by_name['configuration'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_REVIEWEVENT.fields_by_name['commit_revision'].message_type = _COMMITREVISION
DESCRIPTOR.message_types_by_name['CommitRevision'] = _COMMITREVISION
DESCRIPTOR.message_types_by_name['ReferencePointer'] = _REFERENCEPOINTER
DESCRIPTOR.message_types_by_name['PushEvent'] = _PUSHEVENT
DESCRIPTOR.message_types_by_name['ReviewEvent'] = _REVIEWEVENT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CommitRevision = _reflection.GeneratedProtocolMessageType('CommitRevision', (_message.Message,), dict(
DESCRIPTOR = _COMMITREVISION,
__module__ = 'lookout.sdk.event_pb2'
# @@protoc_insertion_point(class_scope:pb.CommitRevision)
))
_sym_db.RegisterMessage(CommitRevision)
ReferencePointer = _reflection.GeneratedProtocolMessageType('ReferencePointer', (_message.Message,), dict(
DESCRIPTOR = _REFERENCEPOINTER,
__module__ = 'lookout.sdk.event_pb2'
# @@protoc_insertion_point(class_scope:pb.ReferencePointer)
))
_sym_db.RegisterMessage(ReferencePointer)
PushEvent = _reflection.GeneratedProtocolMessageType('PushEvent', (_message.Message,), dict(
DESCRIPTOR = _PUSHEVENT,
__module__ = 'lookout.sdk.event_pb2'
# @@protoc_insertion_point(class_scope:pb.PushEvent)
))
_sym_db.RegisterMessage(PushEvent)
ReviewEvent = _reflection.GeneratedProtocolMessageType('ReviewEvent', (_message.Message,), dict(
DESCRIPTOR = _REVIEWEVENT,
__module__ = 'lookout.sdk.event_pb2'
# @@protoc_insertion_point(class_scope:pb.ReviewEvent)
))
_sym_db.RegisterMessage(ReviewEvent)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\310\341\036\000'))
_COMMITREVISION.fields_by_name['base'].has_options = True
_COMMITREVISION.fields_by_name['base']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000'))
_COMMITREVISION.fields_by_name['head'].has_options = True
_COMMITREVISION.fields_by_name['head']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000'))
_REFERENCEPOINTER.fields_by_name['internal_repository_url'].has_options = True
_REFERENCEPOINTER.fields_by_name['internal_repository_url']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\025InternalRepositoryURL'))
_REFERENCEPOINTER.fields_by_name['reference_name'].has_options = True
_REFERENCEPOINTER.fields_by_name['reference_name']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\372\336\037/gopkg.in/src-d/go-git.v4/plumbing.ReferenceName'))
_PUSHEVENT.fields_by_name['internal_id'].has_options = True
_PUSHEVENT.fields_by_name['internal_id']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\nInternalID'))
_PUSHEVENT.fields_by_name['created_at'].has_options = True
_PUSHEVENT.fields_by_name['created_at']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001'))
_PUSHEVENT.fields_by_name['configuration'].has_options = True
_PUSHEVENT.fields_by_name['configuration']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000'))
_PUSHEVENT.fields_by_name['commit_revision'].has_options = True
_PUSHEVENT.fields_by_name['commit_revision']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\320\336\037\001'))
_REVIEWEVENT.fields_by_name['internal_id'].has_options = True
_REVIEWEVENT.fields_by_name['internal_id']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\nInternalID'))
_REVIEWEVENT.fields_by_name['created_at'].has_options = True
_REVIEWEVENT.fields_by_name['created_at']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001'))
_REVIEWEVENT.fields_by_name['updated_at'].has_options = True
_REVIEWEVENT.fields_by_name['updated_at']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\220\337\037\001'))
_REVIEWEVENT.fields_by_name['source'].has_options = True
_REVIEWEVENT.fields_by_name['source']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000'))
_REVIEWEVENT.fields_by_name['configuration'].has_options = True
_REVIEWEVENT.fields_by_name['configuration']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000'))
_REVIEWEVENT.fields_by_name['repository_id'].has_options = True
_REVIEWEVENT.fields_by_name['repository_id']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\342\336\037\014RepositoryID'))
_REVIEWEVENT.fields_by_name['commit_revision'].has_options = True
_REVIEWEVENT.fields_by_name['commit_revision']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\336\037\000\320\336\037\001'))
_REVIEWEVENT.has_options = True
_REVIEWEVENT._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\241\037\000'))
# @@protoc_insertion_point(module_scope) | PypiClean |
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/AlipayCommerceTransportIntelligentizeOdanalysisCreateModel.py | import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.OdAnalysisLineInfo import OdAnalysisLineInfo
class AlipayCommerceTransportIntelligentizeOdanalysisCreateModel(object):
def __init__(self):
self._city_code = None
self._corp_id = None
self._ext_param = None
self._line_info_list = None
self._request_id = None
self._service_task_name = None
@property
def city_code(self):
return self._city_code
@city_code.setter
def city_code(self, value):
self._city_code = value
@property
def corp_id(self):
return self._corp_id
@corp_id.setter
def corp_id(self, value):
self._corp_id = value
@property
def ext_param(self):
return self._ext_param
@ext_param.setter
def ext_param(self, value):
self._ext_param = value
@property
def line_info_list(self):
return self._line_info_list
@line_info_list.setter
def line_info_list(self, value):
if isinstance(value, list):
self._line_info_list = list()
for i in value:
if isinstance(i, OdAnalysisLineInfo):
self._line_info_list.append(i)
else:
self._line_info_list.append(OdAnalysisLineInfo.from_alipay_dict(i))
@property
def request_id(self):
return self._request_id
@request_id.setter
def request_id(self, value):
self._request_id = value
@property
def service_task_name(self):
return self._service_task_name
@service_task_name.setter
def service_task_name(self, value):
self._service_task_name = value
def to_alipay_dict(self):
params = dict()
if self.city_code:
if hasattr(self.city_code, 'to_alipay_dict'):
params['city_code'] = self.city_code.to_alipay_dict()
else:
params['city_code'] = self.city_code
if self.corp_id:
if hasattr(self.corp_id, 'to_alipay_dict'):
params['corp_id'] = self.corp_id.to_alipay_dict()
else:
params['corp_id'] = self.corp_id
if self.ext_param:
if hasattr(self.ext_param, 'to_alipay_dict'):
params['ext_param'] = self.ext_param.to_alipay_dict()
else:
params['ext_param'] = self.ext_param
if self.line_info_list:
if isinstance(self.line_info_list, list):
for i in range(0, len(self.line_info_list)):
element = self.line_info_list[i]
if hasattr(element, 'to_alipay_dict'):
self.line_info_list[i] = element.to_alipay_dict()
if hasattr(self.line_info_list, 'to_alipay_dict'):
params['line_info_list'] = self.line_info_list.to_alipay_dict()
else:
params['line_info_list'] = self.line_info_list
if self.request_id:
if hasattr(self.request_id, 'to_alipay_dict'):
params['request_id'] = self.request_id.to_alipay_dict()
else:
params['request_id'] = self.request_id
if self.service_task_name:
if hasattr(self.service_task_name, 'to_alipay_dict'):
params['service_task_name'] = self.service_task_name.to_alipay_dict()
else:
params['service_task_name'] = self.service_task_name
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceTransportIntelligentizeOdanalysisCreateModel()
if 'city_code' in d:
o.city_code = d['city_code']
if 'corp_id' in d:
o.corp_id = d['corp_id']
if 'ext_param' in d:
o.ext_param = d['ext_param']
if 'line_info_list' in d:
o.line_info_list = d['line_info_list']
if 'request_id' in d:
o.request_id = d['request_id']
if 'service_task_name' in d:
o.service_task_name = d['service_task_name']
return o | PypiClean |
/confine-controller-1.0.2.tar.gz/confine-controller-1.0.2/controller/apps/firmware/plugins/usbimage.py | import os
from django import forms
from controller.utils.paths import get_site_root
from controller.utils.system import run
from api import serializers
from firmware.image import Image
from firmware.plugins import FirmwarePlugin
from firmware.settings import FIRMWARE_PLUGINS_USB_IMAGE
usb_image = FIRMWARE_PLUGINS_USB_IMAGE % {'site_root': get_site_root()}
class USBImagePlugin(FirmwarePlugin):
verbose_name = 'USB image'
description = ('Optionally puts the firmware image into confine-install USB image.\n'
'The base image can be downloaded from http://media.confine-project.eu/'
'confine-install/confine-install.img.gz and stored in %s.' % usb_image)
enabled_by_default = True
def get_form(self):
class USBImageForm(forms.Form):
usb_image = forms.BooleanField(label='USB Image', required=False,
help_text='Select this option if you want to install the node image '
'from a USB stick. This option requires a node internal hard drive.')
return USBImageForm
def get_serializer(self):
class USBImageSerializer(serializers.Serializer):
usb_image = serializers.BooleanField(required=False, default=False)
def __init__(self, node, *args, **kwargs):
super(USBImageSerializer, self).__init__(*args, **kwargs)
def process_post(self):
assert self.is_valid()
return self.data
return USBImageSerializer
def process_form_post(self, form):
return {'usb_image': form.cleaned_data['usb_image']}
def post_umount(self, image, build, *args, **kwargs):
""" Creating confine-install USB image """
if kwargs.get('usb_image', False):
install = Image(usb_image)
try:
install.prepare()
install.gunzip()
install.mount()
path = os.path.join(install.mnt, 'confine/*img.gz')
dst = run('ls %s' % path).stdout
image.gzip()
run('mv %s %s' % (image.file, dst))
install.umount()
except:
install.clean()
image.file = install.file
image.tmp = install.tmp
def update_image_name(self, image_name, **kwargs):
""" Updating confine-install USB image name """
is_usb = kwargs.get('usb_image', False)
return 'USB-'+image_name if is_usb else image_name | PypiClean |
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/request/AlipayOpenPublicPayeeBindDeleteRequest.py | import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayOpenPublicPayeeBindDeleteModel import AlipayOpenPublicPayeeBindDeleteModel
class AlipayOpenPublicPayeeBindDeleteRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayOpenPublicPayeeBindDeleteModel):
self._biz_content = value
else:
self._biz_content = AlipayOpenPublicPayeeBindDeleteModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.open.public.payee.bind.delete'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params | PypiClean |
/bpy36-1.0.0-py3-none-any.whl/bpy2/2.79/scripts/addons/camera_turnaround.py |
bl_info = {
"name": "Turnaround Camera",
"author": "Antonio Vazquez (antonioya)",
"version": (0, 2, 5),
"blender": (2, 68, 0),
"location": "View3D > Toolshelf > Turnaround camera",
"description": "Add a camera rotation around selected object",
"wiki_url": "https://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Animation/TurnaroundCamera",
"category": "Camera"}
import bpy
from math import pi
from bpy.props import (
BoolProperty,
EnumProperty,
FloatProperty,
PointerProperty,
)
from bpy.types import (
Operator,
Panel,
PropertyGroup,
)
# ------------------------------------------------------
# Action class
# ------------------------------------------------------
class RunAction(Operator):
bl_idname = "object.rotate_around"
bl_label = "Turnaround"
bl_description = "Create camera rotation around selected object"
def execute(self, context):
# ----------------------
# Save old data
# ----------------------
scene = context.scene
turn_camera = scene.turn_camera
selectobject = context.active_object
camera = bpy.data.objects[bpy.context.scene.camera.name]
savedcursor = bpy.context.scene.cursor_location.copy() # cursor position
savedframe = scene.frame_current
if turn_camera.use_cursor is False:
bpy.ops.view3d.snap_cursor_to_selected()
# -------------------------
# Create empty and parent
# -------------------------
bpy.ops.object.empty_add(type='PLAIN_AXES')
myempty = bpy.data.objects[bpy.context.active_object.name]
myempty.location = selectobject.location
savedstate = myempty.matrix_world
myempty.parent = selectobject
myempty.name = 'MCH_Rotation_target'
myempty.matrix_world = savedstate
# -------------------------
# Parent camera to empty
# -------------------------
savedstate = camera.matrix_world
camera.parent = myempty
camera.matrix_world = savedstate
# -------------------------
# Now add revolutions
# (make empty active object)
# -------------------------
bpy.ops.object.select_all(False)
myempty.select = True
bpy.context.scene.objects.active = myempty
# save current configuration
savedinterpolation = context.user_preferences.edit.keyframe_new_interpolation_type
# change interpolation mode
context.user_preferences.edit.keyframe_new_interpolation_type = 'LINEAR'
# create first frame
myempty.rotation_euler = (0, 0, 0)
myempty.empty_draw_size = 0.1
bpy.context.scene.frame_set(scene.frame_start)
myempty.keyframe_insert(data_path='rotation_euler', frame=scene.frame_start)
# Clear the Camera Animations if the option is checked
if turn_camera.reset_cam_anim:
try:
if bpy.data.cameras[camera.name].animation_data:
bpy.data.cameras[camera.name].animation_data_clear()
except Exception as e:
print("\n[Camera Turnaround]\nWarning: {}\n".format(e))
# Dolly zoom
if turn_camera.dolly_zoom != "0":
bpy.data.cameras[camera.name].lens = turn_camera.camera_from_lens
bpy.data.cameras[camera.name].keyframe_insert('lens', frame=scene.frame_start)
# Calculate rotation XYZ
ix = -1 if turn_camera.inverse_x else 1
iy = -1 if turn_camera.inverse_y else 1
iz = -1 if turn_camera.inverse_z else 1
xrot = (pi * 2) * turn_camera.camera_revol_x * ix
yrot = (pi * 2) * turn_camera.camera_revol_y * iy
zrot = (pi * 2) * turn_camera.camera_revol_z * iz
# create middle frame
if turn_camera.back_forw is True:
myempty.rotation_euler = (xrot, yrot, zrot)
myempty.keyframe_insert(
data_path='rotation_euler',
frame=((scene.frame_end - scene.frame_start) / 2)
)
# reverse
xrot *= -1
yrot *= -1
zrot = 0
# Dolly zoom
if turn_camera.dolly_zoom == "2":
bpy.data.cameras[camera.name].lens = turn_camera.camera_to_lens
bpy.data.cameras[camera.name].keyframe_insert(
'lens',
frame=((scene.frame_end - scene.frame_start) / 2)
)
# create last frame
myempty.rotation_euler = (xrot, yrot, zrot)
myempty.keyframe_insert(data_path='rotation_euler', frame=scene.frame_end)
# Dolly zoom
if turn_camera.dolly_zoom != "0":
if turn_camera.dolly_zoom == "1":
bpy.data.cameras[camera.name].lens = turn_camera.camera_to_lens # final
else:
bpy.data.cameras[camera.name].lens = turn_camera.camera_from_lens # back to init
bpy.data.cameras[camera.name].keyframe_insert(
'lens', frame=scene.frame_end
)
# Track constraint
if turn_camera.track is True:
bpy.context.scene.objects.active = camera
bpy.ops.object.constraint_add(type='TRACK_TO')
bpy.context.object.constraints[-1].track_axis = 'TRACK_NEGATIVE_Z'
bpy.context.object.constraints[-1].up_axis = 'UP_Y'
bpy.context.object.constraints[-1].target = bpy.data.objects[myempty.name]
# back previous configuration
context.user_preferences.edit.keyframe_new_interpolation_type = savedinterpolation
bpy.context.scene.cursor_location = savedcursor
# -------------------------
# Back to old selection
# -------------------------
bpy.ops.object.select_all(False)
selectobject.select = True
bpy.context.scene.objects.active = selectobject
bpy.context.scene.frame_set(savedframe)
return {'FINISHED'}
# ------------------------------------------------------
# Define Properties
# ------------------------------------------------------
class CameraTurnProps(PropertyGroup):
camera_revol_x = FloatProperty(
name='X', min=0, max=25,
default=0, precision=2,
description='Number total of revolutions in X axis'
)
camera_revol_y = FloatProperty(
name='Y', min=0, max=25,
default=0, precision=2,
description='Number total of revolutions in Y axis'
)
camera_revol_z = FloatProperty(
name='Z', min=0, max=25,
default=1, precision=2,
description='Number total of revolutions in Z axis'
)
inverse_x = BoolProperty(
name="-X",
description="Inverse rotation",
default=False
)
inverse_y = BoolProperty(
name="-Y",
description="Inverse rotation",
default=False
)
inverse_z = BoolProperty(
name="-Z",
description="Inverse rotation",
default=False
)
use_cursor = BoolProperty(
name="Use cursor position",
description="Use cursor position instead of object origin",
default=False
)
back_forw = BoolProperty(
name="Back and forward",
description="Create back and forward animation",
default=False
)
dolly_zoom = EnumProperty(
items=(
('0', "None", ""),
('1', "Dolly zoom", ""),
('2', "Dolly zoom B/F", "")
),
name="Lens Effects",
description="Create a camera lens movement"
)
camera_from_lens = FloatProperty(
name="From",
min=1, max=500, default=35,
precision=3,
description="Start lens value"
)
camera_to_lens = FloatProperty(
name="To",
min=1, max=500,
default=35, precision=3,
description="End lens value"
)
track = BoolProperty(
name="Create track constraint",
description="Add a track constraint to the camera",
default=False
)
reset_cam_anim = BoolProperty(
name="Clear Camera",
description="Clear previous camera animations if there are any\n"
"(For instance, previous Dolly Zoom)",
default=False
)
# ------------------------------------------------------
# UI Class
# ------------------------------------------------------
class PanelUI(Panel):
bl_label = "Turnaround Camera"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_category = "Animation"
def draw(self, context):
layout = self.layout
scene = context.scene
turn_camera = scene.turn_camera
try:
bpy.context.scene.camera.name
except AttributeError:
row = layout.row(align=False)
row.label("No defined camera for scene", icon="INFO")
return
if context.active_object is not None:
if context.active_object.type != 'CAMERA':
buf = context.active_object.name
row = layout.row(align=True)
row.operator("object.rotate_around", icon='OUTLINER_DATA_CAMERA')
box = row.box()
box.scale_y = 0.5
box.label(buf, icon='MESH_DATA')
row = layout.row(align=False)
row.prop(scene, "camera")
layout.label("Rotation:")
row = layout.row(align=True)
row.prop(scene, "frame_start")
row.prop(scene, "frame_end")
col = layout.column(align=True)
split = col.split(percentage=0.85, align=True)
split.prop(turn_camera, "camera_revol_x")
split.prop(turn_camera, "inverse_x", toggle=True)
split = col.split(percentage=0.85, align=True)
split.prop(turn_camera, "camera_revol_y")
split.prop(turn_camera, "inverse_y", toggle=True)
split = col.split(percentage=0.85, align=True)
split.prop(turn_camera, "camera_revol_z")
split.prop(turn_camera, "inverse_z", toggle=True)
col = layout.column(align=True)
col.label("Options:")
row = col.row(align=True)
row.prop(turn_camera, "back_forw", toggle=True)
row.prop(turn_camera, "reset_cam_anim", toggle=True)
col.prop(turn_camera, "track", toggle=True)
col.prop(turn_camera, "use_cursor", toggle=True)
row = layout.row()
row.prop(turn_camera, "dolly_zoom")
if turn_camera.dolly_zoom != "0":
row = layout.row(align=True)
row.prop(turn_camera, "camera_from_lens")
row.prop(turn_camera, "camera_to_lens")
else:
buf = "No valid object selected"
layout.label(buf, icon='MESH_DATA')
# ------------------------------------------------------
# Registration
# ------------------------------------------------------
def register():
bpy.utils.register_class(RunAction)
bpy.utils.register_class(PanelUI)
bpy.utils.register_class(CameraTurnProps)
bpy.types.Scene.turn_camera = PointerProperty(type=CameraTurnProps)
def unregister():
bpy.utils.unregister_class(RunAction)
bpy.utils.unregister_class(PanelUI)
bpy.utils.unregister_class(CameraTurnProps)
del bpy.types.Scene.turn_camera
if __name__ == "__main__":
register() | PypiClean |
/odoo13_addon_base_geoengine-13.0.1.0.3-py3-none-any.whl/odoo/addons/base_geoengine/geo_operators.py | import logging
from .fields import GeoField
UNION_MAPPING = {"|": "OR", "&": "AND"}
logger = logging.getLogger("geoengine.sql.debug")
# TODO Refactor geo_search and dry up the get_**_sql code
def _get_geo_func(model, domain):
"""Map operator to function we do not want to override __getattr__"""
current_field = model._fields[domain[0]]
if isinstance(current_field, GeoField):
current_operator = GeoOperator(current_field)
attr = "get_{}_sql".format(domain[1])
if hasattr(current_operator, attr):
return getattr(current_operator, attr)
raise ValueError("Field {} does not support {}".format(current_field, domain[1]))
def geo_search(model, domain=None, geo_domain=None, offset=0, limit=None, order=None):
"""Perform a geo search it allows direct domain:
geo_search(
domain=[('name', 'ilike', 'toto']),
geo_domain=[('the_point',
'geo_intersect',
myshaply_obj or mywkt or mygeojson)])
We can also support indirect geo_domain
(‘geom’, ‘geo_operator’, {‘res.zip.poly’: [‘id’, ‘in’, [1,2,3]] })
The supported operators are :
* geo_greater
* geo_lesser
* geo_equal
* geo_touch
* geo_within
* geo_contains
* geo_intersect
"""
cr = model._cr
domain = domain or []
geo_domain = geo_domain or []
model.env["ir.model.access"].check(model._name, "read")
query = model._where_calc(domain, active_test=True)
model._apply_ir_rules(query, "read")
order_by = ""
if order:
order_by = model._generate_order_by(order, query) or ""
from_clause, where_clause, where_clause_params = query.get_sql()
limit_str = limit and " LIMIT %d" % limit or ""
offset_str = offset and " OFFSET %d" % offset or ""
where_clause_arr = []
if where_clause and where_clause_params:
where_clause_arr.append(where_clause)
# geosearch where clause generation
MODE = ""
UNION = "AND"
JOIN_MODE = "%s %s"
for domain in geo_domain:
if isinstance(domain, str):
if domain == "!":
MODE = "NOT"
if domain in list(UNION_MAPPING.keys()):
UNION = UNION_MAPPING[domain]
if where_clause_arr:
where_clause_arr.append(JOIN_MODE % (MODE, UNION))
# We start computing geo spation SQL
if isinstance(domain, (list, tuple)):
if isinstance(domain[2], dict):
# We are having indirect geo_operator like (‘geom’, ‘geo_...’,
# {‘res.zip.poly’: [‘id’, ‘in’, [1,2,3]] })
ref_search = domain[2]
rel_where_statement = []
for key in ref_search:
i = key.rfind(".")
rel_model = key[0:i]
rel_col = key[i + 1 :]
rel_model = model.env[rel_model]
from_clause += ", {}".format(rel_model._table)
att_where_sql = ""
# we compute the attributes search on spatial rel
if ref_search[key]:
rel_query = rel_model._where_calc(
ref_search[key], active_test=True
)
rel_res = rel_query.get_sql()
att_where_sql = rel_res[1]
where_clause_params += rel_res[2]
# we compute the spatial search on spatial rel
func = _get_geo_func(model, domain)
spatial_where_sql = func(
model._table,
domain[0],
domain[2],
rel_col=rel_col,
rel_model=rel_model,
)
if att_where_sql:
rel_where_statement.append(
"({} AND {})".format(att_where_sql, spatial_where_sql)
)
else:
rel_where_statement.append("(%s)" % (spatial_where_sql))
where_clause_arr.append("AND ".join(rel_where_statement))
else:
func = _get_geo_func(model, domain)
where_sql = func(model._table, domain[0], domain[2])
where_clause_arr.append(where_sql)
if where_clause_arr:
where_statement = " WHERE %s" % (" ".join(where_clause_arr))
else:
where_statement = ""
# pylint: disable=E8103
sql = (
'SELECT "%s".id FROM ' % model._table
+ from_clause
+ where_statement
+ order_by
+ limit_str
+ offset_str
)
# logger.debug(cursor.mogrify(sql, where_clause_params))
cr.execute(sql, where_clause_params)
res = cr.fetchall()
if res:
return [x[0] for x in res]
else:
return []
class GeoOperator(object):
def __init__(self, geo_field):
self.geo_field = geo_field
def get_rel_field(self, rel_col, rel_model):
"""Retrieves the expression to use in PostGIS statement for a spatial
rel search"""
try:
rel_model._fields[rel_col]
except Exception:
raise Exception(
"Model {} has no column {}".format(rel_model._name, rel_col)
)
return "{}.{}".format(rel_model._table, rel_col)
def _get_direct_como_op_sql(
self, table, col, value, rel_col=None, rel_model=None, op=""
):
"""provide raw sql for geater and lesser operators"""
if isinstance(value, (int, float)):
if rel_col and rel_model:
raise Exception(
"Area %s does not support int compare for relation "
"search" % (op,)
)
return " ST_Area({}.{}) {} {}".format(table, col, op, value)
else:
if rel_col and rel_model is not None:
compare_to = self.get_rel_field(rel_col, rel_model)
else:
base = self.geo_field.entry_to_shape(value, same_type=False)
compare_to = base.wkt
return " ST_Area({}.{}) {} ST_Area(ST_GeomFromText('{}'))".format(
table, col, op, compare_to
)
def _get_postgis_comp_sql(
self, table, col, value, rel_col=None, rel_model=None, op=""
):
"""return raw sql for all search based on St_**(a, b) posgis operator"""
if rel_col and rel_model is not None:
compare_to = self.get_rel_field(rel_col, rel_model)
else:
base = self.geo_field.entry_to_shape(value, same_type=False)
srid = self.geo_field.srid
compare_to = "ST_GeomFromText('{}',{})".format(base.wkt, srid)
return " {}({}.{}, {})".format(op, table, col, compare_to)
def get_geo_greater_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_greater operator
(used for area comparison)
"""
return self._get_direct_como_op_sql(
table, col, value, rel_col, rel_model, op=">"
)
def get_geo_lesser_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_lesser operator
(used for area comparison)"""
return self._get_direct_como_op_sql(
table, col, value, rel_col, rel_model, op="<"
)
def get_geo_equal_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_equal operator
(used for equality comparison)
"""
if rel_col and rel_model is not None:
compare_to = self.get_rel_field(rel_col, rel_model)
else:
base = self.geo_field.entry_to_shape(value, same_type=False)
compare_to = "ST_GeomFromText('{}')".format(base.wkt)
return " {}.{} = {}".format(table, col, compare_to)
def get_geo_intersect_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_intersec operator
(used for spatial comparison)
"""
return self._get_postgis_comp_sql(
table, col, value, rel_col, rel_model, op="ST_Intersects"
)
def get_geo_touch_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_touch operator
(used for spatial comparison)
"""
return self._get_postgis_comp_sql(
table, col, value, rel_col, rel_model, op="ST_Touches"
)
def get_geo_within_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_within operator
(used for spatial comparison)
"""
return self._get_postgis_comp_sql(
table, col, value, rel_col, rel_model, op="ST_Within"
)
def get_geo_contains_sql(self, table, col, value, rel_col=None, rel_model=None):
"""Returns raw sql for geo_contains operator
(used for spatial comparison)
"""
return self._get_postgis_comp_sql(
table, col, value, rel_col, rel_model, op="ST_Contains"
) | PypiClean |
/corelink-0.0.5.tar.gz/corelink-0.0.5/src/corelink/resources/control.py | from . import variables
from .reqs import retrieve, request_func
async def auth() -> int:
"""Authenticates user with values in the object
:return: token
"""
request = {
"function": "auth",
"username": variables.user,
"password": variables.password
}
response = retrieve(await request_func(request), ret=True)
variables.token = response['token']
variables.user_ip = response['IP']
async def list_functions() -> list:
"""return: List of functions available to the user
"""
request = {
"function": "listFunctions",
"token": variables.token
}
return retrieve(await request_func(request), "functionList")
async def list_server_functions() -> list:
request = {
"function": "listServerFunctions",
"token": variables.token
}
return retrieve(await request_func(request), "functionList")
async def describe_function(func: str) -> dict:
request = {"function": "describeFunction",
"functionName": func,
"token": variables.token}
return retrieve(await request_func(request), "description")
async def describe_server_function() -> dict:
request = {
"function": "listServerFunctions",
"token": variables.token
}
return retrieve(await request_func(request), "description")
async def list_workspaces() -> list:
request = {
"function": "listWorkspaces",
"token": variables.token
}
return retrieve(await request_func(request), "workspaceList")
async def add_workspace(space: str):
"""Adds a workspace.
:param space: Space to add
:return: Workspace
"""
request = {
"function": "addWorkspace",
"workspace": space,
"token": variables.token
}
return retrieve(await request_func(request))
async def set_default_workspace(space):
"""Sets default workspace.
:param space: space to set
:return: Workspace
"""
request = {
"function": "setDefaultWorkspace",
"workspace": space,
"token": variables.token
}
return retrieve(await request_func(request))
async def get_default_workspace() -> str:
"""return: Default workspace
"""
request = {
"function": "getDefaultWorkspace",
"token": variables.token
}
return retrieve(await request_func(request), "workspace")
async def remove_workspace(space: str):
request = {
"function": "rmWorkspace",
"workspace": space,
"token": variables.token
}
return retrieve(await request_func(request))
async def add_user(new_username, new_password, admin_bool, first_name,
last_name, email):
request = {
"function": "addUser",
"username": new_username,
"password": new_password,
"admin": admin_bool,
"first": first_name,
"last": last_name,
"email": email,
"token": variables.token
}
return retrieve(await request_func(request))
async def change_password(new_password):
request = {
"function": "password",
"password": new_password,
"token": variables.token
}
return retrieve(await request_func(request))
async def remove_user(rm_username):
request = {
"function": "rmUser",
"password": rm_username,
"token": variables.token
}
return retrieve(await request_func(request))
async def list_users():
request = {
"function": "listUsers",
"token": variables.token
}
return retrieve(await request_func(request), "userList")
async def add_group(group):
request = {
"function": "addGroup",
"group": group,
"token": variables.token
}
return retrieve(await request_func(request))
async def add_user_group(group, user):
request = {
"function": "addUserGroup",
"group": group,
"user": user,
"token": variables.token
}
return retrieve(await request_func(request))
async def remove_user_group(group, user):
request = {
"function": "rmUserGroup",
"group": group,
"user": user,
"token": variables.token
}
return retrieve(await request_func(request))
async def change_owner(group, user):
request = {
"function": "changeOwner",
"group": group,
"user": user,
"token": variables.token
}
return retrieve(await request_func(request))
async def remove_group(group, user):
request = {
"function": "rmGroup",
"group": group,
"token": variables.token
}
return retrieve(await request_func(request))
async def list_groups(group, user):
request = {
"function": "listGroups",
"token": variables.token
}
return retrieve(await request_func(request))
async def list_streams(workspaces="", types=""):
request = {
"function": "listStreams",
"workspaces": workspaces,
"types": types,
"token": variables.token
}
return retrieve(await request_func(request), "senderList")
async def stream_info(stream_id):
request = {
"function": "streamInfo",
"streamID": stream_id,
"token": variables.token
}
return retrieve(await request_func(request), "info")
async def subscribe_to_stream(receiver_id, stream_id):
if not receiver_id in variables.receiver:
raise Exception("Receiver not yet created.")
request = {
"function": "subscribe",
"receiverID": receiver_id,
"streamID": stream_id,
"token": variables.token
}
return retrieve(await request_func(request), "streamList")
async def unsubscribe_from_stream(stream_id):
if not stream_id in variables.receiver:
raise Exception("Receiver not yet created.")
request = {
"function": "unsubscribe",
"receiverID": variables.receiver['streamID'],
"streamID": stream_id,
"token": variables.token
}
return retrieve(await request_func(request), "streamList")
async def set_config(config, context, app, username, value):
request = {
"function": "setConfig",
"config": config,
"context": context,
"app": app,
"user": username,
"value": value,
"token": variables.token
}
return retrieve(await request_func(request))
async def disconnect_streams(workspaces=None, types=None, stream_ids=None):
"""Disconnects streams of given workspaces and types, or by streamIDs
Note: if streamIDs are passed, then other params will be ignored
return: list of disconnected streams
"""
if not (workspaces or types or stream_ids):
raise ValueError
request = {
"function": "disconnect",
"token": variables.token
}
if workspaces:
request["workspaces"] = workspaces
if types:
request["types"] = types
if stream_ids:
request["streamIDs"] = stream_ids
return retrieve(await request_func(request), "streamList")
# async def expire():
# """Expires session and invalidates user token"""
# request = {
# "function": "expire",
# "token": token
# }
# return retrieve(await request_func(request))
# Sarthak said this doesn't yet work | PypiClean |
/trytond_stock-6.8.2-py3-none-any.whl/trytond/modules/stock/inventory.py | from collections import defaultdict
from sql import Null
from trytond.i18n import gettext
from trytond.model import (
Check, Index, Model, ModelSQL, ModelView, Workflow, fields)
from trytond.model.exceptions import AccessError
from trytond.pool import Pool
from trytond.pyson import Bool, Eval, If
from trytond.tools import grouped_slice
from trytond.transaction import Transaction
from trytond.wizard import Button, StateTransition, StateView, Wizard
from .exceptions import (
InventoryCountWarning, InventoryFutureWarning, InventoryValidationError)
class Inventory(Workflow, ModelSQL, ModelView):
'Stock Inventory'
__name__ = 'stock.inventory'
_rec_name = 'number'
_states = {
'readonly': Eval('state') != 'draft',
}
number = fields.Char('Number', readonly=True,
help="The main identifier for the inventory.")
location = fields.Many2One(
'stock.location', 'Location', required=True,
domain=[('type', '=', 'storage')], states={
'readonly': (Eval('state') != 'draft') | Eval('lines', [0]),
},
help="The location inventoried.")
date = fields.Date('Date', required=True, states={
'readonly': (Eval('state') != 'draft') | Eval('lines', [0]),
},
help="The date of the stock count.")
lines = fields.One2Many(
'stock.inventory.line', 'inventory', 'Lines',
states={
'readonly': (_states['readonly'] | ~Eval('location')
| ~Eval('date')),
})
empty_quantity = fields.Selection([
(None, ""),
('keep', "Keep"),
('empty', "Empty"),
], "Empty Quantity", states=_states,
help="How lines without a quantity are handled.")
company = fields.Many2One('company.company', 'Company', required=True,
states={
'readonly': (Eval('state') != 'draft') | Eval('lines', [0]),
},
help="The company the inventory is associated with.")
state = fields.Selection([
('draft', "Draft"),
('done', "Done"),
('cancelled', "Cancelled"),
], "State", readonly=True, sort=False,
help="The current state of the inventory.")
del _states
@classmethod
def __setup__(cls):
super(Inventory, cls).__setup__()
t = cls.__table__()
cls._sql_indexes.add(
Index(t, (t.state, Index.Equality()), where=t.state == 'draft'))
cls._order.insert(0, ('date', 'DESC'))
cls._transitions |= set((
('draft', 'done'),
('draft', 'cancelled'),
))
cls._buttons.update({
'confirm': {
'invisible': Eval('state').in_(['done', 'cancelled']),
'depends': ['state'],
},
'cancel': {
'invisible': Eval('state').in_(['cancelled', 'done']),
'depends': ['state'],
},
'complete_lines': {
'readonly': Eval('state') != 'draft',
'depends': ['state'],
},
'do_count': {
'readonly': Eval('state') != 'draft',
'depends': ['state'],
},
})
@classmethod
def __register__(cls, module_name):
super(Inventory, cls).__register__(module_name)
cursor = Transaction().connection.cursor()
table = cls.__table_handler__(module_name)
sql_table = cls.__table__()
# Migration from 5.4: remove lost_found
table.not_null_action('lost_found', 'remove')
# Migration from 5.6: rename state cancel to cancelled
cursor.execute(*sql_table.update(
[sql_table.state], ['cancelled'],
where=sql_table.state == 'cancel'))
@staticmethod
def default_state():
return 'draft'
@staticmethod
def default_date():
Date = Pool().get('ir.date')
return Date.today()
@staticmethod
def default_company():
return Transaction().context.get('company')
@classmethod
def view_attributes(cls):
return super().view_attributes() + [
('/tree', 'visual', If(Eval('state') == 'cancelled', 'muted', '')),
]
@classmethod
def delete(cls, inventories):
# Cancel before delete
cls.cancel(inventories)
for inventory in inventories:
if inventory.state != 'cancelled':
raise AccessError(
gettext('stock.msg_inventory_delete_cancel',
inventory=inventory.rec_name))
super(Inventory, cls).delete(inventories)
@classmethod
@ModelView.button
@Workflow.transition('done')
def confirm(cls, inventories):
pool = Pool()
Move = pool.get('stock.move')
Date = pool.get('ir.date')
Warning = pool.get('res.user.warning')
today_cache = {}
def in_future(inventory):
if inventory.company not in today_cache:
with Transaction().set_context(company=inventory.company.id):
today_cache[inventory.company] = Date.today()
today = today_cache[inventory.company]
if inventory.date > today:
return inventory
future_inventories = sorted(filter(in_future, inventories))
if future_inventories:
names = ', '.join(i.rec_name for i in future_inventories[:5])
if len(future_inventories) > 5:
names + '...'
warning_name = Warning.format('date_future', future_inventories)
if Warning.check(warning_name):
raise InventoryFutureWarning(warning_name,
gettext('stock.msg_inventory_date_in_the_future',
inventories=names))
moves = []
for inventory in inventories:
keys = set()
for line in inventory.lines:
key = line.unique_key
if key in keys:
raise InventoryValidationError(
gettext('stock.msg_inventory_line_unique',
line=line.rec_name,
inventory=inventory.rec_name))
keys.add(key)
move = line.get_move()
if move:
moves.append(move)
if moves:
Move.save(moves)
# Skip MoveFutureWarning as it is newly created moves
with Transaction().set_user(0):
Move.do(moves)
@classmethod
@ModelView.button
@Workflow.transition('cancelled')
def cancel(cls, inventories):
Line = Pool().get("stock.inventory.line")
Line.cancel_move([l for i in inventories for l in i.lines])
@classmethod
def create(cls, vlist):
pool = Pool()
Configuration = pool.get('stock.configuration')
config = Configuration(1)
vlist = [x.copy() for x in vlist]
default_company = cls.default_company()
for values in vlist:
if values.get('number') is None:
values['number'] = config.get_multivalue(
'inventory_sequence',
company=values.get('company', default_company)).get()
inventories = super(Inventory, cls).create(vlist)
cls.complete_lines(inventories, fill=False)
return inventories
@classmethod
def write(cls, *args):
super().write(*args)
inventories = cls.browse(set(sum(args[::2], [])))
cls.complete_lines(inventories, fill=False)
@classmethod
def copy(cls, inventories, default=None):
pool = Pool()
Date = pool.get('ir.date')
if default is None:
default = {}
else:
default = default.copy()
default.setdefault('date', Date.today())
default.setdefault('lines.moves', None)
default.setdefault('number', None)
new_inventories = super().copy(inventories, default=default)
cls.complete_lines(new_inventories, fill=False)
return new_inventories
@staticmethod
def grouping():
return ('product',)
@classmethod
@ModelView.button
def complete_lines(cls, inventories, fill=True):
'''
Complete or update the inventories
'''
pool = Pool()
Line = pool.get('stock.inventory.line')
Product = pool.get('product.product')
grouping = cls.grouping()
to_create, to_write = [], []
for inventory in inventories:
# Once done computation is wrong because include created moves
if inventory.state == 'done':
continue
# Compute product quantities
with Transaction().set_context(
company=inventory.company.id,
stock_date_end=inventory.date):
if fill:
pbl = Product.products_by_location(
[inventory.location.id],
grouping=grouping)
else:
product_ids = [l.product.id for l in inventory.lines]
pbl = defaultdict(int)
for product_ids in grouped_slice(product_ids):
pbl.update(Product.products_by_location(
[inventory.location.id],
grouping=grouping,
grouping_filter=(list(product_ids),)))
# Index some data
product2type = {}
product2consumable = {}
for product in Product.browse({line[1] for line in pbl}):
product2type[product.id] = product.type
product2consumable[product.id] = product.consumable
# Update existing lines
for line in inventory.lines:
if line.product.type != 'goods':
Line.delete([line])
continue
key = (inventory.location.id,) + line.unique_key
if key in pbl:
quantity = pbl.pop(key)
else:
quantity = 0.0
values = line.update_values4complete(quantity)
if values:
to_write.extend(([line], values))
if not fill:
continue
# Create lines if needed
for key, quantity in pbl.items():
product_id = key[grouping.index('product') + 1]
if (product2type[product_id] != 'goods'
or product2consumable[product_id]):
continue
if not quantity:
continue
values = Line.create_values4complete(inventory, quantity)
for i, fname in enumerate(grouping, 1):
values[fname] = key[i]
to_create.append(values)
if to_create:
Line.create(to_create)
if to_write:
Line.write(*to_write)
@classmethod
@ModelView.button_action('stock.wizard_inventory_count')
def do_count(cls, inventories):
cls.complete_lines(inventories)
class InventoryLine(ModelSQL, ModelView):
'Stock Inventory Line'
__name__ = 'stock.inventory.line'
_states = {
'readonly': Eval('inventory_state') != 'draft',
}
product = fields.Many2One('product.product', 'Product', required=True,
domain=[
('type', '=', 'goods'),
], states=_states)
uom = fields.Function(fields.Many2One('product.uom', 'UOM',
help="The unit in which the quantity is specified."), 'get_uom')
expected_quantity = fields.Float(
"Expected Quantity", digits='uom', required=True, readonly=True,
states={
'invisible': Eval('id', -1) < 0,
},
help="The quantity the system calculated should be in the location.")
quantity = fields.Float(
"Actual Quantity", digits='uom', states=_states,
help="The actual quantity found in the location.")
moves = fields.One2Many('stock.move', 'origin', 'Moves', readonly=True)
inventory = fields.Many2One('stock.inventory', 'Inventory', required=True,
ondelete='CASCADE',
states={
'readonly': _states['readonly'] & Bool(Eval('inventory')),
},
help="The inventory the line belongs to.")
inventory_location = fields.Function(
fields.Many2One('stock.location', "Location"),
'on_change_with_inventory_location',
searcher='search_inventory_location')
inventory_date = fields.Function(
fields.Date("Date"),
'on_change_with_inventory_date',
searcher='search_inventory_date')
inventory_state = fields.Function(
fields.Selection('get_inventory_states', "Inventory State",
depends={'inventory'}),
'on_change_with_inventory_state')
@classmethod
def __setup__(cls):
super(InventoryLine, cls).__setup__()
cls.__access__.add('inventory')
t = cls.__table__()
cls._sql_constraints += [
('check_line_qty_pos', Check(t, t.quantity >= 0),
'stock.msg_inventory_line_quantity_positive'),
]
cls._order.insert(0, ('product', 'ASC'))
@classmethod
def __register__(cls, module_name):
transaction = Transaction()
cursor = transaction.connection.cursor()
update = transaction.connection.cursor()
pool = Pool()
Move = pool.get('stock.move')
sql_table = cls.__table__()
move_table = Move.__table__()
super(InventoryLine, cls).__register__(module_name)
table = cls.__table_handler__(module_name)
# Migration from 3.0: use Move origin
if table.column_exist('move'):
cursor.execute(*sql_table.select(sql_table.id, sql_table.move,
where=sql_table.move != Null))
for line_id, move_id in cursor:
update.execute(*move_table.update(
columns=[move_table.origin],
values=['%s,%s' % (cls.__name__, line_id)],
where=move_table.id == move_id))
table.drop_column('move')
# Migration from 4.6: drop required on quantity
table.not_null_action('quantity', action='remove')
@staticmethod
def default_expected_quantity():
return 0.
@fields.depends('product')
def on_change_product(self):
if self.product:
self.uom = self.product.default_uom
@fields.depends('inventory', '_parent_inventory.location')
def on_change_with_inventory_location(self, name=None):
return self.inventory.location if self.inventory else None
@classmethod
def search_inventory_location(cls, name, clause):
nested = clause[0][len(name):]
return [('inventory.' + name + nested, *clause[1:])]
@fields.depends('inventory', '_parent_inventory.date')
def on_change_with_inventory_date(self, name=None):
if self.inventory:
return self.inventory.date
@classmethod
def search_inventory_date(cls, name, clause):
return [('inventory.date',) + tuple(clause[1:])]
@classmethod
def get_inventory_states(cls):
pool = Pool()
Inventory = pool.get('stock.inventory')
return Inventory.fields_get(['state'])['state']['selection']
@fields.depends('inventory', '_parent_inventory.state')
def on_change_with_inventory_state(self, name=None):
if self.inventory:
return self.inventory.state
return 'draft'
def get_rec_name(self, name):
return self.product.rec_name
@classmethod
def search_rec_name(cls, name, clause):
return [('product.rec_name',) + tuple(clause[1:])]
def get_uom(self, name):
return self.product.default_uom.id
@property
def unique_key(self):
key = []
for fname in self.inventory.grouping():
value = getattr(self, fname)
if isinstance(value, Model):
value = value.id
key.append(value)
return tuple(key)
@classmethod
def cancel_move(cls, lines):
Move = Pool().get('stock.move')
moves = [m for l in lines for m in l.moves if l.moves]
Move.cancel(moves)
Move.delete(moves)
def get_move(self):
'''
Return Move instance for the inventory line
'''
pool = Pool()
Move = pool.get('stock.move')
Uom = pool.get('product.uom')
qty = self.quantity
if qty is None:
if self.inventory.empty_quantity is None:
raise InventoryValidationError(
gettext('stock.msg_inventory_missing_empty_quantity',
inventory=self.inventory.rec_name))
if self.inventory.empty_quantity == 'keep':
return
else:
qty = 0.0
delta_qty = Uom.compute_qty(self.uom,
self.expected_quantity - qty,
self.uom)
if delta_qty == 0.0:
return
from_location = self.inventory.location
to_location = self.inventory.location.lost_found_used
if not to_location:
raise InventoryValidationError(
gettext('stock.msg_inventory_location_missing_lost_found',
inventory=self.inventory.rec_name,
location=self.inventory.location.rec_name))
if delta_qty < 0:
(from_location, to_location, delta_qty) = \
(to_location, from_location, -delta_qty)
return Move(
from_location=from_location,
to_location=to_location,
quantity=delta_qty,
product=self.product,
uom=self.uom,
company=self.inventory.company,
effective_date=self.inventory.date,
origin=self,
)
def update_values4complete(self, quantity):
'''
Return update values to complete inventory
'''
values = {}
# if nothing changed, no update
if self.expected_quantity == quantity:
return values
values['expected_quantity'] = quantity
return values
@classmethod
def create_values4complete(cls, inventory, quantity):
'''
Return create values to complete inventory
'''
return {
'inventory': inventory.id,
'expected_quantity': quantity,
}
@classmethod
def delete(cls, lines):
for line in lines:
if line.inventory_state not in {'cancelled', 'draft'}:
raise AccessError(
gettext('stock.msg_inventory_line_delete_cancel',
line=line.rec_name,
inventory=line.inventory.rec_name))
super(InventoryLine, cls).delete(lines)
class Count(Wizard):
"Stock Inventory Count"
__name__ = 'stock.inventory.count'
start_state = 'search'
search = StateView(
'stock.inventory.count.search',
'stock.inventory_count_search_view_form', [
Button("End", 'end', 'tryton-cancel'),
Button("Select", 'quantity', 'tryton-forward', default=True),
])
quantity = StateView(
'stock.inventory.count.quantity',
'stock.inventory_count_quantity_view_form', [
Button("Cancel", 'search', 'tryton-cancel'),
Button("Add", 'add', 'tryton-ok', default=True),
])
add = StateTransition()
def default_quantity(self, fields):
pool = Pool()
InventoryLine = pool.get('stock.inventory.line')
Warning = pool.get('res.user.warning')
values = {}
lines = InventoryLine.search(
self.get_line_domain(self.record), limit=1)
if not lines:
warning_name = '%s.%s.count_create' % (
self.record, self.search.search)
if Warning.check(warning_name):
raise InventoryCountWarning(warning_name,
gettext('stock.msg_inventory_count_create_line',
search=self.search.search.rec_name))
line, = InventoryLine.create([self.get_line_values(self.record)])
else:
line, = lines
values['line'] = line.id
values['product'] = line.product.id
values['uom'] = line.uom.id
if line.uom.rounding == 1:
values['quantity'] = 1.
return values
def get_line_domain(self, inventory):
pool = Pool()
Product = pool.get('product.product')
domain = [
('inventory', '=', inventory.id),
]
if isinstance(self.search.search, Product):
domain.append(('product', '=', self.search.search.id))
return domain
def get_line_values(self, inventory):
pool = Pool()
Product = pool.get('product.product')
InventoryLine = pool.get('stock.inventory.line')
values = InventoryLine.create_values4complete(inventory, 0)
if isinstance(self.search.search, Product):
values['product'] = self.search.search.id
return values
def transition_add(self):
if self.quantity.line and self.quantity.quantity:
line = self.quantity.line
if line.quantity:
line.quantity += self.quantity.quantity
else:
line.quantity = self.quantity.quantity
line.save()
return 'search'
class CountSearch(ModelView):
"Stock Inventory Count"
__name__ = 'stock.inventory.count.search'
search = fields.Reference(
"Search", [
('product.product', "Product"),
],
required=True,
domain={
'product.product': [
('type', '=', 'goods'),
('consumable', '=', False),
],
},
help="The item that's counted.")
@classmethod
def default_search(cls):
return 'product.product,-1'
class CountQuantity(ModelView):
"Stock Inventory Count"
__name__ = 'stock.inventory.count.quantity'
line = fields.Many2One(
'stock.inventory.line', "Line", readonly=True, required=True)
product = fields.Many2One('product.product', "Product", readonly=True)
uom = fields.Many2One('product.uom', "UOM", readonly=True,
help="The unit in which the quantities are specified.")
total_quantity = fields.Float(
"Total Quantity", digits='uom', readonly=True,
help="The total amount of the line counted so far.")
quantity = fields.Float(
"Quantity", digits='uom', required=True,
help="The quantity to add to the existing count.")
@fields.depends('quantity', 'line')
def on_change_quantity(self):
if self.line:
self.total_quantity = (
(self.line.quantity or 0) + (self.quantity or 0)) | PypiClean |
/svnplus-3.19.1.tar.gz/svnplus-3.19.1/README | THIS PYTHON PACKAGE IS A hook FOR subversion AND IS NOT MEANT TO BE RUN
FROM THE COMMAND LINE UNDER NORMAL USAGE.
Very configurable, very easy to install.
Prerequisites:
python2.7
subversion
apache/httpd - optional
mod_dav_svn - optional if apache is installed
Essentially you must have a working, accessible, subversion repository
and python installed including pip.
Install in 3 steps:
1. pip install svnplus
2. install the subversion pre-commit "hook"
2.1 cd to /path/to/subversion/repo/<repo-name>/hooks
2.2 put the file named "pre-commit", included with the module, there
2.2.1 do not add a ".py" extension or subversion will not
run it.
2.3 chown apache:apache pre-commit (or the user/group running
subversion server daemon)
2.4 chmod 750 pre-commit
3. auto generate a default pre-commit.conf file (which you can then edit
as you like).
3.1 cd to /path/to/subversion/repo/<repo-name>/hooks
3.2 ./pre-commit --generate > pre-commit.conf
3.3 chown apache:apache pre-commit.conf (or the user/group running
subversion server daemon)
3.4 chmod 640 pre-commit.conf
3.5 edit pre-commit.conf and follow the comments''')
SVNPlus TagProtect provides immutability (write once) protection for
the "/tags" directory of a subversion repository. This is the default
protected directory and everything is configurable.
There are two parts included with to this package, a subversion python
script named "pre-commit" and the svnplus python package. Subversion
requires that this software be invoked with the name "pre-commit",
this is not changeable.
The "pre-commit" script can, and should, be run from the command line
for configuration testing and configuration debugging.
Installation of this subversion hook is trivial, simply put "pre-commit"
into the directory named hooks found under the directory where you
have built the subversion repository. Make sure "pre-commit" is owned
and executable by the owner of the httpd process or whatever daemon is
serving subversion.
The subversion administrator - or anyone with write permission on the
subversion installation directory - can change the configuration.
################################################################################
# debug value and where svn, and svnlook, have been installed
DEBUG = 0
SVNPATH = "/usr/bin/svn"
SVNLOOK = "/usr/bin/svnlook"
# The remaining configuration variables comprise an N-Tuple
# and this set can be repeated as many times as wanted.
PROTECTED_PARENT = "/tags" # a literal path
PROTECTED_PRJDIRS = "/tags/*" # literal, glob, or blank
PRJDIR_CREATORS = "*" # or comma list, or blank
ARCHIVE_DIRECTORY = "Archive" # directory name
################################################################################
Do not configure directories with trailing slash characters, if you do
they will simply be discarded anyway but to avoid confusion don't add
them. The configuration of the protected project directories variable,
PROTECTED_PRJDIRS, must start with the exact same path as its associated
protected parent configuration, namely PROTECTED_PARENT. This is for
security. Also for security any instances of /../ (or the like) found
in the PROTECTED_PRJDIRS variable will be discarded.
Each TAG_FOLDER value must be unique and two(2) or more of them cannot
be subdirectories of each other. For example:
PROTECTED_PARENT = "/tags"
PROTECTED_PARENT = "/tags/foobar"
will not be allowed. | PypiClean |
/cr-vision-0.1.0.tar.gz/cr-vision-0.1.0/src/cr/vision/core/contour.py | import numpy as np
import cv2
from . import colors as crv_colors
class Contour:
'''
Various parameters related to a contour
'''
def __init__(self, contour):
self._contour = contour
self._moments = cv2.moments(contour)
def moments(self):
'''Returns all the moments for a contour'''
return self._moments
def centroid(self):
'''Returns the centroid of a contour'''
moments = self._moments
c_x = int(moments['m10']/moments['m00'])
c_y = int(moments['m01']/moments['m00'])
return (c_x, c_y)
def area(self):
'''Returns the area of the contour'''
return cv2.contourArea(self._contour)
def perimeter(self):
'''Returns the perimeter of the contour'''
return cv2.arcLength(self._contour, True)
def approximate_polygon(self, perimeter_gap_factor=0.1):
'''Returns an approximate shape using the Douglas-Peucker algorithm'''
epsilon = perimeter_gap_factor * self.perimeter()
return cv2.approxPolyDP(self._contour, epsilon, True)
def is_convex(self):
'''Returns if the contour is convex'''
return cv2.isContourConvex(self._contour)
def convex_hull_points(self, clockwise=False):
'''Returns the points which form the convex hull of a contour
using the Sklansky's algorithm'''
return cv2.convexHull(self._contour, clockwise=clockwise)
def convex_hull_indices(self, clockwise=False):
'''Returns the indices of the points which form the convex hull of a contour
using the Sklansky's algorithm'''
return cv2.convexHull(self._contour, clockwise=clockwise, returnPoints=False)
def simple_bounding_box(self):
'''Returns the bounding box of the contour which is straight rectangle
without considering the orientation of object'''
return cv2.boundingRect(self._contour)
def best_fit_bounding_box(self):
'''Returns the rotated bounding box considering the orientation of object'''
min_rect = cv2.minAreaRect(self._contour)
box = cv2.boxPoints(min_rect)
box = np.intp(box)
return box
def best_fit_circle(self):
'''Returns the minimum enclosing circle around the contour'''
(c_x, c_y), radius = cv2.minEnclosingCircle(self._contour)
center = (c_x, c_y)
return center, radius
def best_fit_ellipse(self):
'''Returns the best fitting ellipse around the contour'''
ellipse = cv2.fitEllipse(self._contour)
return ellipse
def best_fit_line(self):
'''Returns the best fitting line around the contour'''
(v_x, v_y, x_0, y_0) = cv2.fitLine(
self._contour, cv2.DIST_L2, 0, 0.01, 0.01)
unit_vector = (v_x, v_y)
point = (x_0, y_0)
return unit_vector, point
def find_external_contours(image, method=cv2.CHAIN_APPROX_SIMPLE):
'''Finds the external outer contours in a given (grayscale) image'''
result = cv2.findContours(image, mode=cv2.RETR_EXTERNAL,
method=method)
contours = result[1]
return [Contour(contour) for contour in contours]
class Contours:
'''Helper class to work with a list of contours'''
def __init__(self, contours):
self._contours = contours
def draw_centroids(self, image, color=crv_colors.BLACK, marker_size=10, thickness=2):
'''Draws the centroids of contours on the given image'''
centroids = [contour.centroid() for contour in self._contours]
marker_type = cv2.MARKER_CROSS
for centroid in centroids:
cv2.drawMarker(image, centroid, color, marker_type,
markerSize=marker_size, thickness=thickness)
def draw_simple_bounding_boxes(self, image, color=crv_colors.BLACK, thickness=2):
'''Draws simple bounding boxes around the contours in a given image'''
bounding_boxes = [contour.simple_bounding_box()
for contour in self._contours]
for bounding_box in bounding_boxes:
left, top, width, height = bounding_box
top_left = (left, top)
bottom_right = (left + width, top + height)
cv2.rectangle(image, top_left, bottom_right,
color, thickness=thickness)
def draw_best_fit_bounding_boxes(self, image, color=crv_colors.BLACK, thickness=2):
'''Draws the best fit (rotated) bounding boxes around contours'''
bounding_boxes = [contour.best_fit_bounding_box()
for contour in self._contours]
cv2.drawContours(image, bounding_boxes, -1, color, thickness) | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_nl-cw.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"a.m.",
"p.m."
],
"DAY": [
"zondag",
"maandag",
"dinsdag",
"woensdag",
"donderdag",
"vrijdag",
"zaterdag"
],
"MONTH": [
"januari",
"februari",
"maart",
"april",
"mei",
"juni",
"juli",
"augustus",
"september",
"oktober",
"november",
"december"
],
"SHORTDAY": [
"zo",
"ma",
"di",
"wo",
"do",
"vr",
"za"
],
"SHORTMONTH": [
"jan.",
"feb.",
"mrt.",
"apr.",
"mei",
"jun.",
"jul.",
"aug.",
"sep.",
"okt.",
"nov.",
"dec."
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "dd-MM-yy HH:mm",
"shortDate": "dd-MM-yy",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "ANG",
"DECIMAL_SEP": ",",
"GROUP_SEP": ".",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4\u00a0",
"negSuf": "-",
"posPre": "\u00a4\u00a0",
"posSuf": ""
}
]
},
"id": "nl-cw",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/clima-0.7.10.tar.gz/clima-0.7.10/README.md | <img src="https://raw.githubusercontent.com/d3rp/clima/master/clima.png" align="left" /> Create a command line interface with minimal setup.
[![PyPI](https://img.shields.io/pypi/v/clima)](https://pypi.org/project/clima/)
[![Python versions](https://img.shields.io/pypi/pyversions/clima)]()
[![PyPI license](https://img.shields.io/pypi/l/clima)]()
[![Build status](https://travis-ci.com/d3rp/clima.svg?branch=master)](https://travis-ci.com/d3rp/clima)
[![Dependencies](https://img.shields.io/librariesio/github/d3rp/clima)]()
# clima - command line interface with a schema
##### Table of contents
* [Briefly](#briefly)
* [Features](#features)
* [Cli definition](#cli-definition)
* [Configuration object in a spiffy](#configuration-object-in-a-spiffy)
* [Installing](#installing)
* [Usage](#usage)
* [Examples and platforms](#examples-and-platforms)
* [Testing the examples](#testing-the-examples)
* [Version printing](#version-printing)
* [Autocompletion](#autocompletion)
* [..in IDEs (wip)](#in-ides-wip)
* [..in bash](#in-bash)
* [Post init hook](#post-init-hook)
* [Cli.post_init()](#clipost_init)
* [Schema.post_init()](#schemapost_init)
* [Configuration options](#configuration-options)
* [Configuration file and environment variables](#configuration-file-and-environment-variables)
* [Type casting with configuration definition](#type-casting-with-configuration-definition)
* [Configuration file in the home directory](#configuration-file-in-the-home-directory)
* [.env file](#env-file)
* [Password unwrapping/decryption with pass](#password-unwrappingdecryption-with-pass)
* [Additional features via Fire](#additional-features-via-fire)
* [Truncated error printing](#truncated-error-printing)
* [Ways to run the script for the uninitiated](#ways-to-run-the-script-for-the-uninitiated)
* [Linking executable script to ~/.local/bin](#linking-executable-script-to-localbin)
* [Packaging a module (pip ready)](#packaging-a-module-pip-ready)
* [Building/Installing from source](#buildinginstalling-from-source)
* [Long description and background](#long-description-and-background)
* [Why another cli framework?](#why-another-cli-framework)
* [Dependencies](#dependencies)
## Briefly
### Features
Clima handles loading and parsing command
line arguments with some off-the-shelf features including:
- a global configuration object
- quick definition of defaults
- defining defaults doubles as description for help on the command line
- type handling with annotations
- definitions with configuration files
- env variables
- loading .env files
- secrets stored with [pass](https://www.passwordstore.org/)
- post_init hook
### Cli definition
Creating a cli:
1. Import all necessary parts from the package clima
1. (optional) Define configuration i.e. Schema
1. Define the command line commands i.e. Cli-class:
![example ascii](https://raw.githubusercontent.com/d3rp/clima/master/example.svg)
Example: to setup a configuration and a command line interface ready to go.
from clima import c
@c
class Cli:
def say_hi(self):
print('oh hi - whatever this is..')
The command line usage form could be as simple as:
my_tool say_hi
### Configuration object in a spiffy
from clima import c
# Defining the settings (configuration object)
class S(Schema):
place = 'world'
@c
class Cli:
def say_hi(self):
# using configuration object 'c'
print(f'oh hi - {c.place}')
The command line usage form could be as simple as:
my_tool say_hi
my_tool say_hi --place 'other world'
See the `examples` folder and other sections for more examples. For example the folder includes [something that resembles
the example above](examples/readme_example.py).
## Installing
pip install --user clima
[toc](#table-of-contents)
## Usage
See the example file in [`examples/script_example.py`](examples/script_example.py). Here's a run down of the individual
parts in such a script (adapted from another example at [module example](examples/module_example)).
First import the required components:
from clima import c, Schema
In your code define the `Schema` subclass:
class Configuration(Schema):
a: str = 'A' # a description
x: int = 1 # x description
Here "Configuration" is an arbitrary name, no magic there. The inherited `Schema` class
defines the attributes (i.e. `a` and `x` in this example).
Note the specific formatting of the `Schema` subclass:
# attribute[: type] = default value [# Description for the --help]
a: str = 'A' # a description
`a` is the attribute which can be called in the code later with `c.a`. In this example, it has a type of 'str' and a default
value of 'A'. The comment after is not redundant in the sense that it is parsed for the command line help. The values in square brackets `[]` are
optional.
All of these parts will be parsed for the '--help' for the subcommands of the cli, for example:
./script.py foo -h
Will now produce:
Usage: script.py foo [ARGS]
Description: Args:
--a (str): a description (Default is 'A')
--x (int): x description (Default is 1)
The example in the readme can be found at `examples/readme_example.py`.
The subcommands - or commands of the script - should be defined somewhat as follows:
@c
class Cli:
def subcommand_foo(self):
"""This will be shown in --help for subcommand-foo"""
print('foo')
print(c.a)
print(c.x)
def subcommand_bar(self):
"""This will be shown in --help for subcommand-bar"""
print('bar')
The methods are parsed as subcommands and their respective doc strings will show in the
subcommands' help printout. Note the double usage of the `c` - first as a decorated and later as the parsed configuration
inside the method:
...
...
print(c.a)
print(c.x)
[toc](#table-of-contents)
## Examples and platforms
Tried and used on linux, macos and windows. However, python packaging and dependency management is sometimes hairy and
your mileage may vary.
More examples in the [examples directory](examples) with printouts of the defined subcommands and helps.
### Testing the examples
The [examples](examples) can be tried out by cloning the repo and running from repo directory root (on linux and the like):
git clone https://github.com/d3rp/clima.git
cd clima
PYTHONPATH=$PWD python ./examples/readme_example.py foo -h
Running the examples that wrap a module:
PYTHONPATH=$PWD python ./examples/module_example/__main__.py -h
PYTHONPATH=$PWD python ./examples/module_example/__main__.py subcommand-foo -h
PYTHONPATH=$PWD python ./examples/module_example/__main__.py subcommand-bar
...
Output should resemble this (fire v0.1.3 prints out Args, fire v0.2.1 doesn't (though looks much nicer))
```
$ tester subcommand-foo -- -h
Type: method
String form: <bound method Cli.subcommand_foo of <__main__.Cli object at 0x000002995AD74BE0>>
File: C:\Users\foobar\code\py\clima\tester\__main__.py
Line: 18
Docstring: This will be shown in --help for subcommand-foo
Args:
--a (str): a description (Default is 'A')
--x (int): x description (Default is 1)
Usage: __main__.py subcommand-foo [--X ...]
```
All of the example scripts can be run by installing [poetry](https://python-poetry.org) and running the `run_examples.bash`
script:
pip install --user poetry
./run_examples.bash
[toc](#table-of-contents)
## Version printing
Version printing works via the `version` subcommand. This is intended for scripts that are packaged as command line tools
with poetry. Thus with bumping the version with poetry, clima will handle parsing the current version of your tool so
it can be queried with:
my_tool version
The actual version is parsed into the `c` so overwrite it with `post_init` or something if you want control over it.
## Autocompletion
### ..in IDEs (wip)
Also, to enable autocompletion in IDEs, this hack suffices:
c: Configuration = c
Put it in the "global space" e.g. just after defining the template. See the [`examples/script_example.py`](examples/script_example.py) for a specific example.
When all is complete, the imported `c` variable should have all the bits and pieces for the configuration. It can be
used inside the Cli class as well as imported around the codebase thus encapsulating all the configurations into one
container with quick access with attributes `c.a`, `c.x`, etc...
### ..in bash
Run your script with `-- --completion` arguments:
my_tool -- --completion
This should print an autocompletion definition to include in your bash completions.
TBD: zsh etc. completions
## Post init hook
There's two ways to define a post_init hook depending if it is done in the `Schema` subclass or the `Cli` definition.
### Cli.post_init()
In some occasions it's useful to deduce specific defaults from the given parameters e.g. in a cross platform build allowing
only minimal cli arguments. For those cases there's an `post_init` hook
When defining the post_init() in the Cli class, i.e.
@c
class Cli:
@staticmethod
def post_init(s):
if s.platform = 'win':
self.bin_path = 'c:/Users/foo/bar'
else:
s.bin_path = '/Users/mac/sth'
def subcommand(self):
pass
The method will have access to the cli args, but can not introduce new variables to the schema.
This is arguably the more useful of the two variations of post_inits.
Note: The signature of the `post_init()` differs depending on which of the stages it is defined in. For the time being
it is a `@staticmethod`
### Schema.post_init()
This alternative is for to use post_init-like features positioning the steps so that command line arguments can still
override things.
class SoAdvanced(Schema):
platform: str = 'win' # a description
bin_path: pathlib.Path = '' # x description
def post_init(self, *args):
if self.platform = 'win':
self.win_specific_field = 'All your files are locked by us..'
Note: This post_init() does not have access to the cli arguments, but the `Schema`'s post_init can introduce new
attributes/properties/fields/arguments to the configuration, which the Cli-class post-init can't.
Schema post init hook is run after schema initialization, but BEFORE the cli initialization.
[toc](#table-of-contents)
## Configuration options
It's tedious to have to write a long list of parameters on the command line, when most of the use cases
follow a similar pattern. To facilitate the use of configurations, there's several options to choose from.
The `c` decorator/configuration chains multiple configuration options together in order of priority
(lower number overrides higher number):
1. command line arguments
1. Environment variables
1. .env file
1. configuration file definitions
1. decrypted passwords from `~/.password-store` if gnugpg is installed
1. defaults in the subclass inheriting `Schema`
### Configuration file and environment variables
The configuration file should be named with either the postfix `.conf` or `.cfg` e.g. `foo.conf` and have an ini type formatting with
a 'Clima' section:
# foo.conf
[Clima]
x = 2
The keys are the same as what you define in the schema. You can define all, some or none of the attributes.
Same applies for the env variables.
# linux example
X=2 tester subcommand-foo
A configuration file defined this way can be located in the current working directory or - if your `Schema` defines a
`cwd` field - there. Clima
will try to use the first configuration file it finds, so that might produce some caveats.
class Conf(Schema):
cwd = ''
# Running ./script.py --cwd <folder> would automatically load the first *.conf file in <folder>
### Type casting with configuration definition
The `Schema` definition can have type annotations, which are used to cast the given arguments. For example
class C(Schema):
p: Path = '' # Path to something
Results in `c.p`'s type cast as `Path`.
### Configuration file in the home directory
You can also define the config file in the configuration class (one inheriting `Schema`) by defining the
magic field `CFG`.
For example, lets say the command `my_tool` (packaged etc) has a user configuration file at `~/.my_tool.conf`. This
can now be handled with just adding `CFG = Path.home() / '.my_tool.conf` to the Schema:
from pathlib import Path
class S(Schema):
bing = 'bang'
CFG = Path.home() / '.my_tool.conf'
Then, for example, the configuration file would be written as:
#~/.my_tool.conf
[Clima]
bing = diudiu
Running the command `my_tool` would produce the value in the configuration file, though the arguments can still be overriden.
my_tool run
# diudiu
my_tool run --bing bam
# bam
### .env file
This is handled by [dotenv](https://github.com/theskumar/python-dotenv). In short, all the defaults defined in the
`Schema` subclass can be overridden either by:
<field> = <value>
or
export <field> = <value>
### Password unwrapping/decryption with pass
Note: Currently this works only for gpg-keys without password. It's not ideal, but it's better than plain text `.env`
files ;)
Note 2: Leading and trailing whitespace (including `\n` linefeeds) are stripped, when decrypted.
[pass](https://passwordstore.org) can be used to store passwords as gpg encrypted files under the home directory. Clima
uses the default path of ~/.password-store and the files found within. It will then match the arguments with the
stored passwords, for example:
tree -A ~/.password-store ✔ | 41s | anaconda3
/Users/me/.password-store
├── work
│ ├── ci
│ │ ├── sign_id.gpg
│ │ ├── sign_pw.gpg
... ... ...
And an according `Schema` definition:
class Conf(Schema):
sign_id: str = '' # signing id for the CI
sign_pw: str = '' # signing pw for the CI
Would accept those arguments as cli arguments, or if omitted, would traverse through the `.password-store` and decrypt the
found `sign_id.gpg` and `sign_pw.gpg` placing the values found in the configuration object `c`.
[toc](#table-of-contents)
## Additional features via Fire
See the [Python Fire's Flags](https://github.com/google/python-fire/blob/master/docs/using-cli.md#python-fires-flags)
documentation for nice additional features such as:
# e.g. tester.py is our cli program
tester.py subcommand-foo -- --trace
tester.py -- --interactive
tester.py -- --completion
## Truncated error printing
Even though I've used python for a few years professionally, I'm still not satisfied with its error printing. Clima
truncates the error lists and tries to provide a more readable version of the "first" point of failure. The whole
traceback is written into a logfile `exception_traceback.log` so it can be examined when the truncated output is not
enough.
Note: When running the examples, the `exception_traceback.log` file will be written inside the `examples` directory
## Ways to run the script for the uninitiated
Here's a section to suggest ideas how to wrap scripts using clima.
### Linking executable script to ~/.local/bin
Let's say those lines were written in a file named `script.py`. Command line usage in a terminal would then be e.g.:
python script.py foo
python script.py foo --a 42
Adding this line in `script.py`
#!/usr/bin/env python
and changing its execution permissions (mac, linux):
chmod +x script.py
Allows for a shorter means of execution:
./script.py foo
Now this could be linked as an adhoc command for example:
ln -s $PWD/script.py ~/.local/bin/my_command
### Packaging a module (pip ready)
For a pip-installable package, one could [package this as a runnable command](https://github.com/d3rp/my_tool) -
publish in the public or one's private pypi etc - and then approach the convenience factor shown at first.
pip install my_tool
my_command foo -h
To publish with poetry is quite straight forward. First create an account in pypi.org and then:
cd <project directory>
poetry build
poetry publish
You can use `version` to bump up versions:
poetry version patch
## Building/Installing from source
This repo is based on [poetry](https://poetry.eustace.io).
git clone https://github.com/d3rp/clima.git
cd clima
poetry install --no-dev
The `--no-dev` is for to install the running environment without development tooling.
[toc](#table-of-contents)
## Long description and background
The subcommands are written as a class encapsulating the "business logic".
You can define a simple schema of the configuration that maps to the command line arguments.
In other words, you can use this to wrap your scripts as command line commands without resorting to bash or
maintaining argument parsing in python. This removes the need of duplicating comments in order `--help` to remember what the arguments were and what they did. Sprinkling some decorator magic offers a typical use experience of a cli program (e.g. argument parsing and validation, --help, subcommands, ...).
The implementation is focused on a premise that for a simple script there's usually a script wide global configuration which would be used through out the user code i.e. a context for the program that is refered to in different parts of the code. That configuration is populated with given arguments falling back on defaults in the code and some further complimentary options. Those are then made accessible via a global `c` variable that can be tossed around the code base with very little additional effort. With a small adjustment this can made to autocomplete in IDEs (as attributes). This helps when the schema of the configuration grows larger as the autocomplete kicks in after typing `c.` offering those fields in your "schema" as attributes.
### Why another cli framework?
Clima is not intended to cater all needs of a feature complete cli framework like the ones enlisted below.
This is a package to help with boilerplate to get quick, but reusable tools for your workflow.
Other options for full featured cli experience:
* [docopt](https://docopt.org)
* [fire](https://github.com/google/python-fire)
* [cleo](https://github.com/sdispater/cleo)
* [click](https://click.palletsprojects.com)
* [typer](https://github.com/tiangolo/typer)
## Dependencies
* [dotenv](https://github.com/theskumar/python-dotenv)
* gnugpg - this is pass through though. If it's not installed, the feature is not in use.
* fire - [python-fire](https://github.com/google/python-fire) from google does the cli wrapping / forked and included
into the repo - I wanted to have the version 0.1.x formatting and help output with few hacks of my own
[toc](#table-of-contents)
| PypiClean |
/pyside2_mess_server-0.1.tar.gz/pyside2_mess_server-0.1/server/server_db/server_data_services.py |
import datetime
from server_db.models.server_models import User, UserHistory, \
ActiveUser, LoginHistory, Contact
from server_db.data import session_factory
from pathlib import Path
from dynaconf import settings
class ServerStorage:
"""
Класс - серверная база данных
"""
def __init__(self):
# Создаём движок базы данных
db_def_name = Path(settings.get('DATABASES.SERVER.NAME'))
session_factory.global_init(db_def_name)
session_factory.create_tables()
self.session = session_factory.create_session()
# Если в таблице активных пользователей есть записи, то их необходимо удалить
# Когда устанавливаем соединение, очищаем таблицу активных
# пользователей
if self.session.query(ActiveUser).count() > 0:
self.session.query(ActiveUser).delete()
self.session.commit()
def get_user_by_name(self, username):
"""Возвращает объект пользователя по его имени."""
user = self.session.query(User).filter(User.name == username.lower())
return user.first() if user.count() else None
def get_id_by_name(self, username):
"""
Check client_name exist in clients table
:param username:
:return: record id if exist, -1 otherwise
"""
user = self.session.query(User).filter(User.name == username).first()
if user:
return user.id
else:
exit(1)
def get_user_by_like(self, query):
"""Возвращает объект пользователя по его имени."""
users = self.session.query(
User.name,
User.last_login,
UserHistory.sent,
UserHistory.accepted).filter(
User.id == UserHistory.id,
User.name.ilike(f'%{query}%'))
return users.all()
def user_modify(self, username, new_username):
"""
Функция изменения данных пользователя
:param username:
:param new_username:
:return:
"""
self.session.query(User).filter(
User.name == username).update({'name': new_username})
self.session.commit()
def user_login(self, username, ip_address, port, key):
"""
Функция выполняющяяся при входе пользователя, записывает в базу факт входа
:param username:
:param ip_address:
:param port:
:param key:
:return:
"""
# print(username, ip_address, port)
# Запрос в таблицу пользователей на наличие там пользователя с таким именем
# client = self.find_by_name(username)
user = self.get_user_by_name(username)
# Если имя пользователя уже присутствует в таблице, обновляем время
# последнего входа
if user:
user.last_login = datetime.datetime.now()
if user.pubkey != key:
user.pubkey = key
# Если нет, то генерируем исключение
else:
raise ValueError('Пользователь не зарегистрирован.')
# Теперь можно создать запись в таблицу активных пользователей о факте
# входа.
new_active_user = ActiveUser(
user.id, ip_address, port, datetime.datetime.now())
self.session.add(new_active_user)
# и сохранить в историю входов
history = LoginHistory(
user.id,
datetime.datetime.now(),
ip_address,
port)
self.session.add(history)
# Сохрраняем изменения
self.session.commit()
def add_user(self, name, pass_hash):
"""
Функция регистрации пользователя. Принимает имя и хэш пароля,
создаёт запись в таблице статистики.
:param name:
:param pass_hash:
:return:
"""
user_row = User(name, pass_hash)
self.session.add(user_row)
self.session.commit()
history_row = UserHistory(user_row.id)
self.session.add(history_row)
self.session.commit()
def remove_user(self, name):
"""
Функция удаляющая пользователя из базы
:param name:
:return:
"""
user = self.get_user_by_name(name)
self.session.query(ActiveUser).filter_by(user=user.id).delete()
self.session.query(LoginHistory).filter_by(name=user.id).delete()
self.session.query(Contact).filter_by(user=user.id).delete()
self.session.query(Contact).filter_by(contact=user.id).delete()
self.session.query(UserHistory).filter_by(user=user.id).delete()
self.session.query(User).filter_by(name=name).delete()
self.session.commit()
def get_hash(self, name):
"""
Функция возвращает хэш требуемго пользователя.
:param name:
:return:
"""
user = self.get_user_by_name(name)
return user.pass_hash
def get_pubkey(self, name):
"""
Функция возвращает публичный ключ пользователя
:param name:
:return:
"""
user = self.get_user_by_name(name)
return user.pubkey
def check_user(self, name):
"""
Функция проверки пользователя
:param name:
:return:
"""
user = self.get_user_by_name(name)
if user:
return True
else:
return False
def user_logout(self, username):
"""
Функция фиксирующая отключение пользователя
:param username:
:return:
"""
# Запрашиваем пользователя, что покидает нас
# получаем запись из таблицы AllUsers
user = self.get_user_by_name(username)
# Удаляем его из таблицы активных пользователей.
# Удаляем запись из таблицы ActiveUsers
self.session.query(ActiveUser).filter_by(user=user.id).delete()
# Применяем изменения
self.session.commit()
def process_message(self, sender, recipient):
"""
Функция фиксирует передачу сообщения и делает соответствующие отметки в БД
:param sender:
:param recipient:
:return:
"""
# Получаем ID отправителя и получателя
sender = self.get_id_by_name(sender)
recipient = self.get_id_by_name(recipient)
sender_row = self.session.query(
UserHistory).filter_by(user=sender).first()
sender_row.sent += 1
recipient_row = self.session.query(
UserHistory).filter_by(user=recipient).first()
recipient_row.accepted += 1
self.session.commit()
def add_contact(self, user, contact):
"""
Функция добавляет контакт для пользователя.
:param user:
:param contact:
:return:
"""
# Получаем ID пользователей
user = self.get_user_by_name(user)
contact = self.get_user_by_name(contact)
# Проверяем что не дубль и что контакт может существовать (полю
# пользователь мы доверяем)
if not contact or self.session.query(Contact).filter_by(
user=user.id, contact=contact.id).count():
return
# Создаём объект и заносим его в базу
contact_row = Contact(user.id, contact.id)
self.session.add(contact_row)
self.session.commit()
def remove_contact(self, user, contact):
"""
Функция удаляет контакт из базы данных
:param user:
:param contact:
:return:
"""
# Получаем ID пользователей
user = self.get_user_by_name(user)
contact = self.get_user_by_name(contact)
# Проверяем что контакт может существовать (полю пользователь мы
# доверяем)
if not contact:
return
# Удаляем требуемое
self.session.query(Contact).filter(
Contact.user == user.id,
Contact.contact == contact.id
).delete()
self.session.commit()
def users_list(self):
"""
Функция возвращает список известных пользователей со временем последнего входа.
:return:
"""
query = self.session.query(
User.name,
User.last_login,
)
# Возвращаем список кортежей
return query.all()
# Функция возвращает список активных пользователей
def active_users_list(self):
"""
Функция возвращает список активных пользователей
Запрашиваем соединение таблиц и собираем кортежи имя, адрес, порт, время.
:return:
"""
query = self.session.query(
User.id,
User.name,
ActiveUser.ip_address,
ActiveUser.port,
ActiveUser.login_time
).join(User)
# Возвращаем список кортежей
return query.all()
def login_history(self, username=None):
"""
Функция возвращающая историю входов по пользователю или всем пользователям
:param username:
:return:
"""
# Запрашиваем историю входа
query = self.session.query(User.name,
LoginHistory.ip,
LoginHistory.port,
LoginHistory.date_time
).join(User)
# Если было указано имя пользователя, то фильтруем по нему
if username:
query = query.filter(User.name == username)
return query.all()
def get_contacts(self, username):
"""
Функция возвращает список контактов пользователя.
:param username:
:return:
"""
# Запрашивааем указанного пользователя
user = self.session.query(User).filter_by(name=username).one()
# Запрашиваем его список контактов
query = self.session.query(Contact, User.name). \
filter_by(user=user.id). \
join(User, Contact.contact == User.id)
# выбираем только имена пользователей и возвращаем их.
return [contact[1] for contact in query.all()]
def message_history(self):
"""
Функция возвращает количество переданных и полученных сообщений
:return:
"""
query = self.session.query(
User.name,
User.last_login,
UserHistory.sent,
UserHistory.accepted
).join(User)
# Возвращаем список кортежей
return query.all()
# Отладка
if __name__ == '__main__':
test_db = ServerStorage()
# test_db.user_login('1111', '192.168.1.113', 8080)
# test_db.user_login('McG', '192.168.1.113', 8081)
# print(test_db.users_list())
# print(test_db.active_users_list())
# # test_db.user_logout('McG')
# # print(test_db.login_history('re'))
# # test_db.add_contact('test2', 'test1')
# # test_db.add_contact('test1', 'test3')
# # test_db.add_contact('test1', 'test6')
# # test_db.remove_contact('test1', 'test3')
# test_db.process_message('McG', '1111')
# print(test_db.message_history())
# print(test_db.get_user_by_like('dd'))
# print(test_db.get_user_by_name('ddd'))
test_db.user_modify('yuyuy', 'new2020') | PypiClean |
/e-fonenana-frontend-20190305.1.tar.gz/e-fonenana-frontend-20190305.1/hass_frontend/19d81aa3bd198f084dd6.chunk.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[66],{159:function(module,__webpack_exports__,__webpack_require__){"use strict";var _compute_object_id__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(177);__webpack_exports__.a=stateObj=>stateObj.attributes.friendly_name===void 0?Object(_compute_object_id__WEBPACK_IMPORTED_MODULE_0__.a)(stateObj.entity_id).replace(/_/g," "):stateObj.attributes.friendly_name||""},163:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.d(__webpack_exports__,"a",function(){return domainIcon});var _const__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(109);const fixedIcons={alert:"hass:alert",automation:"hass:playlist-play",calendar:"hass:calendar",camera:"hass:video",climate:"hass:thermostat",configurator:"hass:settings",conversation:"hass:text-to-speech",device_tracker:"hass:account",fan:"hass:fan",group:"hass:google-circles-communities",history_graph:"hass:chart-line",homeassistant:"hass:home-assistant",homekit:"hass:home-automation",image_processing:"hass:image-filter-frames",input_boolean:"hass:drawing",input_datetime:"hass:calendar-clock",input_number:"hass:ray-vertex",input_select:"hass:format-list-bulleted",input_text:"hass:textbox",light:"hass:lightbulb",mailbox:"hass:mailbox",notify:"hass:comment-alert",person:"hass:account",plant:"hass:flower",proximity:"hass:apple-safari",remote:"hass:remote",scene:"hass:google-pages",script:"hass:file-document",sensor:"hass:eye",simple_alarm:"hass:bell",sun:"hass:white-balance-sunny",switch:"hass:flash",timer:"hass:timer",updater:"hass:cloud-upload",vacuum:"hass:robot-vacuum",water_heater:"hass:thermometer",weblink:"hass:open-in-new"};function domainIcon(domain,state){if(domain in fixedIcons){return fixedIcons[domain]}switch(domain){case"alarm_control_panel":switch(state){case"armed_home":return"hass:bell-plus";case"armed_night":return"hass:bell-sleep";case"disarmed":return"hass:bell-outline";case"triggered":return"hass:bell-ring";default:return"hass:bell";}case"binary_sensor":return state&&"off"===state?"hass:radiobox-blank":"hass:checkbox-marked-circle";case"cover":return"closed"===state?"hass:window-closed":"hass:window-open";case"lock":return state&&"unlocked"===state?"hass:lock-open":"hass:lock";case"media_player":return state&&"off"!==state&&"idle"!==state?"hass:cast-connected":"hass:cast";case"zwave":switch(state){case"dead":return"hass:emoticon-dead";case"sleeping":return"hass:sleep";case"initializing":return"hass:timer-sand";default:return"hass:z-wave";}default:console.warn("Unable to find icon for domain "+domain+" ("+state+")");return _const__WEBPACK_IMPORTED_MODULE_0__.a;}}},164:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.d(__webpack_exports__,"a",function(){return HaIcon});var _polymer_iron_icon_iron_icon__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(97);const ironIconClass=customElements.get("iron-icon");let loaded=!1;class HaIcon extends ironIconClass{constructor(...args){super(...args);this._iconsetName=void 0}listen(node,eventName,methodName){super.listen(node,eventName,methodName);if(!loaded&&"mdi"===this._iconsetName){loaded=!0;__webpack_require__.e(58).then(__webpack_require__.bind(null,205))}}}customElements.define("ha-icon",HaIcon)},166:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.d(__webpack_exports__,"a",function(){return computeDomain});function computeDomain(entityId){return entityId.substr(0,entityId.indexOf("."))}},177:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.d(__webpack_exports__,"a",function(){return computeObjectId});function computeObjectId(entityId){return entityId.substr(entityId.indexOf(".")+1)}},178:function(module,__webpack_exports__,__webpack_require__){"use strict";var common_const=__webpack_require__(109),compute_domain=__webpack_require__(166),domain_icon=__webpack_require__(163);function binarySensorIcon(state){const activated=state.state&&"off"===state.state;switch(state.attributes.device_class){case"battery":return activated?"hass:battery":"hass:battery-outline";case"cold":return activated?"hass:thermometer":"hass:snowflake";case"connectivity":return activated?"hass:server-network-off":"hass:server-network";case"door":return activated?"hass:door-closed":"hass:door-open";case"garage_door":return activated?"hass:garage":"hass:garage-open";case"gas":case"power":case"problem":case"safety":case"smoke":return activated?"hass:shield-check":"hass:alert";case"heat":return activated?"hass:thermometer":"hass:fire";case"light":return activated?"hass:brightness-5":"hass:brightness-7";case"lock":return activated?"hass:lock":"hass:lock-open";case"moisture":return activated?"hass:water-off":"hass:water";case"motion":return activated?"hass:walk":"hass:run";case"occupancy":return activated?"hass:home-outline":"hass:home";case"opening":return activated?"hass:square":"hass:square-outline";case"plug":return activated?"hass:power-plug-off":"hass:power-plug";case"presence":return activated?"hass:home-outline":"hass:home";case"sound":return activated?"hass:music-note-off":"hass:music-note";case"vibration":return activated?"hass:crop-portrait":"hass:vibrate";case"window":return activated?"hass:window-closed":"hass:window-open";default:return activated?"hass:radiobox-blank":"hass:checkbox-marked-circle";}}function coverIcon(state){const open="closed"!==state.state;switch(state.attributes.device_class){case"garage":return open?"hass:garage-open":"hass:garage";default:return Object(domain_icon.a)("cover",state.state);}}const fixedDeviceClassIcons={humidity:"hass:water-percent",illuminance:"hass:brightness-5",temperature:"hass:thermometer",pressure:"hass:gauge"};function sensorIcon(state){const dclass=state.attributes.device_class;if(dclass&&dclass in fixedDeviceClassIcons){return fixedDeviceClassIcons[dclass]}if("battery"===dclass){const battery=+state.state;if(isNaN(battery)){return"hass:battery-unknown"}const batteryRound=10*Math.round(battery/10);if(100<=batteryRound){return"hass:battery"}if(0>=batteryRound){return"hass:battery-alert"}return`${"hass"}:battery-${batteryRound}`}const unit=state.attributes.unit_of_measurement;if(unit===common_const.j||unit===common_const.k){return"hass:thermometer"}return Object(domain_icon.a)("sensor")}function inputDateTimeIcon(state){if(!state.attributes.has_date){return"hass:clock"}if(!state.attributes.has_time){return"hass:calendar"}return Object(domain_icon.a)("input_datetime")}__webpack_require__.d(__webpack_exports__,"a",function(){return stateIcon});const domainIcons={binary_sensor:binarySensorIcon,cover:coverIcon,sensor:sensorIcon,input_datetime:inputDateTimeIcon};function stateIcon(state){if(!state){return common_const.a}if(state.attributes.icon){return state.attributes.icon}const domain=Object(compute_domain.a)(state.entity_id);if(domain in domainIcons){return domainIcons[domain](state)}return Object(domain_icon.a)(domain,state.state)}},180:function(module,__webpack_exports__,__webpack_require__){"use strict";var lit_element__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(5);function _decorate(decorators,factory,superClass,mixins){var api=_getDecoratorsApi();if(mixins){for(var i=0;i<mixins.length;i++){api=mixins[i](api)}}var r=factory(function initialize(O){api.initializeInstanceElements(O,decorated.elements)},superClass),decorated=api.decorateClass(_coalesceClassElements(r.d.map(_createElementDescriptor)),decorators);api.initializeClassElements(r.F,decorated.elements);return api.runClassFinishers(r.F,decorated.finishers)}function _getDecoratorsApi(){_getDecoratorsApi=function(){return api};var api={elementsDefinitionOrder:[["method"],["field"]],initializeInstanceElements:function(O,elements){["method","field"].forEach(function(kind){elements.forEach(function(element){if(element.kind===kind&&"own"===element.placement){this.defineClassElement(O,element)}},this)},this)},initializeClassElements:function(F,elements){var proto=F.prototype;["method","field"].forEach(function(kind){elements.forEach(function(element){var placement=element.placement;if(element.kind===kind&&("static"===placement||"prototype"===placement)){var receiver="static"===placement?F:proto;this.defineClassElement(receiver,element)}},this)},this)},defineClassElement:function(receiver,element){var descriptor=element.descriptor;if("field"===element.kind){var initializer=element.initializer;descriptor={enumerable:descriptor.enumerable,writable:descriptor.writable,configurable:descriptor.configurable,value:void 0===initializer?void 0:initializer.call(receiver)}}Object.defineProperty(receiver,element.key,descriptor)},decorateClass:function(elements,decorators){var newElements=[],finishers=[],placements={static:[],prototype:[],own:[]};elements.forEach(function(element){this.addElementPlacement(element,placements)},this);elements.forEach(function(element){if(!_hasDecorators(element))return newElements.push(element);var elementFinishersExtras=this.decorateElement(element,placements);newElements.push(elementFinishersExtras.element);newElements.push.apply(newElements,elementFinishersExtras.extras);finishers.push.apply(finishers,elementFinishersExtras.finishers)},this);if(!decorators){return{elements:newElements,finishers:finishers}}var result=this.decorateConstructor(newElements,decorators);finishers.push.apply(finishers,result.finishers);result.finishers=finishers;return result},addElementPlacement:function(element,placements,silent){var keys=placements[element.placement];if(!silent&&-1!==keys.indexOf(element.key)){throw new TypeError("Duplicated element ("+element.key+")")}keys.push(element.key)},decorateElement:function(element,placements){for(var extras=[],finishers=[],decorators=element.decorators,i=decorators.length-1,keys;0<=i;i--){keys=placements[element.placement];keys.splice(keys.indexOf(element.key),1);var elementObject=this.fromElementDescriptor(element),elementFinisherExtras=this.toElementFinisherExtras((0,decorators[i])(elementObject)||elementObject);element=elementFinisherExtras.element;this.addElementPlacement(element,placements);if(elementFinisherExtras.finisher){finishers.push(elementFinisherExtras.finisher)}var newExtras=elementFinisherExtras.extras;if(newExtras){for(var j=0;j<newExtras.length;j++){this.addElementPlacement(newExtras[j],placements)}extras.push.apply(extras,newExtras)}}return{element:element,finishers:finishers,extras:extras}},decorateConstructor:function(elements,decorators){for(var finishers=[],i=decorators.length-1;0<=i;i--){var obj=this.fromClassDescriptor(elements),elementsAndFinisher=this.toClassDescriptor((0,decorators[i])(obj)||obj);if(elementsAndFinisher.finisher!==void 0){finishers.push(elementsAndFinisher.finisher)}if(elementsAndFinisher.elements!==void 0){elements=elementsAndFinisher.elements;for(var j=0;j<elements.length-1;j++){for(var k=j+1;k<elements.length;k++){if(elements[j].key===elements[k].key&&elements[j].placement===elements[k].placement){throw new TypeError("Duplicated element ("+elements[j].key+")")}}}}}return{elements:elements,finishers:finishers}},fromElementDescriptor:function(element){var obj={kind:element.kind,key:element.key,placement:element.placement,descriptor:element.descriptor},desc={value:"Descriptor",configurable:!0};Object.defineProperty(obj,Symbol.toStringTag,desc);if("field"===element.kind)obj.initializer=element.initializer;return obj},toElementDescriptors:function(elementObjects){if(elementObjects===void 0)return;return _toArray(elementObjects).map(function(elementObject){var element=this.toElementDescriptor(elementObject);this.disallowProperty(elementObject,"finisher","An element descriptor");this.disallowProperty(elementObject,"extras","An element descriptor");return element},this)},toElementDescriptor:function(elementObject){var kind=elementObject.kind+"";if("method"!==kind&&"field"!==kind){throw new TypeError("An element descriptor's .kind property must be either \"method\" or"+" \"field\", but a decorator created an element descriptor with"+" .kind \""+kind+"\"")}var key=_toPropertyKey(elementObject.key),placement=elementObject.placement+"";if("static"!==placement&&"prototype"!==placement&&"own"!==placement){throw new TypeError("An element descriptor's .placement property must be one of \"static\","+" \"prototype\" or \"own\", but a decorator created an element descriptor"+" with .placement \""+placement+"\"")}var descriptor=elementObject.descriptor;this.disallowProperty(elementObject,"elements","An element descriptor");var element={kind:kind,key:key,placement:placement,descriptor:Object.assign({},descriptor)};if("field"!==kind){this.disallowProperty(elementObject,"initializer","A method descriptor")}else{this.disallowProperty(descriptor,"get","The property descriptor of a field descriptor");this.disallowProperty(descriptor,"set","The property descriptor of a field descriptor");this.disallowProperty(descriptor,"value","The property descriptor of a field descriptor");element.initializer=elementObject.initializer}return element},toElementFinisherExtras:function(elementObject){var element=this.toElementDescriptor(elementObject),finisher=_optionalCallableProperty(elementObject,"finisher"),extras=this.toElementDescriptors(elementObject.extras);return{element:element,finisher:finisher,extras:extras}},fromClassDescriptor:function(elements){var obj={kind:"class",elements:elements.map(this.fromElementDescriptor,this)},desc={value:"Descriptor",configurable:!0};Object.defineProperty(obj,Symbol.toStringTag,desc);return obj},toClassDescriptor:function(obj){var kind=obj.kind+"";if("class"!==kind){throw new TypeError("A class descriptor's .kind property must be \"class\", but a decorator"+" created a class descriptor with .kind \""+kind+"\"")}this.disallowProperty(obj,"key","A class descriptor");this.disallowProperty(obj,"placement","A class descriptor");this.disallowProperty(obj,"descriptor","A class descriptor");this.disallowProperty(obj,"initializer","A class descriptor");this.disallowProperty(obj,"extras","A class descriptor");var finisher=_optionalCallableProperty(obj,"finisher"),elements=this.toElementDescriptors(obj.elements);return{elements:elements,finisher:finisher}},runClassFinishers:function(constructor,finishers){for(var i=0,newConstructor;i<finishers.length;i++){newConstructor=(0,finishers[i])(constructor);if(newConstructor!==void 0){if("function"!==typeof newConstructor){throw new TypeError("Finishers must return a constructor.")}constructor=newConstructor}}return constructor},disallowProperty:function(obj,name,objectType){if(obj[name]!==void 0){throw new TypeError(objectType+" can't have a ."+name+" property.")}}};return api}function _createElementDescriptor(def){var key=_toPropertyKey(def.key),descriptor;if("method"===def.kind){descriptor={value:def.value,writable:!0,configurable:!0,enumerable:!1}}else if("get"===def.kind){descriptor={get:def.value,configurable:!0,enumerable:!1}}else if("set"===def.kind){descriptor={set:def.value,configurable:!0,enumerable:!1}}else if("field"===def.kind){descriptor={configurable:!0,writable:!0,enumerable:!0}}var element={kind:"field"===def.kind?"field":"method",key:key,placement:def.static?"static":"field"===def.kind?"own":"prototype",descriptor:descriptor};if(def.decorators)element.decorators=def.decorators;if("field"===def.kind)element.initializer=def.value;return element}function _coalesceGetterSetter(element,other){if(element.descriptor.get!==void 0){other.descriptor.get=element.descriptor.get}else{other.descriptor.set=element.descriptor.set}}function _coalesceClassElements(elements){for(var newElements=[],isSameElement=function(other){return"method"===other.kind&&other.key===element.key&&other.placement===element.placement},i=0;i<elements.length;i++){var element=elements[i],other;if("method"===element.kind&&(other=newElements.find(isSameElement))){if(_isDataDescriptor(element.descriptor)||_isDataDescriptor(other.descriptor)){if(_hasDecorators(element)||_hasDecorators(other)){throw new ReferenceError("Duplicated methods ("+element.key+") can't be decorated.")}other.descriptor=element.descriptor}else{if(_hasDecorators(element)){if(_hasDecorators(other)){throw new ReferenceError("Decorators can't be placed on different accessors with for "+"the same property ("+element.key+").")}other.decorators=element.decorators}_coalesceGetterSetter(element,other)}}else{newElements.push(element)}}return newElements}function _hasDecorators(element){return element.decorators&&element.decorators.length}function _isDataDescriptor(desc){return desc!==void 0&&!(desc.value===void 0&&desc.writable===void 0)}function _optionalCallableProperty(obj,name){var value=obj[name];if(value!==void 0&&"function"!==typeof value){throw new TypeError("Expected '"+name+"' to be a function")}return value}function _toPropertyKey(arg){var key=_toPrimitive(arg,"string");return"symbol"===typeof key?key:key+""}function _toPrimitive(input,hint){if("object"!==typeof input||null===input)return input;var prim=input[Symbol.toPrimitive];if(prim!==void 0){var res=prim.call(input,hint||"default");if("object"!==typeof res)return res;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===hint?String:Number)(input)}function _toArray(arr){return _arrayWithHoles(arr)||_iterableToArray(arr)||_nonIterableRest()}function _nonIterableRest(){throw new TypeError("Invalid attempt to destructure non-iterable instance")}function _iterableToArray(iter){if(Symbol.iterator in Object(iter)||"[object Arguments]"===Object.prototype.toString.call(iter))return Array.from(iter)}function _arrayWithHoles(arr){if(Array.isArray(arr))return arr}let HaCard=_decorate(null,function(_initialize,_LitElement){class HaCard extends _LitElement{constructor(...args){super(...args);_initialize(this)}}return{F:HaCard,d:[{kind:"field",decorators:[Object(lit_element__WEBPACK_IMPORTED_MODULE_0__.f)()],key:"header",value:void 0},{kind:"get",static:!0,key:"styles",value:function styles(){return lit_element__WEBPACK_IMPORTED_MODULE_0__.c`
:host {
background: var(
--ha-card-background,
var(--paper-card-background-color, white)
);
border-radius: var(--ha-card-border-radius, 2px);
box-shadow: var(
--ha-card-box-shadow,
0 2px 2px 0 rgba(0, 0, 0, 0.14),
0 1px 5px 0 rgba(0, 0, 0, 0.12),
0 3px 1px -2px rgba(0, 0, 0, 0.2)
);
color: var(--primary-text-color);
display: block;
transition: all 0.3s ease-out;
}
.header:not(:empty) {
font-size: 24px;
letter-spacing: -0.012em;
line-height: 32px;
opacity: 0.87;
padding: 24px 16px 16px;
}
`}},{kind:"method",key:"render",value:function render(){return lit_element__WEBPACK_IMPORTED_MODULE_0__.e`
<div class="header">${this.header}</div>
<slot></slot>
`}}]}},lit_element__WEBPACK_IMPORTED_MODULE_0__.a);customElements.define("ha-card",HaCard)},181:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(3),_polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(20),_resources_ha_style__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(101);class HaConfigSection extends _polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_1__.a{static get template(){return _polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_0__.a`
<style include="iron-flex ha-style">
.content {
padding: 28px 20px 0;
max-width: 1040px;
margin: 0 auto;
}
.header {
@apply --paper-font-display1;
opacity: var(--dark-primary-opacity);
}
.together {
margin-top: 32px;
}
.intro {
@apply --paper-font-subhead;
width: 100%;
max-width: 400px;
margin-right: 40px;
opacity: var(--dark-primary-opacity);
}
.panel {
margin-top: -24px;
}
.panel ::slotted(*) {
margin-top: 24px;
display: block;
}
.narrow.content {
max-width: 640px;
}
.narrow .together {
margin-top: 20px;
}
.narrow .header {
@apply --paper-font-headline;
}
.narrow .intro {
font-size: 14px;
padding-bottom: 20px;
margin-right: 0;
max-width: 500px;
}
</style>
<div class$="[[computeContentClasses(isWide)]]">
<div class="header"><slot name="header"></slot></div>
<div class$="[[computeClasses(isWide)]]">
<div class="intro"><slot name="introduction"></slot></div>
<div class="panel flex-auto"><slot></slot></div>
</div>
</div>
`}static get properties(){return{hass:{type:Object},narrow:{type:Boolean},showMenu:{type:Boolean,value:!1},isWide:{type:Boolean,value:!1}}}computeContentClasses(isWide){var classes="content ";return isWide?classes:classes+"narrow"}computeClasses(isWide){var classes="together layout ";return classes+(isWide?"horizontal":"vertical narrow")}}customElements.define("ha-config-section",HaConfigSection)},189:function(module,__webpack_exports__,__webpack_require__){"use strict";var fecha__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(182);function toLocaleStringSupportsOptions(){try{new Date().toLocaleString("i")}catch(e){return"RangeError"===e.name}return!1}__webpack_exports__.a=toLocaleStringSupportsOptions()?(dateObj,locales)=>dateObj.toLocaleString(locales,{year:"numeric",month:"long",day:"numeric",hour:"numeric",minute:"2-digit"}):dateObj=>fecha__WEBPACK_IMPORTED_MODULE_0__.a.format(dateObj,"haDateTime")},211:function(module,__webpack_exports__,__webpack_require__){"use strict";var _material_mwc_button__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(73),_polymer_paper_spinner_paper_spinner__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(173),_polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(3),_polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_3__=__webpack_require__(20);class HaProgressButton extends _polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_3__.a{static get template(){return _polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_2__.a`
<style>
.container {
position: relative;
display: inline-block;
}
mwc-button {
transition: all 1s;
}
.success mwc-button {
--mdc-theme-primary: white;
background-color: var(--google-green-500);
transition: none;
}
.error mwc-button {
--mdc-theme-primary: white;
background-color: var(--google-red-500);
transition: none;
}
.progress {
@apply --layout;
@apply --layout-center-center;
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
}
</style>
<div class="container" id="container">
<mwc-button
id="button"
disabled="[[computeDisabled(disabled, progress)]]"
on-click="buttonTapped"
>
<slot></slot>
</mwc-button>
<template is="dom-if" if="[[progress]]">
<div class="progress"><paper-spinner active=""></paper-spinner></div>
</template>
</div>
`}static get properties(){return{hass:{type:Object},progress:{type:Boolean,value:!1},disabled:{type:Boolean,value:!1}}}tempClass(className){var classList=this.$.container.classList;classList.add(className);setTimeout(()=>{classList.remove(className)},1e3)}ready(){super.ready();this.addEventListener("click",ev=>this.buttonTapped(ev))}buttonTapped(ev){if(this.progress)ev.stopPropagation()}actionSuccess(){this.tempClass("success")}actionError(){this.tempClass("error")}computeDisabled(disabled,progress){return disabled||progress}}customElements.define("ha-progress-button",HaProgressButton)},221:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_iron_icon_iron_icon__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(97),_ha_icon__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(164);class HaIconNext extends _ha_icon__WEBPACK_IMPORTED_MODULE_1__.a{connectedCallback(){this.icon="ltr"===window.getComputedStyle(this).direction?"hass:chevron-right":"hass:chevron-left";super.connectedCallback()}}customElements.define("ha-icon-next",HaIconNext)},307:function(module,__webpack_exports__,__webpack_require__){"use strict";var lit_element__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(5),_ha_progress_button__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(211),_common_dom_fire_event__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(44);class HaCallApiButton extends lit_element__WEBPACK_IMPORTED_MODULE_0__.a{render(){return lit_element__WEBPACK_IMPORTED_MODULE_0__.e`
<ha-progress-button
.progress="${this.progress}"
@click="${this._buttonTapped}"
?disabled="${this.disabled}"
><slot></slot
></ha-progress-button>
`}constructor(){super();this.method="POST";this.data={};this.disabled=!1;this.progress=!1}static get properties(){return{hass:{},progress:Boolean,path:String,method:String,data:{},disabled:Boolean}}get progressButton(){return this.renderRoot.querySelector("ha-progress-button")}async _buttonTapped(){this.progress=!0;const eventData={method:this.method,path:this.path,data:this.data};try{const resp=await this.hass.callApi(this.method,this.path,this.data);this.progress=!1;this.progressButton.actionSuccess();eventData.success=!0;eventData.response=resp}catch(err){this.progress=!1;this.progressButton.actionError();eventData.success=!1;eventData.response=err}Object(_common_dom_fire_event__WEBPACK_IMPORTED_MODULE_2__.a)(this,"hass-api-called",eventData)}}customElements.define("ha-call-api-button",HaCallApiButton)},379:function(module,__webpack_exports__,__webpack_require__){"use strict";var _polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_0__=__webpack_require__(3),_polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_1__=__webpack_require__(20),_ha_icon__WEBPACK_IMPORTED_MODULE_2__=__webpack_require__(164),_common_entity_state_icon__WEBPACK_IMPORTED_MODULE_3__=__webpack_require__(178);class HaStateIcon extends _polymer_polymer_polymer_element__WEBPACK_IMPORTED_MODULE_1__.a{static get template(){return _polymer_polymer_lib_utils_html_tag__WEBPACK_IMPORTED_MODULE_0__.a`
<ha-icon icon="[[computeIcon(stateObj)]]"></ha-icon>
`}static get properties(){return{stateObj:{type:Object}}}computeIcon(stateObj){return Object(_common_entity_state_icon__WEBPACK_IMPORTED_MODULE_3__.a)(stateObj)}}customElements.define("ha-state-icon",HaStateIcon)},719:function(module,__webpack_exports__,__webpack_require__){"use strict";__webpack_require__.r(__webpack_exports__);var app_route=__webpack_require__(142),utils_async=__webpack_require__(11),debounce=__webpack_require__(19),html_tag=__webpack_require__(3),polymer_element=__webpack_require__(20),ha_config_section=__webpack_require__(181),mwc_button=__webpack_require__(73),paper_card=__webpack_require__(160),paper_item_body=__webpack_require__(165),paper_toggle_button=__webpack_require__(197),ha_call_api_button=__webpack_require__(307),hass_subpage=__webpack_require__(139),ha_style=__webpack_require__(101),lit_element=__webpack_require__(5),paper_item=__webpack_require__(127),paper_spinner=__webpack_require__(173),ha_card=__webpack_require__(180),fire_event=__webpack_require__(44);const fetchWebhooks=hass=>hass.callWS({type:"webhook/list"}),createCloudhook=(hass,webhookId)=>hass.callWS({type:"cloud/cloudhook/create",webhook_id:webhookId}),deleteCloudhook=(hass,webhookId)=>hass.callWS({type:"cloud/cloudhook/delete",webhook_id:webhookId});function _objectWithoutPropertiesLoose(source,excluded){if(null==source)return{};var target={},sourceKeys=Object.keys(source),key,i;for(i=0;i<sourceKeys.length;i++){key=sourceKeys[i];if(0<=excluded.indexOf(key))continue;target[key]=source[key]}return target}function _toPropertyKey(arg){var key=_toPrimitive(arg,"string");return"symbol"===typeof key?key:key+""}function _toPrimitive(input,hint){if("object"!==typeof input||null===input)return input;var prim=input[Symbol.toPrimitive];if(prim!==void 0){var res=prim.call(input,hint||"default");if("object"!==typeof res)return res;throw new TypeError("@@toPrimitive must return a primitive value.")}return("string"===hint?String:Number)(input)}class cloud_webhooks_CloudWebhooks extends lit_element.a{static get properties(){return{hass:{},cloudStatus:{},_cloudHooks:{},_localHooks:{},_progress:{}}}constructor(){super();this.hass=void 0;this.cloudStatus=void 0;this._cloudHooks=void 0;this._localHooks=void 0;this._progress=void 0;this._progress=[]}connectedCallback(){super.connectedCallback();this._fetchData()}render(){return lit_element.e`
${this.renderStyle()}
<ha-card header="Webhooks">
<div class="body">
Anything that is configured to be triggered by a webhook can be given
a publicly accessible URL to allow you to send data back to Home
Assistant from anywhere, without exposing your instance to the
internet.
</div>
${this._renderBody()}
<div class="footer">
<a href="https://www.nabucasa.com/config/webhooks" target="_blank">
Learn more about creating webhook-powered automations.
</a>
</div>
</ha-card>
`}updated(changedProps){super.updated(changedProps);if(changedProps.has("cloudStatus")&&this.cloudStatus){this._cloudHooks=this.cloudStatus.prefs.cloudhooks||{}}}_renderBody(){if(!this.cloudStatus||!this._localHooks||!this._cloudHooks){return lit_element.e`
<div class="body-text">Loading…</div>
`}if(0===this._localHooks.length){return lit_element.e`
<div class="body-text">
Looks like you have no webhooks yet. Get started by configuring a
<a href="/config/integrations">webhook-based integration</a> or by
creating a <a href="/config/automation/new">webhook automation</a>.
</div>
`}return this._localHooks.map(entry=>lit_element.e`
<div class="webhook" .entry="${entry}">
<paper-item-body two-line>
<div>
${entry.name}
${entry.domain===entry.name.toLowerCase()?"":` (${entry.domain})`}
</div>
<div secondary>${entry.webhook_id}</div>
</paper-item-body>
${this._progress.includes(entry.webhook_id)?lit_element.e`
<div class="progress">
<paper-spinner active></paper-spinner>
</div>
`:this._cloudHooks[entry.webhook_id]?lit_element.e`
<mwc-button @click="${this._handleManageButton}">
Manage
</mwc-button>
`:lit_element.e`
<paper-toggle-button
@click="${this._enableWebhook}"
></paper-toggle-button>
`}
</div>
`)}_showDialog(webhookId){const webhook=this._localHooks.find(ent=>ent.webhook_id===webhookId),cloudhook=this._cloudHooks[webhookId],params={webhook:webhook,cloudhook,disableHook:()=>this._disableWebhook(webhookId)};Object(fire_event.a)(this,"manage-cloud-webhook",params)}_handleManageButton(ev){const entry=ev.currentTarget.parentElement.entry;this._showDialog(entry.webhook_id)}async _enableWebhook(ev){const entry=ev.currentTarget.parentElement.entry;this._progress=[...this._progress,entry.webhook_id];let updatedWebhook;try{updatedWebhook=await createCloudhook(this.hass,entry.webhook_id)}catch(err){alert(err.message);return}finally{this._progress=this._progress.filter(wid=>wid!==entry.webhook_id)}this._cloudHooks=Object.assign({},this._cloudHooks,{[entry.webhook_id]:updatedWebhook});if(0===this._progress.length){this._showDialog(entry.webhook_id)}}async _disableWebhook(webhookId){this._progress=[...this._progress,webhookId];try{await deleteCloudhook(this.hass,webhookId)}catch(err){alert(`Failed to disable webhook: ${err.message}`);return}finally{this._progress=this._progress.filter(wid=>wid!==webhookId)}const _ref=this._cloudHooks,newHooks=_objectWithoutPropertiesLoose(_ref,[webhookId].map(_toPropertyKey));this._cloudHooks=newHooks}async _fetchData(){this._localHooks=this.hass.config.components.includes("webhook")?await fetchWebhooks(this.hass):[]}renderStyle(){return lit_element.e`
<style>
.body {
padding: 0 16px 8px;
}
.body-text {
padding: 0 16px;
}
.webhook {
display: flex;
padding: 4px 16px;
}
.progress {
margin-right: 16px;
display: flex;
flex-direction: column;
justify-content: center;
}
.footer {
padding: 16px;
}
.body-text a,
.footer a {
color: var(--primary-color);
}
</style>
`}}customElements.define("cloud-webhooks",cloud_webhooks_CloudWebhooks);var format_date_time=__webpack_require__(189),events_mixin=__webpack_require__(81),localize_mixin=__webpack_require__(107);const fetchSubscriptionInfo=hass=>hass.callWS({type:"cloud/subscription"}),updatePref=(hass,prefs)=>hass.callWS(Object.assign({type:"cloud/update_prefs"},prefs));var repeat=__webpack_require__(317),paper_tooltip=__webpack_require__(246),ha_state_icon=__webpack_require__(379),compute_state_name=__webpack_require__(159),compute_domain=__webpack_require__(166);const generateFilter=(includeDomains,includeEntities,excludeDomains,excludeEntities)=>{const includeDomainsSet=new Set(includeDomains),includeEntitiesSet=new Set(includeEntities),excludeDomainsSet=new Set(excludeDomains),excludeEntitiesSet=new Set(excludeEntities),haveInclude=0<includeDomainsSet.size||0<includeEntitiesSet.size,haveExclude=0<excludeDomainsSet.size||0<excludeEntitiesSet.size;if(!haveInclude&&!haveExclude){return()=>!0}if(haveInclude&&!haveExclude){return entityId=>includeEntitiesSet.has(entityId)||includeDomainsSet.has(Object(compute_domain.a)(entityId))}if(!haveInclude&&haveExclude){return entityId=>!excludeEntitiesSet.has(entityId)&&!excludeDomainsSet.has(Object(compute_domain.a)(entityId))}if(includeDomainsSet.size){return entityId=>includeDomainsSet.has(Object(compute_domain.a)(entityId))?!excludeEntitiesSet.has(entityId):includeEntitiesSet.has(entityId)}if(excludeDomainsSet.size){return entityId=>excludeDomainsSet.has(Object(compute_domain.a)(entityId))?includeEntitiesSet.has(entityId):!excludeEntitiesSet.has(entityId)}return entityId=>includeEntitiesSet.has(entityId)};class cloud_exposed_entities_CloudExposedEntities extends lit_element.a{constructor(...args){super(...args);this.hass=void 0;this.filter=void 0;this.supportedDomains=void 0;this._filterFunc=void 0}static get properties(){return{hass:{},filter:{},supportedDomains:{},_filterFunc:{}}}render(){if(!this._filterFunc){return lit_element.e``}const states=[];Object.keys(this.hass.states).forEach(entityId=>{if(this._filterFunc(entityId)){const stateObj=this.hass.states[entityId];states.push([Object(compute_state_name.a)(stateObj),stateObj])}});states.sort();return lit_element.e`
${this.renderStyle()}
${Object(repeat.a)(states,stateInfo=>stateInfo[1].entity_id,stateInfo=>lit_element.e`
<span>
<ha-state-icon
.stateObj="${stateInfo[1]}"
@click="${this._handleMoreInfo}"
></ha-state-icon>
<paper-tooltip position="bottom">${stateInfo[0]}</paper-tooltip>
</span>
`)}
`}updated(changedProperties){super.updated(changedProperties);if(changedProperties.has("filter")&&changedProperties.get("filter")!==this.filter){const filter=this.filter,filterFunc=generateFilter(filter.include_domains,filter.include_entities,filter.exclude_domains,filter.exclude_entities),domains=new Set(this.supportedDomains);this._filterFunc=entityId=>{const domain=entityId.split(".")[0];return domains.has(domain)&&filterFunc(entityId)}}}_handleMoreInfo(ev){Object(fire_event.a)(this,"hass-more-info",{entityId:ev.currentTarget.stateObj.entity_id})}renderStyle(){return lit_element.e`
<style>
ha-state-icon {
color: var(--primary-text-color);
cursor: pointer;
}
</style>
`}}customElements.define("cloud-exposed-entities",cloud_exposed_entities_CloudExposedEntities);class cloud_alexa_pref_CloudAlexaPref extends lit_element.a{constructor(...args){super(...args);this.hass=void 0;this.cloudStatus=void 0}static get properties(){return{hass:{},cloudStatus:{}}}render(){if(!this.cloudStatus){return lit_element.e``}const enabled=this.cloudStatus.prefs.alexa_enabled;return lit_element.e`
${this.renderStyle()}
<paper-card heading="Alexa">
<paper-toggle-button
.checked="${enabled}"
@change="${this._toggleChanged}"
></paper-toggle-button>
<div class="card-content">
With the Alexa integration for E-Fonenana Cloud you'll be able to
control all your E-Fonenana devices via any Alexa-enabled device.
<ul>
<li>
To activate, search in the Alexa app for the E-Fonenana Smart Home
skill.
</li>
<li>
<a
href="https://www.home-assistant.io/cloud/alexa/"
target="_blank"
>
Config documentation
</a>
</li>
</ul>
<em
>This integration requires an Alexa-enabled device like the Amazon
Echo.</em
>
${enabled?lit_element.e`
<p>Exposed entities:</p>
<cloud-exposed-entities
.hass="${this.hass}"
.filter="${this.cloudStatus.alexa_entities}"
.supportedDomains="${this.cloudStatus.alexa_domains}"
></cloud-exposed-entities>
`:""}
</div>
</paper-card>
`}async _toggleChanged(ev){const toggle=ev.target;try{await updatePref(this.hass,{alexa_enabled:toggle.checked});Object(fire_event.a)(this,"ha-refresh-cloud-status")}catch(err){toggle.checked=!toggle.checked}}renderStyle(){return lit_element.e`
<style>
a {
color: var(--primary-color);
}
paper-card > paper-toggle-button {
position: absolute;
right: 8px;
top: 16px;
}
</style>
`}}customElements.define("cloud-alexa-pref",cloud_alexa_pref_CloudAlexaPref);class cloud_google_pref_CloudGooglePref extends lit_element.a{constructor(...args){super(...args);this.hass=void 0;this.cloudStatus=void 0}static get properties(){return{hass:{},cloudStatus:{}}}render(){if(!this.cloudStatus){return lit_element.e``}const{google_enabled,google_allow_unlock}=this.cloudStatus.prefs;return lit_element.e`
${this.renderStyle()}
<paper-card heading="Google Assistant">
<paper-toggle-button
id="google_enabled"
.checked="${google_enabled}"
@change="${this._toggleChanged}"
></paper-toggle-button>
<div class="card-content">
With the Google Assistant integration for E-Fonenana Cloud you'll be
able to control all your E-Fonenana devices via any Google
Assistant-enabled device.
<ul>
<li>
<a
href="https://assistant.google.com/services/a/uid/00000091fd5fb875?hl=en-US"
target="_blank"
>
Activate the E-Fonenana skill for Google Assistant
</a>
</li>
<li>
<a
href="https://www.home-assistant.io/cloud/google_assistant/"
target="_blank"
>
Config documentation
</a>
</li>
</ul>
<em
>This integration requires a Google Assistant-enabled device like
the Google Home or Android phone.</em
>
${google_enabled?lit_element.e`
<div class="unlock">
<div>Allow unlocking locks</div>
<paper-toggle-button
id="google_allow_unlock"
.checked="${google_allow_unlock}"
@change="${this._toggleChanged}"
></paper-toggle-button>
</div>
<p>Exposed entities:</p>
<cloud-exposed-entities
.hass="${this.hass}"
.filter="${this.cloudStatus.google_entities}"
.supportedDomains="${this.cloudStatus.google_domains}"
></cloud-exposed-entities>
`:""}
</div>
<div class="card-actions">
<ha-call-api-button
.hass="${this.hass}"
.disabled="${!google_enabled}"
path="cloud/google_actions/sync"
>Sync devices</ha-call-api-button
>
</div>
</paper-card>
`}async _toggleChanged(ev){const toggle=ev.target;try{await updatePref(this.hass,{[toggle.id]:toggle.checked});Object(fire_event.a)(this,"ha-refresh-cloud-status")}catch(err){toggle.checked=!toggle.checked}}renderStyle(){return lit_element.e`
<style>
a {
color: var(--primary-color);
}
paper-card > paper-toggle-button {
position: absolute;
right: 8px;
top: 16px;
}
ha-call-api-button {
color: var(--primary-color);
font-weight: 500;
}
.unlock {
display: flex;
flex-direction: row;
padding-top: 16px;
}
.unlock > div {
flex: 1;
}
</style>
`}}customElements.define("cloud-google-pref",cloud_google_pref_CloudGooglePref);let registeredWebhookDialog=!1;class ha_config_cloud_account_HaConfigCloudAccount extends Object(events_mixin.a)(Object(localize_mixin.a)(polymer_element.a)){static get template(){return html_tag.a`
<style include="iron-flex ha-style">
[slot="introduction"] {
margin: -1em 0;
}
[slot="introduction"] a {
color: var(--primary-color);
}
.content {
padding-bottom: 24px;
direction: ltr;
}
paper-card {
display: block;
}
.account-row {
display: flex;
padding: 0 16px;
}
mwc-button {
align-self: center;
}
.soon {
font-style: italic;
margin-top: 24px;
text-align: center;
}
.nowrap {
white-space: nowrap;
}
.wrap {
white-space: normal;
}
.status {
text-transform: capitalize;
padding: 16px;
}
</style>
<hass-subpage header="E-Fonenana Cloud">
<div class="content">
<ha-config-section is-wide="[[isWide]]">
<span slot="header">E-Fonenana Cloud</span>
<div slot="introduction">
<p>
Thank you for being part of E-Fonenana Cloud. It's because of
people like you that we are able to make a great home automation
experience for everyone. Thank you!
</p>
</div>
<paper-card heading="Nabu Casa Account">
<div class="account-row">
<paper-item-body two-line="">
[[cloudStatus.email]]
<div secondary="" class="wrap">
[[_formatSubscription(_subscription)]]
</div>
</paper-item-body>
</div>
<div class="account-row">
<paper-item-body> Cloud connection status </paper-item-body>
<div class="status">[[cloudStatus.cloud]]</div>
</div>
<div class="card-actions">
<a href="https://account.nabucasa.com" target="_blank"
><mwc-button>Manage Account</mwc-button></a
>
<mwc-button style="float: right" on-click="handleLogout"
>Sign out</mwc-button
>
</div>
</paper-card>
</ha-config-section>
<ha-config-section is-wide="[[isWide]]">
<span slot="header">Integrations</span>
<div slot="introduction">
<p>
Integrations for E-Fonenana Cloud allow you to connect with
services in the cloud without having to expose your Home
Assistant instance publicly on the internet.
</p>
<p>
Check the website for
<a href="https://www.nabucasa.com" target="_blank"
>all available features</a
>.
</p>
</div>
<cloud-alexa-pref
hass="[[hass]]"
cloud-status="[[cloudStatus]]"
></cloud-alexa-pref>
<cloud-google-pref
hass="[[hass]]"
cloud-status="[[cloudStatus]]"
></cloud-google-pref>
<cloud-webhooks
hass="[[hass]]"
cloud-status="[[cloudStatus]]"
></cloud-webhooks>
</ha-config-section>
</div>
</hass-subpage>
`}static get properties(){return{hass:Object,isWide:Boolean,cloudStatus:Object,_subscription:{type:Object,value:null}}}ready(){super.ready();this._fetchSubscriptionInfo()}connectedCallback(){super.connectedCallback();if(!registeredWebhookDialog){registeredWebhookDialog=!0;Object(fire_event.a)(this,"register-dialog",{dialogShowEvent:"manage-cloud-webhook",dialogTag:"cloud-webhook-manage-dialog",dialogImport:()=>Promise.all([__webpack_require__.e(1),__webpack_require__.e(14)]).then(__webpack_require__.bind(null,772))})}}async _fetchSubscriptionInfo(){this._subscription=await fetchSubscriptionInfo(this.hass);if(this._subscription.provider&&this.cloudStatus&&"connected"!==this.cloudStatus.cloud){this.fire("ha-refresh-cloud-status")}}handleLogout(){this.hass.callApi("post","cloud/logout").then(()=>this.fire("ha-refresh-cloud-status"))}_formatSubscription(subInfo){if(null===subInfo){return"Fetching subscription\u2026"}let description=subInfo.human_description;if(subInfo.plan_renewal_date){description=description.replace("{periodEnd}",Object(format_date_time.a)(new Date(1e3*subInfo.plan_renewal_date),this.hass.language))}return description}}customElements.define("ha-config-cloud-account",ha_config_cloud_account_HaConfigCloudAccount);var paper_input=__webpack_require__(80),ha_progress_button=__webpack_require__(211);class ha_config_cloud_forgot_password_HaConfigCloudForgotPassword extends Object(events_mixin.a)(polymer_element.a){static get template(){return html_tag.a`
<style include="iron-flex ha-style">
.content {
padding-bottom: 24px;
direction: ltr;
}
paper-card {
display: block;
max-width: 600px;
margin: 0 auto;
margin-top: 24px;
}
h1 {
@apply --paper-font-headline;
margin: 0;
}
.error {
color: var(--google-red-500);
}
.card-actions {
display: flex;
justify-content: space-between;
align-items: center;
}
.card-actions a {
color: var(--primary-text-color);
}
[hidden] {
display: none;
}
</style>
<hass-subpage header="Forgot Password">
<div class="content">
<paper-card>
<div class="card-content">
<h1>Forgot your password?</h1>
<p>
Enter your email address and we will send you a link to reset
your password.
</p>
<div class="error" hidden$="[[!_error]]">[[_error]]</div>
<paper-input
autofocus=""
id="email"
label="E-mail"
value="{{email}}"
type="email"
on-keydown="_keyDown"
error-message="Invalid email"
></paper-input>
</div>
<div class="card-actions">
<ha-progress-button
on-click="_handleEmailPasswordReset"
progress="[[_requestInProgress]]"
>Send reset email</ha-progress-button
>
</div>
</paper-card>
</div>
</hass-subpage>
`}static get properties(){return{hass:Object,email:{type:String,notify:!0,observer:"_emailChanged"},_requestInProgress:{type:Boolean,value:!1},_error:{type:String,value:""}}}_emailChanged(){this._error="";this.$.email.invalid=!1}_keyDown(ev){if(13===ev.keyCode){this._handleEmailPasswordReset();ev.preventDefault()}}_handleEmailPasswordReset(){if(!this.email||!this.email.includes("@")){this.$.email.invalid=!0}if(this.$.email.invalid)return;this._requestInProgress=!0;this.hass.callApi("post","cloud/forgot_password",{email:this.email}).then(()=>{this._requestInProgress=!1;this.fire("cloud-done",{flashMessage:"Check your email for instructions on how to reset your password."})},err=>this.setProperties({_requestInProgress:!1,_error:err&&err.body&&err.body.message?err.body.message:"Unknown error"}))}}customElements.define("ha-config-cloud-forgot-password",ha_config_cloud_forgot_password_HaConfigCloudForgotPassword);var paper_icon_button=__webpack_require__(96),paper_ripple=__webpack_require__(100),navigate_mixin=__webpack_require__(202),ha_icon_next=__webpack_require__(221);class ha_config_cloud_login_HaConfigCloudLogin extends Object(navigate_mixin.a)(Object(events_mixin.a)(polymer_element.a)){static get template(){return html_tag.a`
<style include="iron-flex ha-style">
.content {
padding-bottom: 24px;
direction: ltr;
}
[slot="introduction"] {
margin: -1em 0;
}
[slot="introduction"] a {
color: var(--primary-color);
}
paper-card {
display: block;
}
paper-item {
cursor: pointer;
}
paper-card:last-child {
margin-top: 24px;
}
h1 {
@apply --paper-font-headline;
margin: 0;
}
.error {
color: var(--google-red-500);
}
.card-actions {
display: flex;
justify-content: space-between;
align-items: center;
}
[hidden] {
display: none;
}
.flash-msg {
padding-right: 44px;
}
.flash-msg paper-icon-button {
position: absolute;
top: 8px;
right: 8px;
color: var(--secondary-text-color);
}
</style>
<hass-subpage header="Cloud Login">
<div class="content">
<ha-config-section is-wide="[[isWide]]">
<span slot="header">E-Fonenana Cloud</span>
<div slot="introduction">
<p>
E-Fonenana Cloud connects your local instance securely to
cloud-only services Amazon Alexa and Google Assistant.
</p>
<p>
This service is run by our partner
<a href="https://www.nabucasa.com" target="_blank"
>Nabu Casa, Inc</a
>, a company founded by the founders of E-Fonenana and Hass.io.
</p>
<p>
E-Fonenana Cloud is a subscription service with a free one month
trial. No payment information necessary.
</p>
<p>
<a href="https://www.nabucasa.com" target="_blank"
>Learn more about E-Fonenana Cloud</a
>
</p>
</div>
<paper-card hidden$="[[!flashMessage]]">
<div class="card-content flash-msg">
[[flashMessage]]
<paper-icon-button icon="hass:close" on-click="_dismissFlash"
>Dismiss</paper-icon-button
>
<paper-ripple id="flashRipple" noink=""></paper-ripple>
</div>
</paper-card>
<paper-card>
<div class="card-content">
<h1>Sign In</h1>
<div class="error" hidden$="[[!_error]]">[[_error]]</div>
<paper-input
label="Email"
id="email"
type="email"
value="{{email}}"
on-keydown="_keyDown"
error-message="Invalid email"
></paper-input>
<paper-input
id="password"
label="Password"
value="{{_password}}"
type="password"
on-keydown="_keyDown"
error-message="Passwords are at least 8 characters"
></paper-input>
</div>
<div class="card-actions">
<ha-progress-button
on-click="_handleLogin"
progress="[[_requestInProgress]]"
>Sign in</ha-progress-button
>
<button
class="link"
hidden="[[_requestInProgress]]"
on-click="_handleForgotPassword"
>
forgot password?
</button>
</div>
</paper-card>
<paper-card>
<paper-item on-click="_handleRegister">
<paper-item-body two-line="">
Start your free 1 month trial
<div secondary="">No payment information necessary</div>
</paper-item-body>
<ha-icon-next></ha-icon-next>
</paper-item>
</paper-card>
</ha-config-section>
</div>
</hass-subpage>
`}static get properties(){return{hass:Object,isWide:Boolean,email:{type:String,notify:!0},_password:{type:String,value:""},_requestInProgress:{type:Boolean,value:!1},flashMessage:{type:String,notify:!0},_error:String}}static get observers(){return["_inputChanged(email, _password)"]}connectedCallback(){super.connectedCallback();if(this.flashMessage){requestAnimationFrame(()=>requestAnimationFrame(()=>this.$.flashRipple.simulatedRipple()))}}_inputChanged(){this.$.email.invalid=!1;this.$.password.invalid=!1;this._error=!1}_keyDown(ev){if(13===ev.keyCode){this._handleLogin();ev.preventDefault()}}_handleLogin(){let invalid=!1;if(!this.email||!this.email.includes("@")){this.$.email.invalid=!0;this.$.email.focus();invalid=!0}if(8>this._password.length){this.$.password.invalid=!0;if(!invalid){invalid=!0;this.$.password.focus()}}if(invalid)return;this._requestInProgress=!0;this.hass.callApi("post","cloud/login",{email:this.email,password:this._password}).then(()=>{this.fire("ha-refresh-cloud-status");this.setProperties({email:"",_password:""})},err=>{this._password="";const errCode=err&&err.body&&err.body.code;if("PasswordChangeRequired"===errCode){alert("You need to change your password before logging in.");this.navigate("/config/cloud/forgot-password");return}const props={_requestInProgress:!1,_error:err&&err.body&&err.body.message?err.body.message:"Unknown error"};if("UserNotConfirmed"===errCode){props._error="You need to confirm your email before logging in."}this.setProperties(props);this.$.email.focus()})}_handleRegister(){this.flashMessage="";this.navigate("/config/cloud/register")}_handleForgotPassword(){this.flashMessage="";this.navigate("/config/cloud/forgot-password")}_dismissFlash(){setTimeout(()=>{this.flashMessage=""},200)}}customElements.define("ha-config-cloud-login",ha_config_cloud_login_HaConfigCloudLogin);class ha_config_cloud_register_HaConfigCloudRegister extends Object(events_mixin.a)(polymer_element.a){static get template(){return html_tag.a`
<style include="iron-flex ha-style">
.content {
direction: ltr;
}
[slot=introduction] {
margin: -1em 0;
}
[slot=introduction] a {
color: var(--primary-color);
}
a {
color: var(--primary-color);
}
paper-card {
display: block;
}
paper-item {
cursor: pointer;
}
paper-card:last-child {
margin-top: 24px;
}
h1 {
@apply --paper-font-headline;
margin: 0;
}
.error {
color: var(--google-red-500);
}
.card-actions {
display: flex;
justify-content: space-between;
align-items: center;
}
[hidden] {
display: none;
}
</style>
<hass-subpage header="Register Account">
<div class="content">
<ha-config-section is-wide="[[isWide]]">
<span slot="header">Start your free trial</span>
<div slot="introduction">
<p>
Create an account to start your free one month trial with E-Fonenana Cloud. No payment information necessary.
</p>
<p>
The trial will give you access to all the benefits of E-Fonenana Cloud, including:
</p>
<ul>
<li>Integration with Google Assistant</li>
<li>Integration with Amazon Alexa</li>
</ul>
<p>
This service is run by our partner <a href='https://www.nabucasa.com' target='_blank'>Nabu Casa, Inc</a>, a company founded by the founders of E-Fonenana and Hass.io.
</p>
<p>
By registering an account you agree to the following terms and conditions.
</p><ul>
<li><a href="https://home-assistant.io/tos/" target="_blank">Terms and Conditions</a></li>
<li><a href="https://home-assistant.io/privacy/" target="_blank">Privacy Policy</a></li>
</ul>
</p>
</div>
<paper-card>
<div class="card-content">
<div class="header">
<h1>Create Account</h1>
<div class="error" hidden$="[[!_error]]">[[_error]]</div>
</div>
<paper-input autofocus="" id="email" label="Email address" type="email" value="{{email}}" on-keydown="_keyDown" error-message="Invalid email"></paper-input>
<paper-input id="password" label="Password" value="{{_password}}" type="password" on-keydown="_keyDown" error-message="Your password needs to be at least 8 characters"></paper-input>
</div>
<div class="card-actions">
<ha-progress-button on-click="_handleRegister" progress="[[_requestInProgress]]">Start trial</ha-progress-button>
<button class="link" hidden="[[_requestInProgress]]" on-click="_handleResendVerifyEmail">Resend confirmation email</button>
</div>
</paper-card>
</ha-config-section>
</div>
</hass-subpage>
`}static get properties(){return{hass:Object,isWide:Boolean,email:{type:String,notify:!0},_requestInProgress:{type:Boolean,value:!1},_password:{type:String,value:""},_error:{type:String,value:""}}}static get observers(){return["_inputChanged(email, _password)"]}_inputChanged(){this._error="";this.$.email.invalid=!1;this.$.password.invalid=!1}_keyDown(ev){if(13===ev.keyCode){this._handleRegister();ev.preventDefault()}}_handleRegister(){let invalid=!1;if(!this.email||!this.email.includes("@")){this.$.email.invalid=!0;this.$.email.focus();invalid=!0}if(8>this._password.length){this.$.password.invalid=!0;if(!invalid){invalid=!0;this.$.password.focus()}}if(invalid)return;this._requestInProgress=!0;this.hass.callApi("post","cloud/register",{email:this.email,password:this._password}).then(()=>this._verificationEmailSent(),err=>{this._password="";this.setProperties({_requestInProgress:!1,_error:err&&err.body&&err.body.message?err.body.message:"Unknown error"})})}_handleResendVerifyEmail(){if(!this.email){this.$.email.invalid=!0;return}this.hass.callApi("post","cloud/resend_confirm",{email:this.email}).then(()=>this._verificationEmailSent(),err=>this.setProperties({_error:err&&err.body&&err.body.message?err.body.message:"Unknown error"}))}_verificationEmailSent(){this.setProperties({_requestInProgress:!1,_password:""});this.fire("cloud-done",{flashMessage:"Account created! Check your email for instructions on how to activate your account."})}}customElements.define("ha-config-cloud-register",ha_config_cloud_register_HaConfigCloudRegister);const LOGGED_IN_URLS=["/cloud/account"],NOT_LOGGED_IN_URLS=["/cloud/login","/cloud/register","/cloud/forgot-password"];class ha_config_cloud_HaConfigCloud extends Object(navigate_mixin.a)(polymer_element.a){static get template(){return html_tag.a`
<app-route
route="[[route]]"
pattern="/cloud/:page"
data="{{_routeData}}"
tail="{{_routeTail}}"
></app-route>
<template
is="dom-if"
if='[[_equals(_routeData.page, "account")]]'
restamp=""
>
<ha-config-cloud-account
hass="[[hass]]"
cloud-status="[[cloudStatus]]"
is-wide="[[isWide]]"
></ha-config-cloud-account>
</template>
<template
is="dom-if"
if='[[_equals(_routeData.page, "login")]]'
restamp=""
>
<ha-config-cloud-login
page-name="login"
hass="[[hass]]"
is-wide="[[isWide]]"
email="{{_loginEmail}}"
flash-message="{{_flashMessage}}"
></ha-config-cloud-login>
</template>
<template
is="dom-if"
if='[[_equals(_routeData.page, "register")]]'
restamp=""
>
<ha-config-cloud-register
page-name="register"
hass="[[hass]]"
is-wide="[[isWide]]"
email="{{_loginEmail}}"
></ha-config-cloud-register>
</template>
<template
is="dom-if"
if='[[_equals(_routeData.page, "forgot-password")]]'
restamp=""
>
<ha-config-cloud-forgot-password
page-name="forgot-password"
hass="[[hass]]"
email="{{_loginEmail}}"
></ha-config-cloud-forgot-password>
</template>
`}static get properties(){return{hass:Object,isWide:Boolean,loadingAccount:{type:Boolean,value:!1},cloudStatus:{type:Object},_flashMessage:{type:String,value:""},route:Object,_routeData:Object,_routeTail:Object,_loginEmail:String}}static get observers(){return["_checkRoute(route, cloudStatus)"]}ready(){super.ready();this.addEventListener("cloud-done",ev=>{this._flashMessage=ev.detail.flashMessage;this.navigate("/config/cloud/login")})}_checkRoute(route){if(!route||"/cloud"!==route.path.substr(0,6))return;this._debouncer=debounce.a.debounce(this._debouncer,utils_async.d.after(0),()=>{if(!this.cloudStatus||!this.cloudStatus.logged_in&&!NOT_LOGGED_IN_URLS.includes(route.path)){this.navigate("/config/cloud/login",!0)}else if(this.cloudStatus.logged_in&&!LOGGED_IN_URLS.includes(route.path)){this.navigate("/config/cloud/account",!0)}})}_equals(a,b){return a===b}}customElements.define("ha-config-cloud",ha_config_cloud_HaConfigCloud)}}]);
//# sourceMappingURL=19d81aa3bd198f084dd6.chunk.js.map | PypiClean |
/lxd_client-1.0.0-py3-none-any.whl/lxd_client/models/window_size_change_request.py | from typing import Any, Dict, List, Type, TypeVar, Union
import attr
from ..models.args import Args
from ..types import UNSET, Unset
T = TypeVar("T", bound="WindowSizeChangeRequest")
@attr.s(auto_attribs=True)
class WindowSizeChangeRequest:
"""Control (window size change)
Attributes:
command (Union[Unset, str]): Example: window-resize.
args (Union[Unset, Args]):
"""
command: Union[Unset, str] = UNSET
args: Union[Unset, Args] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
command = self.command
args: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.args, Unset):
args = self.args.to_dict()
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if command is not UNSET:
field_dict["command"] = command
if args is not UNSET:
field_dict["args"] = args
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
command = d.pop("command", UNSET)
_args = d.pop("args", UNSET)
args: Union[Unset, Args]
if isinstance(_args, Unset):
args = UNSET
else:
args = Args.from_dict(_args)
window_size_change_request = cls(
command=command,
args=args,
)
window_size_change_request.additional_properties = d
return window_size_change_request
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties | PypiClean |
/azure_ai_ml-1.9.0-py3-none-any.whl/azure/ai/ml/_restclient/v2022_12_01_preview/operations/_code_versions_operations.py | from typing import TYPE_CHECKING
from msrest import Serializer
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
# fmt: off
def build_list_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
order_by = kwargs.pop('order_by', None) # type: Optional[str]
top = kwargs.pop('top', None) # type: Optional[int]
skip = kwargs.pop('skip', None) # type: Optional[str]
accept = "application/json"
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
_query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if order_by is not None:
_query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str')
if top is not None:
_query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
if skip is not None:
_query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str')
# Construct headers
_header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
_header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_query_parameters,
headers=_header_parameters,
**kwargs
)
def build_delete_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
accept = "application/json"
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
"version": _SERIALIZER.url("version", version, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
_query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
_header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=_url,
params=_query_parameters,
headers=_header_parameters,
**kwargs
)
def build_get_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
accept = "application/json"
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str'),
"version": _SERIALIZER.url("version", version, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
_query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
_header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=_url,
params=_query_parameters,
headers=_header_parameters,
**kwargs
)
def build_create_or_update_request(
subscription_id, # type: str
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
**kwargs # type: Any
):
# type: (...) -> HttpRequest
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
content_type = kwargs.pop('content_type', None) # type: Optional[str]
accept = "application/json"
# Construct URL
_url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
"name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'),
"version": _SERIALIZER.url("version", version, 'str'),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
_query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
_header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
_header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
_header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=_url,
params=_query_parameters,
headers=_header_parameters,
**kwargs
)
# fmt: on
class CodeVersionsOperations(object):
"""CodeVersionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.machinelearningservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
order_by=None, # type: Optional[str]
top=None, # type: Optional[int]
skip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.CodeVersionResourceArmPaginatedResult"]
"""List versions.
List versions.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:param order_by: Ordering of list.
:type order_by: str
:param top: Maximum number of records to return.
:type top: int
:param skip: Continuation token for pagination.
:type skip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the
result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersionResourceArmPaginatedResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
order_by=order_by,
top=top,
skip=skip,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
api_version=api_version,
order_by=order_by,
top=top,
skip=skip,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore
@distributed_trace
def delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Delete version.
Delete version.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:param version: Version identifier. This is case-sensitive.
:type version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
request = build_delete_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
version=version,
api_version=api_version,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore
@distributed_trace
def get(
self,
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.CodeVersion"
"""Get version.
Get version.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:param version: Version identifier. This is case-sensitive.
:type version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeVersion, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
version=version,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CodeVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore
@distributed_trace
def create_or_update(
self,
resource_group_name, # type: str
workspace_name, # type: str
name, # type: str
version, # type: str
body, # type: "_models.CodeVersion"
**kwargs # type: Any
):
# type: (...) -> "_models.CodeVersion"
"""Create or update version.
Create or update version.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of Azure Machine Learning workspace.
:type workspace_name: str
:param name: Container name. This is case-sensitive.
:type name: str
:param version: Version identifier. This is case-sensitive.
:type version: str
:param body: Version entity to create or update.
:type body: ~azure.mgmt.machinelearningservices.models.CodeVersion
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CodeVersion, or the result of cls(response)
:rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2022-12-01-preview") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(body, 'CodeVersion')
request = build_create_or_update_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
name=name,
version=version,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('CodeVersion', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('CodeVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore | PypiClean |
/boot-synth-1.2.0.tar.gz/boot-synth-1.2.0/synth/projects_master/nginx_router/frontend/react/node_modules/caniuse-lite/data/regions/KW.js | module.exports={D:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0.003642,"35":0,"36":0.010926,"37":0,"38":0.003642,"39":0,"40":0,"41":0.003642,"42":0,"43":0.021852,"44":0,"45":0.003642,"46":0,"47":0.007284,"48":0,"49":0.116544,"50":0,"51":0.003642,"52":0,"53":0,"54":0.003642,"55":0,"56":0.007284,"57":0.003642,"58":0.010926,"59":0,"60":0,"61":0.14568,"62":0.003642,"63":0.032778,"64":0.003642,"65":0.07284,"66":0.007284,"67":0.025494,"68":0.007284,"69":0.021852,"70":0.014568,"71":0.029136,"72":0.03642,"73":0.050988,"74":0.065556,"75":0.167532,"76":5.823558,"77":1.977606,"78":0.010926,"79":0.003642,"80":0},C:{"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0.003642,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0.003642,"46":0,"47":0.007284,"48":0.029136,"49":0,"50":0,"51":0,"52":0.021852,"53":0,"54":0,"55":0.003642,"56":0.007284,"57":0.003642,"58":0,"59":0,"60":0.043704,"61":0,"62":0.003642,"63":0.003642,"64":0.007284,"65":0.007284,"66":0.010926,"67":0.014568,"68":0.225804,"69":0.590004,"70":0.01821,"71":0,"3.5":0,"3.6":0},F:{"9":0,"11":0,"12":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0.003642,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0.032778,"54":0.003642,"55":0,"56":0.003642,"57":0,"58":0,"60":0.003642,"62":0.029136,"9.5-9.6":0,"10.0-10.1":0,"10.5":0,"10.6":0,"11.1":0,"11.5":0,"11.6":0,"12.1":0},E:{"4":0,"5":0,"6":0,"7":0,"8":0.043704,"9":0.003642,"10":0.01821,"11":0.043704,"12":0.12747,"13":0.149322,_:"0","3.1":0,"3.2":0,"5.1":0.043704,"6.1":0,"7.1":0,"9.1":0.032778,"10.1":0.069198,"11.1":0.120186,"12.1":0.790314,"13.1":0},G:{"8":0,"3.2":0.010238158526821,"4.0-4.1":0.0051190792634107,"4.2-4.3":0,"5.0-5.1":0.017916777421938,"6.0-6.1":0,"7.0-7.1":0.020476317053643,"8.1-8.4":0.03839309447558,"9.0-9.2":0.043512173738991,"9.3":0.31482337469976,"10.0-10.2":0.15101283827062,"10.3":0.3813714051241,"11.0-11.2":0.41464542033627,"11.3-11.4":0.74994511208967,"12.0-12.1":1.259293498799,"12.2-12.3":20.376495008006,"13.0-13.1":1.7916777421938},I:{"3":0.0006522328358209,"4":0.0036338686567164,_:"76","2.1":0,"2.2":0.0010249373134328,"2.3":0.00055905671641791,"4.1":0.0082926746268657,"4.2-4.3":0.04901063880597,"4.4":0,"4.4.3-4.4.4":0.030468591044776},P:{"4":0.36255540469974,"5.0-5.4":0.020717451697128,"6.2-6.4":0.062152355091384,"7.2-7.4":0.20717451697128,"8.2":0.16573961357702,"9.2":1.94744045953,"10.1":1.2016121984334},B:{"12":0.003642,"13":0.007284,"14":0.010926,"15":0.010926,"16":0.021852,"17":0.269508,"18":0.433398,_:"76"},K:{_:"0 10 11 12 11.1 11.5 12.1"},A:{"6":0,"7":0,"8":0.032778,"9":0,"10":0.010926,"11":0.404262,"5.5":0},N:{"10":0,"11":0.006358},J:{"7":0,"10":0},L:{"0":42.403904},R:{_:"0"},M:{"0":0.139876},O:{"0":5.78578},Q:{"1.2":0.006358},S:{"2.5":0},H:{"0":3.6477222474227}}; | PypiClean |
/localcosmos_server-0.16.1-py3-none-any.whl/localcosmos_server/template_content/static/template_content/ckeditor5-build-classic/translations/ne.js | (function(e){const t=e["ne"]=e["ne"]||{};t.dictionary=Object.assign(t.dictionary||{},{"%0 of %1":"%0 मध्ये %1","Align cell text to the bottom":"","Align cell text to the center":"","Align cell text to the left":"","Align cell text to the middle":"","Align cell text to the right":"","Align cell text to the top":"","Align center":"केन्द्र पङ्क्तिबद्ध गर्नुहोस्","Align left":"बायाँ पङ्क्तिबद्ध गर्नुहोस्","Align right":"दायाँ पङ्क्तिबद्ध गर्नुहोस्","Align table to the left":"","Align table to the right":"",Alignment:"",Aquamarine:"अव्कवामरिन",Background:"",Black:"कालो","Block quote":"ब्लक उद्धरण",Blue:"निलो",Bold:"बोल्ड",Border:"","Break text":"","Bulleted List":"गोली चिन्ह अङ्कित सूची",Cancel:"रद्द गर्नुहोस्","Caption for image: %0":"","Caption for the image":"","Cell properties":"","Center table":"","Centered image":"केन्द्रित तस्वीर","Change image text alternative":"तस्वीर पाठ विकल्प परिवर्तन गर्नुहोस्","Choose heading":"शीर्षक छनौट गर्नुहोस्",Color:"","Color picker":"",Column:"स्तम्भ",Dashed:"","Decrease indent":"इन्डेन्ट घटाउन","Delete column":"स्तम्भ मेटाउनुहोस्","Delete row":"पङ्क्ति मेटाउनुहोस्","Dim grey":"धमिलो खैरो",Dimensions:"",Dotted:"",Double:"",Downloadable:"डाउनलोड योग्य","Dropdown toolbar":"","Edit block":"ब्लक सम्पादन गर्न","Edit link":"लिङ्क सम्पादन गर्नुहोस्","Editor block content toolbar":"","Editor contextual toolbar":"","Editor editing area: %0":"","Editor toolbar":"","Enter image caption":"तस्वीर क्याप्शन प्रविष्ट गर्नुहोस्","Enter table caption":"","Full size image":"पूर्ण आकार तस्वीर",Green:"हरियो",Grey:"खैरो",Groove:"","Header column":"हेडर स्तम्भ","Header row":"हेडर पङ्क्ति",Heading:"शीर्षक","Heading 1":"शीर्षक-एक","Heading 2":"शीर्षक २","Heading 3":"शीर्षक ३","Heading 4":"शीर्षक ४","Heading 5":"शीर्षक ५","Heading 6":"शीर्षक ६",Height:"","Horizontal text alignment toolbar":"","Image toolbar":"","image widget":"तस्वीर विजेट","In line":"","Increase indent":"इन्डेन्ट बढाउन","Insert column left":"बायाँ स्तम्भ सम्मिलित गर्न","Insert column right":"दायाँ स्तम्भ सम्मिलित गर्न","Insert image":"तस्वीर सम्मिलित गर्नुहोस्","Insert media":"मिडिया सम्मिलित गर्नुहोस्।","Insert row above":"माथि पंक्ति सम्मिलित गर्नुहोस्","Insert row below":"तल पंक्ति सम्मिलित गर्नुहोस्","Insert table":"तालिका सम्मिलित गर्नुहोस्",Inset:"",Italic:"इटालिक",Justify:"जस्टिफाइ गर्नुहोस्","Justify cell text":"","Left aligned image":"बायाँ पङ्क्ति तस्वीर","Light blue":"हल्का निलो","Light green":"हल्का हरियो","Light grey":"हल्का खैरो",Link:"लिङ्क","Link URL":"लिङ्क यूआरएल","Media URL":"मिडिया यूआरएल","media widget":"मिडिया विजेट","Merge cell down":"कक्ष तल मर्ज गर्नुहोस्","Merge cell left":"सेल बायाँ मर्ज गर्नुहोस्","Merge cell right":"दायाँ कक्ष मर्ज गर्नुहोस्","Merge cell up":"कक्ष माथि मर्ज गर्नुहोस्","Merge cells":"कक्ष मर्ज गर्नुहोस्",Next:"अर्को",None:"","Numbered List":"सूचीबद्ध सूची","Open in a new tab":"नयाँ ट्याबमा खोल्न","Open link in new tab":"नयाँ ट्याबमा लिङ्क खोल्नुहोस्","Open media in new tab":"",Orange:"सुन्तला रंग",Outset:"",Padding:"",Paragraph:"अनुच्छेद","Paste the media URL in the input.":"इनपुटमा मिडिया यूआरएल पेस्ट गर्नुहोस्।",Previous:"अघिल्लो",Purple:"बैंगनी रंग",Red:"रातो",Redo:"रिडु","Remove color":"रंग हटाउन","Restore default":"","Rich Text Editor":"धनी पाठ सम्पादक",Ridge:"","Right aligned image":"दायाँ पङ्क्तिबद्ध तस्वीर",Row:"पङ्क्ति",Save:"सुरक्षित गर्नुहोस्","Select column":"","Select row":"","Show more items":"","Side image":"साइड तस्वीर",Solid:"","Split cell horizontally":"क्षैतिज कक्ष विभाजित गर्नुहोस्","Split cell vertically":"ठाडो कक्ष विभाजित गर्नुहोस्",Style:"","Table alignment toolbar":"","Table cell text alignment":"","Table properties":"","Table toolbar":"","Text alignment":"पाठ संरेखण","Text alignment toolbar":"","Text alternative":"पाठ विकल्प",'The color is invalid. Try "#FF0000" or "rgb(255,0,0)" or "red".':"","The URL must not be empty.":"यूआरएल खाली हुनु हुँदैन।",'The value is invalid. Try "10px" or "2em" or simply "2".':"","This link has no URL":"यो लिङ्कसँग यूआरएल छैन","This media URL is not supported.":"यो मिडिया यूआरएल समर्थित छैन।","Tip: Paste the URL into the content to embed faster.":"सुझाव:छिटो इम्बेड गर्न यूआरएल सामग्रीमा पेस्ट गर्नुहोस्।","Toggle caption off":"","Toggle caption on":"",Turquoise:"त्रकोइस",Underline:"रेखांकन",Undo:"पूर्ववत",Unlink:"अनलिङ्क गर्नुहोस्","Upload failed":"अपलोड असफल भयो","Upload in progress":"अपलोड हुदैछ","Vertical text alignment toolbar":"",White:"सेतो",Width:"","Wrap text":"",Yellow:"पहेंलो"});t.getPluralForm=function(e){return e!=1}})(window.CKEDITOR_TRANSLATIONS||(window.CKEDITOR_TRANSLATIONS={})); | PypiClean |
/steelscript.appresponse-2.0.2.tar.gz/steelscript.appresponse-2.0.2/steelscript/appresponse/core/reports.py |
import time
import logging
from collections import OrderedDict
from steelscript.appresponse.core.types import AppResponseException, \
TimeFilter, ResourceObject, Key, Value
from steelscript.appresponse.core.clips import Clip
from steelscript.appresponse.core.fs import File
from steelscript.appresponse.core.capture import Job, VIFG, MIFG
from steelscript.appresponse.core._constants import report_source_to_groups
from steelscript.common._fs import SteelScriptDir
logger = logging.getLogger(__name__)
PACKETS_REPORT_SERVICE_NAME = 'npm.probe.reports'
GENERAL_REPORT_SERVICE_NAME = 'npm.reports'
PACKETS_SOURCES_SERVICE_NAME = 'npm.probe.reports.sources'
GENERAL_SOURCES_SERVICE_NAME = 'npm.reports.sources'
class SourceProxy(object):
CLIP_PREFIX = 'clips/'
MIFG_PREFIX = 'interfaces/'
VIFG_PREFIX = 'vifgs/'
JOB_PREFIX = 'jobs/'
FILE_PREFIX = 'fs'
def __init__(self, packets_obj=None, name=None, path=None):
"""Initialize a data source for reports to run against.
:param packets_obj: Clip or File or VIFG or Job object
:param str name: Name of general report sources
:param str path: Path of the packets data source
"""
if not packets_obj and not name:
raise AppResponseException("Either packets_obj or name is "
"required to be a valid data source.")
if packets_obj:
if isinstance(packets_obj, Clip):
path = '{}{}'.format(self.CLIP_PREFIX, packets_obj.id)
elif isinstance(packets_obj, MIFG):
path = '{}{}'.format(self.MIFG_PREFIX, packets_obj.id)
elif isinstance(packets_obj, VIFG):
path = '{}{}'.format(self.VIFG_PREFIX, packets_obj.id)
elif isinstance(packets_obj, File):
path = '{}{}'.format(self.FILE_PREFIX, packets_obj.id)
elif isinstance(packets_obj, Job):
path = '{}{}'.format(self.JOB_PREFIX, packets_obj.id)
else:
raise AppResponseException(
'Can only support job or clip or file packet source')
self.name = 'packets'
self.path = path
else:
self.name = name
self.path = path
def __str__(self):
return "<{} name:{} path:{}>".format(self.__class__,
self.name, self.path)
def __repr__(self):
return "{}(name='{}', path='{}')".format(
self.__class__.__name__, self.name, self.path
)
def to_dict(self):
ret = {}
for k, v in vars(self).items():
if v:
ret[k] = v
return ret
class ReportService(object):
def __init__(self, appresponse):
self.appresponse = appresponse
self._sources = {}
@property
def sources(self):
if not self._sources:
self._load_sources()
return self._sources
def _load_sources(self):
"""Get the names and granularities of sources.
The hierarchy of the data looks like below:
{ "source1" : { "name": string,
"filters_on_metrics": boolean,
"columns": [source_column],
"granularities": [string],
}
...
}
"""
ss_dir = SteelScriptDir('AppResponse', 'files')
for svc in [PACKETS_SOURCES_SERVICE_NAME,
GENERAL_SOURCES_SERVICE_NAME]:
svc_version = self.appresponse.versions[svc]
sw_version = (self.appresponse.get_info()['sw_version']
.replace(' ', ''))
sources_filename = ('{}-sources-{}-{}.pcl'
.format(svc, svc_version, sw_version))
sources_file = ss_dir.get_data(sources_filename)
sources_file.read()
if not sources_file.data:
svcdef = self.appresponse.find_service(svc)
# sources is a list of dictionaries
sources = svcdef.bind('sources').execute('get').data['items']
# the whole set of sources for current service
all_sources = {}
for source in sources:
cols = source['columns']
source['columns'] = \
OrderedDict(sorted(zip([x['id'] for x in cols],
cols)))
source['filters_on_metrics'] = \
source['capabilities']['filters_on_metrics']
if 'granularities' not in source:
source['granularities'] = None
all_sources[source['name']] = source
if source['name'] in report_source_to_groups:
self._sources[source['name']] = source
# source_file writes the whole set of sources to disk
sources_file.data = all_sources
sources_file.write()
logger.debug("Wrote sources data into {}"
.format(sources_filename))
else:
logger.debug("Loading sources data from {}"
.format(sources_filename))
# Only load valid sources based on settings
for k, v in sources_file.data.items():
if k in report_source_to_groups:
self._sources[k] = v
return
def create_report(self, data_def_request):
"""Convenience method to create a report with a data definition request.
:param DataDef data_def_request: DataDef objects
:return: one Report object
"""
report = Report(self.appresponse)
report.add(data_def_request)
report.run()
return report
def create_instance(self, data_defs):
"""Create a report instance with multiple data definition requests.
:param data_defs: list of DataDef objects
:return: one ReportInstance object
"""
if not data_defs:
msg = 'No data definitions are provided.'
raise AppResponseException(msg)
if (any(dd.source.name == 'packets' for dd in data_defs)
and any(dd.source.name != 'packets' for dd in data_defs)):
# Two report instance needs to be created, one uses 'npm.reports'
# service, the other one uses 'npm.probe.reports' service
# it would create unnecessary complexity to support this case
# thus report error and let the user to create two separate
# report instances
msg = ('Both packets data source and non-packets data source are '
'being queried in this report, which is not supported. The '
'data source names include {}'
.format(', '.join(set([dd.source.name
for dd in data_defs]))))
raise AppResponseException(msg)
live = all(dd.live for dd in data_defs)
if not live and any(dd.live for dd in data_defs):
msg = ('Incompatible DataDefs for report: live and non-live '
'cannot be mixed.')
raise AppResponseException(msg)
def _create_instance(service_name, data_defs, live):
config = dict(data_defs=[dd.to_dict() for dd in data_defs],
live=live)
logger.debug("Creating instance with data definitions %s" % config)
svcdef = self.appresponse.find_service(service_name)
datarep = svcdef.bind('instances')
resp = datarep.execute('create', _data=config)
# XXX sleepwalker bug? resp is actually an `instances`
# resource with the data for a single `instance`. Doing a
# `.pull()` will fill data with /instances collection instead
# here we cast result to actual `instance` instance
report_instance = svcdef.bind('instance', id=resp.data['id'])
instance = ReportInstance(data=resp.data,
datarep=report_instance,
live=live)
return instance
if data_defs[0].source.name == 'packets':
# Create clip for for capture job sources only
# Keep the clip till the instance is completed
if data_defs[0].source.path.startswith(SourceProxy.JOB_PREFIX):
with self.appresponse.clips.create_clips(data_defs):
instance = _create_instance(PACKETS_REPORT_SERVICE_NAME,
data_defs, False)
else:
instance = _create_instance(PACKETS_REPORT_SERVICE_NAME,
data_defs, live)
else:
instance = _create_instance(GENERAL_REPORT_SERVICE_NAME,
data_defs, live)
return instance
def get_instances(self, service=None, include_system_reports=False):
"""Get all running report instances on appliance.
Several different services can have instances running, which covers
both system processes as well as user initiated reports. The primary
means of identifying the sources is through the `user_agent` field.
Examples are:
user reports:
'python-requests/2.4.3 CPython/2.7.12 Darwin/17.5.0 SteelScript/1.3.3'
'python-requests/2.4.3 CPython/2.7.12 Darwin/17.5.0'
'Pilot/11.3.1000.4175'
'curl/7.29.0'
system reports:
'Analytics v1.0'
'ReportManager (internal client)'
'webui'
By default, this method will only return `user reports` to avoid
accidentally deleting system reports.
:param service: optional service to check specifically. If None,
will return from all available report services. Valid options
include: 'npm.probe.reports' or 'npm.reports'
:param include_system_reports: Include system generated views,
including web UI reports in results.
:return: list of ReportInstance objects
"""
if service is None:
services = [PACKETS_REPORT_SERVICE_NAME,
GENERAL_REPORT_SERVICE_NAME]
else:
services = [service]
def test_user_agent(data):
system_report_agents = ('Analytics', 'ReportManager', 'webui')
if include_system_reports:
return True
for agent in system_report_agents:
if agent in data['user_agent']:
return False
return True
instances = []
for svc in services:
svcdef = self.appresponse.find_service(svc)
datarep = svcdef.bind('instances')
for item in datarep['items']:
if test_user_agent(item.data):
instance = ReportInstance(item.data,
datarep=item,
live=item.data['live'])
instances.append(instance)
return instances
def get_column_objects(self, source_name, columns):
"""Return Key/Value objects for given set of string names."""
coldefs = self.sources[source_name]['columns']
def iskey(coldef):
if 'grouped_by' in coldef and coldef['grouped_by'] is True:
return True
return False
cols = []
for c in columns:
obj = Key(c) if iskey(coldefs[c]) else Value(c)
cols.append(obj)
return cols
class ReportInstance(ResourceObject):
"""Main proxy interface to interact with AR11 report instance."""
resource = 'instance'
def __init__(self, data, servicedef=None, datarep=None, live=False):
super(ReportInstance, self).__init__(data, servicedef, datarep)
self.errors = []
self.live = live
self._metatime = {}
def __str__(self):
return "<{} id:{} svc:{} user_agent:{} live:{}>".format(
self.__class__.__name__, self.data['id'],
self.datarep.service.servicedef.name,
self.data['user_agent'], self.live
)
def __repr__(self):
return "{}(id='{}', svc='{}', user_agent='{}', live='{}')".format(
self.__class__.__name__, self.data['id'],
self.datarep.service.servicedef.name,
self.data['user_agent'], self.live
)
@property
def status(self):
status = self.datarep.execute('get_status').data
logger.debug("Status of report instance with id {}: {}"
.format(self.id, status))
return status
@property
def state(self):
return [s['state'] for s in self.status]
def _check_state(self, is_state):
state = self.state
if 'error' in state:
self.check_for_errors()
return all(x == is_state for x in self.state)
def is_complete(self):
"""The completed state for regular reports."""
return self._check_state('completed')
def is_collecting(self):
"""The steady state for live reports."""
return self._check_state('collecting')
def is_ready(self):
"""Return true if report is completed or collecting."""
if self.live:
return self.is_collecting()
else:
return self.is_complete()
def check_for_errors(self):
"""Raise exception if any errors found."""
# Check errors when all queries have completed
for item in self.status:
if item['state'] == 'error':
for m in item['messages']:
self.errors.append(m['text'])
logger.error("Error msg from status: {}"
.format(m['text']))
if self.errors:
err_msgs = ';\n'.join(self.errors)
raise AppResponseException(err_msgs)
def get_data(self):
"""Get data from all sources of report instance."""
return self.datarep.execute('get_data').data
def get_datadef_data(self, index=0, start_time=None, end_time=None):
"""Get instance data from specific data_defs."""
dd = self.datarep['data_defs'][index]
dd.pull()
meta_timerange = dd.data['actual_time']['time_ranges'][index]
if not self._metatime:
self._metatime[index] = meta_timerange
start_time = self._metatime[index]['start']
else:
if meta_timerange['end'] == self._metatime[index]['end']:
logger.debug('No new data for {}, skipping ...'.format(self))
return []
if start_time is None and end_time is None:
start_time = self._metatime[index]['end']
logger.debug('Using start_time of previous end time: %s'
% start_time)
self._metatime[index] = dd.data['actual_time']['time_ranges'][0]
kwargs = {'report_id': self.id}
if start_time:
kwargs['start_time'] = start_time
if end_time:
kwargs['end_time'] = end_time
data = dd.execute('get_data', **kwargs)
return data.data
def delete(self):
return self.datarep.execute('delete')
class DataDef(object):
"""Interface to build a data definition for uploading to a report."""
def __init__(self, source, columns, start=None, end=None, duration=None,
time_range=None, granularity=None, resolution=None,
limit=None, topbycolumns=None,
live=False, retention_time=3600):
"""Initialize a data definition request object.
:param source: Reference to a source object. If a string,
will try to convert to a SourceProxy
:param columns: list Key/Value column objects.
:param start: epoch start time in seconds.
:param end: epoch endtime in seconds.
:param duration: string duration of data def request.
:param time_range: string time range of data def request.
:param int granularity: granularity in seconds. Required.
:param int resolution: resolution in seconds. Optional
:param limit: limit to number of returned rows. Optional
:param topbycolumns: Key/Value columns to be used for topn. Optional.
:param live: boolean for whether this is a live retrieval data_def.
Setting this to true changes the behavior somewhat, see notes.
:param retention_time: int seconds for how long to store data before
overwriting buffer. Only applicable for live reports.
For defining the overall time for the report, either a
single `time_range` string may be used or a combination of
`start`/`end`/`duration`.
Further discussion on `granularity` and `resolution`:
Granularity refers to the amount of time for which the data source
computes a summary of the metrics it received. The data source
examines all data and creates summaries for 1 second, 1 minute,
5 minute, 15 minute, 1 hour, 6 hour and 1 day, etc. Greater
granularity (shorter time periods) results in greater accuracy.
Lesser granularity (1 hour, 6 hours, 1 day) requires less processing
and therefore the data is returned faster. Granularity must be
specified as number of seconds.
Resolution must be multiple of the requested granularity. For
example, if you specify granularity of 5mins (300 seconds) then the
resolution can be set to 5mins, 10mins, 15mins, etc. If the
resolution is set to be equal of the granularity then it has no
effect to the number of returned samples. The resolution is optional.
Notes:
Live reports can be created by setting the option `live` to `True`.
This will zero out any timefilter that may have been applied, and
will use a retention time value that determines how long to
keep the data in a rolling buffer. Retention time defaults to
one hour (360 seconds).
"""
if isinstance(source, SourceProxy):
self.source = source
else:
# try as a packets reference first
try:
self.source = SourceProxy(source)
except AppResponseException:
logger.debug('Assuming source name as non-packets source ...')
self.source = SourceProxy(name=source)
self.columns = columns
self.granularity = granularity
self.resolution = resolution
self.live = live
if self.live:
if self.granularity is None:
logger.info('granularity not chosen, '
'defaulting to "1" for live feed')
self.granularity = "1"
self.timefilter = None
self.retention_time = retention_time
else:
self.timefilter = TimeFilter(start=start, end=end,
duration=duration,
time_range=time_range)
self.retention_time = None
self.limit = limit
self.topbycolumns = topbycolumns or []
self._filters = []
self._data = None
# column names as returned with DataDef results
self._data_columns = None
self._instance = None
def to_dict(self):
data_def = dict()
data_def['source'] = self.source.to_dict()
data_def['group_by'] = [col.name for col in self.columns if col.key]
data_def['time'] = dict()
for k in ['start', 'end']:
v = getattr(self.timefilter, k, None)
if v:
data_def['time'][k] = str(v)
if self.retention_time:
data_def['time']['retention_time'] = str(self.retention_time)
for k in ['granularity', 'resolution']:
v = getattr(self, k)
if v:
data_def['time'][k] = str(v)
data_def['columns'] = [col.name for col in self.columns]
if self._filters:
data_def['filters'] = self._filters
if self.limit:
data_def['limit'] = int(self.limit)
topbycolumns = []
for col in self.topbycolumns:
topbycol = dict()
topbycol["direction"] = "desc"
topbycol["id"] = col.name
topbycolumns.append(topbycol)
data_def['top_by'] = topbycolumns
return data_def
def add_filter(self, filter):
"""Add one traffic filter to the data def.
:param filter: types.TrafficFilter object
"""
self._filters.append(filter.as_dict())
@property
def data(self):
return self._data
@data.setter
def data(self, val):
"""Set the data of data def as a list of dictionaries."""
self._data = val
class Report(object):
"""Main interface to build and run a report on AppResponse."""
def __init__(self, appresponse):
"""Initialize a new report.
:param appresponse: the AppResponse object.
"""
logger.debug("Initializing Report object with appresponse '{}'"
.format(appresponse.host))
self.appresponse = appresponse
self._data_defs = []
self._instance = None
def add(self, data_def_request):
"""Add one data definition request."""
logger.debug("Adding a data_def request {}"
.format(data_def_request.to_dict()))
self._data_defs.append(data_def_request)
def _cast_number(self, result, source_name):
"""Check records and convert string to integer/float.
If the type of the column is 'number' or unit is not 'none', then check
if the column name has 'avg' in its name, if yes, then convert it to
float, otherwise to integer.
:param dict result: includes metadata for one data def request
as well as the response data for the data def request.
:param string source_name: name of the source.
"""
logger.debug("Converting string in records into integer/float")
columns = self.appresponse.reports.sources[source_name]['columns']
functions = [lambda x: x] * len(result['columns'])
for i, col in enumerate(result['columns']):
if columns[col]['type'] in ['integer', 'timestamp']:
functions[i] = (lambda x: None if x == 'NULL'
else int(x) if x.isdigit() else x)
elif columns[col]['type'] in ('number', 'duration'):
functions[i] = (lambda x: None if x == 'NULL'
else float(x) if
x.replace('.', '', 1).isdigit() else x)
# operate on each column, then zip back into list of tuples
datacols = []
for i, c in enumerate(zip(*result['data'])):
datacols.append(list(map(functions[i], c)))
records = list(zip(*datacols))
return records
def run(self):
"""Create and run a report instance with stored data definitions."""
if not self._instance:
self._instance = \
self.appresponse.reports.create_instance(self._data_defs)
while not self._instance.is_ready():
time.sleep(.5)
if not self._instance.live:
# only collect data automatically if we are a single use report
self._collect_data()
def _collect_data(self):
"""Collect all available data from all data defs."""
results = self._instance.get_data()['data_defs']
for i, res in enumerate(results):
source_name = self._data_defs[i].source.name
self._data_defs[i]._data_columns = res['columns']
if 'data' in res:
self._data_defs[i].data = self._cast_number(res,
source_name)
else:
self._data_defs[i].data = []
logger.debug("Obtained {} records for the {}th data request."
.format(len(self._data_defs[i].data), i))
def get_data(self, index=0):
"""Return data for the indexed data definition requests.
Note for live data objects `index` cannot be None, only
explicit requests are allowed. If multiple data_defs
in a report need to collect data, query them individually.
Also, the object returned from a live query will be a
`data_def_results` object
(https://support.riverbed.com/apis/npm.probe.reports/1.0/service.html#types_data_def_results)
The data can be referenced via data['data'] but meta data
about the results including endtime and startime can be
found at data['meta']
:param int index: Set to None to return data from all data
definitions, defaults to returning the data from just
the first data def.
"""
if not self._instance.live:
# get the already retrieved data
if index is None:
return [dd.data for dd in self._data_defs]
return self._data_defs[index].data
else:
# need to do some special processing
# first check for existing meta data, otherwise
# do our first collection
if index is None:
msg = 'index must be a value for live reports'
raise AppResponseException(msg)
else:
resp = self._instance.get_datadef_data(index)
if not self._data_defs[index]._data_columns:
self._data_defs[index]._data_columns = resp['columns']
source_name = self._data_defs[index].source.name
if 'data' in resp:
data = self._cast_number(resp, source_name)
else:
data = None
return {'data': data, 'meta': resp['meta']}
def get_legend(self, index=0, details=False):
"""Return legend information for the data definition.
:param int index: Set to None to return data from all data definitions,
defaults to returning the data from just the first data def.
:param bool details: If True, return complete column dict, otherwise
just short label ids for each column will be returned
"""
def get_cols(data_def):
cols = data_def._data_columns
if details:
source = data_def.source.name
columns = self.appresponse.reports.sources[source]['columns']
return [columns[c] for c in cols]
return [c for c in cols]
if index is None:
legend = [get_cols(dd) for dd in self._data_defs]
else:
legend = get_cols(self._data_defs[index])
return legend
def get_dataframe(self, index=0):
"""Return data in pandas DataFrame format.
This will return a single DataFrame for the given index, unlike
``get_data`` and ``get_legend`` which will optionally return info for
all data defs in a report.
**Requires `pandas` library to be available in environment.**
:param int index: DataDef to process into DataFrame. Defaults to 0.
"""
try:
import pandas
except ImportError as e:
logger.exception("Pandas module is required to run this function. "
"Install pandas and retry. %s" % e)
return
data = self.get_data(index)
# check if we are a live version and extract data directly
try:
data = data['data']
except TypeError:
pass
columns = self.get_legend(index)
df = pandas.DataFrame(data, columns=columns)
return df
def delete(self):
"""Delete the report from the appliance."""
self._instance.delete()
self._instance = None
self._data_defs = [] | PypiClean |
/zoi-python-sdk-1.0.1.tar.gz/zoi-python-sdk-1.0.1/zohosdk/src/com/zoho/officeintegrator/v1/create_document_parameters.py | try:
from zohosdk.src.com.zoho.exception import SDKException
from zohosdk.src.com.zoho.util import StreamWrapper, Constants
except Exception:
from ...exception import SDKException
from ...util import StreamWrapper, Constants
class CreateDocumentParameters(object):
def __init__(self):
"""Creates an instance of CreateDocumentParameters"""
self.__url = None
self.__document = None
self.__callback_settings = None
self.__document_defaults = None
self.__editor_settings = None
self.__permissions = None
self.__document_info = None
self.__user_info = None
self.__ui_options = None
self.__key_modified = dict()
def get_url(self):
"""
The method to get the url
Returns:
string: A string representing the url
"""
return self.__url
def set_url(self, url):
"""
The method to set the value to url
Parameters:
url (string) : A string representing the url
"""
if url is not None and not isinstance(url, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: url EXPECTED TYPE: str', None, None)
self.__url = url
self.__key_modified['url'] = 1
def get_document(self):
"""
The method to get the document
Returns:
StreamWrapper: An instance of StreamWrapper
"""
return self.__document
def set_document(self, document):
"""
The method to set the value to document
Parameters:
document (StreamWrapper) : An instance of StreamWrapper
"""
if document is not None and not isinstance(document, StreamWrapper):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: document EXPECTED TYPE: StreamWrapper', None, None)
self.__document = document
self.__key_modified['document'] = 1
def get_callback_settings(self):
"""
The method to get the callback_settings
Returns:
CallbackSettings: An instance of CallbackSettings
"""
return self.__callback_settings
def set_callback_settings(self, callback_settings):
"""
The method to set the value to callback_settings
Parameters:
callback_settings (CallbackSettings) : An instance of CallbackSettings
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.callback_settings import CallbackSettings
except Exception:
from .callback_settings import CallbackSettings
if callback_settings is not None and not isinstance(callback_settings, CallbackSettings):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: callback_settings EXPECTED TYPE: CallbackSettings', None, None)
self.__callback_settings = callback_settings
self.__key_modified['callback_settings'] = 1
def get_document_defaults(self):
"""
The method to get the document_defaults
Returns:
DocumentDefaults: An instance of DocumentDefaults
"""
return self.__document_defaults
def set_document_defaults(self, document_defaults):
"""
The method to set the value to document_defaults
Parameters:
document_defaults (DocumentDefaults) : An instance of DocumentDefaults
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.document_defaults import DocumentDefaults
except Exception:
from .document_defaults import DocumentDefaults
if document_defaults is not None and not isinstance(document_defaults, DocumentDefaults):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: document_defaults EXPECTED TYPE: DocumentDefaults', None, None)
self.__document_defaults = document_defaults
self.__key_modified['document_defaults'] = 1
def get_editor_settings(self):
"""
The method to get the editor_settings
Returns:
EditorSettings: An instance of EditorSettings
"""
return self.__editor_settings
def set_editor_settings(self, editor_settings):
"""
The method to set the value to editor_settings
Parameters:
editor_settings (EditorSettings) : An instance of EditorSettings
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.editor_settings import EditorSettings
except Exception:
from .editor_settings import EditorSettings
if editor_settings is not None and not isinstance(editor_settings, EditorSettings):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: editor_settings EXPECTED TYPE: EditorSettings', None, None)
self.__editor_settings = editor_settings
self.__key_modified['editor_settings'] = 1
def get_permissions(self):
"""
The method to get the permissions
Returns:
dict: An instance of dict
"""
return self.__permissions
def set_permissions(self, permissions):
"""
The method to set the value to permissions
Parameters:
permissions (dict) : An instance of dict
"""
if permissions is not None and not isinstance(permissions, dict):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: permissions EXPECTED TYPE: dict', None, None)
self.__permissions = permissions
self.__key_modified['permissions'] = 1
def get_document_info(self):
"""
The method to get the document_info
Returns:
DocumentInfo: An instance of DocumentInfo
"""
return self.__document_info
def set_document_info(self, document_info):
"""
The method to set the value to document_info
Parameters:
document_info (DocumentInfo) : An instance of DocumentInfo
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.document_info import DocumentInfo
except Exception:
from .document_info import DocumentInfo
if document_info is not None and not isinstance(document_info, DocumentInfo):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: document_info EXPECTED TYPE: DocumentInfo', None, None)
self.__document_info = document_info
self.__key_modified['document_info'] = 1
def get_user_info(self):
"""
The method to get the user_info
Returns:
UserInfo: An instance of UserInfo
"""
return self.__user_info
def set_user_info(self, user_info):
"""
The method to set the value to user_info
Parameters:
user_info (UserInfo) : An instance of UserInfo
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.user_info import UserInfo
except Exception:
from .user_info import UserInfo
if user_info is not None and not isinstance(user_info, UserInfo):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: user_info EXPECTED TYPE: UserInfo', None, None)
self.__user_info = user_info
self.__key_modified['user_info'] = 1
def get_ui_options(self):
"""
The method to get the ui_options
Returns:
UiOptions: An instance of UiOptions
"""
return self.__ui_options
def set_ui_options(self, ui_options):
"""
The method to set the value to ui_options
Parameters:
ui_options (UiOptions) : An instance of UiOptions
"""
try:
from zohosdk.src.com.zoho.officeintegrator.v1.ui_options import UiOptions
except Exception:
from .ui_options import UiOptions
if ui_options is not None and not isinstance(ui_options, UiOptions):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: ui_options EXPECTED TYPE: UiOptions', None, None)
self.__ui_options = ui_options
self.__key_modified['ui_options'] = 1
def is_key_modified(self, key):
"""
The method to check if the user has modified the given key
Parameters:
key (string) : A string representing the key
Returns:
int: An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if key in self.__key_modified:
return self.__key_modified.get(key)
return None
def set_key_modified(self, key, modification):
"""
The method to mark the given key as modified
Parameters:
key (string) : A string representing the key
modification (int) : An int representing the modification
"""
if key is not None and not isinstance(key, str):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None)
if modification is not None and not isinstance(modification, int):
raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None)
self.__key_modified[key] = modification | PypiClean |
/tpds_extension_ta010_support-1.5.4-py3-none-any.whl/tpds_extension/ta010_support/frontend/assets/js/plugins.js | (function() {
var method;
var noop = function () {};
var methods = [
'assert', 'clear', 'count', 'debug', 'dir', 'dirxml', 'error',
'exception', 'group', 'groupCollapsed', 'groupEnd', 'info', 'log',
'markTimeline', 'profile', 'profileEnd', 'table', 'time', 'timeEnd',
'timeline', 'timelineEnd', 'timeStamp', 'trace', 'warn'
];
var length = methods.length;
var console = (window.console = window.console || {});
while (length--) {
method = methods[length];
// Only stub undefined methods.
if (!console[method]) {
console[method] = noop;
}
}
}());
// Place any jQuery/helper plugins in here.
/*
jQuery Waypoints - v2.0.3
Copyright (c) 2011-2013 Caleb Troughton
Dual licensed under the MIT license and GPL license.
https://github.com/imakewebthings/jquery-waypoints/blob/master/licenses.txt
*/
(function(){var t=[].indexOf||function(t){for(var e=0,n=this.length;e<n;e++){if(e in this&&this[e]===t)return e}return-1},e=[].slice;(function(t,e){if(typeof define==="function"&&define.amd){return define("waypoints",["jquery"],function(n){return e(n,t)})}else{return e(t.jQuery,t)}})(this,function(n,r){var i,o,l,s,f,u,a,c,h,d,p,y,v,w,g,m;i=n(r);c=t.call(r,"ontouchstart")>=0;s={horizontal:{},vertical:{}};f=1;a={};u="waypoints-context-id";p="resize.waypoints";y="scroll.waypoints";v=1;w="waypoints-waypoint-ids";g="waypoint";m="waypoints";o=function(){function t(t){var e=this;this.$element=t;this.element=t[0];this.didResize=false;this.didScroll=false;this.id="context"+f++;this.oldScroll={x:t.scrollLeft(),y:t.scrollTop()};this.waypoints={horizontal:{},vertical:{}};t.data(u,this.id);a[this.id]=this;t.bind(y,function(){var t;if(!(e.didScroll||c)){e.didScroll=true;t=function(){e.doScroll();return e.didScroll=false};return r.setTimeout(t,n[m].settings.scrollThrottle)}});t.bind(p,function(){var t;if(!e.didResize){e.didResize=true;t=function(){n[m]("refresh");return e.didResize=false};return r.setTimeout(t,n[m].settings.resizeThrottle)}})}t.prototype.doScroll=function(){var t,e=this;t={horizontal:{newScroll:this.$element.scrollLeft(),oldScroll:this.oldScroll.x,forward:"right",backward:"left"},vertical:{newScroll:this.$element.scrollTop(),oldScroll:this.oldScroll.y,forward:"down",backward:"up"}};if(c&&(!t.vertical.oldScroll||!t.vertical.newScroll)){n[m]("refresh")}n.each(t,function(t,r){var i,o,l;l=[];o=r.newScroll>r.oldScroll;i=o?r.forward:r.backward;n.each(e.waypoints[t],function(t,e){var n,i;if(r.oldScroll<(n=e.offset)&&n<=r.newScroll){return l.push(e)}else if(r.newScroll<(i=e.offset)&&i<=r.oldScroll){return l.push(e)}});l.sort(function(t,e){return t.offset-e.offset});if(!o){l.reverse()}return n.each(l,function(t,e){if(e.options.continuous||t===l.length-1){return e.trigger([i])}})});return this.oldScroll={x:t.horizontal.newScroll,y:t.vertical.newScroll}};t.prototype.refresh=function(){var t,e,r,i=this;r=n.isWindow(this.element);e=this.$element.offset();this.doScroll();t={horizontal:{contextOffset:r?0:e.left,contextScroll:r?0:this.oldScroll.x,contextDimension:this.$element.width(),oldScroll:this.oldScroll.x,forward:"right",backward:"left",offsetProp:"left"},vertical:{contextOffset:r?0:e.top,contextScroll:r?0:this.oldScroll.y,contextDimension:r?n[m]("viewportHeight"):this.$element.height(),oldScroll:this.oldScroll.y,forward:"down",backward:"up",offsetProp:"top"}};return n.each(t,function(t,e){return n.each(i.waypoints[t],function(t,r){var i,o,l,s,f;i=r.options.offset;l=r.offset;o=n.isWindow(r.element)?0:r.$element.offset()[e.offsetProp];if(n.isFunction(i)){i=i.apply(r.element)}else if(typeof i==="string"){i=parseFloat(i);if(r.options.offset.indexOf("%")>-1){i=Math.ceil(e.contextDimension*i/100)}}r.offset=o-e.contextOffset+e.contextScroll-i;if(r.options.onlyOnScroll&&l!=null||!r.enabled){return}if(l!==null&&l<(s=e.oldScroll)&&s<=r.offset){return r.trigger([e.backward])}else if(l!==null&&l>(f=e.oldScroll)&&f>=r.offset){return r.trigger([e.forward])}else if(l===null&&e.oldScroll>=r.offset){return r.trigger([e.forward])}})})};t.prototype.checkEmpty=function(){if(n.isEmptyObject(this.waypoints.horizontal)&&n.isEmptyObject(this.waypoints.vertical)){this.$element.unbind([p,y].join(" "));return delete a[this.id]}};return t}();l=function(){function t(t,e,r){var i,o;r=n.extend({},n.fn[g].defaults,r);if(r.offset==="bottom-in-view"){r.offset=function(){var t;t=n[m]("viewportHeight");if(!n.isWindow(e.element)){t=e.$element.height()}return t-n(this).outerHeight()}}this.$element=t;this.element=t[0];this.axis=r.horizontal?"horizontal":"vertical";this.callback=r.handler;this.context=e;this.enabled=r.enabled;this.id="waypoints"+v++;this.offset=null;this.options=r;e.waypoints[this.axis][this.id]=this;s[this.axis][this.id]=this;i=(o=t.data(w))!=null?o:[];i.push(this.id);t.data(w,i)}t.prototype.trigger=function(t){if(!this.enabled){return}if(this.callback!=null){this.callback.apply(this.element,t)}if(this.options.triggerOnce){return this.destroy()}};t.prototype.disable=function(){return this.enabled=false};t.prototype.enable=function(){this.context.refresh();return this.enabled=true};t.prototype.destroy=function(){delete s[this.axis][this.id];delete this.context.waypoints[this.axis][this.id];return this.context.checkEmpty()};t.getWaypointsByElement=function(t){var e,r;r=n(t).data(w);if(!r){return[]}e=n.extend({},s.horizontal,s.vertical);return n.map(r,function(t){return e[t]})};return t}();d={init:function(t,e){var r;if(e==null){e={}}if((r=e.handler)==null){e.handler=t}this.each(function(){var t,r,i,s;t=n(this);i=(s=e.context)!=null?s:n.fn[g].defaults.context;if(!n.isWindow(i)){i=t.closest(i)}i=n(i);r=a[i.data(u)];if(!r){r=new o(i)}return new l(t,r,e)});n[m]("refresh");return this},disable:function(){return d._invoke(this,"disable")},enable:function(){return d._invoke(this,"enable")},destroy:function(){return d._invoke(this,"destroy")},prev:function(t,e){return d._traverse.call(this,t,e,function(t,e,n){if(e>0){return t.push(n[e-1])}})},next:function(t,e){return d._traverse.call(this,t,e,function(t,e,n){if(e<n.length-1){return t.push(n[e+1])}})},_traverse:function(t,e,i){var o,l;if(t==null){t="vertical"}if(e==null){e=r}l=h.aggregate(e);o=[];this.each(function(){var e;e=n.inArray(this,l[t]);return i(o,e,l[t])});return this.pushStack(o)},_invoke:function(t,e){t.each(function(){var t;t=l.getWaypointsByElement(this);return n.each(t,function(t,n){n[e]();return true})});return this}};n.fn[g]=function(){var t,r;r=arguments[0],t=2<=arguments.length?e.call(arguments,1):[];if(d[r]){return d[r].apply(this,t)}else if(n.isFunction(r)){return d.init.apply(this,arguments)}else if(n.isPlainObject(r)){return d.init.apply(this,[null,r])}else if(!r){return n.error("jQuery Waypoints needs a callback function or handler option.")}else{return n.error("The "+r+" method does not exist in jQuery Waypoints.")}};n.fn[g].defaults={context:r,continuous:true,enabled:true,horizontal:false,offset:0,triggerOnce:false};h={refresh:function(){return n.each(a,function(t,e){return e.refresh()})},viewportHeight:function(){var t;return(t=r.innerHeight)!=null?t:i.height()},aggregate:function(t){var e,r,i;e=s;if(t){e=(i=a[n(t).data(u)])!=null?i.waypoints:void 0}if(!e){return[]}r={horizontal:[],vertical:[]};n.each(r,function(t,i){n.each(e[t],function(t,e){return i.push(e)});i.sort(function(t,e){return t.offset-e.offset});r[t]=n.map(i,function(t){return t.element});return r[t]=n.unique(r[t])});return r},above:function(t){if(t==null){t=r}return h._filter(t,"vertical",function(t,e){return e.offset<=t.oldScroll.y})},below:function(t){if(t==null){t=r}return h._filter(t,"vertical",function(t,e){return e.offset>t.oldScroll.y})},left:function(t){if(t==null){t=r}return h._filter(t,"horizontal",function(t,e){return e.offset<=t.oldScroll.x})},right:function(t){if(t==null){t=r}return h._filter(t,"horizontal",function(t,e){return e.offset>t.oldScroll.x})},enable:function(){return h._invoke("enable")},disable:function(){return h._invoke("disable")},destroy:function(){return h._invoke("destroy")},extendFn:function(t,e){return d[t]=e},_invoke:function(t){var e;e=n.extend({},s.vertical,s.horizontal);return n.each(e,function(e,n){n[t]();return true})},_filter:function(t,e,r){var i,o;i=a[n(t).data(u)];if(!i){return[]}o=[];n.each(i.waypoints[e],function(t,e){if(r(i,e)){return o.push(e)}});o.sort(function(t,e){return t.offset-e.offset});return n.map(o,function(t){return t.element})}};n[m]=function(){var t,n;n=arguments[0],t=2<=arguments.length?e.call(arguments,1):[];if(h[n]){return h[n].apply(null,t)}else{return h.aggregate.call(null,n)}};n[m].settings={resizeThrottle:100,scrollThrottle:30};return i.load(function(){return n[m]("refresh")})})}).call(this);
//OWL Carousel
!function(a,b,c,d){function e(b,c){this.settings=null,this.options=a.extend({},e.Defaults,c),this.$element=a(b),this.drag=a.extend({},m),this.state=a.extend({},n),this.e=a.extend({},o),this._plugins={},this._supress={},this._current=null,this._speed=null,this._coordinates=[],this._breakpoint=null,this._width=null,this._items=[],this._clones=[],this._mergers=[],this._invalidated={},this._pipe=[],a.each(e.Plugins,a.proxy(function(a,b){this._plugins[a[0].toLowerCase()+a.slice(1)]=new b(this)},this)),a.each(e.Pipe,a.proxy(function(b,c){this._pipe.push({filter:c.filter,run:a.proxy(c.run,this)})},this)),this.setup(),this.initialize()}function f(a){if(a.touches!==d)return{x:a.touches[0].pageX,y:a.touches[0].pageY};if(a.touches===d){if(a.pageX!==d)return{x:a.pageX,y:a.pageY};if(a.pageX===d)return{x:a.clientX,y:a.clientY}}}function g(a){var b,d,e=c.createElement("div"),f=a;for(b in f)if(d=f[b],"undefined"!=typeof e.style[d])return e=null,[d,b];return[!1]}function h(){return g(["transition","WebkitTransition","MozTransition","OTransition"])[1]}function i(){return g(["transform","WebkitTransform","MozTransform","OTransform","msTransform"])[0]}function j(){return g(["perspective","webkitPerspective","MozPerspective","OPerspective","MsPerspective"])[0]}function k(){return"ontouchstart"in b||!!navigator.msMaxTouchPoints}function l(){return b.navigator.msPointerEnabled}var m,n,o;m={start:0,startX:0,startY:0,current:0,currentX:0,currentY:0,offsetX:0,offsetY:0,distance:null,startTime:0,endTime:0,updatedX:0,targetEl:null},n={isTouch:!1,isScrolling:!1,isSwiping:!1,direction:!1,inMotion:!1},o={_onDragStart:null,_onDragMove:null,_onDragEnd:null,_transitionEnd:null,_resizer:null,_responsiveCall:null,_goToLoop:null,_checkVisibile:null},e.Defaults={items:3,loop:!1,center:!1,mouseDrag:!0,touchDrag:!0,pullDrag:!0,freeDrag:!1,margin:0,stagePadding:0,merge:!1,mergeFit:!0,autoWidth:!1,startPosition:0,rtl:!1,smartSpeed:250,fluidSpeed:!1,dragEndSpeed:!1,responsive:{},responsiveRefreshRate:200,responsiveBaseElement:b,responsiveClass:!1,fallbackEasing:"swing",info:!1,nestedItemSelector:!1,itemElement:"div",stageElement:"div",themeClass:"owl-theme",baseClass:"owl-carousel",itemClass:"owl-item",centerClass:"center",activeClass:"active"},e.Width={Default:"default",Inner:"inner",Outer:"outer"},e.Plugins={},e.Pipe=[{filter:["width","items","settings"],run:function(a){a.current=this._items&&this._items[this.relative(this._current)]}},{filter:["items","settings"],run:function(){var a=this._clones,b=this.$stage.children(".cloned");(b.length!==a.length||!this.settings.loop&&a.length>0)&&(this.$stage.children(".cloned").remove(),this._clones=[])}},{filter:["items","settings"],run:function(){var a,b,c=this._clones,d=this._items,e=this.settings.loop?c.length-Math.max(2*this.settings.items,4):0;for(a=0,b=Math.abs(e/2);b>a;a++)e>0?(this.$stage.children().eq(d.length+c.length-1).remove(),c.pop(),this.$stage.children().eq(0).remove(),c.pop()):(c.push(c.length/2),this.$stage.append(d[c[c.length-1]].clone().addClass("cloned")),c.push(d.length-1-(c.length-1)/2),this.$stage.prepend(d[c[c.length-1]].clone().addClass("cloned")))}},{filter:["width","items","settings"],run:function(){var a,b,c,d=this.settings.rtl?1:-1,e=(this.width()/this.settings.items).toFixed(3),f=0;for(this._coordinates=[],b=0,c=this._clones.length+this._items.length;c>b;b++)a=this._mergers[this.relative(b)],a=this.settings.mergeFit&&Math.min(a,this.settings.items)||a,f+=(this.settings.autoWidth?this._items[this.relative(b)].width()+this.settings.margin:e*a)*d,this._coordinates.push(f)}},{filter:["width","items","settings"],run:function(){var b,c,d=(this.width()/this.settings.items).toFixed(3),e={width:Math.abs(this._coordinates[this._coordinates.length-1])+2*this.settings.stagePadding,"padding-left":this.settings.stagePadding||"","padding-right":this.settings.stagePadding||""};if(this.$stage.css(e),e={width:this.settings.autoWidth?"auto":d-this.settings.margin},e[this.settings.rtl?"margin-left":"margin-right"]=this.settings.margin,!this.settings.autoWidth&&a.grep(this._mergers,function(a){return a>1}).length>0)for(b=0,c=this._coordinates.length;c>b;b++)e.width=Math.abs(this._coordinates[b])-Math.abs(this._coordinates[b-1]||0)-this.settings.margin,this.$stage.children().eq(b).css(e);else this.$stage.children().css(e)}},{filter:["width","items","settings"],run:function(a){a.current&&this.reset(this.$stage.children().index(a.current))}},{filter:["position"],run:function(){this.animate(this.coordinates(this._current))}},{filter:["width","position","items","settings"],run:function(){var a,b,c,d,e=this.settings.rtl?1:-1,f=2*this.settings.stagePadding,g=this.coordinates(this.current())+f,h=g+this.width()*e,i=[];for(c=0,d=this._coordinates.length;d>c;c++)a=this._coordinates[c-1]||0,b=Math.abs(this._coordinates[c])+f*e,(this.op(a,"<=",g)&&this.op(a,">",h)||this.op(b,"<",g)&&this.op(b,">",h))&&i.push(c);this.$stage.children("."+this.settings.activeClass).removeClass(this.settings.activeClass),this.$stage.children(":eq("+i.join("), :eq(")+")").addClass(this.settings.activeClass),this.settings.center&&(this.$stage.children("."+this.settings.centerClass).removeClass(this.settings.centerClass),this.$stage.children().eq(this.current()).addClass(this.settings.centerClass))}}],e.prototype.initialize=function(){if(this.trigger("initialize"),this.$element.addClass(this.settings.baseClass).addClass(this.settings.themeClass).toggleClass("owl-rtl",this.settings.rtl),this.browserSupport(),this.settings.autoWidth&&this.state.imagesLoaded!==!0){var b,c,e;if(b=this.$element.find("img"),c=this.settings.nestedItemSelector?"."+this.settings.nestedItemSelector:d,e=this.$element.children(c).width(),b.length&&0>=e)return this.preloadAutoWidthImages(b),!1}this.$element.addClass("owl-loading"),this.$stage=a("<"+this.settings.stageElement+' class="owl-stage"/>').wrap('<div class="owl-stage-outer">'),this.$element.append(this.$stage.parent()),this.replace(this.$element.children().not(this.$stage.parent())),this._width=this.$element.width(),this.refresh(),this.$element.removeClass("owl-loading").addClass("owl-loaded"),this.eventsCall(),this.internalEvents(),this.addTriggerableEvents(),this.trigger("initialized")},e.prototype.setup=function(){var b=this.viewport(),c=this.options.responsive,d=-1,e=null;c?(a.each(c,function(a){b>=a&&a>d&&(d=Number(a))}),e=a.extend({},this.options,c[d]),delete e.responsive,e.responsiveClass&&this.$element.attr("class",function(a,b){return b.replace(/\b owl-responsive-\S+/g,"")}).addClass("owl-responsive-"+d)):e=a.extend({},this.options),(null===this.settings||this._breakpoint!==d)&&(this.trigger("change",{property:{name:"settings",value:e}}),this._breakpoint=d,this.settings=e,this.invalidate("settings"),this.trigger("changed",{property:{name:"settings",value:this.settings}}))},e.prototype.optionsLogic=function(){this.$element.toggleClass("owl-center",this.settings.center),this.settings.loop&&this._items.length<this.settings.items&&(this.settings.loop=!1),this.settings.autoWidth&&(this.settings.stagePadding=!1,this.settings.merge=!1)},e.prototype.prepare=function(b){var c=this.trigger("prepare",{content:b});return c.data||(c.data=a("<"+this.settings.itemElement+"/>").addClass(this.settings.itemClass).append(b)),this.trigger("prepared",{content:c.data}),c.data},e.prototype.update=function(){for(var b=0,c=this._pipe.length,d=a.proxy(function(a){return this[a]},this._invalidated),e={};c>b;)(this._invalidated.all||a.grep(this._pipe[b].filter,d).length>0)&&this._pipe[b].run(e),b++;this._invalidated={}},e.prototype.width=function(a){switch(a=a||e.Width.Default){case e.Width.Inner:case e.Width.Outer:return this._width;default:return this._width-2*this.settings.stagePadding+this.settings.margin}},e.prototype.refresh=function(){if(0===this._items.length)return!1;(new Date).getTime();this.trigger("refresh"),this.setup(),this.optionsLogic(),this.$stage.addClass("owl-refresh"),this.update(),this.$stage.removeClass("owl-refresh"),this.state.orientation=b.orientation,this.watchVisibility(),this.trigger("refreshed")},e.prototype.eventsCall=function(){this.e._onDragStart=a.proxy(function(a){this.onDragStart(a)},this),this.e._onDragMove=a.proxy(function(a){this.onDragMove(a)},this),this.e._onDragEnd=a.proxy(function(a){this.onDragEnd(a)},this),this.e._onResize=a.proxy(function(a){this.onResize(a)},this),this.e._transitionEnd=a.proxy(function(a){this.transitionEnd(a)},this),this.e._preventClick=a.proxy(function(a){this.preventClick(a)},this)},e.prototype.onThrottledResize=function(){b.clearTimeout(this.resizeTimer),this.resizeTimer=b.setTimeout(this.e._onResize,this.settings.responsiveRefreshRate)},e.prototype.onResize=function(){return this._items.length?this._width===this.$element.width()?!1:this.trigger("resize").isDefaultPrevented()?!1:(this._width=this.$element.width(),this.invalidate("width"),this.refresh(),void this.trigger("resized")):!1},e.prototype.eventsRouter=function(a){var b=a.type;"mousedown"===b||"touchstart"===b?this.onDragStart(a):"mousemove"===b||"touchmove"===b?this.onDragMove(a):"mouseup"===b||"touchend"===b?this.onDragEnd(a):"touchcancel"===b&&this.onDragEnd(a)},e.prototype.internalEvents=function(){var c=(k(),l());this.settings.mouseDrag?(this.$stage.on("mousedown",a.proxy(function(a){this.eventsRouter(a)},this)),this.$stage.on("dragstart",function(){return!1}),this.$stage.get(0).onselectstart=function(){return!1}):this.$element.addClass("owl-text-select-on"),this.settings.touchDrag&&!c&&this.$stage.on("touchstart touchcancel",a.proxy(function(a){this.eventsRouter(a)},this)),this.transitionEndVendor&&this.on(this.$stage.get(0),this.transitionEndVendor,this.e._transitionEnd,!1),this.settings.responsive!==!1&&this.on(b,"resize",a.proxy(this.onThrottledResize,this))},e.prototype.onDragStart=function(d){var e,g,h,i;if(e=d.originalEvent||d||b.event,3===e.which||this.state.isTouch)return!1;if("mousedown"===e.type&&this.$stage.addClass("owl-grab"),this.trigger("drag"),this.drag.startTime=(new Date).getTime(),this.speed(0),this.state.isTouch=!0,this.state.isScrolling=!1,this.state.isSwiping=!1,this.drag.distance=0,g=f(e).x,h=f(e).y,this.drag.offsetX=this.$stage.position().left,this.drag.offsetY=this.$stage.position().top,this.settings.rtl&&(this.drag.offsetX=this.$stage.position().left+this.$stage.width()-this.width()+this.settings.margin),this.state.inMotion&&this.support3d)i=this.getTransformProperty(),this.drag.offsetX=i,this.animate(i),this.state.inMotion=!0;else if(this.state.inMotion&&!this.support3d)return this.state.inMotion=!1,!1;this.drag.startX=g-this.drag.offsetX,this.drag.startY=h-this.drag.offsetY,this.drag.start=g-this.drag.startX,this.drag.targetEl=e.target||e.srcElement,this.drag.updatedX=this.drag.start,("IMG"===this.drag.targetEl.tagName||"A"===this.drag.targetEl.tagName)&&(this.drag.targetEl.draggable=!1),a(c).on("mousemove.owl.dragEvents mouseup.owl.dragEvents touchmove.owl.dragEvents touchend.owl.dragEvents",a.proxy(function(a){this.eventsRouter(a)},this))},e.prototype.onDragMove=function(a){var c,e,g,h,i,j;this.state.isTouch&&(this.state.isScrolling||(c=a.originalEvent||a||b.event,e=f(c).x,g=f(c).y,this.drag.currentX=e-this.drag.startX,this.drag.currentY=g-this.drag.startY,this.drag.distance=this.drag.currentX-this.drag.offsetX,this.drag.distance<0?this.state.direction=this.settings.rtl?"right":"left":this.drag.distance>0&&(this.state.direction=this.settings.rtl?"left":"right"),this.settings.loop?this.op(this.drag.currentX,">",this.coordinates(this.minimum()))&&"right"===this.state.direction?this.drag.currentX-=(this.settings.center&&this.coordinates(0))-this.coordinates(this._items.length):this.op(this.drag.currentX,"<",this.coordinates(this.maximum()))&&"left"===this.state.direction&&(this.drag.currentX+=(this.settings.center&&this.coordinates(0))-this.coordinates(this._items.length)):(h=this.coordinates(this.settings.rtl?this.maximum():this.minimum()),i=this.coordinates(this.settings.rtl?this.minimum():this.maximum()),j=this.settings.pullDrag?this.drag.distance/5:0,this.drag.currentX=Math.max(Math.min(this.drag.currentX,h+j),i+j)),(this.drag.distance>8||this.drag.distance<-8)&&(c.preventDefault!==d?c.preventDefault():c.returnValue=!1,this.state.isSwiping=!0),this.drag.updatedX=this.drag.currentX,(this.drag.currentY>16||this.drag.currentY<-16)&&this.state.isSwiping===!1&&(this.state.isScrolling=!0,this.drag.updatedX=this.drag.start),this.animate(this.drag.updatedX)))},e.prototype.onDragEnd=function(b){var d,e,f;if(this.state.isTouch){if("mouseup"===b.type&&this.$stage.removeClass("owl-grab"),this.trigger("dragged"),this.drag.targetEl.removeAttribute("draggable"),this.state.isTouch=!1,this.state.isScrolling=!1,this.state.isSwiping=!1,0===this.drag.distance&&this.state.inMotion!==!0)return this.state.inMotion=!1,!1;this.drag.endTime=(new Date).getTime(),d=this.drag.endTime-this.drag.startTime,e=Math.abs(this.drag.distance),(e>3||d>300)&&this.removeClick(this.drag.targetEl),f=this.closest(this.drag.updatedX),this.speed(this.settings.dragEndSpeed||this.settings.smartSpeed),this.current(f),this.invalidate("position"),this.update(),this.settings.pullDrag||this.drag.updatedX!==this.coordinates(f)||this.transitionEnd(),this.drag.distance=0,a(c).off(".owl.dragEvents")}},e.prototype.removeClick=function(c){this.drag.targetEl=c,a(c).on("click.preventClick",this.e._preventClick),b.setTimeout(function(){a(c).off("click.preventClick")},300)},e.prototype.preventClick=function(b){b.preventDefault?b.preventDefault():b.returnValue=!1,b.stopPropagation&&b.stopPropagation(),a(b.target).off("click.preventClick")},e.prototype.getTransformProperty=function(){var a,c;return a=b.getComputedStyle(this.$stage.get(0),null).getPropertyValue(this.vendorName+"transform"),a=a.replace(/matrix(3d)?\(|\)/g,"").split(","),c=16===a.length,c!==!0?a[4]:a[12]},e.prototype.closest=function(b){var c=-1,d=30,e=this.width(),f=this.coordinates();return this.settings.freeDrag||a.each(f,a.proxy(function(a,g){return b>g-d&&g+d>b?c=a:this.op(b,"<",g)&&this.op(b,">",f[a+1]||g-e)&&(c="left"===this.state.direction?a+1:a),-1===c},this)),this.settings.loop||(this.op(b,">",f[this.minimum()])?c=b=this.minimum():this.op(b,"<",f[this.maximum()])&&(c=b=this.maximum())),c},e.prototype.animate=function(b){this.trigger("translate"),this.state.inMotion=this.speed()>0,this.support3d?this.$stage.css({transform:"translate3d("+b+"px,0px, 0px)",transition:this.speed()/1e3+"s"}):this.state.isTouch?this.$stage.css({left:b+"px"}):this.$stage.animate({left:b},this.speed()/1e3,this.settings.fallbackEasing,a.proxy(function(){this.state.inMotion&&this.transitionEnd()},this))},e.prototype.current=function(a){if(a===d)return this._current;if(0===this._items.length)return d;if(a=this.normalize(a),this._current!==a){var b=this.trigger("change",{property:{name:"position",value:a}});b.data!==d&&(a=this.normalize(b.data)),this._current=a,this.invalidate("position"),this.trigger("changed",{property:{name:"position",value:this._current}})}return this._current},e.prototype.invalidate=function(a){this._invalidated[a]=!0},e.prototype.reset=function(a){a=this.normalize(a),a!==d&&(this._speed=0,this._current=a,this.suppress(["translate","translated"]),this.animate(this.coordinates(a)),this.release(["translate","translated"]))},e.prototype.normalize=function(b,c){var e=c?this._items.length:this._items.length+this._clones.length;return!a.isNumeric(b)||1>e?d:b=this._clones.length?(b%e+e)%e:Math.max(this.minimum(c),Math.min(this.maximum(c),b))},e.prototype.relative=function(a){return a=this.normalize(a),a-=this._clones.length/2,this.normalize(a,!0)},e.prototype.maximum=function(a){var b,c,d,e=0,f=this.settings;if(a)return this._items.length-1;if(!f.loop&&f.center)b=this._items.length-1;else if(f.loop||f.center)if(f.loop||f.center)b=this._items.length+f.items;else{if(!f.autoWidth&&!f.merge)throw"Can not detect maximum absolute position.";for(revert=f.rtl?1:-1,c=this.$stage.width()-this.$element.width();(d=this.coordinates(e))&&!(d*revert>=c);)b=++e}else b=this._items.length-f.items;return b},e.prototype.minimum=function(a){return a?0:this._clones.length/2},e.prototype.items=function(a){return a===d?this._items.slice():(a=this.normalize(a,!0),this._items[a])},e.prototype.mergers=function(a){return a===d?this._mergers.slice():(a=this.normalize(a,!0),this._mergers[a])},e.prototype.clones=function(b){var c=this._clones.length/2,e=c+this._items.length,f=function(a){return a%2===0?e+a/2:c-(a+1)/2};return b===d?a.map(this._clones,function(a,b){return f(b)}):a.map(this._clones,function(a,c){return a===b?f(c):null})},e.prototype.speed=function(a){return a!==d&&(this._speed=a),this._speed},e.prototype.coordinates=function(b){var c=null;return b===d?a.map(this._coordinates,a.proxy(function(a,b){return this.coordinates(b)},this)):(this.settings.center?(c=this._coordinates[b],c+=(this.width()-c+(this._coordinates[b-1]||0))/2*(this.settings.rtl?-1:1)):c=this._coordinates[b-1]||0,c)},e.prototype.duration=function(a,b,c){return Math.min(Math.max(Math.abs(b-a),1),6)*Math.abs(c||this.settings.smartSpeed)},e.prototype.to=function(c,d){if(this.settings.loop){var e=c-this.relative(this.current()),f=this.current(),g=this.current(),h=this.current()+e,i=0>g-h?!0:!1,j=this._clones.length+this._items.length;h<this.settings.items&&i===!1?(f=g+this._items.length,this.reset(f)):h>=j-this.settings.items&&i===!0&&(f=g-this._items.length,this.reset(f)),b.clearTimeout(this.e._goToLoop),this.e._goToLoop=b.setTimeout(a.proxy(function(){this.speed(this.duration(this.current(),f+e,d)),this.current(f+e),this.update()},this),30)}else this.speed(this.duration(this.current(),c,d)),this.current(c),this.update()},e.prototype.next=function(a){a=a||!1,this.to(this.relative(this.current())+1,a)},e.prototype.prev=function(a){a=a||!1,this.to(this.relative(this.current())-1,a)},e.prototype.transitionEnd=function(a){return a!==d&&(a.stopPropagation(),(a.target||a.srcElement||a.originalTarget)!==this.$stage.get(0))?!1:(this.state.inMotion=!1,void this.trigger("translated"))},e.prototype.viewport=function(){var d;if(this.options.responsiveBaseElement!==b)d=a(this.options.responsiveBaseElement).width();else if(b.innerWidth)d=b.innerWidth;else{if(!c.documentElement||!c.documentElement.clientWidth)throw"Can not detect viewport width.";d=c.documentElement.clientWidth}return d},e.prototype.replace=function(b){this.$stage.empty(),this._items=[],b&&(b=b instanceof jQuery?b:a(b)),this.settings.nestedItemSelector&&(b=b.find("."+this.settings.nestedItemSelector)),b.filter(function(){return 1===this.nodeType}).each(a.proxy(function(a,b){b=this.prepare(b),this.$stage.append(b),this._items.push(b),this._mergers.push(1*b.find("[data-merge]").andSelf("[data-merge]").attr("data-merge")||1)},this)),this.reset(a.isNumeric(this.settings.startPosition)?this.settings.startPosition:0),this.invalidate("items")},e.prototype.add=function(a,b){b=b===d?this._items.length:this.normalize(b,!0),this.trigger("add",{content:a,position:b}),0===this._items.length||b===this._items.length?(this.$stage.append(a),this._items.push(a),this._mergers.push(1*a.find("[data-merge]").andSelf("[data-merge]").attr("data-merge")||1)):(this._items[b].before(a),this._items.splice(b,0,a),this._mergers.splice(b,0,1*a.find("[data-merge]").andSelf("[data-merge]").attr("data-merge")||1)),this.invalidate("items"),this.trigger("added",{content:a,position:b})},e.prototype.remove=function(a){a=this.normalize(a,!0),a!==d&&(this.trigger("remove",{content:this._items[a],position:a}),this._items[a].remove(),this._items.splice(a,1),this._mergers.splice(a,1),this.invalidate("items"),this.trigger("removed",{content:null,position:a}))},e.prototype.addTriggerableEvents=function(){var b=a.proxy(function(b,c){return a.proxy(function(a){a.relatedTarget!==this&&(this.suppress([c]),b.apply(this,[].slice.call(arguments,1)),this.release([c]))},this)},this);a.each({next:this.next,prev:this.prev,to:this.to,destroy:this.destroy,refresh:this.refresh,replace:this.replace,add:this.add,remove:this.remove},a.proxy(function(a,c){this.$element.on(a+".owl.carousel",b(c,a+".owl.carousel"))},this))},e.prototype.watchVisibility=function(){function c(a){return a.offsetWidth>0&&a.offsetHeight>0}function d(){c(this.$element.get(0))&&(this.$element.removeClass("owl-hidden"),this.refresh(),b.clearInterval(this.e._checkVisibile))}c(this.$element.get(0))||(this.$element.addClass("owl-hidden"),b.clearInterval(this.e._checkVisibile),this.e._checkVisibile=b.setInterval(a.proxy(d,this),500))},e.prototype.preloadAutoWidthImages=function(b){var c,d,e,f;c=0,d=this,b.each(function(g,h){e=a(h),f=new Image,f.onload=function(){c++,e.attr("src",f.src),e.css("opacity",1),c>=b.length&&(d.state.imagesLoaded=!0,d.initialize())},f.src=e.attr("src")||e.attr("data-src")||e.attr("data-src-retina")})},e.prototype.destroy=function(){this.$element.hasClass(this.settings.themeClass)&&this.$element.removeClass(this.settings.themeClass),this.settings.responsive!==!1&&a(b).off("resize.owl.carousel"),this.transitionEndVendor&&this.off(this.$stage.get(0),this.transitionEndVendor,this.e._transitionEnd);for(var d in this._plugins)this._plugins[d].destroy();(this.settings.mouseDrag||this.settings.touchDrag)&&(this.$stage.off("mousedown touchstart touchcancel"),a(c).off(".owl.dragEvents"),this.$stage.get(0).onselectstart=function(){},this.$stage.off("dragstart",function(){return!1})),this.$element.off(".owl"),this.$stage.children(".cloned").remove(),this.e=null,this.$element.removeData("owlCarousel"),this.$stage.children().contents().unwrap(),this.$stage.children().unwrap(),this.$stage.unwrap()},e.prototype.op=function(a,b,c){var d=this.settings.rtl;switch(b){case"<":return d?a>c:c>a;case">":return d?c>a:a>c;case">=":return d?c>=a:a>=c;case"<=":return d?a>=c:c>=a}},e.prototype.on=function(a,b,c,d){a.addEventListener?a.addEventListener(b,c,d):a.attachEvent&&a.attachEvent("on"+b,c)},e.prototype.off=function(a,b,c,d){a.removeEventListener?a.removeEventListener(b,c,d):a.detachEvent&&a.detachEvent("on"+b,c)},e.prototype.trigger=function(b,c,d){var e={item:{count:this._items.length,index:this.current()}},f=a.camelCase(a.grep(["on",b,d],function(a){return a}).join("-").toLowerCase()),g=a.Event([b,"owl",d||"carousel"].join(".").toLowerCase(),a.extend({relatedTarget:this},e,c));return this._supress[b]||(a.each(this._plugins,function(a,b){b.onTrigger&&b.onTrigger(g)}),this.$element.trigger(g),this.settings&&"function"==typeof this.settings[f]&&this.settings[f].apply(this,g)),g},e.prototype.suppress=function(b){a.each(b,a.proxy(function(a,b){this._supress[b]=!0},this))},e.prototype.release=function(b){a.each(b,a.proxy(function(a,b){delete this._supress[b]},this))},e.prototype.browserSupport=function(){if(this.support3d=j(),this.support3d){this.transformVendor=i();var a=["transitionend","webkitTransitionEnd","transitionend","oTransitionEnd"];this.transitionEndVendor=a[h()],this.vendorName=this.transformVendor.replace(/Transform/i,""),this.vendorName=""!==this.vendorName?"-"+this.vendorName.toLowerCase()+"-":""}this.state.orientation=b.orientation},a.fn.owlCarousel=function(b){return this.each(function(){a(this).data("owlCarousel")||a(this).data("owlCarousel",new e(this,b))})},a.fn.owlCarousel.Constructor=e}(window.Zepto||window.jQuery,window,document),function(a,b){var c=function(b){this._core=b,this._loaded=[],this._handlers={"initialized.owl.carousel change.owl.carousel":a.proxy(function(b){if(b.namespace&&this._core.settings&&this._core.settings.lazyLoad&&(b.property&&"position"==b.property.name||"initialized"==b.type))for(var c=this._core.settings,d=c.center&&Math.ceil(c.items/2)||c.items,e=c.center&&-1*d||0,f=(b.property&&b.property.value||this._core.current())+e,g=this._core.clones().length,h=a.proxy(function(a,b){this.load(b)},this);e++<d;)this.load(g/2+this._core.relative(f)),g&&a.each(this._core.clones(this._core.relative(f++)),h)},this)},this._core.options=a.extend({},c.Defaults,this._core.options),this._core.$element.on(this._handlers)};c.Defaults={lazyLoad:!1},c.prototype.load=function(c){var d=this._core.$stage.children().eq(c),e=d&&d.find(".owl-lazy");!e||a.inArray(d.get(0),this._loaded)>-1||(e.each(a.proxy(function(c,d){var e,f=a(d),g=b.devicePixelRatio>1&&f.attr("data-src-retina")||f.attr("data-src");this._core.trigger("load",{element:f,url:g},"lazy"),f.is("img")?f.one("load.owl.lazy",a.proxy(function(){f.css("opacity",1),this._core.trigger("loaded",{element:f,url:g},"lazy")},this)).attr("src",g):(e=new Image,e.onload=a.proxy(function(){f.css({"background-image":"url("+g+")",opacity:"1"}),this._core.trigger("loaded",{element:f,url:g},"lazy")},this),e.src=g)},this)),this._loaded.push(d.get(0)))},c.prototype.destroy=function(){var a,b;for(a in this.handlers)this._core.$element.off(a,this.handlers[a]);for(b in Object.getOwnPropertyNames(this))"function"!=typeof this[b]&&(this[b]=null)},a.fn.owlCarousel.Constructor.Plugins.Lazy=c}(window.Zepto||window.jQuery,window,document),function(a){var b=function(c){this._core=c,this._handlers={"initialized.owl.carousel":a.proxy(function(){this._core.settings.autoHeight&&this.update()},this),"changed.owl.carousel":a.proxy(function(a){this._core.settings.autoHeight&&"position"==a.property.name&&this.update()},this),"loaded.owl.lazy":a.proxy(function(a){this._core.settings.autoHeight&&a.element.closest("."+this._core.settings.itemClass)===this._core.$stage.children().eq(this._core.current())&&this.update()},this)},this._core.options=a.extend({},b.Defaults,this._core.options),this._core.$element.on(this._handlers)};b.Defaults={autoHeight:!1,autoHeightClass:"owl-height"},b.prototype.update=function(){this._core.$stage.parent().height(this._core.$stage.children().eq(this._core.current()).height()).addClass(this._core.settings.autoHeightClass)},b.prototype.destroy=function(){var a,b;for(a in this._handlers)this._core.$element.off(a,this._handlers[a]);for(b in Object.getOwnPropertyNames(this))"function"!=typeof this[b]&&(this[b]=null)},a.fn.owlCarousel.Constructor.Plugins.AutoHeight=b}(window.Zepto||window.jQuery,window,document),function(a,b,c){var d=function(b){this._core=b,this._videos={},this._playing=null,this._fullscreen=!1,this._handlers={"resize.owl.carousel":a.proxy(function(a){this._core.settings.video&&!this.isInFullScreen()&&a.preventDefault()},this),"refresh.owl.carousel changed.owl.carousel":a.proxy(function(){this._playing&&this.stop()},this),"prepared.owl.carousel":a.proxy(function(b){var c=a(b.content).find(".owl-video");c.length&&(c.css("display","none"),this.fetch(c,a(b.content)))},this)},this._core.options=a.extend({},d.Defaults,this._core.options),this._core.$element.on(this._handlers),this._core.$element.on("click.owl.video",".owl-video-play-icon",a.proxy(function(a){this.play(a)},this))};d.Defaults={video:!1,videoHeight:!1,videoWidth:!1},d.prototype.fetch=function(a,b){var c=a.attr("data-vimeo-id")?"vimeo":"youtube",d=a.attr("data-vimeo-id")||a.attr("data-youtube-id"),e=a.attr("data-width")||this._core.settings.videoWidth,f=a.attr("data-height")||this._core.settings.videoHeight,g=a.attr("href");if(!g)throw new Error("Missing video URL.");if(d=g.match(/(http:|https:|)\/\/(player.|www.)?(vimeo\.com|youtu(be\.com|\.be|be\.googleapis\.com))\/(video\/|embed\/|watch\?v=|v\/)?([A-Za-z0-9._%-]*)(\&\S+)?/),d[3].indexOf("youtu")>-1)c="youtube";else{if(!(d[3].indexOf("vimeo")>-1))throw new Error("Video URL not supported.");c="vimeo"}d=d[6],this._videos[g]={type:c,id:d,width:e,height:f},b.attr("data-video",g),this.thumbnail(a,this._videos[g])},d.prototype.thumbnail=function(b,c){var d,e,f,g=c.width&&c.height?'style="width:'+c.width+"px;height:"+c.height+'px;"':"",h=b.find("img"),i="src",j="",k=this._core.settings,l=function(a){e='<div class="owl-video-play-icon"></div>',d=k.lazyLoad?'<div class="owl-video-tn '+j+'" '+i+'="'+a+'"></div>':'<div class="owl-video-tn" style="opacity:1;background-image:url('+a+')"></div>',b.after(d),b.after(e)};return b.wrap('<div class="owl-video-wrapper"'+g+"></div>"),this._core.settings.lazyLoad&&(i="data-src",j="owl-lazy"),h.length?(l(h.attr(i)),h.remove(),!1):void("youtube"===c.type?(f="http://img.youtube.com/vi/"+c.id+"/hqdefault.jpg",l(f)):"vimeo"===c.type&&a.ajax({type:"GET",url:"http://vimeo.com/api/v2/video/"+c.id+".json",jsonp:"callback",dataType:"jsonp",success:function(a){f=a[0].thumbnail_large,l(f)}}))},d.prototype.stop=function(){this._core.trigger("stop",null,"video"),this._playing.find(".owl-video-frame").remove(),this._playing.removeClass("owl-video-playing"),this._playing=null},d.prototype.play=function(b){this._core.trigger("play",null,"video"),this._playing&&this.stop();var c,d,e=a(b.target||b.srcElement),f=e.closest("."+this._core.settings.itemClass),g=this._videos[f.attr("data-video")],h=g.width||"100%",i=g.height||this._core.$stage.height();"youtube"===g.type?c='<iframe width="'+h+'" height="'+i+'" src="http://www.youtube.com/embed/'+g.id+"?autoplay=1&v="+g.id+'" frameborder="0" allowfullscreen></iframe>':"vimeo"===g.type&&(c='<iframe src="http://player.vimeo.com/video/'+g.id+'?autoplay=1" width="'+h+'" height="'+i+'" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>'),f.addClass("owl-video-playing"),this._playing=f,d=a('<div style="height:'+i+"px; width:"+h+'px" class="owl-video-frame">'+c+"</div>"),e.after(d)},d.prototype.isInFullScreen=function(){var d=c.fullscreenElement||c.mozFullScreenElement||c.webkitFullscreenElement;return d&&a(d).parent().hasClass("owl-video-frame")&&(this._core.speed(0),this._fullscreen=!0),d&&this._fullscreen&&this._playing?!1:this._fullscreen?(this._fullscreen=!1,!1):this._playing&&this._core.state.orientation!==b.orientation?(this._core.state.orientation=b.orientation,!1):!0},d.prototype.destroy=function(){var a,b;this._core.$element.off("click.owl.video");for(a in this._handlers)this._core.$element.off(a,this._handlers[a]);for(b in Object.getOwnPropertyNames(this))"function"!=typeof this[b]&&(this[b]=null)},a.fn.owlCarousel.Constructor.Plugins.Video=d}(window.Zepto||window.jQuery,window,document),function(a,b,c,d){var e=function(b){this.core=b,this.core.options=a.extend({},e.Defaults,this.core.options),this.swapping=!0,this.previous=d,this.next=d,this.handlers={"change.owl.carousel":a.proxy(function(a){"position"==a.property.name&&(this.previous=this.core.current(),this.next=a.property.value)},this),"drag.owl.carousel dragged.owl.carousel translated.owl.carousel":a.proxy(function(a){this.swapping="translated"==a.type},this),"translate.owl.carousel":a.proxy(function(){this.swapping&&(this.core.options.animateOut||this.core.options.animateIn)&&this.swap()},this)},this.core.$element.on(this.handlers)};e.Defaults={animateOut:!1,animateIn:!1},e.prototype.swap=function(){if(1===this.core.settings.items&&this.core.support3d){this.core.speed(0);var b,c=a.proxy(this.clear,this),d=this.core.$stage.children().eq(this.previous),e=this.core.$stage.children().eq(this.next),f=this.core.settings.animateIn,g=this.core.settings.animateOut;this.core.current()!==this.previous&&(g&&(b=this.core.coordinates(this.previous)-this.core.coordinates(this.next),d.css({left:b+"px"}).addClass("animated owl-animated-out").addClass(g).one("webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend",c)),f&&e.addClass("animated owl-animated-in").addClass(f).one("webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend",c))}},e.prototype.clear=function(b){a(b.target).css({left:""}).removeClass("animated owl-animated-out owl-animated-in").removeClass(this.core.settings.animateIn).removeClass(this.core.settings.animateOut),this.core.transitionEnd()},e.prototype.destroy=function(){var a,b;for(a in this.handlers)this.core.$element.off(a,this.handlers[a]);for(b in Object.getOwnPropertyNames(this))"function"!=typeof this[b]&&(this[b]=null)},a.fn.owlCarousel.Constructor.Plugins.Animate=e}(window.Zepto||window.jQuery,window,document),function(a,b,c){var d=function(b){this.core=b,this.core.options=a.extend({},d.Defaults,this.core.options),this.handlers={"translated.owl.carousel refreshed.owl.carousel":a.proxy(function(){this.autoplay()
},this),"play.owl.autoplay":a.proxy(function(a,b,c){this.play(b,c)},this),"stop.owl.autoplay":a.proxy(function(){this.stop()},this),"mouseover.owl.autoplay":a.proxy(function(){this.core.settings.autoplayHoverPause&&this.pause()},this),"mouseleave.owl.autoplay":a.proxy(function(){this.core.settings.autoplayHoverPause&&this.autoplay()},this)},this.core.$element.on(this.handlers)};d.Defaults={autoplay:!1,autoplayTimeout:5e3,autoplayHoverPause:!1,autoplaySpeed:!1},d.prototype.autoplay=function(){this.core.settings.autoplay&&!this.core.state.videoPlay?(b.clearInterval(this.interval),this.interval=b.setInterval(a.proxy(function(){this.play()},this),this.core.settings.autoplayTimeout)):b.clearInterval(this.interval)},d.prototype.play=function(){return c.hidden===!0||this.core.state.isTouch||this.core.state.isScrolling||this.core.state.isSwiping||this.core.state.inMotion?void 0:this.core.settings.autoplay===!1?void b.clearInterval(this.interval):void this.core.next(this.core.settings.autoplaySpeed)},d.prototype.stop=function(){b.clearInterval(this.interval)},d.prototype.pause=function(){b.clearInterval(this.interval)},d.prototype.destroy=function(){var a,c;b.clearInterval(this.interval);for(a in this.handlers)this.core.$element.off(a,this.handlers[a]);for(c in Object.getOwnPropertyNames(this))"function"!=typeof this[c]&&(this[c]=null)},a.fn.owlCarousel.Constructor.Plugins.autoplay=d}(window.Zepto||window.jQuery,window,document),function(a){"use strict";var b=function(c){this._core=c,this._initialized=!1,this._pages=[],this._controls={},this._templates=[],this.$element=this._core.$element,this._overrides={next:this._core.next,prev:this._core.prev,to:this._core.to},this._handlers={"prepared.owl.carousel":a.proxy(function(b){this._core.settings.dotsData&&this._templates.push(a(b.content).find("[data-dot]").andSelf("[data-dot]").attr("data-dot"))},this),"add.owl.carousel":a.proxy(function(b){this._core.settings.dotsData&&this._templates.splice(b.position,0,a(b.content).find("[data-dot]").andSelf("[data-dot]").attr("data-dot"))},this),"remove.owl.carousel prepared.owl.carousel":a.proxy(function(a){this._core.settings.dotsData&&this._templates.splice(a.position,1)},this),"change.owl.carousel":a.proxy(function(a){if("position"==a.property.name&&!this._core.state.revert&&!this._core.settings.loop&&this._core.settings.navRewind){var b=this._core.current(),c=this._core.maximum(),d=this._core.minimum();a.data=a.property.value>c?b>=c?d:c:a.property.value<d?c:a.property.value}},this),"changed.owl.carousel":a.proxy(function(a){"position"==a.property.name&&this.draw()},this),"refreshed.owl.carousel":a.proxy(function(){this._initialized||(this.initialize(),this._initialized=!0),this._core.trigger("refresh",null,"navigation"),this.update(),this.draw(),this._core.trigger("refreshed",null,"navigation")},this)},this._core.options=a.extend({},b.Defaults,this._core.options),this.$element.on(this._handlers)};b.Defaults={nav:!1,navRewind:!0,navText:["prev","next"],navSpeed:!1,navElement:"div",navContainer:!1,navContainerClass:"owl-nav",navClass:["owl-prev","owl-next"],slideBy:1,dotClass:"owl-dot",dotsClass:"owl-dots",dots:!0,dotsEach:!1,dotData:!1,dotsSpeed:!1,dotsContainer:!1,controlsClass:"owl-controls"},b.prototype.initialize=function(){var b,c,d=this._core.settings;d.dotsData||(this._templates=[a("<div>").addClass(d.dotClass).append(a("<span>")).prop("outerHTML")]),d.navContainer&&d.dotsContainer||(this._controls.$container=a("<div>").addClass(d.controlsClass).appendTo(this.$element)),this._controls.$indicators=d.dotsContainer?a(d.dotsContainer):a("<div>").hide().addClass(d.dotsClass).appendTo(this._controls.$container),this._controls.$indicators.on("click","div",a.proxy(function(b){var c=a(b.target).parent().is(this._controls.$indicators)?a(b.target).index():a(b.target).parent().index();b.preventDefault(),this.to(c,d.dotsSpeed)},this)),b=d.navContainer?a(d.navContainer):a("<div>").addClass(d.navContainerClass).prependTo(this._controls.$container),this._controls.$next=a("<"+d.navElement+">"),this._controls.$previous=this._controls.$next.clone(),this._controls.$previous.addClass(d.navClass[0]).html(d.navText[0]).hide().prependTo(b).on("click",a.proxy(function(){this.prev(d.navSpeed)},this)),this._controls.$next.addClass(d.navClass[1]).html(d.navText[1]).hide().appendTo(b).on("click",a.proxy(function(){this.next(d.navSpeed)},this));for(c in this._overrides)this._core[c]=a.proxy(this[c],this)},b.prototype.destroy=function(){var a,b,c,d;for(a in this._handlers)this.$element.off(a,this._handlers[a]);for(b in this._controls)this._controls[b].remove();for(d in this.overides)this._core[d]=this._overrides[d];for(c in Object.getOwnPropertyNames(this))"function"!=typeof this[c]&&(this[c]=null)},b.prototype.update=function(){var a,b,c,d=this._core.settings,e=this._core.clones().length/2,f=e+this._core.items().length,g=d.center||d.autoWidth||d.dotData?1:d.dotsEach||d.items;if("page"!==d.slideBy&&(d.slideBy=Math.min(d.slideBy,d.items)),d.dots||"page"==d.slideBy)for(this._pages=[],a=e,b=0,c=0;f>a;a++)(b>=g||0===b)&&(this._pages.push({start:a-e,end:a-e+g-1}),b=0,++c),b+=this._core.mergers(this._core.relative(a))},b.prototype.draw=function(){var b,c,d="",e=this._core.settings,f=(this._core.$stage.children(),this._core.relative(this._core.current()));if(!e.nav||e.loop||e.navRewind||(this._controls.$previous.toggleClass("disabled",0>=f),this._controls.$next.toggleClass("disabled",f>=this._core.maximum())),this._controls.$previous.toggle(e.nav),this._controls.$next.toggle(e.nav),e.dots){if(b=this._pages.length-this._controls.$indicators.children().length,e.dotData&&0!==b){for(c=0;c<this._controls.$indicators.children().length;c++)d+=this._templates[this._core.relative(c)];this._controls.$indicators.html(d)}else b>0?(d=new Array(b+1).join(this._templates[0]),this._controls.$indicators.append(d)):0>b&&this._controls.$indicators.children().slice(b).remove();this._controls.$indicators.find(".active").removeClass("active"),this._controls.$indicators.children().eq(a.inArray(this.current(),this._pages)).addClass("active")}this._controls.$indicators.toggle(e.dots)},b.prototype.onTrigger=function(b){var c=this._core.settings;b.page={index:a.inArray(this.current(),this._pages),count:this._pages.length,size:c&&(c.center||c.autoWidth||c.dotData?1:c.dotsEach||c.items)}},b.prototype.current=function(){var b=this._core.relative(this._core.current());return a.grep(this._pages,function(a){return a.start<=b&&a.end>=b}).pop()},b.prototype.getPosition=function(b){var c,d,e=this._core.settings;return"page"==e.slideBy?(c=a.inArray(this.current(),this._pages),d=this._pages.length,b?++c:--c,c=this._pages[(c%d+d)%d].start):(c=this._core.relative(this._core.current()),d=this._core.items().length,b?c+=e.slideBy:c-=e.slideBy),c},b.prototype.next=function(b){a.proxy(this._overrides.to,this._core)(this.getPosition(!0),b)},b.prototype.prev=function(b){a.proxy(this._overrides.to,this._core)(this.getPosition(!1),b)},b.prototype.to=function(b,c,d){var e;d?a.proxy(this._overrides.to,this._core)(b,c):(e=this._pages.length,a.proxy(this._overrides.to,this._core)(this._pages[(b%e+e)%e].start,c))},a.fn.owlCarousel.Constructor.Plugins.Navigation=b}(window.Zepto||window.jQuery,window,document),function(a,b){"use strict";var c=function(d){this._core=d,this._hashes={},this.$element=this._core.$element,this._handlers={"initialized.owl.carousel":a.proxy(function(){"URLHash"==this._core.settings.startPosition&&a(b).trigger("hashchange.owl.navigation")},this),"prepared.owl.carousel":a.proxy(function(b){var c=a(b.content).find("[data-hash]").andSelf("[data-hash]").attr("data-hash");this._hashes[c]=b.content},this)},this._core.options=a.extend({},c.Defaults,this._core.options),this.$element.on(this._handlers),a(b).on("hashchange.owl.navigation",a.proxy(function(){var a=b.location.hash.substring(1),c=this._core.$stage.children(),d=this._hashes[a]&&c.index(this._hashes[a])||0;return a?void this._core.to(d,!1,!0):!1},this))};c.Defaults={URLhashListener:!1},c.prototype.destroy=function(){var c,d;a(b).off("hashchange.owl.navigation");for(c in this._handlers)this._core.$element.off(c,this._handlers[c]);for(d in Object.getOwnPropertyNames(this))"function"!=typeof this[d]&&(this[d]=null)},a.fn.owlCarousel.Constructor.Plugins.Hash=c}(window.Zepto||window.jQuery,window,document);
//Smooth scroll
//!function(){function e(){T.keyboardSupport&&f("keydown",a)}function t(){if(!z&&document.body){z=!0;var t=document.body,o=document.documentElement,n=window.innerHeight,a=t.scrollHeight;if(X=document.compatMode.indexOf("CSS")>=0?o:t,S=t,e(),top!=self)C=!0;else if(a>n&&(t.offsetHeight<=n||o.offsetHeight<=n)){var r=document.createElement("div");r.style.cssText="position:absolute; z-index:-10000; top:0; left:0; right:0; height:"+X.scrollHeight+"px",document.body.appendChild(r);var l,i=function(){l||(l=setTimeout(function(){H||(r.style.height="0",r.style.height=X.scrollHeight+"px",l=null)},500))};setTimeout(i,10);var c={attributes:!0,childList:!0,characterData:!1};if(x=new j(i),x.observe(t,c),X.offsetHeight<=n){var u=document.createElement("div");u.style.clear="both",t.appendChild(u)}}T.fixedBackground||H||(t.style.backgroundAttachment="scroll",o.style.backgroundAttachment="scroll")}}function o(e,t,o){if(h(t,o),1!=T.accelerationMax){var n=Date.now(),a=n-N;if(a<T.accelerationDelta){var r=(1+50/a)/2;r>1&&(r=Math.min(r,T.accelerationMax),t*=r,o*=r)}N=Date.now()}if(A.push({x:t,y:o,lastX:0>t?.99:-.99,lastY:0>o?.99:-.99,start:Date.now()}),!K){var l=e===document.body,i=function(n){for(var a=Date.now(),r=0,c=0,u=0;u<A.length;u++){var d=A[u],s=a-d.start,f=s>=T.animationTime,m=f?1:s/T.animationTime;T.pulseAlgorithm&&(m=y(m));var h=d.x*m-d.lastX>>0,p=d.y*m-d.lastY>>0;r+=h,c+=p,d.lastX+=h,d.lastY+=p,f&&(A.splice(u,1),u--)}l?window.scrollBy(r,c):(r&&(e.scrollLeft+=r),c&&(e.scrollTop+=c)),t||o||(A=[]),A.length?P(i,e,1e3/T.frameRate+1):K=!1};P(i,e,0),K=!0}}function n(e){z||t();var n=e.target,a=c(n);if(!a||e.defaultPrevented||e.ctrlKey)return!0;if(m(S,"embed")||m(n,"embed")&&/\.pdf/i.test(n.src)||m(S,"object"))return!0;var r=-e.wheelDeltaX||e.deltaX||0,i=-e.wheelDeltaY||e.deltaY||0;return B&&(e.wheelDeltaX&&w(e.wheelDeltaX,120)&&(r=-120*(e.wheelDeltaX/Math.abs(e.wheelDeltaX))),e.wheelDeltaY&&w(e.wheelDeltaY,120)&&(i=-120*(e.wheelDeltaY/Math.abs(e.wheelDeltaY)))),r||i||(i=-e.wheelDelta||0),1===e.deltaMode&&(r*=40,i*=40),!T.touchpadSupport&&p(i)?!0:(Math.abs(r)>1.2&&(r*=T.stepSize/120),Math.abs(i)>1.2&&(i*=T.stepSize/120),o(a,r,i),e.preventDefault(),void l())}function a(e){var t=e.target,n=e.ctrlKey||e.altKey||e.metaKey||e.shiftKey&&e.keyCode!==L.spacebar;document.contains(S)||(S=document.activeElement);var a=/^(textarea|select|embed|object)$/i,r=/^(button|submit|radio|checkbox|file|color|image)$/i;if(a.test(t.nodeName)||m(t,"input")&&!r.test(t.type)||m(S,"video")||b(e)||t.isContentEditable||e.defaultPrevented||n)return!0;if((m(t,"button")||m(t,"input")&&r.test(t.type))&&e.keyCode===L.spacebar)return!0;var i,u=0,d=0,s=c(S),f=s.clientHeight;switch(s==document.body&&(f=window.innerHeight),e.keyCode){case L.up:d=-T.arrowScroll;break;case L.down:d=T.arrowScroll;break;case L.spacebar:i=e.shiftKey?1:-1,d=-i*f*.9;break;case L.pageup:d=.9*-f;break;case L.pagedown:d=.9*f;break;case L.home:d=-s.scrollTop;break;case L.end:var h=s.scrollHeight-s.scrollTop-f;d=h>0?h+10:0;break;case L.left:u=-T.arrowScroll;break;case L.right:u=T.arrowScroll;break;default:return!0}o(s,u,d),e.preventDefault(),l()}function r(e){S=e.target}function l(){clearTimeout(D),D=setInterval(function(){O={}},1e3)}function i(e,t){for(var o=e.length;o--;)O[q(e[o])]=t;return t}function c(e){var t=[],o=document.body,n=X.scrollHeight;do{var a=O[q(e)];if(a)return i(t,a);if(t.push(e),n===e.scrollHeight){var r=d(X)&&d(o),l=r||s(X);if(C&&u(X)||!C&&l)return i(t,F())}else if(u(e)&&s(e))return i(t,e)}while(e=e.parentElement)}function u(e){return e.clientHeight+10<e.scrollHeight}function d(e){var t=getComputedStyle(e,"").getPropertyValue("overflow-y");return"hidden"!==t}function s(e){var t=getComputedStyle(e,"").getPropertyValue("overflow-y");return"scroll"===t||"auto"===t}function f(e,t){window.addEventListener(e,t,!1)}function m(e,t){return(e.nodeName||"").toLowerCase()===t.toLowerCase()}function h(e,t){e=e>0?1:-1,t=t>0?1:-1,(E.x!==e||E.y!==t)&&(E.x=e,E.y=t,A=[],N=0)}function p(e){return e?(Y.length||(Y=[e,e,e]),e=Math.abs(e),Y.push(e),Y.shift(),clearTimeout(k),k=setTimeout(function(){window.localStorage&&(localStorage.SS_deltaBuffer=Y.join(","))},1e3),!v(120)&&!v(100)):void 0}function w(e,t){return Math.floor(e/t)==e/t}function v(e){return w(Y[0],e)&&w(Y[1],e)&&w(Y[2],e)}function b(e){var t=e.target,o=!1;if(-1!=document.URL.indexOf("www.youtube.com/watch"))do if(o=t.classList&&t.classList.contains("html5-video-controls"))break;while(t=t.parentNode);return o}function g(e){var t,o,n;return e*=T.pulseScale,1>e?t=e-(1-Math.exp(-e)):(o=Math.exp(-1),e-=1,n=1-Math.exp(-e),t=o+n*(1-o)),t*T.pulseNormalize}function y(e){return e>=1?1:0>=e?0:(1==T.pulseNormalize&&(T.pulseNormalize/=g(1)),g(e))}var S,x,D,k,M={frameRate:150,animationTime:400,stepSize:120,pulseAlgorithm:!0,pulseScale:4,pulseNormalize:1,accelerationDelta:20,accelerationMax:1,keyboardSupport:!0,arrowScroll:50,touchpadSupport:!0,fixedBackground:!0,excluded:""},T=M,H=!1,C=!1,E={x:0,y:0},z=!1,X=document.documentElement,Y=[],B=/^Mac/.test(navigator.platform),L={left:37,up:38,right:39,down:40,spacebar:32,pageup:33,pagedown:34,end:35,home:36},T=M,A=[],K=!1,N=Date.now(),q=function(){var e=0;return function(t){return t.uniqueID||(t.uniqueID=e++)}}(),O={};window.localStorage&&localStorage.SS_deltaBuffer&&(Y=localStorage.SS_deltaBuffer.split(","));var R,P=function(){return window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||function(e,t,o){window.setTimeout(e,o||1e3/60)}}(),j=window.MutationObserver||window.WebKitMutationObserver||window.MozMutationObserver,F=function(){var e;return function(){if(!e){var t=document.createElement("div");t.style.cssText="height:10000px;width:1px;",document.body.appendChild(t);var o=document.body.scrollTop;document.documentElement.scrollTop;window.scrollBy(0,1),e=document.body.scrollTop!=o?document.body:document.documentElement,window.scrollBy(0,-1),document.body.removeChild(t)}return e}}();"onwheel"in document.createElement("div")?R="wheel":"onmousewheel"in document.createElement("div")&&(R="mousewheel"),R&&(f(R,n),f("mousedown",r),f("load",t))}();
//LocalScroll
/**
* Copyright (c) 2007-2014 Ariel Flesler - aflesler<a>gmail<d>com | http://flesler.blogspot.com
* Licensed under MIT
* @author Ariel Flesler
* @version 1.3.5
*/
;(function(a){if(typeof define==='function'&&define.amd){define(['jquery'],a)}else{a(jQuery)}}(function($){var g=location.href.replace(/#.*/,'');var h=$.localScroll=function(a){$('body').localScroll(a)};h.defaults={duration:1000,axis:'y',event:'click',stop:true,target:window};$.fn.localScroll=function(a){a=$.extend({},h.defaults,a);if(a.hash&&location.hash){if(a.target)window.scrollTo(0,0);scroll(0,location,a)}return a.lazy?this.on(a.event,'a,area',function(e){if(filter.call(this)){scroll(e,this,a)}}):this.find('a,area').filter(filter).bind(a.event,function(e){scroll(e,this,a)}).end().end();function filter(){return!!this.href&&!!this.hash&&this.href.replace(this.hash,'')==g&&(!a.filter||$(this).is(a.filter))}};h.hash=function(){};function scroll(e,a,b){var c=a.hash.slice(1),elem=document.getElementById(c)||document.getElementsByName(c)[0];if(!elem)return;if(e)e.preventDefault();var d=$(b.target);if(b.lock&&d.is(':animated')||b.onBefore&&b.onBefore(e,elem,d)===false)return;if(b.stop)d._scrollable().stop(true);if(b.hash){var f=elem.id===c?'id':'name',$a=$('<a> </a>').attr(f,c).css({position:'absolute',top:$(window).scrollTop(),left:$(window).scrollLeft()});elem[f]='';$('body').prepend($a);location.hash=a.hash;$a.remove();elem[f]=c}d.scrollTo(elem,b).trigger('notify.serialScroll',[elem])};return h}));
/**
* Copyright (c) 2007-2014 Ariel Flesler - aflesler<a>gmail<d>com | http://flesler.blogspot.com
* Licensed under MIT
* @author Ariel Flesler
* @version 1.4.12
*/
;(function(a){if(typeof define==='function'&&define.amd){define(['jquery'],a)}else{a(jQuery)}}(function($){var j=$.scrollTo=function(a,b,c){return $(window).scrollTo(a,b,c)};j.defaults={axis:'xy',duration:parseFloat($.fn.jquery)>=1.3?0:1,limit:true};j.window=function(a){return $(window)._scrollable()};$.fn._scrollable=function(){return this.map(function(){var a=this,isWin=!a.nodeName||$.inArray(a.nodeName.toLowerCase(),['iframe','#document','html','body'])!=-1;if(!isWin)return a;var b=(a.contentWindow||a).document||a.ownerDocument||a;return/webkit/i.test(navigator.userAgent)||b.compatMode=='BackCompat'?b.body:b.documentElement})};$.fn.scrollTo=function(f,g,h){if(typeof g=='object'){h=g;g=0}if(typeof h=='function')h={onAfter:h};if(f=='max')f=9e9;h=$.extend({},j.defaults,h);g=g||h.duration;h.queue=h.queue&&h.axis.length>1;if(h.queue)g/=2;h.offset=both(h.offset);h.over=both(h.over);return this._scrollable().each(function(){if(f==null)return;var d=this,$elem=$(d),targ=f,toff,attr={},win=$elem.is('html,body');switch(typeof targ){case'number':case'string':if(/^([+-]=?)?\d+(\.\d+)?(px|%)?$/.test(targ)){targ=both(targ);break}targ=win?$(targ):$(targ,this);if(!targ.length)return;case'object':if(targ.is||targ.style)toff=(targ=$(targ)).offset()}var e=$.isFunction(h.offset)&&h.offset(d,targ)||h.offset;$.each(h.axis.split(''),function(i,a){var b=a=='x'?'Left':'Top',pos=b.toLowerCase(),key='scroll'+b,old=d[key],max=j.max(d,a);if(toff){attr[key]=toff[pos]+(win?0:old-$elem.offset()[pos]);if(h.margin){attr[key]-=parseInt(targ.css('margin'+b))||0;attr[key]-=parseInt(targ.css('border'+b+'Width'))||0}attr[key]+=e[pos]||0;if(h.over[pos])attr[key]+=targ[a=='x'?'width':'height']()*h.over[pos]}else{var c=targ[pos];attr[key]=c.slice&&c.slice(-1)=='%'?parseFloat(c)/100*max:c}if(h.limit&&/^\d+$/.test(attr[key]))attr[key]=attr[key]<=0?0:Math.min(attr[key],max);if(!i&&h.queue){if(old!=attr[key])animate(h.onAfterFirst);delete attr[key]}});animate(h.onAfter);function animate(a){$elem.animate(attr,g,h.easing,a&&function(){a.call(this,targ,h)})}}).end()};j.max=function(a,b){var c=b=='x'?'Width':'Height',scroll='scroll'+c;if(!$(a).is('html,body'))return a[scroll]-$(a)[c.toLowerCase()]();var d='client'+c,html=a.ownerDocument.documentElement,body=a.ownerDocument.body;return Math.max(html[scroll],body[scroll])-Math.min(html[d],body[d])};function both(a){return $.isFunction(a)||typeof a=='object'?a:{top:a,left:a}};return j}));
/*!
* jquery.counterup.js 1.0
*
* Copyright 2013, Benjamin Intal http://gambit.ph @bfintal
* Released under the GPL v2 License
*
* Date: Nov 26, 2013
*/(function(e){"use strict";e.fn.counterUp=function(t){var n=e.extend({time:400,delay:10},t);return this.each(function(){var t=e(this),r=n,i=function(){var e=[],n=r.time/r.delay,i=t.text(),s=/[0-9]+,[0-9]+/.test(i);i=i.replace(/,/g,"");var o=/^[0-9]+$/.test(i),u=/^[0-9]+\.[0-9]+$/.test(i),a=u?(i.split(".")[1]||[]).length:0;for(var f=n;f>=1;f--){var l=parseInt(i/n*f);u&&(l=parseFloat(i/n*f).toFixed(a));if(s)while(/(\d+)(\d{3})/.test(l.toString()))l=l.toString().replace(/(\d+)(\d{3})/,"$1,$2");e.unshift(l)}t.data("counterup-nums",e);t.text("0");var c=function(){t.text(t.data("counterup-nums").shift());if(t.data("counterup-nums").length)setTimeout(t.data("counterup-func"),r.delay);else{delete t.data("counterup-nums");t.data("counterup-nums",null);t.data("counterup-func",null)}};t.data("counterup-func",c);setTimeout(t.data("counterup-func"),r.delay)};t.waypoint(i,{offset:"100%",triggerOnce:!0})})}})(jQuery);
/*! WOW - v1.0.2 - 2014-09-24
* Copyright (c) 2014 Matthieu Aussaguel; Licensed MIT */
(function(){var a,b,c,d,e,f=function(a,b){return function(){return a.apply(b,arguments)}},g=[].indexOf||function(a){for(var b=0,c=this.length;c>b;b++)if(b in this&&this[b]===a)return b;return-1};b=function(){function a(){}return a.prototype.extend=function(a,b){var c,d;for(c in b)d=b[c],null==a[c]&&(a[c]=d);return a},a.prototype.isMobile=function(a){return/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(a)},a.prototype.addEvent=function(a,b,c){return null!=a.addEventListener?a.addEventListener(b,c,!1):null!=a.attachEvent?a.attachEvent("on"+b,c):a[b]=c},a.prototype.removeEvent=function(a,b,c){return null!=a.removeEventListener?a.removeEventListener(b,c,!1):null!=a.detachEvent?a.detachEvent("on"+b,c):delete a[b]},a.prototype.innerHeight=function(){return"innerHeight"in window?window.innerHeight:document.documentElement.clientHeight},a}(),c=this.WeakMap||this.MozWeakMap||(c=function(){function a(){this.keys=[],this.values=[]}return a.prototype.get=function(a){var b,c,d,e,f;for(f=this.keys,b=d=0,e=f.length;e>d;b=++d)if(c=f[b],c===a)return this.values[b]},a.prototype.set=function(a,b){var c,d,e,f,g;for(g=this.keys,c=e=0,f=g.length;f>e;c=++e)if(d=g[c],d===a)return void(this.values[c]=b);return this.keys.push(a),this.values.push(b)},a}()),a=this.MutationObserver||this.WebkitMutationObserver||this.MozMutationObserver||(a=function(){function a(){"undefined"!=typeof console&&null!==console&&console.warn("MutationObserver is not supported by your browser."),"undefined"!=typeof console&&null!==console&&console.warn("WOW.js cannot detect dom mutations, please call .sync() after loading new content.")}return a.notSupported=!0,a.prototype.observe=function(){},a}()),d=this.getComputedStyle||function(a){return this.getPropertyValue=function(b){var c;return"float"===b&&(b="styleFloat"),e.test(b)&&b.replace(e,function(a,b){return b.toUpperCase()}),(null!=(c=a.currentStyle)?c[b]:void 0)||null},this},e=/(\-([a-z]){1})/g,this.WOW=function(){function e(a){null==a&&(a={}),this.scrollCallback=f(this.scrollCallback,this),this.scrollHandler=f(this.scrollHandler,this),this.start=f(this.start,this),this.scrolled=!0,this.config=this.util().extend(a,this.defaults),this.animationNameCache=new c}return e.prototype.defaults={boxClass:"wow",animateClass:"animated",offset:0,mobile:!0,live:!0},e.prototype.init=function(){var a;return this.element=window.document.documentElement,"interactive"===(a=document.readyState)||"complete"===a?this.start():this.util().addEvent(document,"DOMContentLoaded",this.start),this.finished=[]},e.prototype.start=function(){var b,c,d,e;if(this.stopped=!1,this.boxes=function(){var a,c,d,e;for(d=this.element.querySelectorAll("."+this.config.boxClass),e=[],a=0,c=d.length;c>a;a++)b=d[a],e.push(b);return e}.call(this),this.all=function(){var a,c,d,e;for(d=this.boxes,e=[],a=0,c=d.length;c>a;a++)b=d[a],e.push(b);return e}.call(this),this.boxes.length)if(this.disabled())this.resetStyle();else{for(e=this.boxes,c=0,d=e.length;d>c;c++)b=e[c],this.applyStyle(b,!0);this.util().addEvent(window,"scroll",this.scrollHandler),this.util().addEvent(window,"resize",this.scrollHandler),this.interval=setInterval(this.scrollCallback,50)}return this.config.live?new a(function(a){return function(b){var c,d,e,f,g;for(g=[],e=0,f=b.length;f>e;e++)d=b[e],g.push(function(){var a,b,e,f;for(e=d.addedNodes||[],f=[],a=0,b=e.length;b>a;a++)c=e[a],f.push(this.doSync(c));return f}.call(a));return g}}(this)).observe(document.body,{childList:!0,subtree:!0}):void 0},e.prototype.stop=function(){return this.stopped=!0,this.util().removeEvent(window,"scroll",this.scrollHandler),this.util().removeEvent(window,"resize",this.scrollHandler),null!=this.interval?clearInterval(this.interval):void 0},e.prototype.sync=function(){return a.notSupported?this.doSync(this.element):void 0},e.prototype.doSync=function(a){var b,c,d,e,f;if(null==a&&(a=this.element),1===a.nodeType){for(a=a.parentNode||a,e=a.querySelectorAll("."+this.config.boxClass),f=[],c=0,d=e.length;d>c;c++)b=e[c],g.call(this.all,b)<0?(this.boxes.push(b),this.all.push(b),this.stopped||this.disabled()?this.resetStyle():this.applyStyle(b,!0),f.push(this.scrolled=!0)):f.push(void 0);return f}},e.prototype.show=function(a){return this.applyStyle(a),a.className=""+a.className+" "+this.config.animateClass},e.prototype.applyStyle=function(a,b){var c,d,e;return d=a.getAttribute("data-wow-duration"),c=a.getAttribute("data-wow-delay"),e=a.getAttribute("data-wow-iteration"),this.animate(function(f){return function(){return f.customStyle(a,b,d,c,e)}}(this))},e.prototype.animate=function(){return"requestAnimationFrame"in window?function(a){return window.requestAnimationFrame(a)}:function(a){return a()}}(),e.prototype.resetStyle=function(){var a,b,c,d,e;for(d=this.boxes,e=[],b=0,c=d.length;c>b;b++)a=d[b],e.push(a.style.visibility="visible");return e},e.prototype.customStyle=function(a,b,c,d,e){return b&&this.cacheAnimationName(a),a.style.visibility=b?"hidden":"visible",c&&this.vendorSet(a.style,{animationDuration:c}),d&&this.vendorSet(a.style,{animationDelay:d}),e&&this.vendorSet(a.style,{animationIterationCount:e}),this.vendorSet(a.style,{animationName:b?"none":this.cachedAnimationName(a)}),a},e.prototype.vendors=["moz","webkit"],e.prototype.vendorSet=function(a,b){var c,d,e,f;f=[];for(c in b)d=b[c],a[""+c]=d,f.push(function(){var b,f,g,h;for(g=this.vendors,h=[],b=0,f=g.length;f>b;b++)e=g[b],h.push(a[""+e+c.charAt(0).toUpperCase()+c.substr(1)]=d);return h}.call(this));return f},e.prototype.vendorCSS=function(a,b){var c,e,f,g,h,i;for(e=d(a),c=e.getPropertyCSSValue(b),i=this.vendors,g=0,h=i.length;h>g;g++)f=i[g],c=c||e.getPropertyCSSValue("-"+f+"-"+b);return c},e.prototype.animationName=function(a){var b;try{b=this.vendorCSS(a,"animation-name").cssText}catch(c){b=d(a).getPropertyValue("animation-name")}return"none"===b?"":b},e.prototype.cacheAnimationName=function(a){return this.animationNameCache.set(a,this.animationName(a))},e.prototype.cachedAnimationName=function(a){return this.animationNameCache.get(a)},e.prototype.scrollHandler=function(){return this.scrolled=!0},e.prototype.scrollCallback=function(){var a;return!this.scrolled||(this.scrolled=!1,this.boxes=function(){var b,c,d,e;for(d=this.boxes,e=[],b=0,c=d.length;c>b;b++)a=d[b],a&&(this.isVisible(a)?this.show(a):e.push(a));return e}.call(this),this.boxes.length||this.config.live)?void 0:this.stop()},e.prototype.offsetTop=function(a){for(var b;void 0===a.offsetTop;)a=a.parentNode;for(b=a.offsetTop;a=a.offsetParent;)b+=a.offsetTop;return b},e.prototype.isVisible=function(a){var b,c,d,e,f;return c=a.getAttribute("data-wow-offset")||this.config.offset,f=window.pageYOffset,e=f+Math.min(this.element.clientHeight,this.util().innerHeight())-c,d=this.offsetTop(a),b=d+a.clientHeight,e>=d&&b>=f},e.prototype.util=function(){return null!=this._util?this._util:this._util=new b},e.prototype.disabled=function(){return!this.config.mobile&&this.util().isMobile(navigator.userAgent)},e}()}).call(this); | PypiClean |
/typegraph_std-0.0.1.tar.gz/typegraph_std-0.0.1/typegraph_std/google/groupssettings.py | from typegraph import t
from box import Box
from typegraph.importers.base.importer import Import
from typegraph.runtimes.http import HTTPRuntime
def import_groupssettings() -> Import:
groupssettings = HTTPRuntime("https://www.googleapis.com/")
renames = {
"ErrorResponse": "_groupssettings_1_ErrorResponse",
"GroupsIn": "_groupssettings_2_GroupsIn",
"GroupsOut": "_groupssettings_3_GroupsOut",
}
types = {}
types["ErrorResponse"] = t.struct(
{"code": t.integer(), "message": t.string(), "status": t.string()}
).named(renames["ErrorResponse"])
types["GroupsIn"] = t.struct(
{
"whoCanModifyMembers": t.string().optional(),
"whoCanPostAnnouncements": t.string().optional(),
"replyTo": t.string().optional(),
"archiveOnly": t.string().optional(),
"whoCanBanUsers": t.string().optional(),
"whoCanMoveTopicsIn": t.string().optional(),
"whoCanUnmarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanDiscoverGroup": t.string().optional(),
"whoCanAssignTopics": t.string().optional(),
"whoCanContactOwner": t.string().optional(),
"customFooterText": t.string().optional(),
"favoriteRepliesOnTop": t.string().optional(),
"description": t.string().optional(),
"whoCanTakeTopics": t.string().optional(),
"default_sender": t.string().optional(),
"sendMessageDenyNotification": t.string().optional(),
"whoCanDeleteTopics": t.string().optional(),
"customRolesEnabledForSettingsToBeMerged": t.string().optional(),
"customReplyTo": t.string().optional(),
"whoCanMoveTopicsOut": t.string().optional(),
"whoCanMarkFavoriteReplyOnOwnTopic": t.string().optional(),
"whoCanLockTopics": t.string().optional(),
"whoCanModifyTagsAndCategories": t.string().optional(),
"whoCanMarkDuplicate": t.string().optional(),
"whoCanMarkNoResponseNeeded": t.string().optional(),
"whoCanUnassignTopic": t.string().optional(),
"isArchived": t.string().optional(),
"whoCanMarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanAdd": t.string().optional(),
"kind": t.string().optional(),
"membersCanPostAsTheGroup": t.string().optional(),
"whoCanEnterFreeFormTags": t.string().optional(),
"whoCanModerateContent": t.string().optional(),
"whoCanApproveMembers": t.string().optional(),
"showInGroupDirectory": t.string().optional(),
"primaryLanguage": t.string().optional(),
"allowGoogleCommunication": t.string().optional(),
"allowWebPosting": t.string().optional(),
"whoCanAddReferences": t.string().optional(),
"maxMessageBytes": t.integer().optional(),
"includeCustomFooter": t.string().optional(),
"whoCanViewGroup": t.string().optional(),
"name": t.string().optional(),
"whoCanModerateMembers": t.string().optional(),
"whoCanHideAbuse": t.string().optional(),
"includeInGlobalAddressList": t.string().optional(),
"whoCanDeleteAnyPost": t.string().optional(),
"spamModerationLevel": t.string().optional(),
"whoCanMakeTopicsSticky": t.string().optional(),
"whoCanInvite": t.string().optional(),
"email": t.string().optional(),
"whoCanJoin": t.string().optional(),
"messageDisplayFont": t.string().optional(),
"messageModerationLevel": t.string().optional(),
"whoCanPostMessage": t.string().optional(),
"whoCanLeaveGroup": t.string().optional(),
"whoCanApproveMessages": t.string().optional(),
"enableCollaborativeInbox": t.string().optional(),
"whoCanViewMembership": t.string().optional(),
"whoCanAssistContent": t.string().optional(),
"allowExternalMembers": t.string().optional(),
"defaultMessageDenyNotificationText": t.string().optional(),
}
).named(renames["GroupsIn"])
types["GroupsOut"] = t.struct(
{
"whoCanModifyMembers": t.string().optional(),
"whoCanPostAnnouncements": t.string().optional(),
"replyTo": t.string().optional(),
"archiveOnly": t.string().optional(),
"whoCanBanUsers": t.string().optional(),
"whoCanMoveTopicsIn": t.string().optional(),
"whoCanUnmarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanDiscoverGroup": t.string().optional(),
"whoCanAssignTopics": t.string().optional(),
"whoCanContactOwner": t.string().optional(),
"customFooterText": t.string().optional(),
"favoriteRepliesOnTop": t.string().optional(),
"description": t.string().optional(),
"whoCanTakeTopics": t.string().optional(),
"default_sender": t.string().optional(),
"sendMessageDenyNotification": t.string().optional(),
"whoCanDeleteTopics": t.string().optional(),
"customRolesEnabledForSettingsToBeMerged": t.string().optional(),
"customReplyTo": t.string().optional(),
"whoCanMoveTopicsOut": t.string().optional(),
"whoCanMarkFavoriteReplyOnOwnTopic": t.string().optional(),
"whoCanLockTopics": t.string().optional(),
"whoCanModifyTagsAndCategories": t.string().optional(),
"whoCanMarkDuplicate": t.string().optional(),
"whoCanMarkNoResponseNeeded": t.string().optional(),
"whoCanUnassignTopic": t.string().optional(),
"isArchived": t.string().optional(),
"whoCanMarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanAdd": t.string().optional(),
"kind": t.string().optional(),
"membersCanPostAsTheGroup": t.string().optional(),
"whoCanEnterFreeFormTags": t.string().optional(),
"whoCanModerateContent": t.string().optional(),
"whoCanApproveMembers": t.string().optional(),
"showInGroupDirectory": t.string().optional(),
"primaryLanguage": t.string().optional(),
"allowGoogleCommunication": t.string().optional(),
"allowWebPosting": t.string().optional(),
"whoCanAddReferences": t.string().optional(),
"maxMessageBytes": t.integer().optional(),
"includeCustomFooter": t.string().optional(),
"whoCanViewGroup": t.string().optional(),
"name": t.string().optional(),
"whoCanModerateMembers": t.string().optional(),
"whoCanHideAbuse": t.string().optional(),
"includeInGlobalAddressList": t.string().optional(),
"whoCanDeleteAnyPost": t.string().optional(),
"spamModerationLevel": t.string().optional(),
"whoCanMakeTopicsSticky": t.string().optional(),
"whoCanInvite": t.string().optional(),
"email": t.string().optional(),
"whoCanJoin": t.string().optional(),
"messageDisplayFont": t.string().optional(),
"messageModerationLevel": t.string().optional(),
"whoCanPostMessage": t.string().optional(),
"whoCanLeaveGroup": t.string().optional(),
"whoCanApproveMessages": t.string().optional(),
"enableCollaborativeInbox": t.string().optional(),
"whoCanViewMembership": t.string().optional(),
"whoCanAssistContent": t.string().optional(),
"allowExternalMembers": t.string().optional(),
"defaultMessageDenyNotificationText": t.string().optional(),
"error": t.proxy(renames["ErrorResponse"]).optional(),
}
).named(renames["GroupsOut"])
functions = {}
functions["groupsPatch"] = groupssettings.put(
"{groupUniqueId}",
t.struct(
{
"groupUniqueId": t.string().optional(),
"whoCanModifyMembers": t.string().optional(),
"whoCanPostAnnouncements": t.string().optional(),
"replyTo": t.string().optional(),
"archiveOnly": t.string().optional(),
"whoCanBanUsers": t.string().optional(),
"whoCanMoveTopicsIn": t.string().optional(),
"whoCanUnmarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanDiscoverGroup": t.string().optional(),
"whoCanAssignTopics": t.string().optional(),
"whoCanContactOwner": t.string().optional(),
"customFooterText": t.string().optional(),
"favoriteRepliesOnTop": t.string().optional(),
"description": t.string().optional(),
"whoCanTakeTopics": t.string().optional(),
"default_sender": t.string().optional(),
"sendMessageDenyNotification": t.string().optional(),
"whoCanDeleteTopics": t.string().optional(),
"customRolesEnabledForSettingsToBeMerged": t.string().optional(),
"customReplyTo": t.string().optional(),
"whoCanMoveTopicsOut": t.string().optional(),
"whoCanMarkFavoriteReplyOnOwnTopic": t.string().optional(),
"whoCanLockTopics": t.string().optional(),
"whoCanModifyTagsAndCategories": t.string().optional(),
"whoCanMarkDuplicate": t.string().optional(),
"whoCanMarkNoResponseNeeded": t.string().optional(),
"whoCanUnassignTopic": t.string().optional(),
"isArchived": t.string().optional(),
"whoCanMarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanAdd": t.string().optional(),
"kind": t.string().optional(),
"membersCanPostAsTheGroup": t.string().optional(),
"whoCanEnterFreeFormTags": t.string().optional(),
"whoCanModerateContent": t.string().optional(),
"whoCanApproveMembers": t.string().optional(),
"showInGroupDirectory": t.string().optional(),
"primaryLanguage": t.string().optional(),
"allowGoogleCommunication": t.string().optional(),
"allowWebPosting": t.string().optional(),
"whoCanAddReferences": t.string().optional(),
"maxMessageBytes": t.integer().optional(),
"includeCustomFooter": t.string().optional(),
"whoCanViewGroup": t.string().optional(),
"name": t.string().optional(),
"whoCanModerateMembers": t.string().optional(),
"whoCanHideAbuse": t.string().optional(),
"includeInGlobalAddressList": t.string().optional(),
"whoCanDeleteAnyPost": t.string().optional(),
"spamModerationLevel": t.string().optional(),
"whoCanMakeTopicsSticky": t.string().optional(),
"whoCanInvite": t.string().optional(),
"email": t.string().optional(),
"whoCanJoin": t.string().optional(),
"messageDisplayFont": t.string().optional(),
"messageModerationLevel": t.string().optional(),
"whoCanPostMessage": t.string().optional(),
"whoCanLeaveGroup": t.string().optional(),
"whoCanApproveMessages": t.string().optional(),
"enableCollaborativeInbox": t.string().optional(),
"whoCanViewMembership": t.string().optional(),
"whoCanAssistContent": t.string().optional(),
"allowExternalMembers": t.string().optional(),
"defaultMessageDenyNotificationText": t.string().optional(),
"auth": t.string().optional(),
}
),
t.proxy(renames["GroupsOut"]),
auth_token_field="auth",
content_type="application/json",
)
functions["groupsGet"] = groupssettings.put(
"{groupUniqueId}",
t.struct(
{
"groupUniqueId": t.string().optional(),
"whoCanModifyMembers": t.string().optional(),
"whoCanPostAnnouncements": t.string().optional(),
"replyTo": t.string().optional(),
"archiveOnly": t.string().optional(),
"whoCanBanUsers": t.string().optional(),
"whoCanMoveTopicsIn": t.string().optional(),
"whoCanUnmarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanDiscoverGroup": t.string().optional(),
"whoCanAssignTopics": t.string().optional(),
"whoCanContactOwner": t.string().optional(),
"customFooterText": t.string().optional(),
"favoriteRepliesOnTop": t.string().optional(),
"description": t.string().optional(),
"whoCanTakeTopics": t.string().optional(),
"default_sender": t.string().optional(),
"sendMessageDenyNotification": t.string().optional(),
"whoCanDeleteTopics": t.string().optional(),
"customRolesEnabledForSettingsToBeMerged": t.string().optional(),
"customReplyTo": t.string().optional(),
"whoCanMoveTopicsOut": t.string().optional(),
"whoCanMarkFavoriteReplyOnOwnTopic": t.string().optional(),
"whoCanLockTopics": t.string().optional(),
"whoCanModifyTagsAndCategories": t.string().optional(),
"whoCanMarkDuplicate": t.string().optional(),
"whoCanMarkNoResponseNeeded": t.string().optional(),
"whoCanUnassignTopic": t.string().optional(),
"isArchived": t.string().optional(),
"whoCanMarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanAdd": t.string().optional(),
"kind": t.string().optional(),
"membersCanPostAsTheGroup": t.string().optional(),
"whoCanEnterFreeFormTags": t.string().optional(),
"whoCanModerateContent": t.string().optional(),
"whoCanApproveMembers": t.string().optional(),
"showInGroupDirectory": t.string().optional(),
"primaryLanguage": t.string().optional(),
"allowGoogleCommunication": t.string().optional(),
"allowWebPosting": t.string().optional(),
"whoCanAddReferences": t.string().optional(),
"maxMessageBytes": t.integer().optional(),
"includeCustomFooter": t.string().optional(),
"whoCanViewGroup": t.string().optional(),
"name": t.string().optional(),
"whoCanModerateMembers": t.string().optional(),
"whoCanHideAbuse": t.string().optional(),
"includeInGlobalAddressList": t.string().optional(),
"whoCanDeleteAnyPost": t.string().optional(),
"spamModerationLevel": t.string().optional(),
"whoCanMakeTopicsSticky": t.string().optional(),
"whoCanInvite": t.string().optional(),
"email": t.string().optional(),
"whoCanJoin": t.string().optional(),
"messageDisplayFont": t.string().optional(),
"messageModerationLevel": t.string().optional(),
"whoCanPostMessage": t.string().optional(),
"whoCanLeaveGroup": t.string().optional(),
"whoCanApproveMessages": t.string().optional(),
"enableCollaborativeInbox": t.string().optional(),
"whoCanViewMembership": t.string().optional(),
"whoCanAssistContent": t.string().optional(),
"allowExternalMembers": t.string().optional(),
"defaultMessageDenyNotificationText": t.string().optional(),
"auth": t.string().optional(),
}
),
t.proxy(renames["GroupsOut"]),
auth_token_field="auth",
content_type="application/json",
)
functions["groupsUpdate"] = groupssettings.put(
"{groupUniqueId}",
t.struct(
{
"groupUniqueId": t.string().optional(),
"whoCanModifyMembers": t.string().optional(),
"whoCanPostAnnouncements": t.string().optional(),
"replyTo": t.string().optional(),
"archiveOnly": t.string().optional(),
"whoCanBanUsers": t.string().optional(),
"whoCanMoveTopicsIn": t.string().optional(),
"whoCanUnmarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanDiscoverGroup": t.string().optional(),
"whoCanAssignTopics": t.string().optional(),
"whoCanContactOwner": t.string().optional(),
"customFooterText": t.string().optional(),
"favoriteRepliesOnTop": t.string().optional(),
"description": t.string().optional(),
"whoCanTakeTopics": t.string().optional(),
"default_sender": t.string().optional(),
"sendMessageDenyNotification": t.string().optional(),
"whoCanDeleteTopics": t.string().optional(),
"customRolesEnabledForSettingsToBeMerged": t.string().optional(),
"customReplyTo": t.string().optional(),
"whoCanMoveTopicsOut": t.string().optional(),
"whoCanMarkFavoriteReplyOnOwnTopic": t.string().optional(),
"whoCanLockTopics": t.string().optional(),
"whoCanModifyTagsAndCategories": t.string().optional(),
"whoCanMarkDuplicate": t.string().optional(),
"whoCanMarkNoResponseNeeded": t.string().optional(),
"whoCanUnassignTopic": t.string().optional(),
"isArchived": t.string().optional(),
"whoCanMarkFavoriteReplyOnAnyTopic": t.string().optional(),
"whoCanAdd": t.string().optional(),
"kind": t.string().optional(),
"membersCanPostAsTheGroup": t.string().optional(),
"whoCanEnterFreeFormTags": t.string().optional(),
"whoCanModerateContent": t.string().optional(),
"whoCanApproveMembers": t.string().optional(),
"showInGroupDirectory": t.string().optional(),
"primaryLanguage": t.string().optional(),
"allowGoogleCommunication": t.string().optional(),
"allowWebPosting": t.string().optional(),
"whoCanAddReferences": t.string().optional(),
"maxMessageBytes": t.integer().optional(),
"includeCustomFooter": t.string().optional(),
"whoCanViewGroup": t.string().optional(),
"name": t.string().optional(),
"whoCanModerateMembers": t.string().optional(),
"whoCanHideAbuse": t.string().optional(),
"includeInGlobalAddressList": t.string().optional(),
"whoCanDeleteAnyPost": t.string().optional(),
"spamModerationLevel": t.string().optional(),
"whoCanMakeTopicsSticky": t.string().optional(),
"whoCanInvite": t.string().optional(),
"email": t.string().optional(),
"whoCanJoin": t.string().optional(),
"messageDisplayFont": t.string().optional(),
"messageModerationLevel": t.string().optional(),
"whoCanPostMessage": t.string().optional(),
"whoCanLeaveGroup": t.string().optional(),
"whoCanApproveMessages": t.string().optional(),
"enableCollaborativeInbox": t.string().optional(),
"whoCanViewMembership": t.string().optional(),
"whoCanAssistContent": t.string().optional(),
"allowExternalMembers": t.string().optional(),
"defaultMessageDenyNotificationText": t.string().optional(),
"auth": t.string().optional(),
}
),
t.proxy(renames["GroupsOut"]),
auth_token_field="auth",
content_type="application/json",
)
return Import(
importer="groupssettings",
renames=renames,
types=Box(types),
functions=Box(functions),
) | PypiClean |
/jdcloud_sdk-1.6.243.tar.gz/jdcloud_sdk-1.6.243/jdcloud_sdk/services/waf/models/LbConf.py |
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class LbConf(object):
def __init__(self, protocols, lbType, sslProtocols=None, rsConfig=None, pureClient=None, sslModify=None, httpsRedirect=None, rsOnlySupportHttp=None, httpsCertUpdateStatus=None, gmHttpsCertUpdateStatus=None, gmCertSupport=None, httpStatus=None, httpVersion=None, enableKeepalive=None, suiteLevel=None, userSuiteLevel=None, enableUnderscores=None, maxBodySize=None, disableHealthCheck=None, proxyConnectTimeout=None):
"""
:param protocols: 使用协议,["http","https"]
:param sslProtocols: (Optional) ssl协议,eg:["TLSv1","TLSv1.1","TLSv1.2","SSLv2","SSLv3","TLSv1.3"]
:param lbType: 负载均衡算法,eg:"rr","ip_hash"
:param rsConfig: (Optional) 网站回源配置
:param pureClient: (Optional) 是否使用前置代理,0为未使用,1为使用
:param sslModify: (Optional) 是否可对ssl配置进行修改,0为不可修改,1可修改
:param httpsRedirect: (Optional) 1为跳转 0为不跳转
:param rsOnlySupportHttp: (Optional) 用户服务器是否只能http回源,1为是,0为否
:param httpsCertUpdateStatus: (Optional) https证书状态,非配置项。-10为未绑定,0为已绑定
:param gmHttpsCertUpdateStatus: (Optional) 国密https证书状态,非配置项。-10为未绑定,0为已绑定
:param gmCertSupport: (Optional) 是否支持国密证书
:param httpStatus: (Optional) 协议状态,非配置项。0为正常,-10为不正常
:param httpVersion: (Optional) Waf侧支持http版本,""为默认值http1.1,"http2"为http2
:param enableKeepalive: (Optional) 回源是否支持长链接,0为否
:param suiteLevel: (Optional) 加密套件等级,0表示默认为中级,1表示高级,2表示低级, 3表示自定义
:param userSuiteLevel: (Optional) 自定义加密套件
:param enableUnderscores: (Optional) 请求头是否支持下划线,1-是,0-否
:param maxBodySize: (Optional) 请求body最大值,默认300M,可为G/K
:param disableHealthCheck: (Optional) 禁用被动健康检查,缺省为0-否
:param proxyConnectTimeout: (Optional) 连接超时时间,3-60s
"""
self.protocols = protocols
self.sslProtocols = sslProtocols
self.lbType = lbType
self.rsConfig = rsConfig
self.pureClient = pureClient
self.sslModify = sslModify
self.httpsRedirect = httpsRedirect
self.rsOnlySupportHttp = rsOnlySupportHttp
self.httpsCertUpdateStatus = httpsCertUpdateStatus
self.gmHttpsCertUpdateStatus = gmHttpsCertUpdateStatus
self.gmCertSupport = gmCertSupport
self.httpStatus = httpStatus
self.httpVersion = httpVersion
self.enableKeepalive = enableKeepalive
self.suiteLevel = suiteLevel
self.userSuiteLevel = userSuiteLevel
self.enableUnderscores = enableUnderscores
self.maxBodySize = maxBodySize
self.disableHealthCheck = disableHealthCheck
self.proxyConnectTimeout = proxyConnectTimeout | PypiClean |
/DeepPhysX-22.6.tar.gz/DeepPhysX-22.6/src/Visualizer/VedoVisualizer.py | from typing import List, Any, Dict, Union
from os.path import join as osPathJoin
from os import makedirs
from vedo import Plotter, Text2D, show
from DeepPhysX.Core.Visualizer.VedoObjects import VedoObjects, ObjectDescription
Viewers = Dict[int, Dict[str, Union[str, List[Any], bool, Plotter]]]
class VedoVisualizer:
"""
| Visualizer class to display VisualInstances in a 2D/3D environment.
| VedoVisualiser use the vedo library to display 3D models.
| Objects are given in the init_view function.
| Updates to these objects are achieved by using update_visualizer and update_instances functions.
"""
def __init__(self):
self.scene: Dict[int, VedoObjects] = {}
self.default_viewer_id: int = 9
self.viewers: Viewers = {self.default_viewer_id: {'title': f"Vedo_axes_{self.default_viewer_id}",
'instances': [],
'sharecam': True,
'interactive': True}}
self.objects_rendered_in: Dict[str, (int, int)] = {}
self.info = Text2D("Press 'q' to\nstart session")
# Wrong samples parameters
self.folder_path: str = ''
self.nb_saved: int = 0
self.nb_screenshots: int = 0
def init_view(self, data_dict: Dict[int, Dict[int, Dict[str, Union[Dict[str, Any], Any]]]]) -> None:
"""
| Initialize VedoVisualizer class by parsing the scenes hierarchy and creating VisualInstances.
| OBJECT DESCRIPTION DICTIONARY is usually obtained using the corresponding factory (VedoObjectFactory).
| data_dict example:
| {SCENE_1_ID: {OBJECT_1.1_ID: {CONTENT OF OBJECT_1.1 DESCRIPTION DICTIONARY},
| ...
| OBJECT_1.N_ID: {CONTENT OF OBJECT_1.N DESCRIPTION DICTIONARY}
| },
| ...
| SCENE_M_ID: {OBJECT_M.1_ID: {CONTENT OF OBJECT_K.1 DESCRIPTION DICTIONARY},
| ...
| OBJECT_M.K_ID: {CONTENT OF OBJECT_K.P DESCRIPTION DICTIONARY}
| }
| }
:param data_dict: Dictionary describing the scene hierarchy and object parameters
:type data_dict: Dict[int, Dict[int, Dict[str, Union[Dict[str, Any], Any]]]]
"""
# For each scene/client
for scene_id in sorted(data_dict):
# Add a VedoObjects container for the scene
self.scene.update({scene_id: VedoObjects()})
scene = self.scene[scene_id]
# Create each object in current scene/client
for object_id in data_dict[scene_id]:
scene.create_object(data_dict[scene_id][object_id])
objects_dict = scene.objects_factory.objects_dict
# Deal with all the windows and object attached to these windows first
remaining_object_id = set(objects_dict.keys())
for window_id in scene.objects_factory.windows_id:
# Removes the window we are dealing with from the object we have to add to the plotter
remaining_object_id -= {window_id}
# Vedo can only handle 1 axe type per viewer, so we create as many viewers as needed
viewer_id = objects_dict[window_id]['axes']
# Existing viewer: ensure all objects share the same window parameters
if viewer_id in self.viewers:
# If at least one need a shared camera then we set True for all
self.viewers[viewer_id]['sharecam'] |= objects_dict[window_id]['sharecam']
# If one requires that the window is not interactive then it's not interactive for all
self.viewers[viewer_id]['interactive'] &= objects_dict[window_id]['interactive']
# New viewer: init parameters
else:
self.viewers[viewer_id] = {'sharecam': objects_dict[window_id]['sharecam'],
'interactive': objects_dict[window_id]['interactive'],
'instances': [],
'title': f"Vedo_axes_{objects_dict[window_id]['axes']}"}
# Add the objects to the corresponding list
for object_id in objects_dict[window_id]['objects_id']:
# Affects the object in the existing window
if -1 < objects_dict[object_id]['at'] < len(self.viewers[viewer_id]['instances']):
self.viewers[viewer_id]['instances'][objects_dict[object_id]['at']].append([scene_id,
object_id])
# Affects the object in the next non-existing window
else:
objects_dict[object_id]['at'] = len(self.viewers[viewer_id]['instances'])
self.viewers[viewer_id]['instances'].append([[scene_id, object_id]])
# Remove all the objects attached to the window from the object to deal with
remaining_object_id -= set(objects_dict[window_id]['objects_id'])
# Deals with the remaining objects that are not specified in windows
for object_id in remaining_object_id:
# Affects the object in the existing window
if -1 < objects_dict[object_id]['at'] < len(self.viewers[self.default_viewer_id]['instances']):
self.viewers[self.default_viewer_id]['instances'][objects_dict[object_id]['at']].append([scene_id,
object_id])
# Affects the object in the next non-existing window
else:
objects_dict[object_id]['at'] = len(self.viewers[self.default_viewer_id]['instances'])
self.viewers[self.default_viewer_id]['instances'].append([[scene_id, object_id]])
# Once all objects are created we create the plotter with the corresponding parameters
for viewer_id in list(self.viewers.keys()):
# If no objects created for the viewer, remove it
if len(self.viewers[viewer_id]['instances']) == 0:
del self.viewers[viewer_id]
continue
# # Create plotter
# self.viewers[viewer_id]['plotter'] = Plotter(N=len(self.viewers[viewer_id]['instances']),
# title=self.viewers[viewer_id]['title'],
# axes=viewer_id,
# sharecam=self.viewers[viewer_id]['sharecam'],
# interactive=self.viewers[viewer_id]['interactive'])
# self.viewers[viewer_id]['plotter'].add(self.info, at=0)
#
# # self.viewers[viewer_id]['instances'] is a list containing lists of instances
# # Each sublist contains all instances present in a window hence, each sublist has it own "at"
# for at, ids in enumerate(self.viewers[viewer_id]['instances']):
# for scene_id, object_in_scene_id in ids:
# # Add object instance in the actors list of plotter
# self.viewers[viewer_id]['plotter'].add(
# self.scene[scene_id].objects_instance[object_in_scene_id], at=at, render=False)
# # Register the object rendering location
# self.objects_rendered_in[f'{scene_id}_{object_in_scene_id}'] = (viewer_id, at)
#
# # Render viewer
# self.viewers[viewer_id]['plotter'].show(interactive=True)
# self.viewers[viewer_id]['plotter'].remove(self.info)
actors = []
for at, ids in enumerate(self.viewers[viewer_id]['instances']):
actors.append([])
if at == 0:
actors[-1].append(self.info)
for scene_id, object_in_scene_id in ids:
actors[-1].append(self.scene[scene_id].objects_instance[object_in_scene_id])
self.objects_rendered_in[f'{scene_id}_{object_in_scene_id}'] = (viewer_id, at)
self.viewers[viewer_id]['plotter'] = show(actors,
N=len(actors),
title=self.viewers[viewer_id]['title'],
axes=viewer_id,
sharecam=self.viewers[viewer_id]['sharecam'],
interactive=self.viewers[viewer_id]['interactive'])
self.viewers[viewer_id]['plotter'].remove(self.info)
def render(self) -> None:
"""
| Call render on all valid plotter.
"""
# Update all objects
self.update_instances()
# Render all plotters
for viewer_id in self.viewers:
self.viewers[viewer_id]['plotter'].render()
self.viewers[viewer_id]['plotter'].allowInteraction()
def update_instances(self) -> None:
"""
| Call update_instance on all updates object description
"""
# Update in every scene/client
for scene_id in self.scene:
# Update every object of the current scene
for object_id in self.scene[scene_id].objects_instance:
# Get the rendering location of the object
viewer_data = self.objects_rendered_in[f'{scene_id}_{object_id}']
plotter = self.viewers[viewer_data[0]]['plotter']
at = viewer_data[1]
# _, self.viewers[viewer_data[0]]['plotter'] = self.scene[scene_id].update_instance(
# object_id, (self.viewers[viewer_data[0]]['plotter'], viewer_data[1]))
self.scene[scene_id].update_instance(object_id, (plotter, at))
def update_visualizer(self, data_dict: Dict[int, Dict[int, ObjectDescription]]) -> None:
"""
| Call update_object_dict on all designed objects.
:param data_dict: Dictionary describing the scene hierarchy and object parameters
:type data_dict: Dict[int, Dict[int, Dict[str, Union[Dict[str, Any], Any]]]]
"""
for scene_id in data_dict:
for object_id in data_dict[scene_id]:
self.scene[scene_id].update_object(object_id, data_dict[scene_id][object_id])
def save_sample(self, session_dir: str, viewer_id: int) -> None:
"""
Save the samples as a .npz file.
:param str session_dir: Directory in which to save the file
:param int viewer_id: id of the designed viewer
"""
if self.folder_path == "":
self.folder_path = osPathJoin(session_dir, 'dataset', 'wrong_samples')
makedirs(self.folder_path)
from DeepPhysX.Core.Utils import wrong_samples
import shutil
shutil.copy(wrong_samples.__file__, self.folder_path)
filename = osPathJoin(self.folder_path, f'wrong_sample_{self.nb_saved}.npz')
self.nb_saved += 1
self.viewers[viewer_id]['plotter'].export(filename=filename)
def save_screenshot(self, session_dir: str) -> None:
"""
| Save a screenshot of each viewer in the dataset folder_path of the session.
:param str session_dir: Directory in which to save the file
"""
# Check folder_path existence
if self.folder_path == "":
self.folder_path = osPathJoin(session_dir, 'dataset', 'samples')
makedirs(self.folder_path)
# Save a screenshot for each viewer
for viewer_id in self.viewers.keys():
filename = osPathJoin(self.folder_path, f'screenshot_{self.nb_screenshots}.png')
self.nb_screenshots += 1
self.viewers[viewer_id]['plotter'].screenshot(filename=filename) | PypiClean |
/future-0.18.3.tar.gz/future-0.18.3/src/libfuturize/fixes/fix_next_call.py | from lib2to3.pgen2 import token
from lib2to3.pygram import python_symbols as syms
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, Call, find_binding
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
class FixNextCall(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
|
global=global_stmt< 'global' any* 'next' any* >
"""
order = "pre" # Pre-order tree traversal
def start_tree(self, tree, filename):
super(FixNextCall, self).start_tree(tree, filename)
n = find_binding('next', tree)
if n:
self.warning(n, bind_warning)
self.shadowed_next = True
else:
self.shadowed_next = False
def transform(self, node, results):
assert results
base = results.get("base")
attr = results.get("attr")
name = results.get("name")
if base:
if self.shadowed_next:
# Omit this:
# attr.replace(Name("__next__", prefix=attr.prefix))
pass
else:
base = [n.clone() for n in base]
base[0].prefix = ""
node.replace(Call(Name("next", prefix=node.prefix), base))
elif name:
# Omit this:
# n = Name("__next__", prefix=name.prefix)
# name.replace(n)
pass
elif attr:
# We don't do this transformation if we're assigning to "x.next".
# Unfortunately, it doesn't seem possible to do this in PATTERN,
# so it's being done here.
if is_assign_target(node):
head = results["head"]
if "".join([str(n) for n in head]).strip() == '__builtin__':
self.warning(node, bind_warning)
return
# Omit this:
# attr.replace(Name("__next__"))
elif "global" in results:
self.warning(node, bind_warning)
self.shadowed_next = True
### The following functions help test if node is part of an assignment
### target.
def is_assign_target(node):
assign = find_assign(node)
if assign is None:
return False
for child in assign.children:
if child.type == token.EQUAL:
return False
elif is_subtree(child, node):
return True
return False
def find_assign(node):
if node.type == syms.expr_stmt:
return node
if node.type == syms.simple_stmt or node.parent is None:
return None
return find_assign(node.parent)
def is_subtree(root, node):
if root == node:
return True
return any(is_subtree(c, node) for c in root.children) | PypiClean |
/cpg-utils-ms-1.3.5.tar.gz/cpg-utils-ms-1.3.5/README.md | # cpg-utils-ms
This is a Python library containing convenience functions that are specific to the CPG.
In order to install the library, run:
```bash
pip install cpg-utils-ms
```
To use the library, import functions like this:
```python
from cpg_utils.cloud import email_from_id_token
_email_string = email_from_id_token(id_token_jwt='TOKEN_STRING')
```
We use `bumpversion` for incrementing the library's semantic version. A new conda package gets published automatically in the `cpg` conda channel whenever a version bump commit is merged with the `main` branch.
## Contents
- [Methods to facilitate cloud computing](documentation/cloud.md)
- [Helper functions for Hail Batch jobs](documentation/hail_batch.md)
- [Cloning git repositories inside Hail Batch jobs](documentation/git.md)
| PypiClean |
/odoo14_addon_mrp_subcontracting_inhibit-14.0.1.1.1-py3-none-any.whl/odoo/addons/mrp_subcontracting_inhibit/readme/USAGE.rst | #. Go to *Inventory > Configuration > Routes* and edit 'Buy' rule and check 'Subcontracting inhibit'.
#. Go to *Inventory > Products > Product* and create some product called 'Subcontract product'.
#. Go to *Inventory > Products > Product* and create other product called 'Component'.
#. Go to *Manufacturing > Products > Bill of materials* and create a new one with type = "Subcontracting" related to 'Subcontract product' and set 'Component' product.
#. Go to *Inventory > Products > Product* and edit 'Subcontract product' and set Purchase tab as follows:
#. [Vendor line 1] Vendor: Azure Interior, Subcontracting inhibited: Yes, Price: 5.
#. [Vendor line 2] Vendor: Azure Interior, Subcontracting inhibited: No, Price: 10.
Purchase order flow:
#. Go to *Purchase > Orders > Requests for Quotation* and create new order as follows:
#. Vendor: Azure Interior, Product: Subcontract product
#. The unit price of the product will be 10.
#. Click on the 'Confirm Order' button.
#. A production order will have been created.
Replenishment flow:
#. Go to *Inventory > Products > Product* and go to 'Subcontract product'.
#. Click on the 'Replenish' button, select 'Buy' in Preferred Routes field and click on the 'Confirm' button.
#. A new purchase order will have been created, go to *Purchase > Orders > Requests for Quotation* and enter it.
#. The unit price of the product will be 5.
#. Click on the 'Confirm Order' button.
#. A production order will not have been created.
| PypiClean |