content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
import requests
import re
class MyCrawler:
def __init__(self, filename):
self.filename = filename
self.headers = {
'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36'
}
def download(self, url):
r = requests.get(url, headers=self.headers)
return r.text
def extract(self, content, pattern):
result = re.findall(pattern, content)
return result
def save(self, items):
with open(self.filename, "w", encoding="utf-8") as f:
for item in items:
# f.write(item[0] + " " + item[1] + " " + item[2] +
# " " + item[3] + " " + item[4] + " " + item[5] + "\n")
f.write(" ".join(item) + "\n")
def crawl(self, url, pattern, headers=None):
if headers:
self.headers.update(headers)
content = self.download(url)
info = self.extract(content, pattern)
self.save(info)
# 爬爬b站排行版
# url = "https://www.bilibili.com/v/popular/rank/douga?spm_id_from=333.851.b_62696c695f7265706f72745f646f756761.39"
# b_crawler = MyCrawler("bilibili.txt")
# pattern = '<a\shref="//([^"]*?)"\starget="_blank"\sclass="title">(.*?)</a>.*?</i>[\s]+(.*?)[\s]+.*</i>[\s]+(\d+)[\s]+</span>.*</i>[\s]+(.*?)[\s]+</span>.*<div class="pts"><div>(\d+)</div>'
# b_crawler.crawl(url, pattern)
# 一招制敌: 放全部request header
# copy as curl, and use tools to convert code to python code
# 没登陆基本不需要cookies
# url = "https://www.douban.com/search?q=%E7%A5%9E%E7%BB%8F%E7%BD%91%E7%BB%9C"
# 说明豆瓣只验证user agent
# headers = {
# 'User-Agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Mobile Safari/537.36'
# }
# response = requests.get('https://www.douban.com/search?q=%E7%A5%9E%E7%BB%8F%E7%BD%91%E7%BB%9C',
# headers=headers)
# print(len(response.text))
# print("神经网络与深度学习" in response.text)
# print(response.text)
# NB. Original query string below. It seems impossible to parse and
# reproduce query strings 100% accurately so the one below is given
# in case the reproduced version is not "correct".
# response = requests.get('https://www.douban.com/search?q=^%^E7^%^A5^%^9E^%^E7^%^BB^%^8F^%^E7^%^BD^%^91^%^E7^%^BB^%^9C', headers=headers, cookies=cookies)
| python |
'''
The purpose of this package is to provide asynchronous variants of
the builtin `input` and `print` functions. `print` is known to be
relatively slow compared to other operations. `input` is even slower
because it has to wait for user input. While these slow IO
operations are being ran, code using `asyncio` should be able to
continuously run.
ainput and aprint
------------------
With `aio_stdout`, the `aio_stdout.ainput` and `aio_stdout.aprint`
functions provide easy to use functionality with organized behaviour.
```python
import asyncio
from aio_stdout import ainput, aprint
async def countdown(n: int) -> None:
"""Count down from `n`, taking `n` seconds to run."""
for i in range(n, 0, -1):
await aprint(i)
await asyncio.sleep(1)
async def get_name() -> str:
"""Ask the user for their name."""
name = await ainput("What is your name? ")
await aprint(f"Your name is {name}.")
return name
async def main() -> None:
await asyncio.gather(countdown(15), get_name())
if __name__ == "__main__":
asyncio.run(main())
```
Example output:
```
15
What is your name? Jane
14
13
12
11
10
9
8
Your name is Jane.
7
6
5
4
3
2
1
```
Notice that while the prompt `"What is your name? "` is being waited
for, the `countdown` continues to `aprint` in the background, without
becoming blocked. The `countdown` does not, however, display its
results until the `ainput` is completed. Instead it waits for the
`ainput` to finish before flushing out all of the queued messages.
It is worth noting that with naive threading, a normal attempt to use
`print` while waiting on an `input` leads to overlapping messages.
Fixing this behavior requires a lot more work than should be needed
to use a simple `print` or `input` function, which is why this
package exists. To remedy this problem, queues are used to store
messages until they are ready to be printed.
IO Locks
---------
Although the asynchronization behaviors of `ainput` and `aprint` are
nice, sometimes we want to be able to synchronize our messages even
more. IO locks provide a way to group messages together, locking the
global `aio_stdout` queues until it finishes or yields access.
```python
import asyncio
from aio_stdout import IOLock, ainput, aprint
async def countdown(n: int) -> None:
"""Count down from `n`, taking `n` seconds to run."""
async with IOLock(n=5) as io_lock:
for i in range(n, 0, -1):
await io_lock.aprint(i)
await asyncio.sleep(1)
async def get_name() -> str:
"""Ask the user for their name."""
async with IOLock() as io_lock:
name = await io_lock.ainput("What is your name? ")
await io_lock.aprint(f"Your name is {name}.")
return name
async def main() -> None:
await asyncio.gather(countdown(15), get_name())
if __name__ == "__main__":
asyncio.run(main())
```
Let's try the example again now using the new locks:
```
15
14
13
12
11
What is your name? Jane
Your name is Jane.
10
9
8
7
6
5
4
3
2
1
```
Notice that this time the `countdown` does not immediately yield to
the `get_name`. Instead, it runs 5 messages before yielding control
over to `get_name`. Now, after the `ainput` finishes, it does not
yield to `countdown`. Instead, it runs its own `aprint` first. In the
meantime, `countdown` continues to run in the background and flushes
all of its buffered messages afterwards.
Flushing
---------
Since messages may be delayed, it is possible for your asynchronous
code to finish running before all messages are displayed, producing
confusing results. As such, the best recommended practice is to flush
from `main` before terminating.
```python
from aio_stdout import flush
async def main() -> None:
async with flush:
pass
```
Common Gotchas
---------------
- Using `input` or `print` instead of `ainput` and `aprint` will push
a message immediately to the console, potentially conflicting with
`ainput` or `aprint`.
- Using `ainput` or `aprint` instead of `io_lock.ainput` and
`io_lock.aprint` may produce **deadlock** due to having to
wait for the lock to release. As such, the `io_lock` is equipped
with a default `timeout` limit of 10 seconds to avoid deadlock
and explain to users this potential problem.
'''
from __future__ import annotations
from asyncio import Queue
from enum import Enum
from functools import partial
from typing import Any, ClassVar, Dict, Generic, IO, Literal, Optional, Tuple, Type, TypedDict, TypeVar
import asyncio
import logging
import sys
__all__ = ["IOLock", "ainput", "aprint", "flush"]
T = TypeVar("T")
# Make `asyncio.Queue` generic for type-hinting.
if sys.version_info < (3, 9):
class Queue(Queue, Generic[T]):
"""
A queue, useful for coordinating producer and consumer coroutines.
If maxsize is less than or equal to zero, the queue size is infinite. If it
is an integer greater than 0, then "await put()" will block when the
queue reaches maxsize, until an item is removed by get().
Unlike the standard library Queue, you can reliably know this Queue's size
with qsize(), since your single-threaded asyncio application won't be
interrupted between calling qsize() and doing an operation on the Queue.
"""
__slots__ = ()
async def get(self: Queue[T], /) -> T:
"""
Remove and return an item from the queue.
If queue is empty, wait until an item is available.
"""
return await super().get()
def get_nowait(self: Queue[T], /) -> T:
"""
Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
"""
return super().get_nowait()
async def put(self: Queue[T], item: T, /) -> T:
"""
Put an item into the queue.
Put an item into the queue. If the queue is full, wait until a free
slot is available before adding item.
"""
return await super().put(item)
def put_nowait(self: Queue[T], item: T, /) -> T:
"""
Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
"""
return super().put_nowait(item)
logger = logging.getLogger(__name__)
class PrintKwargs(TypedDict, total=False):
sep: Optional[str]
end: Optional[str]
file: IO
flush: Any
IOQueueType = Queue[Tuple[bool, Optional[asyncio.Event], Tuple[str, ...], PrintKwargs]]
class IOLock(asyncio.Lock):
"""
The `IOLock` may be used to control the order with which `ainput` and
`aprint` are scheduled.
The `IOLock` blocks new IO requests from directly entering the `IO_QUEUE`
by moving them to the `UNLOCKED_QUEUE` instead.
Use `IOLock.ainput` and `IOLock.aprint` to within its context block to
schedule locked IO requests.
Attributes
-----------
Construct an IOLock using:
>>> io_lock = IOLock(n=..., timeout=...)
By default, `n = None` and `timeout = 10`.
n:
The number of io requests that can be queued at a time
before letting other io requests go through.
timeout:
The number of seconds the io lock can sleep before letting other
io requests go through.
See `help(IOLock.n)` or `help(IOLock.timeout)` for more information.
Example
--------
Use it as a context manager to ensure you can't have printed messages
in-between them.
>>> async with IOLock() as io_lock:
... name = await io_lock.ainput("What is your name? ")
... await io_lock.aprint(f"Your name is {name}.")
...
What is your name? (...)
Your name is (...).
WARNING
--------
Using `aprint` with `block=True` or `ainput` inside of an `io_lock`
block will cause deadlock, preventing your program from continuing.
Use `io_lock.ainput` and `io_lock.aprint` instead.
Using `aprint` with `block=False` inside of an `io_lock` block
will delay the `aprint` until the `io_lock` block is finished.
With the default `io_lock.timeout` however, such deadlocks only hold for 10 seconds.
"""
_class_is_finished: ClassVar[asyncio.Event] = asyncio.Event()
_class_queue: ClassVar[Queue[Tuple[Optional[float], IOQueueType, asyncio.Event, asyncio.Event]]] = Queue()
_i: int
_is_awake: asyncio.Event
_is_finished: asyncio.Event
_n: Optional[int]
_queue: IOQueueType
_timeout: Optional[float]
__slots__ = ("_i", "_is_awake", "_is_finished", "_n", "_queue", "_timeout")
# Finished running IO because there's nothing being ran yet.
_class_is_finished.set()
def __init__(self: IOLock, /, *args: Any, n: Optional[int] = None, timeout: Optional[float] = 10, **kwargs: Any) -> None:
if n is not None and not isinstance(n, int):
raise TypeError(f"n must be an integer or None, got {x!r}")
elif timeout is not None and not isinstance(timeout, (int, float)):
raise TypeError(f"timeout must be an positive number or None, got {timeout!r}")
elif n is not None and not n > 0:
raise ValueError(f"n must be greater than 0, got {n!r}")
elif timeout is not None and not timeout > 0:
raise ValueError(f"timeout must be greater than 0, got {timeout!r}")
super().__init__(*args, **kwargs)
self._i = 0
self._is_awake = asyncio.Event()
self._is_finished = asyncio.Event()
self._n = n
self._queue = Queue()
self._timeout = float(timeout) if isinstance(timeout, int) else timeout
# The lock is not sleeping because it's not being executed.
self._is_awake.set()
# Finished running IO because there's nothing being ran yet.
self._is_finished.set()
async def __aenter__(self: IOLock, /) -> IOLock:
"""Acquire the lock and return itself."""
await super().__aenter__()
return self
async def acquire(self: IOLock, /) -> Literal[True]:
"""
Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
This prevents other `ainput` or `aprint` from running.
"""
await super().acquire()
# Once the lock is acquired, add it to the queue.
self._is_finished.clear()
await type(self)._class_queue.put((self.timeout, self._queue, self._is_awake, self._is_finished))
# Restart the class executor if necessary.
if type(self)._class_is_finished.is_set():
type(self)._class_is_finished.clear()
asyncio.create_task(type(self)._execute_io())
# The lock is sleeping because there's nothing being ran yet.
self._is_awake.clear()
def release(self: IOLock, /) -> None:
"""
Release a lock.
When the lock is locked, reset it to unlocked, and return.
If any other coroutines are blocked waiting for the lock to become
unlocked, allow exactly one of them to proceed.
When invoked on an unlocked lock, a RuntimeError is raised.
There is no return value.
"""
super().release()
self._is_finished.set()
# Use a new `is_awake` event.
self._is_awake = asyncio.Event()
self._is_awake.set()
# Use a new `is_finished` event.
self._is_finished = asyncio.Event()
self._is_finished.set()
# Collect future IO in an empty queue.
if not self._queue.empty():
self._queue = Queue()
@classmethod
async def __exhaust_queue(cls: Type[IOLock], io_queue: IOQueueType, /) -> None:
"""Helper method to exhaust a queue."""
# Otherwise the io lock is not sleeping and the io queue should be exhausted.
while not io_queue.empty():
# Get the next io request.
is_print, event, args, kwargs = await io_queue.get()
# Execute the io request in `asyncio`'s default thread.
if is_print:
try:
await asyncio.get_running_loop().run_in_executor(None, partial(print, *args, **kwargs))
except Exception as e:
if event is None:
logger.exception(e)
else:
PRINT_EXCEPTIONS[event] = e
else:
try:
INPUT_RESULTS[event] = (False, await asyncio.get_running_loop().run_in_executor(None, partial(input, *args)))
except Exception as e:
INPUT_RESULTS[event] = (True, e)
# Signal the io request was completed.
if event is not None:
event.set()
io_queue.task_done()
@classmethod
async def __wait_event(cls: Type[IOLock], event: asyncio.Event, message: str, /) -> str:
"""Helper method to wait until an event occurs."""
await event.wait()
return message
@classmethod
async def _execute_io(cls: Type[IOLock], /) -> None:
"""Helper method for executing IO requests."""
while not cls._class_queue.empty():
timeout, io_queue, is_awake, is_finished = await cls._class_queue.get()
is_finished_task = asyncio.create_task(cls.__wait_event(is_finished, "finished"))
task_type = "awake"
# Wait for the queue to be finished.
while task_type == "awake":
# Otherwise the io lock is awake and the io queue should be exhausted.
await cls.__exhaust_queue(io_queue)
# Sleep once all tasks are done.
is_awake.clear()
tasks = [is_finished_task]
tasks.append(asyncio.create_task(cls.__wait_event(is_awake, "awake")))
if timeout is None:
as_completed = asyncio.as_completed(tasks)
else:
as_completed = asyncio.as_completed(tasks, timeout=timeout)
# Wait until one of the tasks is done.
for task in as_completed:
try:
task_type = await task
except asyncio.TimeoutError:
task_type = "timeout"
break
del tasks[0]
for task in tasks:
task.cancel()
for task in tasks:
try:
await task
except asyncio.CancelledError:
pass
cls._class_queue.task_done()
# Wake up if finished.
if task_type == "finished":
is_awake.set()
# Finish the remaining io requests.
await cls.__exhaust_queue(io_queue)
# Otherwise it timed out and needs to be re-added it to the queue.
else:
# Warn the user if they timed out after 10 seconds and other IO is waiting.
if None is not timeout >= 10 and not (cls._class_queue.empty() and IO_QUEUE.empty()):
print(
"An `io_lock` timed out after 10 seconds or more.",
"This is likely due to the use of `aprint` or `ainput`",
"instead of `io_lock.aprint` or `io_lock.ainput` while",
"inside of an `io_lock` block."
)
# Insert the global queue into the class queue.
global_queue = Queue()
for _ in range(IO_QUEUE.qsize()):
global_queue.put_nowait(IO_QUEUE.get_nowait())
global_is_finished = asyncio.Event()
global_is_finished.set()
await cls._class_queue.put((None, global_queue, asyncio.Event(), global_is_finished))
await cls._class_queue.put((timeout, io_queue, is_awake, is_finished))
# Signal no io lock is executing.
cls._class_is_finished.set()
# Restart the global executor if necessary.
if IS_FINISHED.is_set():
IS_FINISHED.clear()
asyncio.create_task(_execute_io())
def _schedule_io(self: IOLock, is_print: bool, event: Optional[asyncio.Event], args: Tuple[str, ...], kwargs: Optional[PrintKwargs], /) -> None:
"""Helper method for scheduling IO requests."""
# Insert the next IO request.
self._queue.put_nowait((is_print, event, args, kwargs))
# Update the lock counter.
self._i += 1
# Refresh the lock if necessary.
if None is not self.n <= self._i:
self._i = 0
# The current queue is finished.
self._is_finished.set()
# Use a new `is_awake` event.
self._is_awake = asyncio.Event()
self._is_awake.set()
# Use a new `is_finished` event.
self._is_finished = asyncio.Event()
# Use a new `queue`.
self._queue = Queue()
# Re-add it to the class queue.
type(self)._class_queue.put_nowait((self.timeout, self._queue, self._is_awake, self._is_finished))
# The io lock is no longer sleeping, if it was.
else:
self._is_awake.set()
async def ainput(self: IOLock, /, *args: Any) -> str:
"""Locked version of `ainput`. See `ainput` for more details."""
# Perform early type-checking on args.
if len(args) > 1:
raise TypeError(f"ainput expected at most 1 argument, got {len(args)}")
# Require the io lock to be locked.
elif not self.locked():
raise RuntimeError(f"ainput used before the lock was acquired")
# Wait for the io to finish.
is_completed = asyncio.Event()
# Schedule the `input`.
self._schedule_io(False, is_completed, (*[str(arg) for arg in args],), {})
# Wait for the `input` to finish.
await is_completed.wait()
# Collect the result.
had_exception, response = INPUT_RESULTS.pop(is_completed)
if had_exception:
raise response
else:
return response
async def aprint(self: IOLock, /, *args: Any, block: bool = False, **kwargs: Any) -> None:
"""Locked version of `aprint`. See `aprint` for more details."""
# Perform early type-checking on kwargs.
for kwarg, value in kwargs.items():
if kwarg in ("sep", "end") and value is not None and not isinstance(value, str):
raise TypeError(f"{kwarg} must be None or a string, not {type(value).__name__}")
elif kwarg == "file" and not isinstance(value, IO):
raise TypeError(f"file must be an IO instance, not {type(value).__name__}")
elif kwarg not in ("sep", "end", "file", "flush"):
raise TypeError(f"{kwarg!r} is an invalid keyword argument for aprint()")
# Require the io lock to be locked.
if not self.locked():
raise RuntimeError(f"ainput used before the lock was acquired")
# Wait for the io to finish depending on `block`.
event = asyncio.Event() if block else None
# Schedule the `print`.
self._schedule_io(True, event, (*[str(arg) for arg in args],), kwargs)
# Wait for the `print` to finish.
if block:
await event.wait()
# Wait at least once before returning so that the print can start running.
else:
await asyncio.sleep(0)
@property
def n(self: IOLock, /) -> Optional[int]:
"""
The number of io requests that can be queued at a time
before letting other io requests go through.
If `None`, then it blocks until all locked io requests go through.
"""
return self._n
@property
def timeout(self: IOLock, /) -> Optional[float]:
"""
The number of seconds the io lock can sleep before letting other
io requests go through.
If `None`, then it blocks until all locked io requests go through.
"""
return self._timeout
class Flush(Enum):
"""Use `async with flush: ...` to flush all io before exiting."""
flush = ()
async def __aenter__(self: Flush, /) -> None:
pass
async def __aexit__(self: Flush, /, *args: Any) -> None:
"""Waits until all IO is flushed."""
await IOLock._class_is_finished.wait()
await IS_FINISHED.wait()
flush: Flush = Flush.flush
INPUT_RESULTS: Dict[asyncio.Event, Union[Tuple[Literal[False], str], Tuple[Literal[True], Exception]]] = {}
IO_QUEUE: IOQueueType = Queue()
IS_FINISHED: asyncio.Event = asyncio.Event()
PRINT_EXCEPTIONS: Dict[asyncio.Event, Exception] = {}
# Finished running IO because there's nothing being ran yet.
IS_FINISHED.set()
async def _execute_io() -> None:
"""Helper function for executing IO requests."""
# Exhaust all of the io requests.
# Stop if an `IOLock` is currently being used.
while not IO_QUEUE.empty() and IOLock._class_is_finished.is_set():
# Get the next io request.
is_print, event, args, kwargs = await IO_QUEUE.get()
# Execute the io request in `asyncio`'s default thread.
if is_print:
try:
await asyncio.get_running_loop().run_in_executor(None, partial(print, *args, **kwargs))
except Exception as e:
if event is None:
logger.exception(e)
else:
PRINT_EXCEPTIONS[event] = e
else:
try:
INPUT_RESULTS[event] = (False, await asyncio.get_running_loop().run_in_executor(None, partial(input, *args)))
except Exception as e:
INPUT_RESULTS[event] = (True, e)
# Signal the io request was completed.
if event is not None:
event.set()
IO_QUEUE.task_done()
# Signal no io requests are being executed.
IS_FINISHED.set()
def _schedule_io(is_print: bool, event: Optional[asyncio.Event], args: Tuple[str, ...], kwargs: Optional[PrintKwargs], /) -> None:
"""Helper function for scheduling IO requests."""
# Insert the next IO request.
IO_QUEUE.put_nowait((is_print, event, args, kwargs))
# Restart the executor if necessary.
if IS_FINISHED.is_set() and IOLock._class_is_finished.is_set():
IS_FINISHED.clear()
asyncio.create_task(_execute_io())
async def ainput(*args: Any) -> str:
"""
An asynchronous version of `input`, which runs in a thread.
Blocks the current coroutine from progressing until `input` is given.
WARNING:
Using `ainput` inside of an `io_lock` block will cause deadlock,
preventing your program from continuing.
Use `io_lock.ainput` instead.
With the default `io_lock.timeout` however, such deadlocks only
hold for 10 seconds.
NOTE:
Since `ainput` only queues a prompt to be printed evantually,
it may not print anything if the `asyncio` loop terminates first.
In order to flush out all remaining `aprint`s and `ainput`s, use
>>> async with flush:
... pass # Main code.
...
at the end of the main code to wait until all other code gets to print.
"""
# Perform early type-checking on args.
if len(args) > 1:
raise TypeError(f"ainput expected at most 1 argument, got {len(args)}")
# Wait for the io to finish.
is_completed = asyncio.Event()
# Schedule the `input`.
_schedule_io(False, is_completed, (*[str(arg) for arg in args],), {})
# Wait for the `input` to finish.
await is_completed.wait()
# Collect the result.
had_exception, response = INPUT_RESULTS.pop(is_completed)
if had_exception:
raise response
else:
return response
async def aprint(*args: Any, block: bool = False, **kwargs: Any) -> None:
"""
An asynchronous version of `print`, which runs in a thread.
By default, `block=False`, which schedule the `print` but returns
immediately. If `block=True`, schedule the `print` and wait for it
to be ran. For example, if an `aprint` occurs after an `ainput`, it
will wait until the `ainput` is completed to `print` the message,
but code using the `aprint` has the option to wait for this or not.
Use `block=True` only if you need the `print` to go through before
continuing, such as when printing to a file.
WARNING:
Using `aprint` with `block=True` inside of an `io_lock` block
will cause deadlock, preventing your program from continuing.
Use `io_lock.aprint` instead.
Using `aprint` with `block=False` inside of an `io_lock` block
will delay the `aprint` until the `io_lock` block is finished.
With the default `io_lock.timeout` however, such deadlocks only
hold for 10 seconds.
NOTE:
Since `aprint` only queues a message to be printed evantually,
it may not print anything if the `asyncio` loop terminates first.
In order to flush out all remaining `aprint`s and `ainput`s, use
>>> async with flush:
... pass # Main code.
...
at the end of the main code to wait until all other code gets to print.
"""
# Perform early type-checking on kwargs.
for kwarg, value in kwargs.items():
if kwarg in ("sep", "end") and value is not None and not isinstance(value, str):
raise TypeError(f"{kwarg} must be None or a string, not {type(value).__name__}")
elif kwarg == "file" and not isinstance(value, IO):
raise TypeError(f"file must be an IO instance, not {type(value).__name__}")
elif kwarg not in ("sep", "end", "file", "flush"):
raise TypeError(f"{kwarg!r} is an invalid keyword argument for aprint()")
# Wait for the io to finish depending on `block`.
event = asyncio.Event() if block else None
# Schedule the `print`.
_schedule_io(True, event, (*[str(arg) for arg in args],), kwargs)
# Wait for the `print` to finish.
if block:
await event.wait()
if event in PRINT_EXCEPTIONS:
raise PRINT_EXCEPTIONS.pop(event)
# Wait at least once before returning so that the print can start running.
else:
await asyncio.sleep(0)
| python |
'''
Banner endpoint handler (defined in swagger.yaml)
'''
from app import metrics
import os
from PIL import Image,ImageFilter
import subprocess
from dataclasses import dataclass
import logging
from connexion.lifecycle import ConnexionResponse
from connexion import NoContent
from prometheus_client import Counter
from flask import make_response
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
@dataclass
class Font:
base_from_a: bool
font_width: int
font_height: int
rows: int
characters_per_row: int
filename: str
fonts = {
"carebear": Font(filename='fonts/carebear.jpg', base_from_a=False, font_width=26, font_height=26, characters_per_row=12, rows=5),
"cuddly": Font(filename='fonts/cuddly.jpg', base_from_a=True, font_width=32, font_height=32, characters_per_row=10, rows=5),
"knight4": Font(filename='fonts/knight4.jpg', base_from_a=False, font_width=32, font_height=25, characters_per_row=10, rows=7),
"tcb": Font(filename='fonts/tcb.jpg', base_from_a=False, font_width=32, font_height=32, characters_per_row=10, rows=6)
}
SELECTED_FONTS = Counter('font', 'font', ['name'])
@metrics.summary('generate_by_status', 'generate Request latencies by status', labels={
'code': lambda r: r.status_code
})
def generate(message: str, fontname: str, width: int) -> str:
'''
Render the banner message in a fontname with the given terminal width
'''
logger = logging.getLogger()
output = ""
if len(message) > 0:
out_folder = "./out"
banner = str.upper(message)
if fontname not in fonts:
SELECTED_FONTS.labels("error").inc()
return "Unsupported font", 400
selected_font = fonts[fontname]
# increment font selection counter
SELECTED_FONTS.labels(fontname).inc()
font_width = selected_font.font_width
font_height = selected_font.font_height
rows = selected_font.rows
characters_per_row = selected_font.characters_per_row
font = Image.open(selected_font.filename)
banner_width = len(banner) * font_width
#font.rotate(45).show()
out_image = Image.new("RGB", (banner_width, font_height))
letters={}
character=' '
if selected_font.base_from_a:
character='A'
logger.info(f"Cut", extra={"selected_font": selected_font, "banner": message})
for cursor_y in range(0, rows):
for cursor_x in range(0, characters_per_row):
coords = (cursor_x * font_width, cursor_y * font_height, (cursor_x * font_width) + font_width, (cursor_y * font_height) + font_height)
#print(character + " " + str(coords))
#letter = font.crop(corrds)
#letters[character] = letter
letters[character] = coords
character = chr(ord(character) + 1)
cursor_x = 0
for letter in banner:
coords = letters[letter]
letter_image = font.crop(coords)
#print(letter + " " + str(coords))
out_image.paste(letter_image, (cursor_x * font_width, 0))
cursor_x += 1
#out_image.show()
if not os.path.exists(out_folder):
os.makedirs(out_folder)
banner_file = os.path.join(out_folder, 'banner.jpg')
out_image.save(banner_file)
docker = False
if 'DOCKER' in os.environ:
docker = str2bool(os.environ['DOCKER'])
logger.info(f"DOCKER found in environment {docker}", extra={"docker": docker})
if width == 0:
width = banner_width
logger.info(f"Render", extra={"banner_file": banner_file, "banner": message, "width": width})
if docker:
completed = subprocess.run(["jp2a", "--width=" + str(width), "--colors", "--color-depth=24", "--fill", banner_file], capture_output=True)
else:
completed = subprocess.run(["jp2a", "--width=" + str(width), "--invert", banner_file], capture_output=True)
if completed.returncode != 0:
logger.error(f"Error running jp2a", extra={"stderr": completed.stderr})
return "Failed to process", 503
output = completed.stdout.decode("ascii")
logger.info(f"Output", extra={"length": len(output)})
# ensure that the response is not quoted
response = make_response(output, 200)
response.mimetype = "text/plain"
return response
@metrics.summary('supported_fonts_by_status', 'supported fonts Request latencies by status', labels={
'code': lambda r: r.status_code
})
def supported_fonts() -> list:
'''
Return list of available fonts
'''
return(list(fonts.keys()))
| python |
#!/usr/bin/env python
# Copyright (c) 2020 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
""" This module is responsible for the management of the sumo simulation. """
# ==================================================================================================
# -- imports ---------------------------------------------------------------------------------------
# ==================================================================================================
import collections
import enum
import logging
import carla # pylint: disable=import-error
import sumolib # pylint: disable=import-error
import traci # pylint: disable=import-error
from .constants import INVALID_ACTOR_ID
# ==================================================================================================
# -- sumo definitions ------------------------------------------------------------------------------
# ==================================================================================================
# https://sumo.dlr.de/docs/TraCI/Vehicle_Signalling.html
class SumoVehSignal(object):
"""
SumoVehSignal contains the different sumo vehicle signals.
"""
BLINKER_RIGHT = 1 << 0
BLINKER_LEFT = 1 << 1
BLINKER_EMERGENCY = 1 << 2
BRAKELIGHT = 1 << 3
FRONTLIGHT = 1 << 4
FOGLIGHT = 1 << 5
HIGHBEAM = 1 << 6
BACKDRIVE = 1 << 7
WIPER = 1 << 8
DOOR_OPEN_LEFT = 1 << 9
DOOR_OPEN_RIGHT = 1 << 10
EMERGENCY_BLUE = 1 << 11
EMERGENCY_RED = 1 << 12
EMERGENCY_YELLOW = 1 << 13
# https://sumo.dlr.de/docs/Definition_of_Vehicles,_Vehicle_Types,_and_Routes.html#abstract_vehicle_class
class SumoActorClass(enum.Enum):
"""
SumoActorClass enumerates the different sumo actor classes.
"""
IGNORING = "ignoring"
PRIVATE = "private"
EMERGENCY = "emergency"
AUTHORITY = "authority"
ARMY = "army"
VIP = "vip"
PEDESTRIAN = "pedestrian"
PASSENGER = "passenger"
HOV = "hov"
TAXI = "taxi"
BUS = "bus"
COACH = "coach"
DELIVERY = "delivery"
TRUCK = "truck"
TRAILER = "trailer"
MOTORCYCLE = "motorcycle"
MOPED = "moped"
BICYCLE = "bicycle"
EVEHICLE = "evehicle"
TRAM = "tram"
RAIL_URBAN = "rail_urban"
RAIL = "rail"
RAIL_ELECTRIC = "rail_electric"
RAIL_FAST = "rail_fast"
SHIP = "ship"
CUSTOM1 = "custom1"
CUSTOM2 = "custom2"
SumoActor = collections.namedtuple(
'SumoActor', 'type_id vclass transform signals extent color')
# ==================================================================================================
# -- sumo simulation -------------------------------------------------------------------------------
# ==================================================================================================
class SumoSimulation(object):
"""
SumoSimulation is responsible for the management of the sumo simulation.
"""
def __init__(self, args):
self.args = args
host = args.sumo_host
port = args.sumo_port
if args.sumo_gui is True:
sumo_binary = sumolib.checkBinary('sumo-gui')
else:
sumo_binary = sumolib.checkBinary('sumo')
if args.sumo_host is None or args.sumo_port is None:
logging.info('Starting new sumo server...')
if args.sumo_gui is True:
logging.info('Remember to press the play button to start the simulation')
traci.start([
sumo_binary,
"-c", args.sumo_cfg_file,
'--step-length', str(args.step_length),
'--lateral-resolution', '0.25',
'--collision.check-junctions'
])
else:
logging.info('Connection to sumo server. Host: %s Port: %s', host, port)
traci.init(host=host, port=port)
# Structures to keep track of the spawned and destroyed vehicles at each time step.
self.spawned_actors = set()
self.destroyed_actors = set()
# Creating a random route to be able to spawn carla actors.
traci.route.add("carla_route", [traci.edge.getIDList()[0]])
# Variable to asign an id to new added actors.
self._sequential_id = 0
@staticmethod
def subscribe(actor_id):
"""
Subscribe the given actor to the following variables:
* Type.
* Vehicle class.
* Color.
* Length, Width, Height.
* Position3D (i.e., x, y, z).
* Angle, Slope.
* Speed.
* Lateral speed.
* Signals.
"""
traci.vehicle.subscribe(actor_id, [
traci.constants.VAR_TYPE, traci.constants.VAR_VEHICLECLASS,
traci.constants.VAR_COLOR, traci.constants.VAR_LENGTH,
traci.constants.VAR_WIDTH, traci.constants.VAR_HEIGHT,
traci.constants.VAR_POSITION3D, traci.constants.VAR_ANGLE,
traci.constants.VAR_SLOPE, traci.constants.VAR_SPEED,
traci.constants.VAR_SPEED_LAT, traci.constants.VAR_SIGNALS
])
@staticmethod
def unsubscribe(actor_id):
"""
Unsubscribe the given actor from receiving updated information each step.
"""
traci.vehicle.unsubscribe(actor_id)
@staticmethod
def get_net_offset():
"""
Accessor for sumo net offset.
"""
offset = traci.simulation.convertGeo(0, 0)
return (-offset[0], -offset[1])
@staticmethod
def get_step_length():
"""
Accessor for sumo simulation step length.
"""
return traci.simulation.getDeltaT()
@staticmethod
def get_actor(actor_id):
"""
Accessor for sumo actor.
"""
results = traci.vehicle.getSubscriptionResults(actor_id)
type_id = results[traci.constants.VAR_TYPE]
vclass = SumoActorClass(results[traci.constants.VAR_VEHICLECLASS])
color = results[traci.constants.VAR_COLOR]
length = results[traci.constants.VAR_LENGTH]
width = results[traci.constants.VAR_WIDTH]
height = results[traci.constants.VAR_HEIGHT]
location = list(results[traci.constants.VAR_POSITION3D])
rotation = [
results[traci.constants.VAR_SLOPE],
results[traci.constants.VAR_ANGLE], 0.0
]
transform = carla.Transform(
carla.Location(location[0], location[1], location[2]),
carla.Rotation(rotation[0], rotation[1], rotation[2])
)
signals = results[traci.constants.VAR_SIGNALS]
extent = carla.Vector3D(length / 2.0, width / 2.0, height / 2.0)
return SumoActor(type_id, vclass, transform, signals, extent, color)
def spawn_actor(self, type_id, attrs=None):
"""
Spawns a new actor.
:param type_id: vtype to be spawned.
:param attrs: dictionary with additional attributes for this specific actor.
:return: actor id if the actor is successfully spawned. Otherwise, INVALID_ACTOR_ID.
"""
actor_id = 'carla' + str(self._sequential_id)
try:
traci.vehicle.add(actor_id, 'carla_route', typeID=type_id)
except traci.exceptions.TraCIException as error:
logging.error('Spawn sumo actor failed: %s', error)
return INVALID_ACTOR_ID
if attrs is not None:
if self.args.sync_vehicle_color and 'color' in attrs:
color = attrs['color'].split(',')
traci.vehicle.setColor(actor_id, color)
self._sequential_id += 1
return actor_id
@staticmethod
def destroy_actor(actor_id):
"""
Destroys the given actor.
"""
traci.vehicle.remove(actor_id)
def synchronize_vehicle(self, vehicle_id, transform, signals=None):
"""
Updates vehicle state.
:param vehicle_id: id of the actor to be updated.
:param transform: new vehicle transform (i.e., position and rotation).
:param signals: new vehicle signals.
:return: True if successfully updated. Otherwise, False.
"""
loc_x, loc_y = transform.location.x, transform.location.y
yaw = transform.rotation.yaw
traci.vehicle.moveToXY(vehicle_id, "", 0, loc_x, loc_y, angle=yaw, keepRoute=2)
if signals is not None and self.args.sync_vehicle_lights:
traci.vehicle.setSignals(vehicle_id, signals)
return True
def tick(self):
"""
Tick to sumo simulation.
"""
traci.simulationStep()
# Update data structures for the current frame.
self.spawned_actors = set(traci.simulation.getDepartedIDList())
self.destroyed_actors = set(traci.simulation.getArrivedIDList())
@staticmethod
def close():
"""
Closes traci client.
"""
traci.close()
| python |
import os
import httpx
CAST_SERVICE_HOST_URL = 'http://localhost:8002/api/v1/casts/'
url = os.environ.get('CAST_SERVICE_HOST_URL') or CAST_SERVICE_HOST_URL
def is_cast_present(cast_id: int):
r = httpx.get(f'{url}{cast_id}')
return True if r.status_code == 200 else False | python |
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2013 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import atexit
import os
import logging
import re
import sys
import time
from webkitpy.common.system.crashlogs import CrashLogs
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.system.executive import ScriptError, Executive
from webkitpy.common.system.path import abspath_to_uri, cygpath
from webkitpy.port.apple import ApplePort
_log = logging.getLogger(__name__)
class WinPort(ApplePort):
port_name = "win"
VERSION_FALLBACK_ORDER = ["win-xp", "win-vista", "win-7sp0", "win"]
ARCHITECTURES = ['x86']
CRASH_LOG_PREFIX = "CrashLog"
POST_MORTEM_DEBUGGER_KEY = "/HKLM/SOFTWARE/Microsoft/Windows NT/CurrentVersion/AeDebug/%s"
previous_debugger_values = {}
def do_text_results_differ(self, expected_text, actual_text):
# Sanity was restored in WK2, so we don't need this hack there.
if self.get_option('webkit_test_runner'):
return ApplePort.do_text_results_differ(self, expected_text, actual_text)
# This is a hack (which dates back to ORWT).
# Windows does not have an EDITING DELEGATE, so we strip any EDITING DELEGATE
# messages to make more of the tests pass.
# It's possible more of the ports might want this and this could move down into WebKitPort.
delegate_regexp = re.compile("^EDITING DELEGATE: .*?\n", re.MULTILINE)
expected_text = delegate_regexp.sub("", expected_text)
actual_text = delegate_regexp.sub("", actual_text)
return expected_text != actual_text
def default_baseline_search_path(self):
name = self._name.replace('-wk2', '')
if name.endswith(self.FUTURE_VERSION):
fallback_names = [self.port_name]
else:
fallback_names = self.VERSION_FALLBACK_ORDER[self.VERSION_FALLBACK_ORDER.index(name):-1] + [self.port_name]
# FIXME: The AppleWin port falls back to AppleMac for some results. Eventually we'll have a shared 'apple' port.
if self.get_option('webkit_test_runner'):
fallback_names.insert(0, 'win-wk2')
fallback_names.append('mac-wk2')
# Note we do not add 'wk2' here, even though it's included in _skipped_search_paths().
# FIXME: Perhaps we should get this list from MacPort?
fallback_names.extend(['mac-lion', 'mac'])
return map(self._webkit_baseline_path, fallback_names)
def operating_system(self):
return 'win'
def show_results_html_file(self, results_filename):
self._run_script('run-safari', [abspath_to_uri(SystemHost().platform, results_filename)])
# FIXME: webkitperl/httpd.pm installs /usr/lib/apache/libphp4.dll on cycwin automatically
# as part of running old-run-webkit-tests. That's bad design, but we may need some similar hack.
# We might use setup_environ_for_server for such a hack (or modify apache_http_server.py).
def _runtime_feature_list(self):
supported_features_command = [self._path_to_driver(), '--print-supported-features']
try:
output = self._executive.run_command(supported_features_command, error_handler=Executive.ignore_error)
except OSError, e:
_log.warn("Exception running driver: %s, %s. Driver must be built before calling WebKitPort.test_expectations()." % (supported_features_command, e))
return None
# Note: win/DumpRenderTree.cpp does not print a leading space before the features_string.
match_object = re.match("SupportedFeatures:\s*(?P<features_string>.*)\s*", output)
if not match_object:
return None
return match_object.group('features_string').split(' ')
# Note: These are based on the stock Cygwin locations for these files.
def _uses_apache(self):
return False
def _path_to_lighttpd(self):
return "/usr/sbin/lighttpd"
def _path_to_lighttpd_modules(self):
return "/usr/lib/lighttpd"
def _path_to_lighttpd_php(self):
return "/usr/bin/php-cgi"
def _driver_tempdir_for_environment(self):
return cygpath(self._driver_tempdir())
def test_search_path(self):
test_fallback_names = [path for path in self.baseline_search_path() if not path.startswith(self._webkit_baseline_path('mac'))]
return map(self._webkit_baseline_path, test_fallback_names)
def _ntsd_location(self):
possible_paths = [self._filesystem.join(os.environ['PROGRAMFILES'], "Windows Kits", "8.0", "Debuggers", "x86", "ntsd.exe"),
self._filesystem.join(os.environ['PROGRAMFILES'], "Windows Kits", "8.0", "Debuggers", "x64", "ntsd.exe"),
self._filesystem.join(os.environ['PROGRAMFILES'], "Debugging Tools for Windows (x86)", "ntsd.exe"),
self._filesystem.join(os.environ['ProgramW6432'], "Debugging Tools for Windows (x64)", "ntsd.exe"),
self._filesystem.join(os.environ['SYSTEMROOT'], "system32", "ntsd.exe")]
for path in possible_paths:
expanded_path = self._filesystem.expanduser(path)
if self._filesystem.exists(expanded_path):
_log.debug("Using ntsd located in '%s'" % path)
return expanded_path
return None
def create_debugger_command_file(self):
debugger_temp_directory = str(self._filesystem.mkdtemp())
command_file = self._filesystem.join(debugger_temp_directory, "debugger-commands.txt")
commands = ''.join(['.logopen /t "%s\\%s.txt"\n' % (cygpath(self.results_directory()), self.CRASH_LOG_PREFIX),
'.srcpath "%s"\n' % cygpath(self._webkit_finder.webkit_base()),
'!analyze -vv\n',
'~*kpn\n',
'q\n'])
self._filesystem.write_text_file(command_file, commands)
return command_file
def read_registry_string(self, key):
registry_key = self.POST_MORTEM_DEBUGGER_KEY % key
read_registry_command = ["regtool", "--wow32", "get", registry_key]
value = self._executive.run_command(read_registry_command, error_handler=Executive.ignore_error)
return value.rstrip()
def write_registry_string(self, key, value):
registry_key = self.POST_MORTEM_DEBUGGER_KEY % key
set_reg_value_command = ["regtool", "--wow32", "set", "-s", str(registry_key), str(value)]
rc = self._executive.run_command(set_reg_value_command, return_exit_code=True)
if rc == 2:
add_reg_value_command = ["regtool", "--wow32", "add", "-s", str(registry_key)]
rc = self._executive.run_command(add_reg_value_command, return_exit_code=True)
if rc == 0:
rc = self._executive.run_command(set_reg_value_command, return_exit_code=True)
if rc:
_log.warn("Error setting key: %s to value %s. Error=%ld." % (key, value, rc))
return False
# On Windows Vista/7 with UAC enabled, regtool will fail to modify the registry, but will still
# return a successful exit code. So we double-check here that the value we tried to write to the
# registry was really written.
if self.read_registry_string(key) != value:
_log.warn("Regtool reported success, but value of key %s did not change." % key)
return False
return True
def setup_crash_log_saving(self):
if '_NT_SYMBOL_PATH' not in os.environ:
_log.warning("The _NT_SYMBOL_PATH environment variable is not set. Crash logs will not be saved.")
return None
ntsd_path = self._ntsd_location()
if not ntsd_path:
_log.warning("Can't find ntsd.exe. Crash logs will not be saved.")
return None
# If we used -c (instead of -cf) we could pass the commands directly on the command line. But
# when the commands include multiple quoted paths (e.g., for .logopen and .srcpath), Windows
# fails to invoke the post-mortem debugger at all (perhaps due to a bug in Windows's command
# line parsing). So we save the commands to a file instead and tell the debugger to execute them
# using -cf.
command_file = self.create_debugger_command_file()
if not command_file:
return None
debugger_options = '"{0}" -p %ld -e %ld -g -noio -lines -cf "{1}"'.format(cygpath(ntsd_path), cygpath(command_file))
registry_settings = {'Debugger': debugger_options, 'Auto': "1"}
for key in registry_settings:
self.previous_debugger_values[key] = self.read_registry_string(key)
self.write_registry_string(key, registry_settings[key])
def restore_crash_log_saving(self):
for key in self.previous_debugger_values:
self.write_registry_string(key, self.previous_debugger_values[key])
def setup_test_run(self):
atexit.register(self.restore_crash_log_saving)
self.setup_crash_log_saving()
super(WinPort, self).setup_test_run()
def clean_up_test_run(self):
self.restore_crash_log_saving()
super(WinPort, self).clean_up_test_run()
def _get_crash_log(self, name, pid, stdout, stderr, newer_than, time_fn=None, sleep_fn=None, wait_for_log=True):
# Note that we do slow-spin here and wait, since it appears the time
# ReportCrash takes to actually write and flush the file varies when there are
# lots of simultaneous crashes going on.
# FIXME: Should most of this be moved into CrashLogs()?
time_fn = time_fn or time.time
sleep_fn = sleep_fn or time.sleep
crash_log = ''
crash_logs = CrashLogs(self.host, self.results_directory())
now = time_fn()
# FIXME: delete this after we're sure this code is working ...
_log.debug('looking for crash log for %s:%s' % (name, str(pid)))
deadline = now + 5 * int(self.get_option('child_processes', 1))
while not crash_log and now <= deadline:
# If the system_pid hasn't been determined yet, just try with the passed in pid. We'll be checking again later
system_pid = self._executive.pid_to_system_pid.get(pid)
if system_pid == None:
break # We haven't mapped cygwin pid->win pid yet
crash_log = crash_logs.find_newest_log(name, system_pid, include_errors=True, newer_than=newer_than)
if not wait_for_log:
break
if not crash_log or not [line for line in crash_log.splitlines() if line.startswith('quit:')]:
sleep_fn(0.1)
now = time_fn()
if not crash_log:
return (stderr, None)
return (stderr, crash_log)
def look_for_new_crash_logs(self, crashed_processes, start_time):
"""Since crash logs can take a long time to be written out if the system is
under stress do a second pass at the end of the test run.
crashes: test_name -> pid, process_name tuple of crashed process
start_time: time the tests started at. We're looking for crash
logs after that time.
"""
crash_logs = {}
for (test_name, process_name, pid) in crashed_processes:
# Passing None for output. This is a second pass after the test finished so
# if the output had any logging we would have already collected it.
crash_log = self._get_crash_log(process_name, pid, None, None, start_time, wait_for_log=False)[1]
if crash_log:
crash_logs[test_name] = crash_log
return crash_logs
def find_system_pid(self, name, pid):
system_pid = int(pid)
# Windows and Cygwin PIDs are not the same. We need to find the Windows
# PID for our Cygwin process so we can match it later to any crash
# files we end up creating (which will be tagged with the Windows PID)
ps_process = self._executive.run_command(['ps', '-e'], error_handler=Executive.ignore_error)
for line in ps_process.splitlines():
tokens = line.strip().split()
try:
cpid, ppid, pgid, winpid, tty, uid, stime, process_name = tokens
if process_name.endswith(name):
self._executive.pid_to_system_pid[int(cpid)] = int(winpid)
if int(pid) == int(cpid):
system_pid = int(winpid)
break
except ValueError, e:
pass
return system_pid
| python |
from urllib.parse import urljoin
from scrapy import Request
from product_spider.items import RawData
from product_spider.utils.spider_mixin import BaseSpider
class AcanthusSpider(BaseSpider):
name = "acanthus"
allowd_domains = ["acanthusresearch.com"]
start_urls = ["http://acanthusresearch.com/products/", ]
base_url = "http://www.acanthusresearch.com/"
def parse(self, response):
prd_urls = response.xpath('//ul[@class="products"]/li//div[@class="prod-detail"]//h2/a/@href').extract()
for prd_url in prd_urls:
yield Request(prd_url, callback=self.detail_parse)
next_page_url = response.xpath('//a[@class="next page-numbers"]/@href').get()
if next_page_url:
yield Request(next_page_url, callback=self.parse)
def detail_parse(self, response):
tmp_xpath = '//span[@class="spec" and contains(text(), {0!r})]/following-sibling::span//text()'
raw_mf = response.xpath(tmp_xpath.format("Molecular Formula")).extract()
en_name = response.xpath('//h1[contains(@class, "product_title")]/text()').get(default="").strip()
cas = response.xpath(tmp_xpath.format("CAS Number")).get(default="N/A").strip()
d = {
'brand': "acanthus",
'cat_no': response.xpath(tmp_xpath.format("Product Number")).get("").strip(),
'en_name': en_name,
'prd_url': response.request.url, # 产品详细连接
'cas': cas == "NA" and "N/A" or cas,
'mf': ''.join(raw_mf),
'mw': None,
'info1': response.xpath('//div[@class="tags"]/a/text()').get("").strip() or None,
'stock_info': "".join(
response.xpath('//div[@class="row"]//div[contains(@class, "stock-opt")]//text()').extract()).strip(),
'parent': response.xpath(tmp_xpath.format("Parent Drug")).get("").strip(),
'img_url': urljoin(self.base_url, response.xpath('//div[@class="row"]//img/@src').get()),
}
yield RawData(**d)
| python |
import sys
import shlex
sys.path.append('..')
bamsnap_prog = "src/bamsnap.py"
from src import bamsnap
# import bamsnap
# bamsnap_prog = "bamsnap"
cmdlist = []
cmdlist.append("""
-bam ./data/test_SV1_softclipped_1.bam \
-title "Clipped read" \
-pos chr1:37775740 chr1:37775780 chr1:37775783 chr1:37775785 chr1:37775789 \
-out ./out/test_SV1-7_proc1 \
-bamplot coverage read \
-margin 100 \
-no_target_line \
-show_soft_clipped \
-read_color_by interchrom \
-zipout \
-save_image_only
""")
cmdlist.append("""
-bam ./data/test_SV1_softclipped_1.bam \
-title "Clipped read" \
-pos chr1:37775740 chr1:37775780 chr1:37775783 chr1:37775785 chr1:37775789 \
-out ./out/test_SV1-7_proc2 \
-bamplot coverage read \
-margin 100 \
-no_target_line \
-show_soft_clipped \
-read_color_by interchrom \
-zipout \
-process 2 \
-save_image_only
""")
def getopt(target_option):
flag = False
value = ""
for opt1 in sys.argv:
if flag:
if opt1[0] == '-':
break
else:
value += ' ' + opt1
if opt1 == target_option:
flag = True
return value.strip()
def test_run():
for cmd in cmdlist:
# cmd = cmdlist[-1]
cmd = bamsnap_prog + " " + cmd.strip()
sys.argv = shlex.split(cmd)
print(' '.join(sys.argv))
# print(cmd)
bamsnap.cli()
out = getopt('-out')
assert bamsnap.util.is_exist(out + '.zip') == True
if __name__ == "__main__":
test_run()
| python |
#
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from riscv.EnvRISCV import EnvRISCV
from riscv.GenThreadRISCV import GenThreadRISCV
from base.Sequence import Sequence
class MainSequence(Sequence):
def generate(self, **kargs):
random_instructions = [
"ADDW##RISCV",
"SRLI#RV64I#RISCV",
"ADDI##RISCV",
"SLLI#RV64I#RISCV",
"LUI##RISCV",
]
ldstr_instructions = ["LD##RISCV", "SD##RISCV"]
addr_size = 48
alignment = 8
if self.getGlobalState("AppRegisterWidth") == 32:
random_instructions = [
"ADD##RISCV",
"SRLI#RV32I#RISCV",
"ADDI##RISCV",
"SLLI#RV32I#RISCV",
"LUI##RISCV",
]
ldstr_instructions = ["LW##RISCV", "SW##RISCV"]
addr_size = 32
alignment = 4
for _ in range(10):
for _ in range(self.random32(0, 5)):
self.genInstruction(self.choice(random_instructions))
(opt_value, opt_valid) = self.getOption("FlatMap")
rand_VA = 0
if opt_valid:
rand_VA = self.genVA(
Size=addr_size,
Align=alignment,
Type="D",
Bank="Default",
FlatMap=opt_value,
)
else:
rand_VA = self.genVA(Size=addr_size, Align=alignment, Type="D", Bank="Default")
self.notice("gen target VA={:#x}".format(rand_VA))
self.genInstruction(self.choice(ldstr_instructions), {"LSTarget": rand_VA})
MainSequenceClass = MainSequence
GenThreadClass = GenThreadRISCV
EnvClass = EnvRISCV
| python |
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Stubouts, mocks and fixtures for the test suite."""
import pickle
import random
import sys
import fixtures
import mock
from os_xenapi.client import session
from os_xenapi.client import XenAPI
from nova import test
from nova.virt.xenapi import fake
def stubout_session(test, cls, product_version=(5, 6, 2),
product_brand='XenServer', platform_version=(1, 9, 0),
**opt_args):
"""Stubs out methods from XenAPISession."""
test.stub_out('os_xenapi.client.session.XenAPISession._create_session',
lambda s, url: cls(url, **opt_args))
test.stub_out('os_xenapi.client.session.XenAPISession.'
'_get_product_version_and_brand',
lambda s: (product_version, product_brand))
test.stub_out('os_xenapi.client.session.XenAPISession.'
'_get_platform_version',
lambda s: platform_version)
def _make_fake_vdi():
sr_ref = fake.get_all('SR')[0]
vdi_ref = fake.create_vdi('', sr_ref)
vdi_rec = fake.get_record('VDI', vdi_ref)
return vdi_rec['uuid']
class FakeSessionForVMTests(fake.SessionBase):
"""Stubs out a XenAPISession for VM tests."""
def host_call_plugin(self, _1, _2, plugin, method, _5):
plugin = plugin.rstrip('.py')
if plugin == 'glance' and method == 'download_vhd2':
root_uuid = _make_fake_vdi()
return pickle.dumps(dict(root=dict(uuid=root_uuid)))
else:
return (super(FakeSessionForVMTests, self).
host_call_plugin(_1, _2, plugin, method, _5))
def VM_start(self, _1, ref, _2, _3):
vm = fake.get_record('VM', ref)
if vm['power_state'] != 'Halted':
raise XenAPI.Failure(['VM_BAD_POWER_STATE', ref, 'Halted',
vm['power_state']])
vm['power_state'] = 'Running'
vm['is_a_template'] = False
vm['is_control_domain'] = False
vm['domid'] = random.randrange(1, 1 << 16)
return vm
def VM_start_on(self, _1, vm_ref, host_ref, _2, _3):
vm_rec = self.VM_start(_1, vm_ref, _2, _3)
vm_rec['resident_on'] = host_ref
def VDI_snapshot(self, session_ref, vm_ref, _1):
sr_ref = "fakesr"
return fake.create_vdi('fakelabel', sr_ref, read_only=True)
def SR_scan(self, session_ref, sr_ref):
pass
class ReplaceModule(fixtures.Fixture):
"""Replace a module with a fake module."""
def __init__(self, name, new_value):
self.name = name
self.new_value = new_value
def _restore(self, old_value):
sys.modules[self.name] = old_value
def setUp(self):
super(ReplaceModule, self).setUp()
old_value = sys.modules.get(self.name)
sys.modules[self.name] = self.new_value
self.addCleanup(self._restore, old_value)
class FakeSessionForVolumeTests(fake.SessionBase):
"""Stubs out a XenAPISession for Volume tests."""
def VDI_introduce(self, _1, uuid, _2, _3, _4, _5,
_6, _7, _8, _9, _10, _11):
valid_vdi = False
refs = fake.get_all('VDI')
for ref in refs:
rec = fake.get_record('VDI', ref)
if rec['uuid'] == uuid:
valid_vdi = True
if not valid_vdi:
raise XenAPI.Failure([['INVALID_VDI', 'session', self._session]])
class FakeSessionForVolumeFailedTests(FakeSessionForVolumeTests):
"""Stubs out a XenAPISession for Volume tests: it injects failures."""
def VDI_introduce(self, _1, uuid, _2, _3, _4, _5,
_6, _7, _8, _9, _10, _11):
# This is for testing failure
raise XenAPI.Failure([['INVALID_VDI', 'session', self._session]])
def PBD_unplug(self, _1, ref):
rec = fake.get_record('PBD', ref)
rec['currently-attached'] = False
def SR_forget(self, _1, ref):
pass
class FakeSessionForFailedMigrateTests(FakeSessionForVMTests):
def VM_assert_can_migrate(self, session, vmref, migrate_data,
live, vdi_map, vif_map, options):
raise XenAPI.Failure("XenAPI VM.assert_can_migrate failed")
def host_migrate_receive(self, session, hostref, networkref, options):
raise XenAPI.Failure("XenAPI host.migrate_receive failed")
def VM_migrate_send(self, session, vmref, migrate_data, islive, vdi_map,
vif_map, options):
raise XenAPI.Failure("XenAPI VM.migrate_send failed")
# FIXME(sirp): XenAPITestBase is deprecated, all tests should be converted
# over to use XenAPITestBaseNoDB
class XenAPITestBase(test.TestCase):
def setUp(self):
super(XenAPITestBase, self).setUp()
self.useFixture(ReplaceModule('XenAPI', fake))
fake.reset()
def stubout_get_this_vm_uuid(self):
def f(session):
vms = [rec['uuid'] for rec
in fake.get_all_records('VM').values()
if rec['is_control_domain']]
return vms[0]
self.stub_out('nova.virt.xenapi.vm_utils.get_this_vm_uuid', f)
class XenAPITestBaseNoDB(test.NoDBTestCase):
def setUp(self):
super(XenAPITestBaseNoDB, self).setUp()
self.useFixture(ReplaceModule('XenAPI', fake))
fake.reset()
@staticmethod
def get_fake_session(error=None):
fake_session = mock.MagicMock()
session.apply_session_helpers(fake_session)
if error is not None:
class FakeException(Exception):
details = [error, "a", "b", "c"]
fake_session.XenAPI.Failure = FakeException
fake_session.call_xenapi.side_effect = FakeException
return fake_session
| python |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# ----------------------------------------------------------------------------
# Port Scanner
# Copyright (c) 2015 brainelectronics.de
# Scharpf, Jonas
#
# All rights reserved.
#
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import serial
import platform
from serial.tools import list_ports
import threading
import time
import sys
import datetime
class myThread (threading.Thread):
myThreadFlag = 0
def __init__(self, functionToCall, name="", *args):
threading.Thread.__init__(self)
self.name = name
self.functionToCall = functionToCall
self.functionArguments = args
def run(self):
if self.name is not "":
print "Starting " + self.name,
time.sleep(1)
#newCom.print_working()
#newCom.find_port(9600, 1)
self.functionToCall(*self.functionArguments)
if self.name is not "":
print self.name + " done"
class Communication(object):
"""docstring for Communication"""
def __init__(self):
self.truePorts = []
def print_working(self):
while not workingThread.myThreadFlag:
print("."),
time.sleep(1)
if workingThread.myThreadFlag:
newCom.printPorts()
def find_port(self, baud, timeout):
if platform.system() == 'Windows':
ports = enumerate_serial_ports()
elif platform.system() == 'Darwin':
ports = [i[0] for i in list_ports.comports()]
else:
ports = glob.glob("/dev/ttyUSB*") + glob.glob("/dev/ttyACM*") + glob.glob('/dev/ttyS*')
for port in ports:
try:
s = serial.Serial(port)
s.close()
self.truePorts.append(port)
except (OSError, serial.SerialException):
pass
#print(self.truePorts)
workingThread.myThreadFlag = 1
def printPorts(self):
print(self.truePorts)
class Connection(object):
"""docstring for Connection"""
def __init__(self):
super(Connection, self).__init__()
self.port = serial.Serial(
port="/dev/cu.wchusbserial620",
baudrate=9600,
timeout=3.0)
def sendCmd(self):
print "sendCmd", datetime.datetime.now()
self.port.write("n")
def readPort(self):
print "reading..."
doIt = True
while doIt:
lineRead = str(self.port.readline())
print (lineRead)
if len(lineRead) > 0:
print "done reading", datetime.datetime.now()
doIt = False
sys.exit()
if __name__=='__main__':
newConnection = Connection()
readThread = myThread(newConnection.readPort)
readThread.start()
time.sleep(5)
newConnection.sendCmd()
# newCom = Communication()
# # Create new threads
# workingThread = myThread(newCom.print_working)
# scanningThread = myThread(newCom.find_port, "Scan", 9600, 1)
# # Start new Threads
# workingThread.start()
# scanningThread.start()
# #print(threading.enumerate())
| python |
from unittest import TestCase
from approvaltests import approvals
class TestSubdirectories(TestCase):
def test_subdirectory(self) -> None:
approvals.verify("xxx")
| python |
import isdhic
import numpy as np
from isdhic import utils
from isdhic.core import take_time
from isdhic.model import Likelihood
from scipy import optimize
from test_params import random_pairs
class Logistic(isdhic.Logistic):
"""Logistic
Python implementation of Logistic likelihood.
"""
def log_prob(self):
y, x = self.data, self.mock.get()
return - np.logaddexp(np.zeros(len(x)), self.alpha * (x-y)).sum()
def update_derivatives(self):
y, x = self.data, self.mock.get()
self.grad[...] = - self.alpha / (1 + np.exp(-self.alpha * (x-y)))
def log_prob(x, params, likelihood):
params['coordinates'].set(x)
likelihood.update()
return likelihood.log_prob()
if __name__ == '__main__':
## create universe
universe = utils.create_universe(n_particles=1000, diameter=4.)
coords = isdhic.Coordinates(universe)
forces = isdhic.Forces(universe)
## create parameters
params = isdhic.Parameters()
## create contact data
n_data = 100
pairs = random_pairs(universe.n_particles, n_data)
data = np.random.random(n_data) * 10.
mock = isdhic.ModelDistances( pairs, 'contacts')
logistic = Logistic('contacts', data, mock, params=params)
logistic2 = isdhic.Logistic('contacts2', data, mock, params=params)
for param in (coords, forces, mock, logistic.steepness):
params.add(param)
mock.update(params)
with take_time('evaluating python version of logistic likelihood'):
lgp = logistic.log_prob()
print 'log_prob={0:.3e}'.format(lgp)
with take_time('evaluating cython version of logistic likelihood'):
lgp = logistic2.log_prob()
print 'log_prob={0:.3e}'.format(lgp)
with take_time('evaluating derivatives of python version'):
logistic.update_derivatives()
with take_time('evaluating derivatives of cython version'):
logistic2.update_derivatives()
forces.set(0.)
logistic.update_forces()
## numerical gradient
f = lambda x, params=params, likelihood=logistic: \
log_prob(x, params, likelihood)
x = coords.get().copy()
forces_num = optimize.approx_fprime(x, f, 1e-5)
print 'max discrepancy={0:.5e}, corr={1:.1f}'.format(
np.fabs(forces.get()-forces_num).max(),
np.corrcoef(forces.get(),forces_num)[0,1]*100)
| python |
import os
import numpy as np
from PIL import Image
# import util
import cv2
import random
import torchvision.transforms as transforms
import torch
import torch.utils.data
import pyclipper
import Polygon as plg
from yacs.config import CfgNode as CN
from .bounding_box import BoxList
# from __main__ import opt
'''
def read_config_file(config_file):
f = open(config_file)
opt = CN.load_cfg(f)
return opt
cfg = read_config_file(opt.config_file)
train_data_dir = cfg.ADDRESS.DETETECTION.TRAIN_DATA_DIR
train_gt_dir = cfg.ADDRESS.DETETECTION.TRAIN_GT_DIR
'''
class ICDAR2013Dataset(torch.utils.data.Dataset):
CLASSES = (
"__background__",
"text"
)
def __init__(self, cfg, use_difficlut=False, transforms=None):
# data_dirs = [train_data_dir]
# gt_dirs = [train_gt_dir]
self.root = cfg.ADDRESS.DETETECTION.TRAIN_DATA_DIR # data_dirs
self.anno_dir = cfg.ADDRESS.DETETECTION.TRAIN_GT_DIR # gt_dirs
self.keep_difficlut = use_difficlut
self.transforms = transforms
image_list = os.listdir(self.root)
self.ids = [image[:-4] for image in image_list]
self.id_to_img_map = {k: v for k, v in enumerate(self.ids)}
cls = ICDAR2013Dataset.CLASSES
self.class_to_ind = dict(zip(cls, range(len(cls))))
def __len__(self):
return len(self.ids)
def get_groundtruth(self, index):
img_id = self.ids[index]
gt_path = os.path.join(self.anno_dir, 'gt_' + img_id + '.txt')
anno = self._preprocess_annotation(gt_path)
return anno
def _preprocess_annotation(self, gt_path):
boxes = []
gt_classes = []
difficult_boxes = []
gt_list = open(gt_path, 'r', encoding='utf-8').readlines()
for gt_ele in gt_list:
gt_ele = gt_ele.replace('\n', '').replace('\ufeff', '')
gt = gt_ele.split(',')
if len(gt) > 1:
gt_ind = np.array(gt[:8], dtype=np.float32)
gt_ind = np.array(gt_ind, dtype=np.int32)
words = gt[8]
gt_ind = gt_ind.reshape(4, 2)
xs = gt_ind[:, 0].reshape(-1)
ys = gt_ind[:, 1].reshape(-1)
xmin = np.min(xs)
xmax = np.max(xs)
ymin = np.min(ys)
ymax = np.max(ys)
boxes.append([xmin, ymin, xmax, ymax])
gt_classes.append(self.class_to_ind['text'])
difficult_boxes.append(0)
# size = target.find("size")
# im_info = tuple(map(int, (size.find("height").text, size.find("width").text)))
res = {
"boxes": torch.tensor(boxes, dtype=torch.float32),
"labels": torch.tensor(gt_classes),
"difficult": torch.tensor(difficult_boxes),
"im_info": None,
}
return res
def __getitem__(self, index):
img_id = self.ids[index]
im_path = os.path.join(self.root, img_id + '.jpg')
img = Image.open(im_path).convert("RGB")
im = cv2.imread(im_path)
'''
try:
img = Image.open(im_path).convert("RGB")
im = cv2.imread(im_path)
except Exception as e:
print(im_path)
raise
'''
anno = self.get_groundtruth(index)
anno["im_info"] = [im.shape[0], im.shape[1]]
height, width = anno["im_info"]
target = BoxList(anno["boxes"], (width, height), mode="xyxy")
target.add_field("labels", anno["labels"])
target.add_field("difficult", anno["difficult"])
target = target.clip_to_image(remove_empty=True)
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target, index
def map_class_id_to_class_name(self, class_id):
return ICDAR2013Dataset.CLASSES[class_id]
class ICDAR2015TRAIN(torch.utils.data.Dataset):
CLASSES = (
"__background__ ",
"text"
)
def __init__(self, cfg, use_difficult=False, transforms=None):
self.root = data_dir
self.anno_dir = cfg.ADDRESS.DETETECTION.TRAIN_GT_DIR # anno_dir
self.keep_difficult = use_difficult
self.transforms = transforms
image_list = os.listdir(self.root)
self.ids = [image[:-4] for image in image_list]
self.id_to_img_map = {k: v for k, v in enumerate(self.ids)}
cls = ICDAR2015TRAIN.CLASSES
self.class_to_ind = dict(zip(cls, range(len(cls))))
def __getitem__(self, index):
img_id = self.ids[index]
im_path = os.path.join(self.root, img_id + '.jpg')
img = Image.open(im_path).convert("RGB")
im = cv2.imread(im_path)
anno = self.get_groundtruth(index)
anno["im_info"] = [im.shape[0], im.shape[1]]
height, width = anno["im_info"]
target = BoxList(anno["boxes"], (width, height), mode="xyxy")
target.add_field("labels", anno["labels"])
target.add_field("difficult", anno["difficult"])
target = target.clip_to_image(remove_empty=True)
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target, index
def __len__(self):
return len(self.ids)
def get_groundtruth(self, index):
img_id = self.ids[index]
# anno = ET.parse(self._annopath % img_id).getroot()
gt_path = os.path.join(self.anno_dir, 'gt_' + img_id + '.txt')
anno = self._preprocess_annotation(gt_path)
return anno
def _preprocess_annotation(self, gt_path):
boxes = []
gt_classes = []
difficult_boxes = []
# TO_REMOVE = 1
gt_list = open(gt_path, 'r', encoding='utf-8').readlines()
for gt_ele in gt_list:
gt_ele = gt_ele.replace('\n', '').replace('\ufeff', '')
gt = gt_ele.split(',')
if len(gt) > 1:
gt_ind = np.array(gt[:8], dtype=np.float32)
gt_ind = np.array(gt_ind, dtype=np.int32)
words = gt[8]
gt_ind = gt_ind.reshape(4, 2)
xs = gt_ind[:, 0].reshape(-1)
ys = gt_ind[:, 1].reshape(-1)
xmin = np.min(xs)
xmax = np.max(xs)
ymin = np.min(ys)
ymax = np.max(ys)
boxes.append([xmin, ymin, xmax, ymax])
gt_classes.append(self.class_to_ind['text'])
difficult_boxes.append(0)
# size = target.find("size")
# im_info = tuple(map(int, (size.find("height").text, size.find("width").text)))
res = {
"boxes": torch.tensor(boxes, dtype=torch.float32),
"labels": torch.tensor(gt_classes),
"difficult": torch.tensor(difficult_boxes),
"im_info": None,
}
return res
def get_img_info(self, index):
img_id = self.ids[index]
im_path = os.path.join(self.root, img_id + '.jpg')
# img = Image.open(im_path).convert("RGB")
im = cv2.imread(im_path)
# anno = self.get_groundtruth(index)
# anno["im_info"] = [im.shape[0], im.shape[1]]
return {"height": im.shape[0], "width": im.shape[1]}
def map_class_id_to_class_name(self, class_id):
return ICDAR2015TRAIN.CLASSES[class_id] | python |
# TODO: set first card in the pile
# Check for illegal move on the client side itself.
from Cards import Card, cards
import random
class Game:
def __init__(self, id):
# Which player's turn is it? Initially player 1
self.turn = 0
# Are both players connected?
self.ready = False
# game ID
self.id = id
# deck
self.deck = cards
random.shuffle(self.deck)
# player 1 cards
self.p1Cards = self.deck[0:7]
# player 2 cards
self.p2Cards = self.deck[7:14]
# In UNO only the last move matters
self.lastMove = self.deck[14]
# 7 distributed to each player + 1 on top of pile
self.numCardsAssigned = 15
# Two players
self.wins = [0,0]
def getLastMove(self):
return self.lastMove
def play(self, player, move: Card):
"""
@Param: player- which player's move is this?
No error checking in this function. Implement before.
"""
if move.ability != None:
"""
In case the move has an ability, the turn is retained. No need to switch turns.
"""
if move.ability == "d2":
if player == 0:
self.p2Cards.append(self.deck[self.numCardsAssigned])
self.p2Cards.append(self.deck[self.numCardsAssigned + 1])
else:
self.p1Cards.append(self.deck[self.numCardsAssigned])
self.p1Cards.append(self.deck[self.numCardsAssigned + 1])
self.numCardsAssigned += 2
# Other abilities simply retain the turn. No need for special checking
else:
self.turn = (player + 1) % 2
try:
if player == 0:
index = self.findCard(move, player)
if index != None: del self.p1Cards[index]
else:
index = self.findCard(move, player)
if index != None: del self.p2Cards[index]
except error as e:
print("ran into error while playing move")
self.lastMove = move
def connected(self):
return self.ready
def findCard(self, card: Card, player):
listOfCards = ""
if player == 0:
listOfCards = self.p1Cards
else:
listOfCards = self.p2Cards
for index in range(0, len(listOfCards)):
if listOfCards[index] == card:
return index
return None
def draw(self, player):
"""
@Param: player- which player's move is this?
No error checking in this function. Implement before.
"""
if player == 0:
self.p1Cards.append(self.deck[self.numCardsAssigned])
else:
self.p2Cards.append(self.deck[self.numCardsAssigned])
self.numCardsAssigned += 1
| python |
import sys, os
from tqdm import tqdm
import numpy as np
import sys, os
sys.path.append('../')
from torch.utils.data import Dataset
import pandas as pd
from hateXplain.Preprocess.dataCollect import collect_data,set_name
from sklearn.model_selection import train_test_split
from os import path
from gensim.models import KeyedVectors
import pickle
import json
class Vocab_own():
def __init__(self,dataframe, model):
self.itos={}
self.stoi={}
self.vocab={}
self.embeddings=[]
self.dataframe=dataframe
self.model=model
### load embedding given a word and unk if word not in vocab
### input: word
### output: embedding,word or embedding for unk, unk
def load_embeddings(self,word):
try:
return self.model[word],word
except KeyError:
return self.model['unk'],'unk'
### create vocab,stoi,itos,embedding_matrix
### input: **self
### output: updates class members
def create_vocab(self):
count=1
for index,row in tqdm(self.dataframe.iterrows(),total=len(self.dataframe)):
for word in row['Text']:
vector,word=self.load_embeddings(word)
try:
self.vocab[word]+=1
except KeyError:
if(word=='unk'):
print(word)
self.vocab[word]=1
self.stoi[word]=count
self.itos[count]=word
self.embeddings.append(vector)
count+=1
self.vocab['<pad>']=1
self.stoi['<pad>']=0
self.itos[0]='<pad>'
self.embeddings.append(np.zeros((300,), dtype=float))
self.embeddings=np.array(self.embeddings)
print(self.embeddings.shape)
def encodeData(dataframe,vocab,params):
tuple_new_data=[]
for index,row in tqdm(dataframe.iterrows(),total=len(dataframe)):
if(params['bert_tokens']):
tuple_new_data.append((row['Text'],row['Attention'],row['Label'], row['Post_id']))
else:
list_token_id=[]
for word in row['Text']:
try:
index=vocab.stoi[word]
except KeyError:
index=vocab.stoi['unk']
list_token_id.append(index)
tuple_new_data.append((list_token_id,row['Attention'],row['Label'], row['Post_id']))
return tuple_new_data
def createDatasetSplit(params):
filename=set_name(params)
if path.exists(filename):
##### REMOVE LATER ######
#dataset=collect_data(params)
pass
else:
dataset=collect_data(params)
if (path.exists(filename[:-7])):
with open(filename[:-7]+'/train_data.pickle', 'rb') as f:
X_train = pickle.load(f)
with open(filename[:-7]+'/val_data.pickle', 'rb') as f:
X_val = pickle.load(f)
with open(filename[:-7]+'/test_data.pickle', 'rb') as f:
X_test = pickle.load(f)
if(params['bert_tokens']==False):
with open(filename[:-7]+'/vocab_own.pickle', 'rb') as f:
vocab_own=pickle.load(f)
else:
if(params['bert_tokens']==False):
word2vecmodel1 = KeyedVectors.load("Data/word2vec.model")
vector = word2vecmodel1['easy']
assert(len(vector)==300)
dataset= pd.read_pickle(filename)
#X_train_dev, X_test= train_test_split(dataset, test_size=0.1, random_state=1,stratify=dataset['Label'])
#X_train, X_val= train_test_split(X_train_dev, test_size=0.11, random_state=1,stratify=X_train_dev['Label'])
with open('Data/post_id_divisions.json', 'r') as fp:
post_id_dict=json.load(fp)
X_train=dataset[dataset['Post_id'].isin(post_id_dict['train'])]
X_val=dataset[dataset['Post_id'].isin(post_id_dict['val'])]
X_test=dataset[dataset['Post_id'].isin(post_id_dict['test'])]
if(params['bert_tokens']):
vocab_own=None
vocab_size =0
padding_idx =0
else:
vocab_own=Vocab_own(X_train,word2vecmodel1)
vocab_own.create_vocab()
padding_idx=vocab_own.stoi['<pad>']
vocab_size=len(vocab_own.vocab)
X_train=encodeData(X_train,vocab_own,params)
X_val=encodeData(X_val,vocab_own,params)
X_test=encodeData(X_test,vocab_own,params)
print("total dataset size:", len(X_train)+len(X_val)+len(X_test))
os.mkdir(filename[:-7])
with open(filename[:-7]+'/train_data.pickle', 'wb') as f:
pickle.dump(X_train, f)
with open(filename[:-7]+'/val_data.pickle', 'wb') as f:
pickle.dump(X_val, f)
with open(filename[:-7]+'/test_data.pickle', 'wb') as f:
pickle.dump(X_test, f)
if(params['bert_tokens']==False):
with open(filename[:-7]+'/vocab_own.pickle', 'wb') as f:
pickle.dump(vocab_own, f)
if(params['bert_tokens']==False):
return X_train,X_val,X_test,vocab_own
else:
return X_train,X_val,X_test
| python |
#!/usr/bin/env python
segments = 200
r = 30000
for x in range(-r/2, r/2, r / segments):
if x < -r/4 or x > r/4:
y = r / 2
else:
y = -r / 2
print(str(x) + " " + str(y)) | python |
def func(*args, **kwargs):
print(args)
print(kwargs)
idade = kwargs.get('idade')
if idade != None:
print(idade)
else:
print('Não foi possível encontrar a idade.')
lista = [1, 2, 3, 4, 5]
lista2 = [10, 20, 30, 40, 50]
func(*lista, *lista2, nome='Luiz', sobrenome = 'Miranda')
| python |
import discord
import datetime
import random
import os
import re
import sys
import time
import asyncio
import json
import hashlib
import sqlite3
import struct
from urllib.request import *
from urllib.error import *
current_time_min = lambda: int(round(time.time() / 60))
SELF_BOT_MEMBER = None
SELF_BOT_SERVER = None
db_mng = None
client = discord.Client()
shutdown_watch_running = False
running_State = True
debug_mode = False
class ServerDatabase:
global debug_mode
global current_time_min
#Stores and obtains friend codes using an SQLite 3 database.
def __init__(self):
self.recover = sys.argv
self.conn = sqlite3.connect('data/fc.sqlite')
print('Addon "{}" loaded\n'.format(self.__class__.__name__))
def __del__(self):
global running_State
self.conn.commit()
self.conn.close()
print('Addon "{}" unloaded\n'.format(self.__class__.__name__))
if (running_State and not debug_mode):
print("Unexpected interpreter exit at {}, rebooting.".format(str(datetime.datetime.now())))
os.execv(sys.executable, ['python3'] + self.recover)
# based on https://github.com/megumisonoda/SaberBot/blob/master/lib/saberbot/valid_fc.rb
def verify_fc(self, fc):
try:
fc = int(fc.replace('-', ''))
except ValueError:
return None
if fc > 0x7FFFFFFFFF:
return None
principal_id = fc & 0xFFFFFFFF
checksum = (fc & 0xFF00000000) >> 32
return (fc if hashlib.sha1(struct.pack('<L', principal_id)).digest()[0] >> 1 == checksum else None)
def fc_to_string(self, fc):
fc = str(fc).rjust(12, '0')
return "{}-{}-{}".format(fc[0:4], fc[4:8], fc[8:12])
async def warn_set(self, memberid, value):
c = self.conn.cursor()
if(value == 0):
c.execute('DELETE FROM usr_warns WHERE userid = ?', (int(memberid),))
return
rows = c.execute("SELECT * FROM usr_warns WHERE userid = ?", (int(memberid),))
for row in rows:
c.execute("UPDATE usr_warns SET warns = ? WHERE userid = ?", (value, int(memberid)))
return
c.execute('INSERT INTO usr_warns VALUES (?,?)', (int(memberid), int(value)))
async def fact_add(self, memberid, fact):
c = self.conn.cursor()
c.execute('INSERT INTO facts VALUES (?,?)', (int(memberid), fact))
async def fact_delete(self, id):
c = self.conn.cursor()
c.execute("DELETE FROM facts WHERE rowid = ?", (id,))
async def fact_deleteuser(self, memberid):
c = self.conn.cursor()
c.execute("DELETE FROM facts WHERE userid = ?", (int(memberid),))
async def fact_userreg(self, memberid):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM facts WHERE userid = ?", (int(memberid),))
for row in rows:
return True
return False
async def fact_get(self, withid):
c = self.conn.cursor()
rows = []
if (withid == True):
rows = c.execute("SELECT rowid,* FROM facts")
else:
rows = c.execute("SELECT * FROM facts")
ret = []
for row in rows:
ret.append(row)
return ret
async def fact_get_byrow(self, row_id):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM facts WHERE rowid = ?", (row_id,))
ret = []
for row in rows:
ret.append(row)
return ret
async def fact_getuser(self, memberid):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM facts WHERE userid = ?", (int(memberid),))
for row in rows:
return row[1]
return None
async def warn_get(self, memberid):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM usr_warns WHERE userid = ?", (int(memberid),))
for row in rows:
return int(row[1])
return 0
async def warn_get_all(self):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM usr_warns")
return rows
async def schedule_add(self, messageid, dest_id, amountmin, text):
c = self.conn.cursor()
c.execute('INSERT INTO sched_msg VALUES (?,?,?,?,?)', (int(messageid), int(dest_id), current_time_min(), amountmin, text))
async def schedule_get(self):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM sched_msg")
return rows
async def schedule_del(self, messageid):
c = self.conn.cursor()
c.execute("DELETE FROM sched_msg WHERE botmsgid = ?", (int(messageid),))
async def schedule_del_confirm(self, messageid):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM sched_msg WHERE botmsgid = ?", (int(messageid),))
return_code = -1
for row in rows:
return_code = 1
c.execute("DELETE FROM sched_msg WHERE botmsgid = ?", (int(messageid),))
return return_code
async def mute_apply(self, memberid, amountmin):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM usr_mute WHERE userid = ?", (int(memberid),))
for row in rows:
c.execute("UPDATE usr_mute SET start = ?, amount = ? WHERE userid = ?", (current_time_min(), amountmin, int(memberid)))
return
c.execute('INSERT INTO usr_mute VALUES (?,?,?)', (int(memberid),current_time_min(), amountmin))
async def mute_get(self):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM usr_mute")
return rows
async def mute_remove(self, memberid):
c = self.conn.cursor()
c.execute('DELETE FROM usr_mute WHERE userid = ?', (int(memberid),))
async def bug_add(self, authorid, explain, botmessage):
c = self.conn.cursor()
c.execute('INSERT INTO bugs VALUES (?,?,?,?)', (int(authorid), explain, int(botmessage.id), 1))
async def bug_close(self, botmessageid):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM bugs WHERE botmsgid = ?", (int(botmessageid),))
for row in rows:
if(row[3] == 1):
c.execute("UPDATE bugs SET state = ? WHERE botmsgid = ?", (0, int(botmessageid)))
return row
else:
return []
return []
async def bug_count(self):
c =self.conn.cursor()
cursor = c.execute("SELECT COUNT(*) FROM bugs")
(tot_t,)=cursor.fetchone()
cursor = c.execute("SELECT COUNT(*) FROM bugs WHERE state = 0")
(clo_t,)=cursor.fetchone()
ope_t = tot_t - clo_t
return [ope_t, clo_t]
# Based on kurisu from homebrew discord server https://github.com/ihaveamac/Kurisu
async def fcregister(self, message, fc, notify):
"""Add your friend code."""
fc = self.verify_fc(fc)
if not fc:
await client.send_message(message.channel, '{}, that\'s an invalid friend code.'.format(message.author.name))
return
if (notify.lower() == "true"):
notify = True
elif (notify.lower() == "false"):
notify = False
else:
await client.send_message(message.channel, '{}, invalid command syntax, `(notify)` must be `true` or `false`.'.format(message.author.name))
return
c = self.conn.cursor()
rows = c.execute('SELECT * FROM friend_codes WHERE userid = ?', (int(message.author.id),))
for row in rows:
# if the user already has one, this prevents adding another
await client.send_message(message.channel, "{}, please delete your current friend code with `@HyperMario fcdelete` before adding another.".format(message.author.name))
return
c.execute('INSERT INTO friend_codes VALUES (?,?,?)', (int(message.author.id), fc, notify))
if notify:
info_str = ". You will be notified whenever someone requests your code."
else:
info_str = ""
await client.send_message(message.channel, "{}, your friend code has been added to the database: `{}`{}".format(message.author.name, self.fc_to_string(fc), info_str))
self.conn.commit()
async def fcquery(self, message):
global SELF_BOT_MEMBER
global SELF_BOT_SERVER
"""Get other user's friend code. You must have one yourself in the database."""
c = self.conn.cursor()
member = None
for m in message.mentions:
if m != SELF_BOT_MEMBER:
member = m
if not member:
await client.send_message(message.channel, "{}, no user or invalid user specified.".format(message.author.name))
return
rows = c.execute('SELECT * FROM friend_codes WHERE userid = ?', (int(message.author.id),))
for row in rows:
# assuming there is only one, which there should be
rows_m = c.execute('SELECT * FROM friend_codes WHERE userid = ?', (int(member.id),))
for row_m in rows_m:
if (member.name[-1:] == "s"):
suffix = "\'"
else:
suffix = "\'s"
await client.send_message(message.channel, "{}{} friend code is `{}`".format(member.name, suffix, self.fc_to_string(row_m[1])))
try:
if (row_m[2]):
await client.send_message(member, "{} in {} server has queried your friend code! Their code is `{}`.".format(message.author.name, SELF_BOT_SERVER.name, self.fc_to_string(row[1])))
except discord.errors.Forbidden:
pass # don't fail in case user has DMs disabled for this server, or blocked the bot
return
await client.send_message(message.channel, "{}, looks like {} has no friend code registered.".format(message.author.name, member.name))
return
await client.send_message(message.channel, "{}, you need to register your own friend code with `@HyperMario fcregister` before getting others.".format(message.author.name))
async def fcdelete(self, message):
#Delete your friend code.
if (type(message) is discord.Message):
c = self.conn.cursor()
c.execute('DELETE FROM friend_codes WHERE userid = ?', (int(message.author.id),))
await client.send_message(message.channel, "{}, your friend code has been removed from database.".format(message.author.name))
self.conn.commit()
elif (type(message) is discord.Member):
c = self.conn.cursor()
c.execute('DELETE FROM friend_codes WHERE userid = ?', (int(message.id),))
self.conn.commit()
async def get_cookie(self, user):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM cookies WHERE userid = ?", (int(user),))
for row in rows:
return row[1]
return 0
async def add_cookie(self, user, amount):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM cookies WHERE userid = ?", (int(user),))
for row in rows:
calc = row[1] + amount
if (calc < 0 ):
calc = 0
c.execute("UPDATE cookies SET amount = ? WHERE userid = ?", (calc, user))
return
if (amount < 0):
amount = 0
c.execute('INSERT INTO cookies VALUES (?,?)', (int(user), amount))
return
async def set_cookie(self, user, amount):
c = self.conn.cursor()
rows = c.execute("SELECT * FROM cookies WHERE userid = ?", (int(user),))
if (amount <= 0):
amount = 0
for row in rows:
c.execute("UPDATE cookies SET amount = ? WHERE userid = ?", (amount, user))
return
c.execute('INSERT INTO cookies VALUES (?,?)', (int(user), amount))
return
async def top_ten_cookie(self):
c = self.conn.cursor()
return c.execute("SELECT * FROM cookies ORDER BY amount DESC limit 10")
async def delete_cookie(self, user):
c = self.conn.cursor()
c.execute('DELETE FROM cookies WHERE userid = ?', (int(user),))
return
def get_retry_times ():
try:
with open("data/retry.flag", "r") as f:
data = f.read()
ret = int(data)
return ret
except:
set_retry_times(0)
return 0
def set_retry_times(amount):
with open("data/retry.flag", "w") as f:
f.write(str(amount))
def is_channel(message, ch_id):
return (message.channel.id == ch_id)
def get_role(roleid):
global SELF_BOT_SERVER
roles = SELF_BOT_SERVER.roles
for rol in roles:
if(rol.id == roleid):
return rol
return None
def get_from_mention(mention):
global SELF_BOT_SERVER
global SELF_BOT_MEMBER
memberid = re.sub("\D", "", mention)
return client.get_server(SERVER_ID()).get_member(memberid)
def int_to_emoji(num):
num = int(num)
eml = NUMBER_EMOJI();
if (num == 0):
return eml[0]
retstr = ""
while (num != 0):
retstr = eml[num % 10] + retstr
num = int(num/10)
return retstr
def int_to_rps(num):
num = num % 3
if (num == 0):
return ":punch:"
elif (num == 1):
return ":hand_splayed:"
return ":v:"
async def game_numberguess(user, machine, diff, message):
global db_mng
mach1 = int_to_emoji(int(machine/10))
mach2 = int_to_emoji(machine % 10)
i = 0
game_message = await client.send_message(message.channel, "{}, you guessed: {} , I guessed: :question::question:".format(message.author.name, int_to_emoji(user)))
randsec = random.randint(1, 3)
while (i < randsec):
await asyncio.sleep(1)
i = i + 1
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}:question:".format(message.author.name, int_to_emoji(user), mach1))
randsec = random.randint(1, 3)
while (i < randsec):
await asyncio.sleep(1)
i = i + 1
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}{}".format(message.author.name, int_to_emoji(user), mach1, mach2))
if (user == machine):
if diff == 0:
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}{} . **You won 10 <:yoshicookie:416533826869657600>!**".format(message.author.name, int_to_emoji(user), mach1, mach2))
await db_mng.add_cookie(message.author.id, 10)
elif diff == 1:
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}{} . **You won 50 <:yoshicookie:416533826869657600>!**".format(message.author.name, int_to_emoji(user), mach1, mach2))
await db_mng.add_cookie(message.author.id, 50)
elif diff == 2:
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}{} . **You won 100 <:yoshicookie:416533826869657600>!**".format(message.author.name, int_to_emoji(user), mach1, mach2))
await db_mng.add_cookie(message.author.id, 100)
else:
game_message = await client.edit_message(game_message, "{}, you guessed: {} , I guessed: {}{} . **You lost 1 <:yoshicookie:416533826869657600>.**".format(message.author.name, int_to_emoji(user), mach1, mach2))
await db_mng.add_cookie(message.author.id, -1)
return
async def game_rps(bot_ch, usr_ch, message):
##0 - rock; 1 - paper; 2 - scissors
state = 0 #0 lose; 1 match; 2 win
bot_ch = bot_ch + 3
usr_ch = usr_ch + 3
winstr = "**You lost 1 <:yoshicookie:416533826869657600>.**"
if (bot_ch == usr_ch):
state = 1
winstr = "**That's a match.**"
elif (bot_ch % 3) == (usr_ch - 1) % 3:
state = 2
winstr = "**You won 2 <:yoshicookie:416533826869657600>.**"
await db_mng.add_cookie(message.author.id, 2)
else:
await db_mng.add_cookie(message.author.id, -1)
await client.send_message(message.channel, "{}, your choice: {} , my choice: {} . {}".format(message.author.name, int_to_rps(usr_ch), int_to_rps(bot_ch), winstr))
return
async def game_coin(bot_ch, usr_ch, message):
choice_str = "head"
if (usr_ch == 1):
choice_str = "tails"
bot_str = "head"
if (bot_ch % 2 == 1):
bot_str = "tails"
if (bot_ch == 145):
await client.send_message(message.channel, "{}, you guessed: **{}** , the coin landed on its **side**. **How lucky! You won 500 <:yoshicookie:416533826869657600>.**".format(message.author.name, choice_str))
await db_mng.add_cookie(message.author.id, 500)
elif(bot_ch % 2 == usr_ch):
await client.send_message(message.channel, "{}, you guessed: **{}** , the coin landed on its **{}**. **You won 1 <:yoshicookie:416533826869657600>.**".format(message.author.name, choice_str, bot_str))
await db_mng.add_cookie(message.author.id, 1)
else:
await client.send_message(message.channel, "{}, you guessed: **{}** , the coin landed on its **{}**. **You lost 1 <:yoshicookie:416533826869657600>.**".format(message.author.name, choice_str, bot_str))
await db_mng.add_cookie(message.author.id, -1)
return
def help_array():
return {
"fcregister": ">@HyperMario fcregister (friendcode) (notify)\r\nAdds your friend code to the server database. If notify is \"true\", you will be notified whenever someone queries your friend code, otherwise set it to \"false\".",
"fcquery": ">@HyperMario fcquery (user)\r\nGets the friend code from the specified user (you need to have your own friend code registered). If the specified user has the notify option enabled, your friend code will be sent to them as well.",
"fcdelete": ">@HyperMario fcdelete\r\nRemoves your friend code from the server database.",
"ping": ">@HyperMario ping\r\nPings the bot.",
"membercount": ">@HyperMario membercount\r\nDisplays the member count of the server.",
"rules": ">@HyperMario rules\r\nShows the server rules.",
"getwarn": ">@HyperMario getwarn\nSends your warning amount in a DM.",
"getmute": ">@HyperMario getmute\nSends your muted time in a DM.",
"fact": ">@HyperMario fact (factID)\nDisplays a random fact. If factID is specified, the fact with that id will be displayed. (Use listfact to get all fact IDs.)",
"addfact": ">@HyperMario addfact (fact)\nAdds a fact (only one per user). The format is the following: base;opt1, opt2, etc; opt1, opt2, etc; etc... any instance of {} will be replaced by a random choice. You must have the same amount of {} as ; otherwise it won't work properly.\n\nExamples:\n{} is number {}; Mario, Luigi, Yoshi; NUMBER:1:3\nI {} {} {}; hate, love; cheese, apples, USER; :wink:, :weary:\n\nNUMBER:X:Y -> Random number between X and Y\nUSER -> Random server member.",
"delfact": ">@HyperMario delfact\nRemoves your own fact.",
"listfact": ">@HyperMario listfact\nDisplays all facts.",
"communities": ">@HyperMario communities\nShows the main CTGP-7 communities.",
"game": ">@HyperMario game (gamemode) (options)\nPlays a game.",
"report": "!report (Explanation)\nReports a bug with the given explanation. Can only be used in #bugs_discussion.",
"bugcount": ">@HyperMario bugcount\nShows the amount of open and closed bugs."
}
def staff_help_array():
return {
"say": ">@HyperMario say (channel/user) (text)\r\nSends a message in the specified channel or a DM if it is a user.",
"edit": ">@HyperMario edit (messageid) (text)\r\nEdits the specified message. Can only edit recent bot messages in the server.",
"release": ">@HyperMario release (version) (tag)\r\nAnnounces the release of the specified version (data taken from github) in #announcements. If (tag) is 1, it will tag @everyone (only tag everyone for major releases)",
"restart": ">@HyperMario restart\r\nRestarts the bot.",
"stop": ">@HyperMario stop\r\nStops the bot, once stopped is has to be manually started again from a terminal, so no way to start it from discord.",
"mute": ">@HyperMario mute (user) (amount)\r\nMutes an user for a certain amount. The amount can be m (minutes), h (hours), d (days) and y (years). For example: 2h, 12m, 7d, etc",
"unmute": ">@HyperMario unmute (user)\r\nUnmutes a muted user.",
"warn": ">@HyperMario warn (user) [Reason]\nGives a warning to an user. Reason is optional.",
"setwarn": ">@HyperMario setwarn (user) (amount) [Reason]\nSets the warning amount of an user. Reason is optional.",
"getwarn": ">@HyperMario getwarn\nGets all the warned users.",
"getmute": ">@HyperMario getmute\nGets all the muted users.",
"delfact": ">@HyperMario delfact (id)\nDeletes specified fact.",
"change_game": ">@HyperMario change_game\nChanges the current playing game to a new random one.",
"closebug": ">@HyperMario closebug (bugID) [Reason]\nCloses the specified bug with the specified reason.",
"schedule": ">@HyperMario schedule (channel/user) (time_amount) (text)\nSchedules a message to be sent in/to the channel/user specified after time_amount has passed. (Works the same way as mute time amount).",
"cancel_schedule": ">@HyperMario cancel_schedule (scheduleid)\nCancels the specified scheduled message. The schedule id can be obtained from the id of the message sent by the bot."
}
def game_help_array():
return {
"guessanumber": ">@HyperMario game guessanumber (easy/normal/hard) (number)\nGuess a number game.\n\neasy: Guess a number between 0 and 10 (Win: +10 yoshi cookies).\nnormal: Guess a number between 0 and 50 (Win: +50 yoshi cookies).\nhard: Guess a number between 0 and 99 (Win: +100 yoshi cookies).\nLose: -1 yoshi cookies.",
"rps": ">@HyperMario game rps (rock/paper/scissors)\nRock-Paper-Scissors.\n\nWin: +2 yoshi cookies.\nMatch: nothing.\nLose: -1 yoshi cookies.",
"coin": ">@HyperMario game coin (head/tails)\nFlip a coin.\n\nWin: +1 yoshi cookies.\nLose: -1 yoshi cookies.",
"showcookie": ">@HyperMario game showcookie\nShows your amount of yoshi cookies.",
"top10": ">@HyperMario game top10\nShows the top 10 users with the highest amount of yoshi cookies."
}
def staff_game_help_array():
return {
"showcookie":">@HyperMario game showcookie (user)\nShows the amount of yoshi cookies of the specified user.",
"setcookie": ">@HyperMario game setcookie (user) (amount)\nSets the amount of yoshi cookies of the specified user."
}
#All the ids
def ch_list():
return {
"ANN": "163072540061728768",
"STAFF": "382885324575211523",
"FRIEND": "163333095725072384",
"DOORSTEP": "339476078244397056",
"BOTCHAT": "324672297812099093",
"BUGS": "315921603756163082",
"BUG_REPORTS": "426318663327547392"
}
def NUMBER_EMOJI():
return [":zero:", ":one:", ":two:", ":three:", ":four:", ":five:", ":six:", ":seven:", ":eight:", ":nine:"]
def PLAYING_GAME():
return ["CTGP-Revolution", "CTGP-Universe", "CTGP-7", "Super Smash Bros. for Wii U", "Super Mario Galaxy", "Super Mario Galaxy 2", "Mario Kart 8 Deluxe", "Super Mario Universe", "Super Smash Bros. 5"]
def MUTEROLE_ID():
return "385544890030751754"
def SERVER_ID():
return "163070769067327488"
COMMUNITIES_TEXT = "```Here are the main CTGP-7 communities:\n\nCustom Tracks: 29-1800-5228-2361\nCustom Tracks, 200cc: 52-3127-4613-8641\nNormal Tracks: 02-5770-2485-4638\nNormal Tracks, 200cc: 54-0178-4815-8814\n\nMake sure you are in 0.17.1 or greater to play in those communities.```"
async def send_rules(user, newusr):
global client
try:
with open("data/rules.txt", "r") as f:
if (newusr):
await client.send_message(user, "Welcome to the CTGP-Revolution server! :3\nHere are the rules: ``` {} ```".format(f.read()))
else:
await client.send_message(user, "Here are the rules: ``` {} ```".format(f.read()))
except:
print("Failed opening rules file.")
async def shutdown_watch():
global db_mng
global client
global shutdown_watch_running
global running_State
if (shutdown_watch_running):
return
shutdown_watch_running = True
while True:
await asyncio.sleep(5)
if os.path.isfile("data/stop.flag"):
running_State = False
os.remove("data/stop.flag")
print("Manually stopping by terminal.")
del db_mng
await client.close()
with open("data/stopped.flag", "w") as f:
f.write("dummy")
try:
sys.exit(0)
except:
pass
async def parsetime(timestr):
try:
basenum = int(timestr[0:-1])
unit = timestr[-1:]
except:
return [-1, -1, " "]
if(unit == "m"):
return [basenum, basenum, "minutes"]
elif(unit == "h"):
return [basenum * 60, basenum, "hours"]
elif(unit == "d"):
return [basenum * 60 * 24, basenum, "days"]
elif(unit == "y"):
return [basenum * 60 * 24 * 365, basenum, "years"]
else:
return [-1, -1, " "]
async def punish(member, amount):
global client
if(amount == 2):
try:
await client.send_message(member, "**CTGP-7 server:** You have been muted for 2 hours.")
except:
pass
await mute_user(member.id, 120)
elif(amount == 3):
try:
await client.send_message(member, "**CTGP-7 server:** You have been kicked and muted 7 days, you may join again.")
except:
pass
await mute_user(member.id, 7*24*60)
try:
await client.kick(member)
except:
pass
elif(amount >= 4):
try:
await client.send_message(member, "**CTGP-7 server:** You have been banned.")
except:
pass
try:
await client.ban(member, 7)
except:
pass
async def mute_user(memberid, amount):
global db_mng
global client
global SELF_BOT_SERVER
muted_user = get_from_mention(memberid)
await db_mng.mute_apply(muted_user.id, amount)
mute_role = get_role(MUTEROLE_ID())
await client.add_roles(muted_user, mute_role)
async def unmute_user(memberid):
global db_mng
global client
global SELF_BOT_SERVER
muted_user = get_from_mention(memberid)
await db_mng.mute_remove(muted_user.id)
mute_role = get_role(MUTEROLE_ID())
try:
await client.send_message(muted_user, "**CTGP-7 server:** You have been unmuted.")
except:
pass
await client.remove_roles(muted_user, mute_role)
def checkdestvalid(dest_id):
channel_id = re.sub("\D", "", dest_id)
channel_obj = client.get_channel(channel_id)
if (channel_obj != None):
return channel_obj
else:
return get_from_mention(dest_id)
async def sayfunc(dest_id, text, channel):
channel_id = re.sub("\D", "", dest_id)
channel_obj = client.get_channel(channel_id)
if (channel_obj != None):
await client.send_message(channel_obj, text)
await client.send_message(channel, "Message successfully sent in {}.".format(channel_obj.name))
else:
member_obj = get_from_mention(dest_id)
if (member_obj != None):
try:
await client.send_message(member_obj, text)
await client.send_message(channel, "Message successfully sent to {}.".format(member_obj.name))
except:
await client.send_message(channel, "Can't send message to member (not in the server or blocked the bot).")
else:
await client.send_message(channel, "Invalid channel or member specified.")
async def parse_fact(s1):
global SELF_BOT_SERVER
s2 = re.split("[;]", s1)
base = s2[0]
del s2[0]
final = []
for rep in s2:
final.append(re.split("[,]", rep))
for f in final:
id = random.randint(0, len(f) - 1)
f[id] = f[id].strip()
f[id] = f[id].replace("==", " ")
foundNum = 0
foundUsr = 0
while (foundNum != -1 or foundUsr != -1):
foundNum = f[id].find("NUMBER")
foundUsr = f[id].find("USER")
random.seed()
if (foundNum != -1):
special = f[id][foundNum:]
special = special.split()[0]
special = re.split("[:]", special)
try:
replacement = str(random.randint(int(special[1]),int(special[2])))
except:
replacement = ""
f[id] = f[id].replace(special[0]+":"+ special[1]+":"+special[2], replacement, 1)
elif (foundUsr != -1):
memberlist = list(SELF_BOT_SERVER.members)
replacement = memberlist[random.randint(0,len(memberlist) - 1)].name
replacement.replace("USER", "user")
f[id] = f[id].replace("USER", replacement, 1)
base = base.replace("{}", f[id], 1)
return base
async def isfact_dynamic(s1):
s2 = re.split("[;]", s1)
if (len(s2) == 1):
return False
else:
return True
async def muted_task():
global db_mng
global current_time_min
while True:
await asyncio.sleep(60)
rows = await db_mng.mute_get()
for row in rows:
timeleft = (row[1] + row[2]) - current_time_min()
if(timeleft <= 0):
await unmute_user(str(row[0]))
tobedeleted = []
rows = await db_mng.schedule_get()
for row in rows:
timeleft = (row[2] + row[3]) - current_time_min()
if(timeleft <= 0):
tobedeleted.append(row[0])
staffchan = client.get_channel(ch_list()["STAFF"])
await sayfunc(str(row[1]), row[4], staffchan)
for delitm in tobedeleted:
await db_mng.schedule_del(delitm)
async def perform_game_change():
names = PLAYING_GAME()
name = names[random.randint(0, len(names) - 1)]
await client.change_presence(game=discord.Game(name=name))
return name
async def change_game():
while True:
await perform_game_change()
await asyncio.sleep(600)
@client.event
async def on_ready():
print("\n-------------------------\n")
global db_mng
global SELF_BOT_SERVER
global SELF_BOT_MEMBER
global debug_mode
if(os.path.isfile("debug.flag")):
print("Debug mode enabled.")
debug_mode = True
SELF_BOT_SERVER = client.get_server(SERVER_ID())
SELF_BOT_MEMBER = SELF_BOT_SERVER.get_member(client.user.id)
db_mng = ServerDatabase()
asyncio.ensure_future(shutdown_watch())
asyncio.ensure_future(muted_task())
asyncio.ensure_future(change_game())
print("Bot running: {}".format(str(datetime.datetime.now())))
print('Logged in as: {} in server: {}'.format(SELF_BOT_MEMBER.name,SELF_BOT_SERVER.name))
print('------\n')
set_retry_times(0)
@client.event
async def wait_until_login():
await client.change_presence(game=discord.Game(name='something goes here'))
@client.event
async def on_member_join(member):
global SELF_BOT_SERVER
global client
global db_mng
door_chan = SELF_BOT_SERVER.get_channel(ch_list()["DOORSTEP"])
await client.send_message(door_chan, "Everybody welcome {} to the server! Make sure to check the rules I've sent to you in a direct message.\nWe are now {} members.".format(member.mention, SELF_BOT_SERVER.member_count))
await send_rules(member, True)
rows = await db_mng.mute_get()
for row in rows:
if (row[0] == int(member.id)):
timeleft = (row[1] + row[2]) - current_time_min()
if (timeleft > 0):
await mute_user(member.id, timeleft)
@client.event
async def on_member_remove(member):
global SELF_BOT_SERVER
global db_mng
global client
door_chan = SELF_BOT_SERVER.get_channel(ch_list()["DOORSTEP"])
await client.send_message(door_chan, "See ya **{}**. We are now {} members.".format(member.name, SELF_BOT_SERVER.member_count))
await db_mng.fcdelete(member)
@client.event
async def on_message(message):
global db_mng
global SELF_BOT_SERVER
global SELF_BOT_MEMBER
global COMMUNITIES_TEXT
global client
global running_State
global debug_mode
global current_time_min
if (client.user == None) or (SELF_BOT_SERVER == None) or (SELF_BOT_MEMBER == None):
print("Error, some variable is None")
return None
try:
random.seed()
bot_mtn = message.content.split()[0]
if (get_from_mention(bot_mtn) == client.user) and (message.author != client.user): #@HyperMario
try:
bot_cmd = message.content.split()[1]
if bot_cmd == 'mute':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split()
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["mute"] + "```")
return
muted_member = get_from_mention(tag[2])
if(muted_member != None):
mutemin = await parsetime(tag[3])
if (mutemin[0] == -1):
await client.send_message(message.channel, "{}, invalid time amount.".format(message.author.name))
return
await mute_user(tag[2], mutemin[0])
await client.send_message(message.channel, "{} was muted for {} {}.".format(muted_member.name, mutemin[1], mutemin[2]))
try:
await client.send_message(muted_member, "**CTGP-7 server:** You have been muted for {} {}.".format(mutemin[1], mutemin[2]))
except:
pass
return
else:
await client.send_message(message.channel, "{}, invalid member.".format(message.author.name))
return
elif bot_cmd == 'unmute':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split()
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["unmute"] + "```")
return
muted_member = get_from_mention(tag[2])
if(muted_member != None):
await unmute_user(tag[2])
await client.send_message(message.channel, "{} was unmuted.".format(muted_member.name))
else:
await client.send_message(message.channel, "{}, invalid member.".format(message.author.name))
elif bot_cmd == 'getmute':
tag = message.content.split()
if is_channel(message, ch_list()["STAFF"]):
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["getmute"] + "```")
return
rows = await db_mng.mute_get()
retstr = "--------------------- \n"
for row in rows:
retstr += "{}: {}m\n".format(get_from_mention(str(row[0])).name, (row[1] + row[2]) - current_time_min())
retstr += "---------------------"
await client.send_message(message.channel, "Muted users:\n```{}```".format(retstr))
else:
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["getmute"] + "```")
return
await client.send_message(message.channel, "{}, I've sent your muted time in a DM".format(message.author.name))
rows = await db_mng.mute_get()
for row in rows:
if (str(row[0]) == message.author.id):
try:
await client.send_message(message.author, "**CTGP-7 server:** You are muted for {} minutes.".format((row[1] + row[2]) - current_time_min()))
except:
pass
return
try:
await client.send_message(message.author, "**CTGP-7 server:** You are not muted.")
except:
pass
elif bot_cmd == 'closebug':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 3)
if not (len(tag) == 4 or len(tag) == 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["closebug"] + "```")
return
try:
bug_entry = await db_mng.bug_close(tag[2])
except:
bug_entry = []
if (len(bug_entry) == 0):
await client.send_message(message.channel, "{}, invalid ID specified or bug is already closed.".format(message.author.name))
return
bug_reports = SELF_BOT_SERVER.get_channel(ch_list()["BUG_REPORTS"])
bugs = SELF_BOT_SERVER.get_channel(ch_list()["BUGS"])
bot_msg = await client.get_message(bug_reports, tag[2])
if (len(tag) == 4):
try:
await client.edit_message(bot_msg, "```State: Closed\nReason: {}\n------------------\nReported by: {}\nExplanation: {}\nID: {}```".format(tag[3], get_from_mention(str(bug_entry[0])).name, bug_entry[1], bot_msg.id))
except:
pass
await client.send_message(bugs, "{}, your bug with ID: `{}` has been closed. Reason: ```{}```".format(get_from_mention(str(bug_entry[0])).mention, bot_msg.id, tag[3]))
else:
try:
await client.edit_message(bot_msg, "```State: Closed\nReason: No reason given.\n------------------\nReported by: {}\nExplanation: {}\nID: {}```".format( get_from_mention(str(bug_entry[0])).name, bug_entry[1], bot_msg.id))
except:
pass
await client.send_message(bugs, "{}, your bug with ID: `{}` has been closed. Reason: ```No reason given.```".format(get_from_mention(str(bug_entry[0])).mention, bot_msg.id))
await client.send_message(message.channel, "{}, closed successfully.".format(message.author.name))
elif bot_cmd == "bugcount":
tag = message.content.split()
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["bugcount"] + "```")
return
count_bug = await db_mng.bug_count()
await client.send_message(message.channel, "**Bug stats:**```Open: {}\nClosed: {}\n\nTotal: {}```".format(count_bug[0], count_bug[1], count_bug[0] + count_bug[1]))
elif bot_cmd == 'communities' or bot_cmd == 'community':
tag = message.content.split(None)
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["communities"] + "```")
return
await client.send_message(message.channel, COMMUNITIES_TEXT)
elif bot_cmd == 'change_game':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None)
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["change_game"] + "```")
return
retgame = await perform_game_change()
await client.send_message(message.channel, "{}, changed current playing game to: `{}`".format(message.author.name, retgame))
elif bot_cmd == 'warn':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 3)
if (len(tag) < 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["warn"] + "```")
return
warn_member = get_from_mention(tag[2])
warnreason = ""
if(len(tag) == 3):
warnreason = "No reason given."
else:
warnreason = tag[3]
if(warn_member != None):
warncount = await db_mng.warn_get(warn_member.id)
warncount += 1
await db_mng.warn_set(warn_member.id, warncount)
await client.send_message(message.channel, "{} got a warning. {} warnings in total.".format(warn_member.name, warncount))
try:
await client.send_message(warn_member, "**CTGP-7 server:** You got a warning. Toatal warnings: {}.\nReason:\n```{}```".format(warncount, warnreason))
except:
pass
await punish(warn_member, warncount)
else:
await client.send_message(message.channel, "{}, invalid member.".format(message.author.name))
elif bot_cmd == 'setwarn':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 4)
if (len(tag) < 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["setwarn"] + "```")
return
warn_member = get_from_mention(tag[2])
warnreason = ""
try:
warncount = int(tag[3])
except:
await client.send_message(message.channel, "{}, invalid amount.".format(message.author.name))
return
if(len(tag) == 4):
warnreason = "No reason given."
else:
warnreason = tag[4]
if(warn_member != None):
await db_mng.warn_set(warn_member.id, warncount)
await client.send_message(message.channel, "Set {} warnings to {}.".format(warn_member.name, warncount))
try:
await client.send_message(warn_member, "**CTGP-7 server:** You now have {} warnings.\nReason:\n```{}```".format(warncount, warnreason))
except:
pass
await punish(warn_member, warncount)
else:
await client.send_message(message.channel, "{}, invalid member.".format(message.author.name))
elif bot_cmd == 'getwarn':
tag = message.content.split()
if is_channel(message, ch_list()["STAFF"]):
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["getwarn"] + "```")
return
rows = await db_mng.warn_get_all()
retstr = "--------------------- \n"
for row in rows:
retstr += "{}: {}\n".format(get_from_mention(str(row[0])).name, row[1])
retstr += "---------------------"
await client.send_message(message.channel, "Users with warnings:\n```{}```".format(retstr))
else:
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["getwarn"] + "```")
return
await client.send_message(message.channel, "{}, I've sent your amount of warnings in a DM".format(message.author.name))
warncount = await db_mng.warn_get(message.author.id)
try:
await client.send_message(message.author, "**CTGP-7 server:** You have {} warnings.".format(warncount))
except:
pass
elif bot_cmd == 'release':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split()
try:
d = urlopen("https://api.github.com/repos/mariohackandglitch/CTGP-7updates/releases/tags/" + tag[2])
except HTTPError as err:
await client.send_message(message.channel, "Release tag invalid. (Example: v0.14-1)\r\nError: " + str(err.code))
else:
json_data = json.loads(d.read().decode("utf-8"))
ch = client.get_channel(ch_list()["ANN"]) #announcements
try:
if tag[3] == "1":
await client.send_message(ch, "@everyone\r\n" + json_data["name"] +" (" + json_data["tag_name"] + ") has been released! Here is the changelog:\r\n```" + json_data["body"] + "```")
except IndexError:
await client.send_message(ch, json_data["name"] +" (" + json_data["tag_name"] + ") has been released! Here is the changelog:\r\n```" + json_data["body"] + "```")
elif bot_cmd == 'cancel_schedule':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split()
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["cancel_schedule"] + "```")
return
try:
retcode = await db_mng.schedule_del_confirm(int(tag[2]))
if (retcode == -1):
await client.send_message(message.channel, "{}, invalid schedule id specified.".format(message.author.name))
return
else:
await client.send_message(message.channel, "{}, the schedule was cancelled successfully.".format(message.author.name))
return
except:
await client.send_message(message.channel, "{}, invalid schedule id specified.".format(message.author.name))
return
elif bot_cmd == 'schedule':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 4)
if (len(tag) != 5):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["schedule"] + "```")
return
timeamount = await parsetime(tag[3])
if (timeamount[0] == -1):
await client.send_message(message.channel, "{}, invalid time specified.".format(message.author.name))
return
messagedest = checkdestvalid(tag[2])
if (messagedest == None):
await client.send_message(message.channel, "{}, invalid user or channel specified.".format(message.author.name))
return
messagesent = await client.send_message(message.channel, "{}, the message will be sent in {} {} to {}".format(message.author.name, timeamount[1], timeamount[2], messagedest.name))
await db_mng.schedule_add(messagesent.id, messagedest.id, timeamount[0], tag[4])
elif bot_cmd == 'say':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 3)
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["schedule"] + "```")
return
await sayfunc(tag[2], tag[3], message.channel)
elif bot_cmd == 'edit':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split(None, 3)
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["edit"] + "```")
return
for chan in SELF_BOT_SERVER.channels:
try:
msg = await client.get_message(chan, tag[2])
if (msg.author == client.user):
try:
old_content = msg.content
new_msg = await client.edit_message(msg, tag[3])
await client.send_message(message.channel, "**Edited successfully:**\nOld: ```{}```New:```{}```".format(old_content, new_msg.content))
return
except:
await client.send_message(message.channel, "**Couldn't edit message:** Internal error.")
return
else:
await client.send_message(message.channel, "**Couldn't edit message:** Not a bot message.")
return
except:
pass
await client.send_message(message.channel, "**Couldn't edit message:** Message not found (may be too old).")
return
elif bot_cmd == 'restart':
if is_channel(message, ch_list()["STAFF"]):
await client.send_message(message.channel, "The bot is now restarting.")
print("Manually restarting by {} ({})".format(message.author.id, message.author.name))
running_State = False
del db_mng
await client.close()
os.execv(sys.executable, ['python3'] + sys.argv)
elif bot_cmd == 'stop':
if is_channel(message, ch_list()["STAFF"]):
await client.send_message(message.channel, "The bot is now stopping, see ya.")
print("Manually stopping by {} ({})".format(message.author.id, message.author.name))
running_State = False
del db_mng
await client.close()
try:
sys.exit(0)
except:
pass
elif bot_cmd == 'ping':
tag = message.content.split()
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["ping"] + "```")
return
msg_time = message.timestamp
now_dt = datetime.datetime.utcnow()
delay_time = now_dt - msg_time
await client.send_message(message.channel, "Pong! ({}s, {}ms)".format(delay_time.seconds, delay_time.microseconds / 1000))
elif bot_cmd == 'membercount':
if not (message.channel.is_private):
await client.send_message(message.channel, "We are now {} members.".format(SELF_BOT_SERVER.member_count))
else:
await client.send_message(message.channel, "This command cannot be used here.")
elif bot_cmd == 'fcregister':
if is_channel(message, ch_list()["FRIEND"]):
tag = message.content.split()
if not (len(tag) == 3 or len(tag) == 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["fcregister"] + "```")
return
if (len(tag) == 4):
await db_mng.fcregister(message, tag[2], tag[3])
else:
await db_mng.fcregister(message, tag[2], "true")
else:
await client.send_message(message.channel, "{}, friend code related commands can only be used in {}".format(message.author.name,SELF_BOT_SERVER.get_channel(ch_list()["FRIEND"]).mention))
elif bot_cmd == 'fcquery':
if is_channel(message, ch_list()["FRIEND"]):
tag = message.content.split()
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["fcquery"] + "```")
return
await db_mng.fcquery(message)
else:
await client.send_message(message.channel, "{}, friend code related commands can only be used in {}".format(message.author.name,SELF_BOT_SERVER.get_channel(ch_list()["FRIEND"]).mention))
elif bot_cmd == 'fcdelete':
if is_channel(message, ch_list()["FRIEND"]):
tag = message.content.split()
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["fcdelete"] + "```")
return
await db_mng.fcdelete(message)
else:
await client.send_message(message.channel, "{}, friend code related commands can only be used in {}".format(message.author.name,SELF_BOT_SERVER.get_channel(ch_list()["FRIEND"]).mention))
elif bot_cmd == 'rules':
await client.send_message(message.channel, "{}, I've sent you the rules in a private message.".format(message.author.name))
await send_rules(message.author, False)
elif bot_cmd == 'fact':
tag = message.content.split()
if not (len(tag) == 2 or len(tag) == 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["fact"] + "```")
return
final_text = ""
if (len(tag) == 2):
fact_text = await db_mng.fact_get(False)
fact_id = fact_text[random.randint(0, len(fact_text) - 1)][1]
try:
final_text = await parse_fact(fact_id)
except:
print("Error parsing: " + fact_id)
raise
return
else:
try:
fact_text = await db_mng.fact_get_byrow(int(tag[2]))
fact_id = fact_text[0][1]
except:
await client.send_message(message.channel, "Invalid id specified.")
return
try:
final_text = await parse_fact(fact_id)
except:
print("Error parsing: " + fact_id)
raise
return
await client.send_message(message.channel, "```" + final_text + "```")
elif bot_cmd == 'listfact':
tag = message.content.split()
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["listfact"] + "```")
return
fact_text = await db_mng.fact_get(True)
retstr = "```\n----------\n"
if is_channel(message, ch_list()["STAFF"]):
for row in fact_text:
retstr += str(row[0]) + " - " + get_from_mention(str(row[1])).name + " - " + row[2] + "\n----------\n"
retstr += "```"
await client.send_message(message.channel, retstr)
else:
for row in fact_text:
try:
final_text = await parse_fact(row[2])
text_isdyn = "(dynamic)" if await isfact_dynamic(row[2]) else "(static)"
retstr += str(row[0]) + " - " + text_isdyn + " - " + final_text + "\n----------\n"
except:
print("Error parsing: " + fact_id)
retstr += "```"
await client.send_message(message.channel, "{}, I sent you all the facts in a DM.".format(message.author.name))
await client.send_message(message.author, retstr)
elif bot_cmd == 'delfact':
if is_channel(message, ch_list()["STAFF"]):
tag = message.content.split()
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_help_array()["delfact"] + "```")
return
try:
await db_mng.fact_delete(int(tag[2]))
except:
await client.send_message(message.channel, "{}, invalid id.".format(message.author.name))
return
await client.send_message(message.channel, "Fact {} deleted.".format(tag[2]))
else:
tag = message.content.split()
if (len(tag) != 2):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["delfact"] + "```")
return
await db_mng.fact_deleteuser(message.author.id)
await client.send_message(message.channel, "{}, your fact has been removed.".format(message.author.name))
elif bot_cmd == 'addfact':
if not is_channel(message, ch_list()["STAFF"]):
if(await db_mng.fact_userreg(message.author.id)):
await client.send_message(message.channel, "{}, you can only have one fact registered. Use `@HyperMario delfact` to delete the existing one.".format(message.author.name))
return
tag = message.content.split(None, 2)
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["addfact"] + "```")
return
try:
dummy = await parse_fact(tag[2])
except:
await client.send_message(message.channel, "{}, error parsing fact, correct usage:\r\n```".format(message.author.name) + help_array()["addfact"] + "```")
return
await db_mng.fact_add(int(message.author.id), tag[2])
await client.send_message(message.channel, "Fact added: \n```{}```".format(await parse_fact(tag[2])))
elif bot_cmd == 'help':
if is_channel(message, ch_list()["BOTCHAT"]) or is_channel(message, ch_list()["STAFF"]) or message.channel.is_private:
tag = message.content.split()
if (len(tag) > 2):
if tag[2] == "game":
if (len(tag) == 3):
help_str = "Here is the help for the specified command:\r\n```" + help_array()["game"] + "```"
help_str += "Here is a list of all the available game modes:\n\n"
for index, content in game_help_array().items():
help_str += "`" + index + "`, "
help_str = help_str[:-2]
help_str += "\n\nUse `@HyperMario help game (gamemode)` to get help of a specific command."
await client.send_message(message.channel, help_str)
if is_channel(message, ch_list()["STAFF"]):
help_str = "\nHere is a list of all the available game staff commands:\n\n"
for index, content in staff_game_help_array().items():
help_str += "`" + index + "`, "
help_str = help_str[:-2]
help_str += "\n\nUse `@HyperMario help game (gamemode)` to get help of a specific command."
await client.send_message(message.channel, help_str)
return
else:
if is_channel(message, ch_list()["STAFF"]):
if tag[3] in staff_game_help_array():
await client.send_message(message.channel, "Here is the help for the specified game mode:\r\n```" + staff_game_help_array()[tag[3]] + "```")
return
if tag[3] in game_help_array():
await client.send_message(message.channel, "Here is the help for the specified game mode:\r\n```" + game_help_array()[tag[3]] + "```")
else:
await client.send_message(message.channel, "Unknown game mode, use `@HyperMario help game` to get a list of all the available game modes.")
return
if is_channel(message, ch_list()["STAFF"]):
if tag[2] in staff_help_array():
await client.send_message(message.channel, "Here is the help for the specified command:\r\n```" + staff_help_array()[tag[2]] + "```")
return
if tag[2] in help_array():
await client.send_message(message.channel, "Here is the help for the specified command:\r\n```" + help_array()[tag[2]] + "```")
else:
await client.send_message(message.channel, "Unknown command, use `@HyperMario help` to get a list of all the available commands.")
else:
help_str = "Here is a list of all the available commands:\n\n"
for index, content in help_array().items():
help_str += "`" + index + "`, "
help_str = help_str[:-2]
help_str += "\n\nUse `@HyperMario help (command)` to get help of a specific command."
await client.send_message(message.channel, help_str)
if is_channel(message, ch_list()["STAFF"]):
help_str = "\nHere is a list of all the available staff commands:\n\n"
for index, content in staff_help_array().items():
help_str += "`" + index + "`, "
help_str = help_str[:-2]
help_str += "\n\nUse `@HyperMario help (command)` to get help of a specific command."
await client.send_message(message.channel, help_str)
else:
await client.send_message(message.channel, "`@HyperMario help` can only be used in <#324672297812099093> or DM.")
return
elif bot_cmd == "game":
if (is_channel(message, ch_list()["BOTCHAT"]) or is_channel(message, ch_list()["STAFF"])):
tag = message.content.split()
if (len(tag) < 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["game"] + "```")
return
if (tag[2] == "guessanumber"):
if (len(tag) != 5):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + game_help_array()["guessanumber"] + "```")
return
if (tag[3] == "easy"):
try:
guessed = int(tag[4])
if not guessed in range(0, 11):
raise ValueError("Number out of range.")
except:
await client.send_message(message.channel, "{}, invalid number specified. (Must be between 0 and 10)".format(message.author.name))
return
result = random.randint(0, 10)
await game_numberguess(guessed, result, 0, message)
return
elif (tag[3] == "normal"):
try:
guessed = int(tag[4])
if not guessed in range(0, 51):
raise ValueError("Number out of range.")
except:
await client.send_message(message.channel, "{}, invalid number specified. (Must be between 0 and 50)".format(message.author.name))
return
result = random.randint(0, 50)
await game_numberguess(guessed, result, 1, message)
return
elif (tag[3] == "hard"):
try:
guessed = int(tag[4])
if not guessed in range(0, 100):
raise ValueError("Number out of range.")
except:
await client.send_message(message.channel, "{}, invalid number specified. (Must be between 0 and 99)".format(message.author.name))
return
result = random.randint(0, 99)
await game_numberguess(guessed, result, 2, message)
return
else:
await client.send_message(message.channel, "{}, invalid difficulty specified. (easy/normal/hard)".format(message.author.name))
return
elif (tag[2] == "rps"):
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + game_help_array()["rps"] + "```")
return
bot_ch = random.randint(0, 2)
usr_ch = 0
if (tag[3] == "rock" or tag[3] == "r"):
usr_ch = 0
elif (tag[3] == "paper" or tag[3] == "p"):
usr_ch = 1
elif (tag[3] == "scissors" or tag[3] == "s"):
usr_ch = 2
else:
await client.send_message(message.channel, "{}, invalid choice (rock/paper/scissors).".format(message.author.name))
return
await game_rps(bot_ch, usr_ch, message)
return
elif (tag[2] == "coin"):
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + game_help_array()["coin"] + "```")
return
bot_ch = random.randint(1, 500)
usr_ch = 0
if (tag[3] == "head" or tag[3] == "h"):
usr_ch = 0
elif (tag[3] == "tails" or tag[3] == "t" or tag[3] == "tail"):
usr_ch = 1
else:
await client.send_message(message.channel, "{}, invalid choice (head/tails).".format(message.author.name))
return
await game_coin(bot_ch, usr_ch, message)
return
elif (tag[2] == "showcookie"):
if is_channel(message, ch_list()["STAFF"]):
if (len(tag) != 4):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_game_help_array()["showcookie"] + "```")
return
cookie_member = get_from_mention(tag[3])
if (cookie_member != None):
cookie_amount = await db_mng.get_cookie(cookie_member.id)
await client.send_message(message.channel, "{} has {} <:yoshicookie:416533826869657600> .".format(cookie_member.name, cookie_amount))
return
else:
await client.send_message(message.channel, "{}, invalid member specified.".format(message.author.name))
else:
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + game_help_array()["showcookie"] + "```")
return
cookie_amount = await db_mng.get_cookie(message.author.id)
await client.send_message(message.channel, "{}, you have {} <:yoshicookie:416533826869657600> .".format(message.author.name, cookie_amount))
return
elif (tag[2] == "top10"):
if (len(tag) != 3):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + game_help_array()["top10"] + "```")
return
rows = await db_mng.top_ten_cookie()
retstr = "Users with most <:yoshicookie:416533826869657600> .\n\n---------------------------------\n"
for row in rows:
cookie_member = get_from_mention(str(row[0]))
if cookie_member != None:
retstr += "**{}** = **{}** <:yoshicookie:416533826869657600>\n---------------------------------\n".format(cookie_member.name, row[1])
else:
await db_mng.delete_cookie(row[0])
await client.send_message(message.channel, "{}".format(retstr))
elif (tag[2] == "setcookie"):
if is_channel(message, ch_list()["STAFF"]):
if (len(tag) != 5):
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + staff_game_help_array()["setcookie"] + "```")
return
cookie_member = get_from_mention(tag[3])
try:
amount = int(tag[4])
except:
await client.send_message(message.channel, "{}, invalid amount specified.".format(message.author.name))
return
if (cookie_member != None):
await db_mng.set_cookie(cookie_member.id, amount)
await client.send_message(message.channel, "Set {} <:yoshicookie:416533826869657600> to {} .".format(cookie_member.name, amount))
return
else:
await client.send_message(message.channel, "{}, invalid user specified.".format(message.author.name))
return
else:
await client.send_message(message.channel, "{}, invalid game mode specified. Use `@HyperMario help game` to get a list of game modes.".format(message.author.name))
return
return
else:
await client.send_message(message.channel, "`@HyperMario game` can only be used in <#324672297812099093>.")
return
else:
await client.send_message(message.channel, 'Hi {}! :3\r\nTo get the list of all the available commands use `@HyperMario help`'.format(message.author.name))
except IndexError:
await client.send_message(message.channel, 'Hi {}! :3\r\nTo get the list of all the available commands use `@HyperMario help`'.format(message.author.name))
elif (message.channel.is_private and not message.author == client.user):
staff_chan = SELF_BOT_SERVER.get_channel(ch_list()["STAFF"])
await client.send_message(staff_chan, "{} sent me the following in a DM:\n```{}```".format(message.author.mention, message.content))
elif (is_channel(message, ch_list()["BUGS"]) and (message.author != client.user) and bot_mtn == "!report"):
tag = message.content.split(None, 1)
if (len(tag) > 1):
notif_msg = await client.send_message(message.channel, "{}, adding your bug report: ```{}```".format(message.author.name, tag[1]))
bug_reports = SELF_BOT_SERVER.get_channel(ch_list()["BUG_REPORTS"])
bot_msg = await client.send_message(bug_reports, "Processing...")
await client.edit_message(bot_msg, "```State: Open\n------------------\nReported by: {}\nExplanation: {}\nID: {}```".format(message.author.name, tag[1], bot_msg.id))
if (bot_msg != None):
await db_mng.bug_add(message.author.id, tag[1], bot_msg)
await client.edit_message(notif_msg, "{}, adding your bug report: ```{}```**Success**".format(message.author.name, tag[1]))
else:
await client.edit_message(notif_msg, "{}, adding your bug report: ```{}```**Fail**".format(message.author.name, tag[1]))
else:
await client.send_message(message.channel, "{}, invalid syntax, correct usage:\r\n```".format(message.author.name) + help_array()["report"] + "```")
except:
if(debug_mode):
raise
else:
pass
try:
client.run(sys.argv[1])
except:
if (running_State):
print("Got exception at {}, restarting bot in a while.".format(str(datetime.datetime.now())))
retryam = get_retry_times()
if(retryam < 30):
time.sleep(30)
elif(retryam < 180):
time.sleep(300)
else:
print("Retried too many times, exiting.")
running_State = False
del db_mng
raise
print("Retry count: {}\n".format(retryam))
set_retry_times(retryam + 1)
running_State = False
del db_mng
os.execv(sys.executable, ['python3'] + sys.argv)
else:
pass
| python |
"""
Ibutsu API
A system to store and query test results # noqa: E501
The version of the OpenAPI document: 1.13.4
Generated by: https://openapi-generator.tech
"""
import unittest
import ibutsu_client
from ibutsu_client.api.login_api import LoginApi # noqa: E501
class TestLoginApi(unittest.TestCase):
"""LoginApi unit test stubs"""
def setUp(self):
self.api = LoginApi() # noqa: E501
def tearDown(self):
pass
def test_activate(self):
"""Test case for activate
"""
pass
def test_auth(self):
"""Test case for auth
"""
pass
def test_config(self):
"""Test case for config
"""
pass
def test_login(self):
"""Test case for login
"""
pass
def test_recover(self):
"""Test case for recover
"""
pass
def test_register(self):
"""Test case for register
"""
pass
def test_reset_password(self):
"""Test case for reset_password
"""
pass
def test_support(self):
"""Test case for support
"""
pass
if __name__ == '__main__':
unittest.main()
| python |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-01 17:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0002_auto_20161128_0904'),
]
operations = [
migrations.AddField(
model_name='bettoride',
name='success',
field=models.NullBooleanField(default=None, verbose_name='betSucess'),
),
]
| python |
from testwatch.report import Report
def report_to_tsv(report: Report) -> str:
rows: list[tuple[str, str, str]] = []
start_row = ("start", str(report.start_time), str(report.start_time))
rows.append(start_row)
for task in report.tasks:
task_row = (task.name, str(task.start_time), str(task.end_time))
rows.append(task_row)
end_row = ("end", str(report.end_time), str(report.end_time))
rows.append(end_row)
lines = map(lambda row: "\t".join(row), rows)
return "\n".join(lines)
| python |
# Copyright (c) Nuralogix. All rights reserved. Licensed under the MIT license.
# See LICENSE.txt in the project root for license information
from setuptools import setup
setup(
name='dfx-apiv2-client',
version='0.8.0',
packages=['dfx_apiv2_client'],
install_requires=[
'aiohttp[speedups]',
'dfx-apiv2-protos @ https://github.com/nuralogix/dfx-apiv2-protos-python/tarball/master',
],
setup_requires=['wheel'],
description='dfx-apiv2-client is an async client for the DeepAffex API.',
)
| python |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsFieldValidator.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '31/01/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '176c06ceefb5f555205e72b20c962740cc0ec183'
import qgis # NOQA
from qgis.PyQt.QtCore import QVariant, QLocale
from qgis.PyQt.QtGui import QValidator
from qgis.core import QgsVectorLayer
from qgis.gui import QgsFieldValidator
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
TEST_DATA_DIR = unitTestDataPath()
start_app()
class TestQgsFieldValidator(unittest.TestCase):
def setUp(self):
"""Run before each test."""
testPath = TEST_DATA_DIR + '/' + 'bug_17878.gpkg|layername=bug_17878'
self.vl = QgsVectorLayer(testPath, "test_data", "ogr")
assert self.vl.isValid()
def tearDown(self):
"""Run after each test."""
pass
def _fld_checker(self, field):
"""
Expected results from validate
QValidator::Invalid 0 The string is clearly invalid.
QValidator::Intermediate 1 The string is a plausible intermediate value.
QValidator::Acceptable 2 The string is acceptable as a final result; i.e. it is valid.
"""
DECIMAL_SEPARATOR = QLocale().decimalPoint()
OTHER_SEPARATOR = ',' if DECIMAL_SEPARATOR == '.' else '.'
validator = QgsFieldValidator(None, field, '0.0', '')
def _test(value, expected):
ret = validator.validate(value, 0)
self.assertEqual(ret[0], expected, "%s != %s" % (ret[0], expected))
if value:
self.assertEqual(validator.validate('-' + value, 0)[0], expected, '-' + value)
# Valid
_test('0.1234', QValidator.Acceptable)
# Apparently we accept comma only when locale say so
if DECIMAL_SEPARATOR != '.':
_test('0,1234', QValidator.Acceptable)
# If precision is > 0, regexp validator is used (and it does not support sci notation)
if field.precision() == 0:
_test('12345.1234e+123', QValidator.Acceptable)
_test('12345.1234e-123', QValidator.Acceptable)
if DECIMAL_SEPARATOR != '.':
_test('12345,1234e+123', QValidator.Acceptable)
_test('12345,1234e-123', QValidator.Acceptable)
_test('', QValidator.Acceptable)
# Out of range
_test('12345.1234e+823', QValidator.Intermediate)
_test('12345.1234e-823', QValidator.Intermediate)
if DECIMAL_SEPARATOR != '.':
_test('12345,1234e+823', QValidator.Intermediate)
_test('12345,1234e-823', QValidator.Intermediate)
# Invalid
_test('12345-1234', QValidator.Invalid)
_test('onetwothree', QValidator.Invalid)
int_field = self.vl.fields()[self.vl.fields().indexFromName('int_field')]
self.assertEqual(int_field.precision(), 0) # this is what the provider reports :(
self.assertEqual(int_field.length(), 0) # not set
self.assertEqual(int_field.type(), QVariant.Int)
validator = QgsFieldValidator(None, int_field, '0', '')
# Valid
_test('0', QValidator.Acceptable)
_test('1234', QValidator.Acceptable)
_test('', QValidator.Acceptable)
# Invalid
_test('12345-1234', QValidator.Invalid)
_test('12345%s1234' % DECIMAL_SEPARATOR, QValidator.Invalid)
_test('onetwothree', QValidator.Invalid)
def test_doubleValidator(self):
"""Test the double with default (system) locale"""
field = self.vl.fields()[self.vl.fields().indexFromName('double_field')]
self.assertEqual(field.precision(), 0) # this is what the provider reports :(
self.assertEqual(field.length(), 0) # not set
self.assertEqual(field.type(), QVariant.Double)
self._fld_checker(field)
def test_doubleValidatorCommaLocale(self):
"""Test the double with german locale"""
QLocale.setDefault(QLocale(QLocale.German, QLocale.Germany))
assert QLocale().decimalPoint() == ','
field = self.vl.fields()[self.vl.fields().indexFromName('double_field')]
self._fld_checker(field)
def test_doubleValidatorDotLocale(self):
"""Test the double with english locale"""
QLocale.setDefault(QLocale(QLocale.English))
assert QLocale().decimalPoint() == '.'
field = self.vl.fields()[self.vl.fields().indexFromName('double_field')]
self._fld_checker(field)
def test_precision(self):
"""Test different precision"""
QLocale.setDefault(QLocale(QLocale.English))
assert QLocale().decimalPoint() == '.'
field = self.vl.fields()[self.vl.fields().indexFromName('double_field')]
field.setPrecision(4)
self._fld_checker(field)
if __name__ == '__main__':
unittest.main()
| python |
from flask import abort, Flask, jsonify, request
from flask_restful import Resource, Api
from translation_engine import decode, encode
app = Flask(__name__)
api = Api(app)
class Encoder(Resource):
def post(self):
if not request.json or not 'message' in request.json:
abort(400)
msg = request.json['message']
enc = encode(msg)
return jsonify({"message": enc})
class Decoder(Resource):
def post(self):
if not request.json or not 'message' in request.json:
abort(400)
msg = request.json['message']
try:
dec = decode(msg)
except ValueError as e:
return str(e), 400
return jsonify({'message': dec})
class Hello(Resource):
def get(self):
return 'Hello World!'
api.add_resource(Encoder, '/v1/encode')
api.add_resource(Decoder, '/v1/decode')
api.add_resource(Hello, '/')
if __name__ == '__main__':
app.run(threaded=True)
| python |
from neural_network import neural_network
import numpy as np
from sklearn import preprocessing
from sklearn.datasets import fetch_mldata
# Retrieve MNIST data and prep valid/test set
size_training_data = 5500
size_validation_data = 500
mnist = fetch_mldata('MNIST original')
input_data = preprocessing.scale(np.c_[mnist.data])
target_class = np.concatenate(np.c_[mnist.target],axis=0).astype(int)
random_indices = np.arange(len(input_data))
np.random.shuffle(random_indices)
training_values = random_indices[0:size_training_data-1]
validation_values = random_indices[size_training_data:size_validation_data+size_training_data-1]
training_inputs = input_data[training_values]
training_outputs = target_class[training_values]
validation_inputs = input_data[validation_values]
validation_outputs = target_class[validation_values]
# Define input and output dimensions
input_dim = training_inputs.shape[1]
output_dim = 10
# TODO develop unit testing and get comments on the current design
# to further develop the code
# Todo learn about different optimization approaches and the use of solvers like ADAMS
# Todo Implement hyperparameter optimization
numberOfNeurons = [[output_dim],[15],[25],[10,10],[10,10,10]]
# Todo Read on annotation in python 3.6
# How to determine the number of iterations?
for network_arch in numberOfNeurons:
ann = neural_network()
# Gradient descent parameters, play with these and see their effects
ann.configure_classifier(input_dim,output_dim,hidden_layers =network_arch,activation_function_type='relu',
batch_size=500,epsilon = 1e-4)
ann.load_data(training_inputs,training_outputs)
model = ann.train_model(num_iterations=1000)
predicted_outputs = ann.predict(validation_inputs)
error = sum((predicted_outputs-validation_outputs) != 0)
print("The error rate with " + str(network_arch) + "neurons is " + str(error*100/size_validation_data) +"%")
| python |
from django.db import models
from transactions import constant
class Transaction(models.Model):
transaction_id = models.IntegerField(unique=True)
brief_description = models.CharField(max_length=255, null=False)
description = models.CharField(max_length=255)
amount = models.FloatField(default=0.0)
transaction_type = models.IntegerField(choices=constant.TRANSACTION_TYPE, default=1)
classification = models.CharField(max_length=255, default="Utility")
date = models.DateField()
| python |
class NonGameScreen:
def __init__(self, screen):
self.screen = screen
def draw_text(self, text, font, color, cntr):
phrase = font.render(text, 0, color)
phrase_rect = phrase.get_rect(center=cntr)
self.screen.blit(phrase, phrase_rect)
| python |
while True:
n = int(input())
if n == 0:
break
cards = []
for i in range(n):
cards.append(i + 1)
discarded_cards = []
while len(cards) > 1:
x = cards.pop(0)
y = cards.pop(0)
discarded_cards.append(x)
cards.append(y)
print("Discarded cards:", end="")
for x in discarded_cards:
print("",x,end="")
print()
print("Remaining card:", cards[0]) | python |
import random
import pytest
from app.utils import graph as m
from tests.utils.utils import random_lower_string
class TestYmirNode:
def test_create_ymir_node(self):
d = {
"id": random.randint(1000, 2000),
"name": random_lower_string(10),
"hash": random_lower_string(10),
"label": "Model",
}
node = m.YmirNode.from_dict(d)
assert node.label == "Model"
assert node.id == d["id"]
assert node.properties["name"] == d["name"]
assert node.properties["hash"] == d["hash"]
@pytest.fixture(autouse=True)
def mock_redis(mocker):
mocker.patch.object(m, "StrictRedis")
class TestGraphClient:
def test_query(self, mocker):
mock_graph = mocker.Mock()
mocker.patch.object(m, "Graph", return_value=mock_graph)
q = random_lower_string()
client = m.GraphClient(redis_uri=None)
client.user_id = 2
client.query(q)
mock_graph.query.assert_called_with(q)
def test_add_relationship(self, mocker):
mock_graph = mocker.Mock()
mocker.patch.object(m, "Graph", return_value=mock_graph)
client = m.GraphClient(redis_uri=None)
client.user_id = 2
client.add_relationship(
{"id": 1, "label": "Dataset"},
{"id": 2, "label": "Model"},
{"id": 3, "label": "Task"},
)
mock_graph.query.assert_called()
| python |
import unittest
import os
from simian.config import Configuration
class ConfigTest(unittest.TestCase):
def setUp(self):
dirname = os.path.dirname(__file__)
self.config_file_path = os.path.join(dirname, 'config/config.ini')
self.config = Configuration(self.config_file_path)
self.test_image_assets_path = os.path.join(dirname, 'assets/images')
self.test_audio_assets_path = os.path.join(dirname, 'assets/audios')
def test_constructor(self):
self.assertEqual(self.config.IMAGE_ASSETS_PATH,
self.test_image_assets_path)
self.assertEqual(self.test_audio_assets_path,
self.config.AUDIO_ASSETS_PATH)
| python |
# This file is Copyright 2019 Volatility Foundation and licensed under the Volatility Software License 1.0
# which is available at https://www.volatilityfoundation.org/license/vsl-v1.0
#
import logging
import socket
from typing import Dict, Tuple, List, Union
from volatility.framework import exceptions
from volatility.framework import objects, interfaces
from volatility.framework.objects import Array
from volatility.framework.renderers import conversion
vollog = logging.getLogger(__name__)
def inet_ntop(address_family: int, packed_ip: Union[List[int], Array]) -> str:
if address_family in [socket.AF_INET6, socket.AF_INET]:
try:
return socket.inet_ntop(address_family, bytes(packed_ip))
except AttributeError:
raise RuntimeError("This version of python does not have socket.inet_ntop, please upgrade")
raise socket.error("[Errno 97] Address family not supported by protocol")
# Python's socket.AF_INET6 is 0x1e but Microsoft defines it
# as a constant value of 0x17 in their source code. Thus we
# need Microsoft's since that's what is found in memory.
AF_INET = 2
AF_INET6 = 0x17
# String representations of INADDR_ANY and INADDR6_ANY
inaddr_any = inet_ntop(socket.AF_INET, [0] * 4)
inaddr6_any = inet_ntop(socket.AF_INET6, [0] * 16)
class _TCP_LISTENER(objects.StructType):
"""Class for objects found in TcpL pools.
This class serves as a base class for all pooled network objects.
It exposes some functions which return sanity-checked members. Substructures referred to by a
pointer may appear valid at first glance but will throw an InvalidAddressException on access.
This is not a problem when objects are validated via their `is_valid()` method, but when
scanning for semi-corrupted data this check will not be performed.
Be mindful that most of those methods return `None` when they would access invalid data.
If you want to process the raw data access the attributes directly, e.g.
via `network_object.InetAF` instead of `network_object.get_address_family()`.
"""
MIN_CREATETIME_YEAR = 1950
MAX_CREATETIME_YEAR = 2200
def __init__(self, context: interfaces.context.ContextInterface, type_name: str,
object_info: interfaces.objects.ObjectInformation, size: int,
members: Dict[str, Tuple[int, interfaces.objects.Template]]) -> None:
super().__init__(context = context,
type_name = type_name,
object_info = object_info,
size = size,
members = members)
def get_address_family(self):
try:
return self.InetAF.dereference().AddressFamily
except exceptions.InvalidAddressException:
return None
def get_owner(self):
try:
return self.member('Owner').dereference()
except exceptions.InvalidAddressException:
return None
def get_owner_pid(self):
if self.get_owner().is_valid():
if self.get_owner().has_valid_member("UniqueProcessId"):
return self.get_owner().UniqueProcessId
return None
def get_owner_procname(self):
if self.get_owner().is_valid():
if self.get_owner().has_valid_member("ImageFileName"):
return self.get_owner().ImageFileName.cast("string",
max_length = self.get_owner().ImageFileName.vol.count,
errors = "replace")
return None
def get_create_time(self):
dt_obj = conversion.wintime_to_datetime(self.CreateTime.QuadPart)
if isinstance(dt_obj, interfaces.renderers.BaseAbsentValue):
return dt_obj
# return None if the timestamp seems invalid
if not (self.MIN_CREATETIME_YEAR < dt_obj.year < self.MAX_CREATETIME_YEAR):
return None
else:
return dt_obj
def get_in_addr(self):
try:
local_addr = self.LocalAddr.dereference()
if local_addr.pData.dereference():
inaddr = local_addr.inaddr
return inaddr
else:
return None
except exceptions.InvalidAddressException:
return None
def dual_stack_sockets(self):
"""Handle Windows dual-stack sockets"""
# If this pointer is valid, the socket is bound to
# a specific IP address. Otherwise, the socket is
# listening on all IP addresses of the address family.
# Note the remote address is always INADDR_ANY or
# INADDR6_ANY for sockets. The moment a client
# connects to the listener, a TCP_ENDPOINT is created
# and that structure contains the remote address.
inaddr = self.get_in_addr()
if inaddr:
if self.get_address_family() == AF_INET:
yield "v4", inet_ntop(socket.AF_INET, inaddr.addr4), inaddr_any
elif self.get_address_family() == AF_INET6:
yield "v6", inet_ntop(socket.AF_INET6, inaddr.addr6), inaddr6_any
else:
yield "v4", inaddr_any, inaddr_any
if self.get_address_family() == AF_INET6:
yield "v6", inaddr6_any, inaddr6_any
def is_valid(self):
try:
if not self.get_address_family() in (AF_INET, AF_INET6):
return False
except exceptions.InvalidAddressException:
return False
return True
class _TCP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in TcpE pools"""
def _ipv4_or_ipv6(self, inaddr):
if self.get_address_family() == AF_INET:
return inet_ntop(socket.AF_INET, inaddr.addr4)
else:
return inet_ntop(socket.AF_INET6, inaddr.addr6)
def get_local_address(self):
try:
inaddr = self.AddrInfo.dereference().Local.pData.dereference().dereference()
return self._ipv4_or_ipv6(inaddr)
except exceptions.InvalidAddressException:
return None
def get_remote_address(self):
try:
inaddr = self.AddrInfo.dereference().Remote.dereference()
return self._ipv4_or_ipv6(inaddr)
except exceptions.InvalidAddressException:
return None
def is_valid(self):
if self.State not in self.State.choices.values():
vollog.debug("invalid due to invalid tcp state {}".format(self.State))
return False
try:
if self.get_address_family() not in (AF_INET, AF_INET6):
vollog.debug("invalid due to invalid address_family {}".format(self.get_address_family()))
return False
if not self.get_local_address() and (not self.get_owner() or self.get_owner().UniqueProcessId == 0
or self.get_owner().UniqueProcessId > 65535):
vollog.debug("invalid due to invalid owner data")
return False
except exceptions.InvalidAddressException:
vollog.debug("invalid due to invalid address access")
return False
return True
class _UDP_ENDPOINT(_TCP_LISTENER):
"""Class for objects found in UdpA pools"""
class _LOCAL_ADDRESS(objects.StructType):
@property
def inaddr(self):
return self.pData.dereference().dereference()
class _LOCAL_ADDRESS_WIN10_UDP(objects.StructType):
@property
def inaddr(self):
return self.pData.dereference()
win10_x64_class_types = {
'_TCP_ENDPOINT': _TCP_ENDPOINT,
'_TCP_LISTENER': _TCP_LISTENER,
'_UDP_ENDPOINT': _UDP_ENDPOINT,
'_LOCAL_ADDRESS': _LOCAL_ADDRESS,
'_LOCAL_ADDRESS_WIN10_UDP': _LOCAL_ADDRESS_WIN10_UDP
}
class_types = {
'_TCP_ENDPOINT': _TCP_ENDPOINT,
'_TCP_LISTENER': _TCP_LISTENER,
'_UDP_ENDPOINT': _UDP_ENDPOINT,
'_LOCAL_ADDRESS': _LOCAL_ADDRESS
}
| python |
from datetime import datetime
import json
from typing import Type
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine, or_, and_, inspect, Table, MetaData, Column
from iupdatable.util.weixin.models import Article
from iupdatable import Status, Logger, File
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import re
from urllib.parse import urlparse, parse_qs
class WeiXinCrawler(object):
_wei_xin_name: str
_seed_url: str
_template_url: str
_sqlite_session: None
_db_file_url: str
_max_count: int
def __init__(self):
Logger.instance().config(log_file_full_path="WeiXinCrawler.log")
def _init_db(self, db_path):
db_path = db_path.strip()
if not File.exist(db_path):
url = "sqlite:///" + db_path
engine = create_engine(url)
session = sessionmaker(bind=engine)
self._sqlite_session = session()
Article.metadata.create_all(engine)
def start(self, wei_xin_name="", seed_url="", max_count=-1, db_path=""):
"""
开始抓取任务
:param wei_xin_name: 微信公众号的名称
:param seed_url: 种子链接
:param max_count: 最多抓取多少页,默认:-1,表示抓取所有历史消息
:param db_path: 用于保存的数据库文件路径
:return:
"""
try:
Logger.instance().info("开始任务...")
if wei_xin_name == "":
Logger.instance().error("请填写微信公众号名称!")
return None
if seed_url == "":
Logger.instance().error("请填写种子链接!")
return None
if "offset=" not in seed_url:
Logger.instance().error("种子链接填写错误!")
return None
db_path = db_path.strip()
if db_path == "":
self._db_file_url = "sqlite:///微信公众号历史消息.db"
else:
if not File.exist(db_path):
Logger.instance().warning("首次使用,创建数据库文件:{0}".format(db_path))
self._init_db(db_path)
self._db_file_url = "sqlite:///" + db_path
self._template_url = re.sub("(?<=offset=)(?:[0-9]{0,3})", "{0}", seed_url)
self._seed_url = seed_url
self._max_count = max_count
self._wei_xin_name = wei_xin_name
engine = create_engine(self._db_file_url)
session = sessionmaker(bind=engine)
self._sqlite_session = session()
can_continue = True
offset = 0
while can_continue:
if offset > self._max_count != -1:
break
grab_result = self._grab_articles(offset)
if grab_result == Status.retry:
grab_result = self._grab_articles(offset)
if isinstance(grab_result, dict):
can_continue = grab_result["continue"]
if can_continue:
offset = grab_result["next_offset"]
else:
Logger.instance().info("全部抓取完毕!")
break
else:
Logger.instance().error("多次重试失败!")
break
Logger.instance().info("任务完成,已退出!")
except Exception as e:
Logger.error(repr(e), is_with_debug_info=True)
return -1
def _grab_articles(self, offset):
try:
url = self._template_url.format(offset)
headers = {
"User-Agent": "MicroMessenger"
}
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
response = requests.get(url, headers=headers, verify=False)
if response.status_code == 200:
json_result = json.loads(response.text)
if json_result["errmsg"] == "ok":
new_json = json.loads(json_result["general_msg_list"])
general_msg_list = new_json["list"]
for i, msg in enumerate(general_msg_list):
comm_msg_info = msg["comm_msg_info"]
release_time_unix = comm_msg_info["datetime"]
if "app_msg_ext_info" not in msg:
continue
app_msg_ext_info = msg["app_msg_ext_info"]
article = self._dict_to_model(app_msg_ext_info, release_time_unix)
if article:
add_result = self._add_or_update_record(article, "articleId", Article)
if add_result:
log = "{0} - {1}. {2}".format(article.releaseTime, article.index, article.title)
Logger.instance().info(log)
for j, sub_msg in enumerate(app_msg_ext_info["multi_app_msg_item_list"]):
article = self._dict_to_model(sub_msg, release_time_unix)
if article:
add_result = self._add_or_update_record(article, "articleId", Article)
if add_result:
log = "{0} - {1}. {2}".format(article.releaseTime, article.index, article.title)
Logger.instance().info(log)
if json_result["can_msg_continue"] == 1:
result = {
"continue": True,
"next_offset": int(json_result["next_offset"])
}
return result
else:
result = {
"continue": False
}
return result
else:
return Status.retry
else:
return Status.retry
except Exception as e:
Logger.error(repr(e), is_with_debug_info=True)
return Status.retry
def _add_or_update_record(self, record, compare_property_name: str, entity: Type[declarative_base]):
"""
增加或更新一条数据库记录
:param record: 一条 entity 类型的记录实例
:param compare_property_name: 要比较的字段名称,注意:该值为 entity 中的名称,不是数据库的字段名
:param entity: 数据库的实体类,确保其基类为 declarative_base
:return: 插入:Status.added,更新:Status.existing,异常:Status.failed
"""
try:
skip_column_list = ["id"]
query_result = self._sqlite_session.query(entity) \
.filter(getattr(entity, compare_property_name) == getattr(record, compare_property_name)).first()
if query_result:
for member in inspect(entity).attrs:
member_name = member.key
column_name = member.expression.key
if column_name in skip_column_list:
continue
setattr(query_result, member_name, getattr(record, member_name))
self._sqlite_session.commit()
return Status.existing
else:
self._sqlite_session.add(record)
self._sqlite_session.commit()
return Status.added
except Exception as e:
Logger.error(repr(e), is_with_debug_info=True)
return Status.failed
@staticmethod
def _get_url_param_value(url: str, param_name):
parsed_uri = urlparse(url)
return parse_qs(parsed_uri.query)[param_name][0]
def _dict_to_model(self, msg: dict, release_time_unix):
article = Article()
article.url = msg["content_url"]
if "mid" not in article.url:
return None
mid = int(self._get_url_param_value(article.url, "mid"))
article.index = int(self._get_url_param_value(article.url, "idx"))
article.articleId = mid * 10 + article.index
article.title = msg["title"]
article.digest = msg["digest"]
article.releaseTime_unix = release_time_unix
article.releaseTime = "{0}".format(datetime.fromtimestamp(release_time_unix))
article.delFlag = msg["del_flag"]
article.copyrightStatus = msg["copyright_stat"]
article.author = msg["author"]
article.fileId = msg["fileid"]
article.account = self._wei_xin_name
article.cover = msg["cover"]
article.sourceUrl = msg["source_url"]
return article
| python |
# coding: utf-8
##############################################################################
# Copyright (C) 2020 Microchip Technology Inc. and its subsidiaries.
#
# Subject to your compliance with these terms, you may use Microchip software
# and any derivatives exclusively with Microchip products. It is your
# responsibility to comply with third party license terms applicable to your
# use of third party software (including open source software) that may
# accompany Microchip software.
#
# THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
# EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
# WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
# PARTICULAR PURPOSE.
#
# IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
# INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
# WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
# BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
# FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
# ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
# THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
##############################################################################
pic32mxBm83BtadbComponentIDList = ["drv_i2c", "i2c1", "sys_time", "core_timer"]
pic32mxBm83BtadbAutoConnectList = [["audio_codec_ak4954", "DRV_I2C", "drv_i2c_0", "drv_i2c"],
["drv_i2c_0", "drv_i2c_I2C_dependency", "i2c1", "I2C1_I2C"],
["sys_time", "sys_time_TMR_dependency", "core_timer", "CORE_TIMER_TMR"]]
pic32mxBm83BtadbPinConfigs = [{"pin": 67, "name": "SDA1", "type": "SDA1", "direction": "", "latch": "", "opendrain": "", "abcd": ""}, # RA15
{"pin": 66, "name": "SCL1", "type": "SCL1", "direction": "", "latch": "", "opendrain": "", "abcd": ""}, # RA14
{"pin": 91, "name": "STBYRST", "type": "GPIO", "direction": "Out", "latch": "High", "opendrain": "true", "pullup": "true", "abcd": ""}] # RA6
pic32mx_bm83_bluetooth_audio_dev_bd = bspSupportObj(pic32mxBm83BtadbPinConfigs, pic32mxBm83BtadbComponentIDList, None, pic32mxBm83BtadbAutoConnectList, None)
addBSPSupport("BSP_PIC32MX_BM83_Bluetooth_Audio_Development_Board", "PIC32MX_BM83_BTADB", pic32mx_bm83_bluetooth_audio_dev_bd)
| python |
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Script name: ShpToZip
#
# Description: A Python module to automate the conversion of .shp files to .zip
# archives.
#
# Shp_to_Zip_README file includes the following information:
# Project information - Script description - Software framework
# Version control - Executing - Contributors - Licence - References
#
# Meta information: v.02.01 | 21 OCT 2017 | deepVector (author)
#-------------------------------------------------------------------------------
# Import system module(s)
import sys
import os
import glob
import zipfile
# Folder and file management:
def ShpToZipInDir(dirOut):
# Check that the input folder exists
if not os.path.exists(dirOut):
print "ERROR: Input folder '%s' does not exist" % dirOut
return False
# If the output folder does not exist, create it
dirOut_Zip = (dirOut + '_Zip')
if not os.path.exists(dirOut_Zip):
os.makedirs(dirOut_Zip)
# Loop through .shp files in the input folder
for inShp in glob.glob(os.path.join(dirOut, "*.shp")):
# Build the .zip filename from the .shp filename
outZip = os.path.join(
dirOut_Zip, os.path.splitext(os.path.basename(inShp))[0] + ".zip")
# Convert the .shp files to .zip files
zipShp(inShp, outZip)
return True
# Zipping:
def zipShp(inShpFile, newZipFN):
# check if the input .shp exists
if not (os.path.exists(inShpFile)):
print " ERROR: '%s' does not exist" % inShpFile
return False
# if the output .zip exists, delete it
if (os.path.exists(newZipFN)):
os.remove(newZipFN)
# If the output .zip still exists, exit
if (os.path.exists(newZipFN)):
print " ERROR: Unable to delete '%s'" % newZipFN
return False
# Open zip file object
zipobj = zipfile.ZipFile(newZipFN, 'w')
# Loop through .shp components
for infile in glob.glob(inShpFile.lower().replace(".shp", ".*")):
# Skip .zip file extension
if os.path.splitext(infile)[1].lower() != ".zip":
# Zip the .shp components
zipobj.write(infile, os.path.basename(infile),
zipfile.ZIP_DEFLATED)
# Close the .zip file object
zipobj.close()
return True
# To run the script standalone, uncomment and enter the path to 'dirOut':
# if __name__ == "__main__":
# dirOut = "C:\\01\\output"
# ShpToZipInDir(dirOut)
| python |
'''
A message containing letters from A-Z is being encoded to numbers using the following mapping:
'A' -> 1
'B' -> 2
...
'Z' -> 26
Given an encoded message containing digits, determine the total number of ways to decode it.
For example,
Given encoded message "12", it could be decoded as "AB" (1 2) or "L" (12).
The number of ways decoding "12" is 2.
TODO: do it again
'''
class Solution(object):
def numDecodings(self, s):
"""
:type s: str
:rtype: int
"""
if len(s)<1:
return 0
if len(s)==1 and str(s)<=26 and str(s)>=1:
return 1
result=0
i=len(s)-1
lookupTable=[0 for x in range(len(s)+1)]
# initialize the lookupTable
lookupTable[len(s)]=1
if s[i]!='0':
lookupTable[i]=1
i=i-1
while i>=0:
if s[i]!='0' and int(s[i:i+2])<=26:
lookupTable[i]=lookupTable[i+1]+lookupTable[i+2]
elif s[i]!='0' and int(s[i:i+2])>26:
lookupTable[i]=lookupTable[i+1]
i-=1
return lookupTable[0]
if __name__=="__main__":
solution = Solution()
testCase1='12'
result1=solution.numDecodings(testCase1)
print(str(result1))
testCase2='27'
result2=solution.numDecodings(testCase2)
print(str(result2))
| python |
import urllib
import time
import urllib.request
import json
from src.games.player import Player
import numpy as np
from src.config import *
class ReversiRandomPlayer(Player):
"""
随机AI
"""
def play(self, board):
legal_moves_np = self.game.get_legal_moves(1, board) # 获取可行动的位置
legal_moves = []
for i in range(self.game.n ** 2):
if legal_moves_np[i]:
legal_moves.append(i)
# print('legal moves: ', list(map(lambda x: (x // self.game.n, x % self.game.n), legal_moves)))
action = -1
if len(legal_moves) != 0: # 无子可下
action = legal_moves[np.random.randint(len(legal_moves))]
return action, # it's a tuple
class ReversiGreedyPlayer(Player):
"""
基于贪心的 AI
"""
def __init__(self, game, description="", greedy_mode=0):
"""
greedy mode
=0 可贪心使得当前转换棋子数量最大
=1 可贪心使得对方行动力最小(哭了哭了,太假了)
"""
super().__init__(game, description)
# 贪心策略
self.greedy_mode = greedy_mode
def play(self, board):
legal_moves_np = self.game.get_legal_moves(1, board) # 获取可行动的位置
legal_moves = []
for i in range(self.game.n ** 2):
if legal_moves_np[i]:
legal_moves.append(i)
action = -1
if len(legal_moves) != 0: # 有子可下
if self.greedy_mode == 0:
# 贪心使得当前转换棋子数量最大
max_greedy = -self.game.n ** 2
for i in legal_moves:
board_tmp, _ = self.game.get_next_state(1, i, board)
sum_tmp = np.sum(board_tmp)
# print((i // self.game.n, i % self.game.n), ' greedy: ', sum_tmp)
if max_greedy < sum_tmp:
max_greedy = sum_tmp
action = i
# print((action // self.game.n, action % self.game.n), ' max greedy: ', max_greedy)
else:
# 贪心使得对方行动力最小
max_greedy = self.game.n ** 2
for i in legal_moves:
board_tmp, _ = self.game.get_next_state(1, i, board)
# 对方可移动位置
legal_moves_tmp = self.game.get_legal_moves(_, board_tmp)
sum_tmp = np.sum(legal_moves_tmp[:-1])
# print((i // self.game.n, i % self.game.n), ' greedy: ', sum_tmp)
if max_greedy > sum_tmp:
max_greedy = sum_tmp
action = i
# print((action // self.game.n, action % self.game.n), ' max greedy: ', max_greedy)
return action, # it's a tuple
class ReversiHumanPlayer(Player):
"""
人类AI,即手动操作
"""
def play(self, board):
legal_moves_np = self.game.get_legal_moves(1, board) # 获取可行动的位置
legal_moves = []
for i in range(self.game.n ** 2):
if legal_moves_np[i]:
legal_moves.append((i // self.game.n, i % self.game.n))
self.game.display(board)
print(legal_moves)
while True:
try:
x, y = map(int, input().split())
if len(legal_moves) == 0 and x == -1:
return -1, # it's a tuple
else:
action = x * self.game.n + y
if legal_moves_np[action]:
return action, # it's a tuple
else:
print("error!")
except Exception as e:
print(e)
class ReversiBotzonePlayer(Player):
"""
Connects to Botzone
"""
def __init__(self, game, description="", args=default_args):
super().__init__(game, description)
self.matches = {}
self.is_finished = False
self.args = args
def init(self, referee=None):
super().init(referee=referee)
self.matches = {}
self.is_finished = False
self.fetch(self.SomeKindOfMatch)
class Match:
has_request = False
has_response = False
current_request = None
current_response = None
matchid = None
def new_request(self, request):
self.has_request = True
self.has_response = False
self.current_request = request
# TODO:定义一种特化的对局数据类,比如存储棋盘状态等
class SomeKindOfMatch(Match):
def __init__(self, matchid, first_request):
self.has_request = True
self.current_request = first_request
self.matchid = matchid
# 从 Botzone 上拉取新的对局请求
def fetch(self, matchClass):
req = urllib.request.Request(self.args.botzone_local_api)
for matchid, m in self.matches.items():
if m.has_response and m.has_request and m.current_response:
print('> Response for match [%s]: %s' % (matchid,
m.current_response))
m.has_request = False
req.add_header("X-Match-" + matchid, m.current_response)
while True:
try:
res = urllib.request.urlopen(req, timeout=None)
botzone_input = res.read().decode()
lines = botzone_input.split('\n')
request_count, result_count = map(int, lines[0].split(' '))
for i in range(0, request_count):
# 新的 Request
matchid = lines[i * 2 + 1]
request = lines[i * 2 + 2]
if matchid in self.matches:
print('> Request for match [%s]: %s' % (matchid, request))
self.matches[matchid].new_request(request)
else:
print('New match [%s] with first request: %s' % (matchid,
request))
self.matches[matchid] = matchClass(matchid, request)
for i in range(0, result_count):
# 结束的对局结果
matchid, slot, player_count, *scores = lines[
request_count * 2 + 1 + i].split(' ')
if player_count == "0":
print("Match [%s] aborted:\n> I'm player %s" % (matchid,
slot))
else:
print(
"Match [%s] finished:\n> I'm player %s, and the scores are %s"
% (matchid, slot, scores))
self.is_finished = True
self.matches.pop(matchid)
except (urllib.error.URLError, urllib.error.HTTPError):
# 此时可能是长时间没有新的 request 导致连接超时,再试即可
print(
"Error reading from Botzone or timeout, retrying 2 seconds later..."
)
time.sleep(2)
continue
break
return self.is_finished
def play(self, board):
resp = dict()
last_action = self.referee.get_last_action()
for mid, m in self.matches.items():
if last_action is None: # 第一次的时候
break
if last_action >= self.game.n ** 2 or last_action < 0:
resp['x'] = -1
resp['y'] = -1
else:
resp['x'] = int(last_action % self.game.n)
resp['y'] = int(last_action // self.game.n)
m.current_response = json.dumps(resp)
# 将自己的动作存入 m.current_response,同样进行一步模拟
m.has_response = True
if not self.is_finished and self.fetch(self.SomeKindOfMatch):
"""
如果对局已经结束,发生这种情况一般 current_request 没有接收到的下一步,因此我们得自行走最后一步
容易证明,如果当前可走,则这一步走完以后游戏必定结束
1. 假设我有多于 1 的行动力,且对局已经结束则说明对方无法在该步后做出行动,然而再下一步我依然可以行动,此假设不成立
2. 假设我只有 1 的行动力,同上对方无法行动,则该步结束后游戏结束,假设成立
3. 假设我无法行动,该步并不会做出任何动作,游戏结束,假设成立
"""
legal_moves_np = self.game.get_legal_moves(1, board) # 获取可行动的位置
for i in range(self.game.n ** 2): # 找到可行动的位置
if legal_moves_np[i]:
print("本地最后一次弥补:", (i // self.game.n, i % self.game.n))
return i, # it's a tuple
action = -1
for mid, m in self.matches.items():
# 使用 m.current_request 模拟一步对局状态,然后产生动作
botzone_action = json.loads(m.current_request)
action = int(botzone_action['y']) * self.game.n + int(botzone_action['x'])
# self.fetch(self.SomeKindOfMatch)
return action if 0 <= action < self.game.n ** 2 else -1, # it's a tuple
class ReversiRLPlayer(Player):
"""
基于强化学习的 AI(正在制作中)
"""
def __init__(self, game, choice_mode=0, nnet=None, check_point=None, args=default_args):
"""choice_mode 代表 AI 在运行时如何选择走法(0 代表挑选最优点,1 代表按 pi 概率挑选)"""
super().__init__(game)
# from src.games.reversi.reversi_nnnet import NNetWrapper as NNet
from src.games.reversi.reversi_nnet import NNetWrapper as NNet
from src.lib.mcts import MCTS
self.n1 = NNet(self.game, args) if nnet is None else nnet
self.choice_mode = choice_mode
self.args = args
self.mcts1 = MCTS(self.game, self.n1, self.args)
# 临时操作
if check_point is not None:
# print('loading ... checkpoint: ', format(check_point))
self.n1.load_checkpoint(check_point[0], check_point[1])
def init(self, referee=None):
super().init(referee)
def play(self, board):
counts = self.mcts1.get_action_probility(board, temp=1)
action = -1
if self.choice_mode == 0:
# 以预测胜率最大的点为下一步行动点
action = np.argmax(counts)
else:
# 按预测胜率为分布进行挑选
try:
action = np.random.choice(len(counts), p=counts)
except Exception as e:
# print('Error: ', e)
pass
return action, counts # it's a tuple
if __name__ == "__main__":
pass
| python |
import numpy as np
def assert_array_shape(a, ndim=None, shape=None, dims={}):
if not type(a) is np.ndarray:
raise TypeError("Provided object type (%s) is not nunpy.array." % str(type(a)))
if ndim is not None:
if not a.ndim == ndim:
raise ValueError("Provided array dimensions (%d) are not as expected (%d)." % (a.ndim, ndim))
if shape is not None:
if not np.all(a.shape == shape):
raise ValueError("Provided array size (%s) are not as expected (%s)." % (str(a.shape), shape))
for k, v in dims.items():
if not a.shape[k] == v:
raise ValueError("Provided array's %d-th dimension's size (%d) is not as expected (%d)." % (k, a.shape[k], v))
def assert_array_non_negative(a):
if np.any(a < 0):
raise ValueError("Provided array's contains negative elements.")
def assert_positive_int(i):
if not type(i) is np.int:
raise TypeError("Provided argument (%s) must be npumpy.int." % str(type(i)))
if not i > 0:
raise ValueError("Provided integer (%d) must be positive." % i)
| python |
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'new/(?P<item_id>[\d]+)/$', 'reviewclone.views.create_review',
name='create_review'),
url(r'relations/$', 'reviewclone.views.relations_list',
name='relations'),
url(r'relations/new/$', 'reviewclone.views.create_relation',
name='create_relation'),
url(r'relations/delete/$', 'reviewclone.views.delete_relation',
name='delete_relation'),
url(r'clones/$', 'reviewclone.views.similar_list', name='similar_list'),
url(r'movies/$', 'reviewclone.views.items_list', name='items_list'),
url(r'movies/(?P<letter>[-\w]+)/$', 'reviewclone.views.items_list',
name='items_list_letter'),
url(r'review/(?P<review_id>\d+)/$', 'reviewclone.views.after_review',
name='after_review'),
url(r'user/(?P<user_id>[-\d]+)/$', 'reviewclone.views.user_reviews',
name='user_reviews'),
url(r'dashboard/$', 'reviewclone.views.dashboard', name='dashboard'),
)
| python |
from typing import Any, Optional
from pydantic import BaseModel, StrictBool, validator
from app.db.session import Base
class UserBase(BaseModel):
username: str
profile: str
email: str
disabled: StrictBool = False
class UserCreate(UserBase):
password: str
@validator("username")
def validate_username(cls: Any, username: str, **kwargs: Any) -> Any:
if len(username) <= 4:
raise ValueError("Username can't be empty")
return username
@validator("email")
def validate_email(cls: Any, email: str, **kwargs: Any) -> Any:
if len(email) == 0:
raise ValueError("An email is required")
return email
@validator("profile")
def validate_profile(cls: any, profile: str, **kwargs: Any) -> Any:
if len(profile) == 0:
raise ValueError("A profile is required")
return profile
class User(UserBase):
id: Optional[int] = None
class Config:
orm_mode: bool = True
class UserInDB(User):
hashed_password: str
class Users(User):
id: int
class UserUpdate(UserBase):
password: Optional[str]
class Config:
orm_mode: bool = True
class UserPassword(BaseModel):
password: Optional[str] = None
# pass
| python |
def palindrome (kata, h, z):
if h == z//2 :
return 'Yes, it is a palindrome'
elif z % 2 == 0:
if kata[z//2 - h - 1] == kata[z//2 + h]:
return palindrome (kata, h + 1, z)
else:
return 'No, it is not a palindrome'
else:
if kata[z//2 - h - 1] == kata[z//2 + 1 + h]:
return palindrome (kata, h + 1, z)
else:
return 'No, it is not a palindrome'
while True:
x = input('kata? ')
print(palindrome(x,0,int (len(x)))) | python |
import subprocess
import json
import time
import urllib.request
import os
pem="scripts/Vertx.pem"
jar_file="target/WebChatVertxMaven-0.1.0-fat.jar"
groupName="VertxCluster"
count=1
def url_is_alive(dns):
"""
Checks that a given URL is reachable.
:param url: A URL
:rtype: bool
"""
request = urllib.request.Request("http://%s:8080" % dns)
request.get_method = lambda: 'HEAD'
try:
urllib.request.urlopen(request)
return True
except urllib.error.URLError:
return False
def run(pem, dns, jar_file):
print("RUNNING %s" % dns)
outfile = open('logs/%s-log.log' % dns, 'w')
subprocess.call("./scripts/deploy.sh %s %s %s &" % (pem, dns, jar_file), shell=True, stdout=outfile, stderr=outfile)
with open(os.devnull, "w") as f:
subprocess.call("./scripts/addServerToHA.sh node_%s %s &" % (dns, dns), shell=True, stdout=f, stderr=f)
subprocess.call("rm haproxy/haproxy.cfg", shell=True)
res=json.loads(subprocess.Popen("aws ec2 describe-instances --filter Name=\"instance.group-name\",Values=\"%s\"" % groupName, shell=True, stdout=subprocess.PIPE).stdout.read())
have_master=False
nodes = []
master = None
for instance in res['Reservations'][0]['Instances']:
node= dict()
node['DNS'] = instance['PublicDnsName']
node['PRIVATE_IP'] = instance['PrivateIpAddress']
node['PUBLIC_IP'] = instance['PublicIpAddress']
# ONLY FIRST
if not have_master:
have_master = True
subprocess.call("sed 's/$INTERFACE/%s/' src/main/resources/base.xml > src/main/resources/cluster.xml" % instance['PrivateIpAddress'], shell=True)
print("Running: mvn install")
subprocess.call("mvn install", shell=True, stdout=subprocess.PIPE)
run(pem, node['DNS'], jar_file)
node['isMaster'] = True
master = node
# OTHERS
else:
node['isMaster'] = False
nodes.append(node)
with open('logs/instances.json', 'w') as outfile:
json.dump(nodes, outfile)
while True and len(nodes) > 0:
print("DEPLOYING MASTER ...")
if url_is_alive(master['DNS']):
break
time.sleep( 10 )
print("Master UP")
for node in nodes:
if not node['isMaster']:
run(pem, node['DNS'], jar_file)
for node in nodes:
if not node['isMaster']:
while True and len(nodes) > 0:
if url_is_alive(node['DNS']):
break
time.sleep( 10 )
print("NODE: "+node['DNS']+" is UP")
# outfile_ha = open('logs/haproxy.txt', 'w')
# subprocess.call("haproxy -f haproxy/haproxy.cfg", shell=True, stdout=outfile_ha, stderr=outfile_ha)
| python |
from django.core.urlresolvers import reverse_lazy
from django.utils.text import slugify
def generate_article_link(title, url=None):
if url is None:
url = reverse_lazy('article-detail', kwargs={'slug': slugify(title)})
return "[{0}]({1})".format(title, url)
| python |
#!/usr/bin/env python3
"""
Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
Algorithm class: Convert a ProjectScene from one type to another.
"""
from copy import deepcopy
from sumo.semantic.project_object import ProjectObject
from sumo.semantic.project_object_dict import ProjectObjectDict
from sumo.semantic.project_scene import ProjectScene
from sumo.threedee.compute_bbox import ComputeBbox
from sumo.threedee.voxelizer import Voxelizer
class ProjectConverter(object):
"""
Convert a ProjectScene from one type to another.
The converter only supports converting from more complex types
to less complex types. Specifically:
meshes -> voxels
voxels -> bounding_box
meshes -> bounding_box
"""
allowed_conversions = [("meshes", "voxels"),
("meshes", "bounding_box"),
("voxels", "bounding_box")]
def __init__(self):
pass
def run(self, project, target_type):
"""
Convert an in-memory project to the target type
Inputs:
project (ProjectScene) - input project
target_type (string) - voxels or bounding_box
Return:
new_project (ProjectScene) - a project with the target project type
Exceptions:
ValueError - if target_type is not allowed for the given input project.
See above for allowed conversions.
"""
if (project.project_type, target_type) not in self.allowed_conversions:
raise ValueError("Invalid target_type ({}) for \
project with type {}".format(target_type, project.project_type))
new_settings = deepcopy(project.settings)
new_elements = ProjectObjectDict()
for element in project.elements.values():
new_element = self.convert_element(element, target_type)
new_elements[new_element.id] = new_element
new_project = ProjectScene(project_type=target_type, elements=new_elements,
settings=new_settings)
return new_project
def convert_element(self, element, target_type):
"""
Convert <element> to <target_type> track. Makes a copy of the element.
Inputs:
element (ProjectObject) - element to convert
target_type (string) - destination project type
Return
new_element (ProjectObject) - converted element
See above for allowed conversions.
"""
if (element.project_type, target_type) not in self.allowed_conversions:
raise ValueError("Invalid target_type ({}) for element with type \
{}".format(target_type, element.project_type))
source_type = element.project_type
if target_type == "bounding_box":
if source_type == "voxels":
bounds = element.voxels.bounds()
elif source_type == "meshes":
bounds = ComputeBbox().from_gltf_object(element.meshes)
else:
raise ValueError("Invalid target type") # this should not be possible
new_element = ProjectObject.gen_bounding_box_object(
id=element.id,
bounds=bounds,
pose=deepcopy(element.pose),
category=element.category,
symmetry=element.symmetry,
score=element.score
)
elif target_type == "voxels":
voxelizer = Voxelizer()
voxels = voxelizer.run(element.meshes)
new_element = ProjectObject.gen_voxels_object(
id=element.id,
bounds=voxels.bounds(),
voxels=voxels,
pose=deepcopy(element.pose),
category=element.category,
symmetry=element.symmetry,
score=element.score
)
else:
raise ValueError("Invalid target type") # this should not be possible
return new_element
| python |
# From http://www.scipy-lectures.org/intro/scipy.html#finding-the-minimum-of-a-scalar-function
from scipy import optimize
import numpy as np
import matplotlib.pyplot as plt
def f(x):
return x**2 + 10 * np.sin(x)
x = np.arange(-10, 10, 0.1)
plt.plot(x, f(x))
plt.show()
result = optimize.minimize(f, x0=0)
print("\n{}\nresult\n{}".format('-' * 80, result))
| python |
import planckStyle as s
from pylab import *
g=s.getSinglePlotter()
roots = ['base_omegak_planck_lowl_lowLike_highL','base_omegak_planck_lowl_lowLike_highL_lensing','base_omegak_planck_lowl_lowLike_highL_lensing_post_BAO']
params = g.get_param_array(roots[0], ['omegam', 'omegal', 'H0'])
g.setAxes(params, lims=[0, 1, 0, 1])
g.add_3d_scatter(roots[0], params)
g.add_2d_contours(roots[1], params[0], params[1], filled=False)
#g.add_2d_contours(roots[2], params[0], params[1], filled=True)
g.add_line([1, 0], [0, 1], zorder=1)
g.export('Omegam-Omegal-H0')
g.newPlot()
g.setAxes(params, lims=[0.2, 0.5, 0.5, 0.8])
g.add_3d_scatter(roots[0], params)
g.add_2d_contours(roots[1], params[0], params[1], filled=False, zorder=1)
g.add_2d_contours(roots[2], params[0], params[1], filled=True, zorder=2, alpha=0.85)
g.add_line([1, 0], [0, 1], zorder=0)
g.add_legend(['+lensing','+lensing+BAO'])
g.export('Omegam-Omegal-H0_zoom')
| python |
# coding: utf-8
import logging
from marshmallow import Schema, fields, pre_load, post_dump, validate, ValidationError
from src.exceptions import InvalidUsage
from flask import jsonify
import json
class LoginSchema(Schema):
email = fields.Email(required=True)
password = fields.Str(load_only=True, validate=validate.Length(min=1), required=True)
name = fields.Str(dump_only=True)
surname = fields.Str(dump_only=True)
token = fields.Str(dump_only=True)
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
type = fields.Str(dump_only=True)
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'user': data}
class Meta:
strict = True
class UserSchema(Schema):
name = fields.Str(validate=validate.Length(min=1))
surname = fields.Str(validate=validate.Length(min=1))
email = fields.Email()
token = fields.Str(dump_only=True)
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
children = fields.List(fields.Nested(lambda: ChildSchema()), dump_only=True)
parents = fields.List(fields.Nested(lambda: UserSchema(exclude=("children", "parents", "token"))), dump_only=True)
ehrid = fields.Str(dump_only=True)
type = fields.Str(dump_only=True)
timer = fields.DateTime(dump_only=True)
rewards = fields.List(fields.Nested(lambda:RewardSchema()))
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'user': data}
class Meta:
strict = True
class RegisterUserSchema(Schema):
name = fields.Str(validate=validate.Length(min=1), required=True)
surname = fields.Str(validate=validate.Length(min=1), required=True)
email = fields.Email(required=True)
password = fields.Str(load_only=True, validate=validate.Length(min=1), required=True)
confirmPassword = fields.Str(load_only=True, validate=validate.Length(min=1), required=True)
token = fields.Str(dump_only=True)
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
type = fields.Str(dump_only=True)
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'user': data}
class Meta:
strict = True
class DiabetesInfoSchema(Schema):
measurements= fields.Int(validate=validate.Range(min=1, max=20), required=True)
SU_LO= fields.Float(validate=validate.Range(min=0, max=15), required=True)
SU_HI= fields.Float(validate=validate.Range(min=0, max=15), required=True)
class ObesityInfoSchema(Schema):
goalweight = fields.Int(validate=validate.Range(min=40, max=60), required=True)
class RegisterChildSchema(Schema):
name = fields.Str(validate=validate.Length(min=1), required=True)
surname = fields.Str(validate=validate.Length(min=1), required=True)
email = fields.Email(required=True)
password = fields.Str(load_only=True, validate=validate.Length(min=1), required=True)
confirmPassword = fields.Str(load_only=True, validate=validate.Length(min=1), required=True)
gender = fields.Str(validate=(validate.OneOf(["MALE", "FEMALE", "UNKNOWN", "OTHER"])), required=True)
dateofbirth = fields.DateTime(format="iso", required=True)
disease = fields.Str(validate=(validate.OneOf(["DIABETES", "OBESITY"])), required=True)
token = fields.Str(dump_only=True)
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
diseaseInfo = fields.Nested(DiabetesInfoSchema())
type = fields.Str(dump_only=True)
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
if data.get('disease')=="OBESITY":
self.declared_fields.update({'diseaseInfo': fields.Nested(ObesityInfoSchema())})
self.load_fields.update({'diseaseInfo': fields.Nested(ObesityInfoSchema())})
self.fields.update({'diseaseInfo': fields.Nested(ObesityInfoSchema())})
self.dump_fields.update({'diseaseInfo': fields.Nested(ObesityInfoSchema())})
elif data.get('disease')=="DIABETES":
self.declared_fields.update({'diseaseInfo': fields.Nested(DiabetesInfoSchema())})
self.load_fields.update({'diseaseInfo': fields.Nested(DiabetesInfoSchema())})
self.fields.update({'diseaseInfo': fields.Nested(DiabetesInfoSchema())})
self.dump_fields.update({'diseaseInfo': fields.Nested(DiabetesInfoSchema())})
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'user': data}
class Meta:
strict = True
class RewardSchema(Schema):
nameOf = fields.Str()
description = fields.Str()
reward = fields.Str()
endDate = fields.Date()
startDate = fields.Date()
ehrid = fields.Str()
@pre_load
def make_reward(self, data, **kwargs):
data = data.get('reward')
return data
class RegisterRewardSchema(Schema):
nameOf = fields.Str(required=True)
description = fields.Str(required=True)
reward = fields.Str(required=True)
endDate = fields.Date(required=True)
startDate = fields.Date(required=True)
ehrid = fields.Str(required=True)
@pre_load
def make_reward(self, data, **kwargs):
data = data.get('reward')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
class ChildSchema(Schema):
name = fields.Str(dump_only=True)
surname = fields.Str(dump_only=True)
email = fields.Email()
ehrid = fields.Str()
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
type = fields.Str(dump_only=True)
timer = fields.DateTime(dump_only=True)
rewards = fields.List(fields.Nested(lambda:RewardSchema()))
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'child': data}
class Meta:
strict = True
class ParentSchema(Schema):
name = fields.Str(dump_only=True)
surname = fields.Str(dump_only=True)
email = fields.Email(dump_only=True)
createdAt = fields.DateTime(attribute='created_at', dump_only=True)
lastSeen = fields.DateTime(attribute='last_seen', dump_only=True)
type = fields.Str(dump_only=True)
@pre_load
def make_user(self, data, **kwargs):
data = data.get('user')
return data
def handle_error(self, exc, data, **kwargs):
"""Log and raise our custom exception when (de)serialization fails."""
raise InvalidUsage(exc.messages)
@post_dump
def dump_user(self, data, **kwargs):
return {'child': data}
class Meta:
strict = True
login_schema = LoginSchema()
register_user_schema = RegisterUserSchema()
register_child_schema = RegisterChildSchema()
user_schema = UserSchema()
user_schemas = UserSchema(many=True)
child_schema = ChildSchema()
child_schemas = ChildSchema(many=True)
parent_schemas = ParentSchema(many=True)
register_reward_schema = RegisterRewardSchema()
reward_schema = RewardSchema() | python |
from django.shortcuts import render
from django.views.generic import TemplateView
from .models import *
from django.conf import settings
from django.http import HttpResponseRedirect
from django.http import JsonResponse
from rest_framework import viewsets
from .serializers import *
class ReactTemplateView(TemplateView):
template_name = 'index.html'
class ServicesViewSet(viewsets.ModelViewSet):
queryset = Service.objects.all()
serializer_class = ServiceSerializer
class BannersViewSet(viewsets.ModelViewSet):
queryset = Banner.objects.filter(status='Y')
serializer_class = BannersSerializer
class FooterDataViewSet(viewsets.ModelViewSet):
queryset = ContactUs.objects.all()[:1]
serializer_class = ContactUsSerializer
| python |
import datetime
import cloudscraper
import colorama
from termcolor import colored
import time
import json
import random
import pickle
from cryptography import fernet
import os
import bs4
import sys
import shutil
import requests, uuid, hashlib, hmac, urllib, string
from pathlib import Path
from colorama import Fore
# Turn off InsecureRequestWarning
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
# The urllib library was split into other modules from Python 2 to Python 3
if sys.version_info.major == 3:
import urllib.parse
colorama.init()
key = b'HzodHGhHL2l4MjeYNm1k_FJa1QUemTKimQIKLpTtStY='
class P_InstaAPI:
def __init__(self, creds):
self.username = creds['username']
self.password = creds['password']
sessionpath = Path(f"sessions/{self.username}.session")
mainlogin = P_InstagramLogin(self.username, self.password, Path("./sessions"))
api = mainlogin.api
if not api.isLoggedIn:
print(colored(f"[{time.ctime()}] API: login failed", "red"))
try: os.remove("secrets.pickle")
except: pass
try: shutil.rmtree("sessions/")
except: pass
exit()
self.api = api
class P_InstagramAPI:
API_URL = 'https://i.instagram.com/api/v1/'
DEVICE_SETTINGS = {'manufacturer': 'Xiaomi',
'model': 'HM 1SW',
'android_version': 19,
'android_release': '6.4'}
USER_AGENT = 'Instagram 89.0.0.21.101 Android ({android_version}/{android_release}; 320dpi; 720x1280; {manufacturer}; {model}; armani; qcom; fr_FR)'.format(**DEVICE_SETTINGS)
IG_SIG_KEY = '4f8732eb9ba7d1c8e8897a75d6474d4eb3f5279137431b2aafb71fafe2abe178'
EXPERIMENTS = 'ig_promote_reach_objective_fix_universe,ig_android_universe_video_production,ig_search_client_h1_2017_holdout,ig_android_live_follow_from_comments_universe,ig_android_carousel_non_square_creation,ig_android_live_analytics,ig_android_follow_all_dialog_confirmation_copy,ig_android_stories_server_coverframe,ig_android_video_captions_universe,ig_android_offline_location_feed,ig_android_direct_inbox_retry_seen_state,ig_android_ontact_invite_universe,ig_android_live_broadcast_blacklist,ig_android_insta_video_reconnect_viewers,ig_android_ad_async_ads_universe,ig_android_search_clear_layout_universe,ig_android_shopping_reporting,ig_android_stories_surface_universe,ig_android_verified_comments_universe,ig_android_preload_media_ahead_in_current_reel,android_instagram_prefetch_suggestions_universe,ig_android_reel_viewer_fetch_missing_reels_universe,ig_android_direct_search_share_sheet_universe,ig_android_business_promote_tooltip,ig_android_direct_blue_tab,ig_android_async_network_tweak_universe,ig_android_elevate_main_thread_priority_universe,ig_android_stories_gallery_nux,ig_android_instavideo_remove_nux_comments,ig_video_copyright_whitelist,ig_react_native_inline_insights_with_relay,ig_android_direct_thread_message_animation,ig_android_draw_rainbow_client_universe,ig_android_direct_link_style,ig_android_live_heart_enhancements_universe,ig_android_rtc_reshare,ig_android_preload_item_count_in_reel_viewer_buffer,ig_android_users_bootstrap_service,ig_android_auto_retry_post_mode,ig_android_shopping,ig_android_main_feed_seen_state_dont_send_info_on_tail_load,ig_fbns_preload_default,ig_android_gesture_dismiss_reel_viewer,ig_android_tool_tip,ig_android_ad_logger_funnel_logging_universe,ig_android_gallery_grid_column_count_universe,ig_android_business_new_ads_payment_universe,ig_android_direct_links,ig_android_audience_control,ig_android_live_encore_consumption_settings_universe,ig_perf_android_holdout,ig_android_cache_contact_import_list,ig_android_links_receivers,ig_android_ad_impression_backtest,ig_android_list_redesign,ig_android_stories_separate_overlay_creation,ig_android_stop_video_recording_fix_universe,ig_android_render_video_segmentation,ig_android_live_encore_reel_chaining_universe,ig_android_sync_on_background_enhanced_10_25,ig_android_immersive_viewer,ig_android_mqtt_skywalker,ig_fbns_push,ig_android_ad_watchmore_overlay_universe,ig_android_react_native_universe,ig_android_profile_tabs_redesign_universe,ig_android_live_consumption_abr,ig_android_story_viewer_social_context,ig_android_hide_post_in_feed,ig_android_video_loopcount_int,ig_android_enable_main_feed_reel_tray_preloading,ig_android_camera_upsell_dialog,ig_android_ad_watchbrowse_universe,ig_android_internal_research_settings,ig_android_search_people_tag_universe,ig_android_react_native_ota,ig_android_enable_concurrent_request,ig_android_react_native_stories_grid_view,ig_android_business_stories_inline_insights,ig_android_log_mediacodec_info,ig_android_direct_expiring_media_loading_errors,ig_video_use_sve_universe,ig_android_cold_start_feed_request,ig_android_enable_zero_rating,ig_android_reverse_audio,ig_android_branded_content_three_line_ui_universe,ig_android_live_encore_production_universe,ig_stories_music_sticker,ig_android_stories_teach_gallery_location,ig_android_http_stack_experiment_2017,ig_android_stories_device_tilt,ig_android_pending_request_search_bar,ig_android_fb_topsearch_sgp_fork_request,ig_android_seen_state_with_view_info,ig_android_animation_perf_reporter_timeout,ig_android_new_block_flow,ig_android_story_tray_title_play_all_v2,ig_android_direct_address_links,ig_android_stories_archive_universe,ig_android_save_collections_cover_photo,ig_android_live_webrtc_livewith_production,ig_android_sign_video_url,ig_android_stories_video_prefetch_kb,ig_android_stories_create_flow_favorites_tooltip,ig_android_live_stop_broadcast_on_404,ig_android_live_viewer_invite_universe,ig_android_promotion_feedback_channel,ig_android_render_iframe_interval,ig_android_accessibility_logging_universe,ig_android_camera_shortcut_universe,ig_android_use_one_cookie_store_per_user_override,ig_profile_holdout_2017_universe,ig_android_stories_server_brushes,ig_android_ad_media_url_logging_universe,ig_android_shopping_tag_nux_text_universe,ig_android_comments_single_reply_universe,ig_android_stories_video_loading_spinner_improvements,ig_android_collections_cache,ig_android_comment_api_spam_universe,ig_android_facebook_twitter_profile_photos,ig_android_shopping_tag_creation_universe,ig_story_camera_reverse_video_experiment,ig_android_direct_bump_selected_recipients,ig_android_ad_cta_haptic_feedback_universe,ig_android_vertical_share_sheet_experiment,ig_android_family_bridge_share,ig_android_search,ig_android_insta_video_consumption_titles,ig_android_stories_gallery_preview_button,ig_android_fb_auth_education,ig_android_camera_universe,ig_android_me_only_universe,ig_android_instavideo_audio_only_mode,ig_android_user_profile_chaining_icon,ig_android_live_video_reactions_consumption_universe,ig_android_stories_hashtag_text,ig_android_post_live_badge_universe,ig_android_swipe_fragment_container,ig_android_search_users_universe,ig_android_live_save_to_camera_roll_universe,ig_creation_growth_holdout,ig_android_sticker_region_tracking,ig_android_unified_inbox,ig_android_live_new_watch_time,ig_android_offline_main_feed_10_11,ig_import_biz_contact_to_page,ig_android_live_encore_consumption_universe,ig_android_experimental_filters,ig_android_search_client_matching_2,ig_android_react_native_inline_insights_v2,ig_android_business_conversion_value_prop_v2,ig_android_redirect_to_low_latency_universe,ig_android_ad_show_new_awr_universe,ig_family_bridges_holdout_universe,ig_android_background_explore_fetch,ig_android_following_follower_social_context,ig_android_video_keep_screen_on,ig_android_ad_leadgen_relay_modern,ig_android_profile_photo_as_media,ig_android_insta_video_consumption_infra,ig_android_ad_watchlead_universe,ig_android_direct_prefetch_direct_story_json,ig_android_shopping_react_native,ig_android_top_live_profile_pics_universe,ig_android_direct_phone_number_links,ig_android_stories_weblink_creation,ig_android_direct_search_new_thread_universe,ig_android_histogram_reporter,ig_android_direct_on_profile_universe,ig_android_network_cancellation,ig_android_background_reel_fetch,ig_android_react_native_insights,ig_android_insta_video_audio_encoder,ig_android_family_bridge_bookmarks,ig_android_data_usage_network_layer,ig_android_universal_instagram_deep_links,ig_android_dash_for_vod_universe,ig_android_modular_tab_discover_people_redesign,ig_android_mas_sticker_upsell_dialog_universe,ig_android_ad_add_per_event_counter_to_logging_event,ig_android_sticky_header_top_chrome_optimization,ig_android_rtl,ig_android_biz_conversion_page_pre_select,ig_android_promote_from_profile_button,ig_android_live_broadcaster_invite_universe,ig_android_share_spinner,ig_android_text_action,ig_android_own_reel_title_universe,ig_promotions_unit_in_insights_landing_page,ig_android_business_settings_header_univ,ig_android_save_longpress_tooltip,ig_android_constrain_image_size_universe,ig_android_business_new_graphql_endpoint_universe,ig_ranking_following,ig_android_stories_profile_camera_entry_point,ig_android_universe_reel_video_production,ig_android_power_metrics,ig_android_sfplt,ig_android_offline_hashtag_feed,ig_android_live_skin_smooth,ig_android_direct_inbox_search,ig_android_stories_posting_offline_ui,ig_android_sidecar_video_upload_universe,ig_android_promotion_manager_entry_point_universe,ig_android_direct_reply_audience_upgrade,ig_android_swipe_navigation_x_angle_universe,ig_android_offline_mode_holdout,ig_android_live_send_user_location,ig_android_direct_fetch_before_push_notif,ig_android_non_square_first,ig_android_insta_video_drawing,ig_android_swipeablefilters_universe,ig_android_live_notification_control_universe,ig_android_analytics_logger_running_background_universe,ig_android_save_all,ig_android_reel_viewer_data_buffer_size,ig_direct_quality_holdout_universe,ig_android_family_bridge_discover,ig_android_react_native_restart_after_error_universe,ig_android_startup_manager,ig_story_tray_peek_content_universe,ig_android_profile,ig_android_high_res_upload_2,ig_android_http_service_same_thread,ig_android_scroll_to_dismiss_keyboard,ig_android_remove_followers_universe,ig_android_skip_video_render,ig_android_story_timestamps,ig_android_live_viewer_comment_prompt_universe,ig_profile_holdout_universe,ig_android_react_native_insights_grid_view,ig_stories_selfie_sticker,ig_android_stories_reply_composer_redesign,ig_android_streamline_page_creation,ig_explore_netego,ig_android_ig4b_connect_fb_button_universe,ig_android_feed_util_rect_optimization,ig_android_rendering_controls,ig_android_os_version_blocking,ig_android_encoder_width_safe_multiple_16,ig_search_new_bootstrap_holdout_universe,ig_android_snippets_profile_nux,ig_android_e2e_optimization_universe,ig_android_comments_logging_universe,ig_shopping_insights,ig_android_save_collections,ig_android_live_see_fewer_videos_like_this_universe,ig_android_show_new_contact_import_dialog,ig_android_live_view_profile_from_comments_universe,ig_fbns_blocked,ig_formats_and_feedbacks_holdout_universe,ig_android_reduce_view_pager_buffer,ig_android_instavideo_periodic_notif,ig_search_user_auto_complete_cache_sync_ttl,ig_android_marauder_update_frequency,ig_android_suggest_password_reset_on_oneclick_login,ig_android_promotion_entry_from_ads_manager_universe,ig_android_live_special_codec_size_list,ig_android_enable_share_to_messenger,ig_android_background_main_feed_fetch,ig_android_live_video_reactions_creation_universe,ig_android_channels_home,ig_android_sidecar_gallery_universe,ig_android_upload_reliability_universe,ig_migrate_mediav2_universe,ig_android_insta_video_broadcaster_infra_perf,ig_android_business_conversion_social_context,android_ig_fbns_kill_switch,ig_android_live_webrtc_livewith_consumption,ig_android_destroy_swipe_fragment,ig_android_react_native_universe_kill_switch,ig_android_stories_book_universe,ig_android_all_videoplayback_persisting_sound,ig_android_draw_eraser_universe,ig_direct_search_new_bootstrap_holdout_universe,ig_android_cache_layer_bytes_threshold,ig_android_search_hash_tag_and_username_universe,ig_android_business_promotion,ig_android_direct_search_recipients_controller_universe,ig_android_ad_show_full_name_universe,ig_android_anrwatchdog,ig_android_qp_kill_switch,ig_android_2fac,ig_direct_bypass_group_size_limit_universe,ig_android_promote_simplified_flow,ig_android_share_to_whatsapp,ig_android_hide_bottom_nav_bar_on_discover_people,ig_fbns_dump_ids,ig_android_hands_free_before_reverse,ig_android_skywalker_live_event_start_end,ig_android_live_join_comment_ui_change,ig_android_direct_search_story_recipients_universe,ig_android_direct_full_size_gallery_upload,ig_android_ad_browser_gesture_control,ig_channel_server_experiments,ig_android_video_cover_frame_from_original_as_fallback,ig_android_ad_watchinstall_universe,ig_android_ad_viewability_logging_universe,ig_android_new_optic,ig_android_direct_visual_replies,ig_android_stories_search_reel_mentions_universe,ig_android_threaded_comments_universe,ig_android_mark_reel_seen_on_Swipe_forward,ig_internal_ui_for_lazy_loaded_modules_experiment,ig_fbns_shared,ig_android_capture_slowmo_mode,ig_android_live_viewers_list_search_bar,ig_android_video_single_surface,ig_android_offline_reel_feed,ig_android_video_download_logging,ig_android_last_edits,ig_android_exoplayer_4142,ig_android_post_live_viewer_count_privacy_universe,ig_android_activity_feed_click_state,ig_android_snippets_haptic_feedback,ig_android_gl_drawing_marks_after_undo_backing,ig_android_mark_seen_state_on_viewed_impression,ig_android_live_backgrounded_reminder_universe,ig_android_live_hide_viewer_nux_universe,ig_android_live_monotonic_pts,ig_android_search_top_search_surface_universe,ig_android_user_detail_endpoint,ig_android_location_media_count_exp_ig,ig_android_comment_tweaks_universe,ig_android_ad_watchmore_entry_point_universe,ig_android_top_live_notification_universe,ig_android_add_to_last_post,ig_save_insights,ig_android_live_enhanced_end_screen_universe,ig_android_ad_add_counter_to_logging_event,ig_android_blue_token_conversion_universe,ig_android_exoplayer_settings,ig_android_progressive_jpeg,ig_android_offline_story_stickers,ig_android_gqls_typing_indicator,ig_android_chaining_button_tooltip,ig_android_video_prefetch_for_connectivity_type,ig_android_use_exo_cache_for_progressive,ig_android_samsung_app_badging,ig_android_ad_holdout_watchandmore_universe,ig_android_offline_commenting,ig_direct_stories_recipient_picker_button,ig_insights_feedback_channel_universe,ig_android_insta_video_abr_resize,ig_android_insta_video_sound_always_on'
SIG_KEY_VERSION = '4'
def __init__(self, username, password):
m = hashlib.md5()
m.update(username.encode('utf-8') + password.encode('utf-8'))
self.device_id = self.generateDeviceId(m.hexdigest())
self.isLoggedIn = False
self.LastResponse = None
self.PATH = ""
self.s = requests.Session()
self.username = username
self.password = password
self.uuid = self.generateUUID(True)
def sendMessage(self, target_user, msgText):
target_user = f'[[{",".join([target_user])}]]'
url = 'direct_v2/threads/broadcast/text/'
data = {
'text': msgText,
'_uuid': self.uuid,
'_csrftoken': self.token,
'recipient_users': target_user,
'_uid': self.username_id,
'action': 'send_item',
'client_context': self.generateUUID(True)
}
return self.SendRequest(url, data)
def login(self, force=False):
if (not self.isLoggedIn or force):
if (self.SendRequest('si/fetch_headers/?challenge_type=signup&guid=' + self.generateUUID(False), None, True)):
data = {'phone_id': self.generateUUID(True),
'_csrftoken': self.LastCookies['csrftoken'],
'username': self.username,
'guid': self.uuid,
'device_id': self.device_id,
'password': self.password,
'login_attempt_count': '0'}
if (self.SendRequest('accounts/login/', self.generateSignature(json.dumps(data)), True)):
self.isLoggedIn = True
self.username_id = self.logged_in_user_pk
self.rank_token = f"{self.username_id}_{self.uuid}"
self.token = self.LastCookies["csrftoken"]
self.syncFeatures()
self.autoCompleteUserList()
self.timelineFeed()
self.getv2Inbox()
self.getRecentActivity()
return True
def syncFeatures(self):
data = json.dumps({'_uuid': self.uuid,
'_uid': self.username_id,
'id': self.username_id,
'_csrftoken': self.token,
'experiments': self.EXPERIMENTS})
return self.SendRequest('qe/sync/', self.generateSignature(data))
def autoCompleteUserList(self):
return self.SendRequest('friendships/autocomplete_user_list/')
def getRecentActivity(self):
activity = self.SendRequest('news/inbox/?')
return activity
def timelineFeed(self):
return self.SendRequest('feed/timeline/')
def getv2Inbox(self, limit=50):
inbox = self.SendRequest(
'direct_v2/inbox/?persistentBadging=true&use_unified_inbox=true&limit={}'.format(limit))
return inbox
def generateSignature(self, data, skip_quote=False):
if not skip_quote:
try:
parsedData = urllib.parse.quote(data)
except AttributeError:
parsedData = urllib.quote(data)
else:
parsedData = data
return 'ig_sig_key_version=' + self.SIG_KEY_VERSION + '&signed_body=' + hmac.new(
self.IG_SIG_KEY.encode('utf-8'), data.encode('utf-8'), hashlib.sha256).hexdigest() + '.' + parsedData
def generateDeviceId(self, seed):
volatile_seed = "12345"
m = hashlib.md5()
m.update(seed.encode('utf-8') + volatile_seed.encode('utf-8'))
return 'android-' + m.hexdigest()[:16]
def generateUUID(self, type):
generated_uuid = str(uuid.uuid4())
if (type):
return generated_uuid
else:
return generated_uuid.replace('-', '')
def SendRequest(self, endpoint, post=None, login=False, wait=20):
verify = False # Don't show request warning
if (not self.isLoggedIn and not login):
raise Exception("Not logged in!\n")
self.s.headers.update(
{'Connection': 'close',
'Accept': '*/*',
'Content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie2': '$Version=1',
'Accept-Language': 'en-US',
'User-Agent': self.USER_AGENT})
while True:
try:
if (post is not None):
response = self.s.post(self.API_URL + endpoint, data=post, verify=verify)
else:
response = self.s.get(self.API_URL + endpoint, verify=verify)
break
except Exception as e:
print('Except on SendRequest (wait 60 sec and resend): ' + str(e))
time.sleep(60)
if response.status_code == 200:
self.LastCookies = response.cookies
try: self.logged_in_user_pk = json.loads(response.text)['logged_in_user']['pk']
except: pass
return response.status_code, json.loads(response.text)
else:
if response.status_code != 405:
print(colored(f"[{time.ctime()}] API: login failed", "red"))
try: os.remove("secrets.pickle")
except: pass
try: shutil.rmtree("sessions/")
except: pass
exit()
try:
self.LastCookies = response.cookies
try: self.logged_in_user_pk = json.loads(response.text)['logged_in_user']['pk']
except: pass
return response.status_code, json.loads(response.text)
except:
pass
return False
def default_data(self):
return {"_uuid": self.uuid, "_uid": self.username_id, "_csrftoken": self.token}
class P_InstagramLogin(object):
def __init__(self, username, password, folder=Path("./")):
encrypt_creds = fernet.Fernet(key)
self.username = username
self.password = password
self.path = Path(str(folder) + "/" + username + ".session")
if not os.path.exists(str(folder)):
os.mkdir(folder)
if not os.path.exists(self.path):
self.api = P_InstagramAPI(self.username, self.password)
try:
self.api.login()
except KeyError: # logged_in_user -> couldn't login
shutil.rmtree("sessions/")
exit()
self.api.password = encrypt_creds.encrypt(str.encode(self.password))
pickle.dump(self.api, open(self.path, "wb"))
self.api.password = encrypt_creds.decrypt(self.api.password)
else:
self.api = pickle.load(open(self.path, "rb"))
self.api.password = encrypt_creds.decrypt(self.api.password)
if not self.api.isLoggedIn:
self.api.login()
if self.api.isLoggedIn:
pickle.dump(self.api, open(self.path, "wb"))
class Scraper:
def __init__(self, accept):
self.banner()
self.scraper = cloudscraper.create_scraper()
self.link = 'https://www.instagram.com/accounts/login/'
self.login_url = 'https://www.instagram.com/accounts/login/ajax/'
self.time = int(datetime.datetime.now().timestamp())
response = self.scraper.get(self.link)
try:
self.csrf = response.cookies['csrftoken']
except:
self.csrf = "bguzeiugege"
self.acceptRequests = accept
if os.path.exists("accept.json"):
self.acceptRequests=False
self.csrf_token = None
self.username = None
self.password = None
self.new_requests = 0
self.pending_users = {}
self.accepted = []
self.totalProgress = []
self.save_login_credentials()
self.payload = {
'username': self.username,
'enc_password': f'#PWD_INSTAGRAM_BROWSER:0:{self.time}:{self.password}',
'queryParams': {},
'optIntoOneTap': 'false'
}
self.login_header = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/49.0.2623.112 Safari/537.36',
"X-Requested-With": "XMLHttpRequest",
"Referer": "https://www.instagram.com/accounts/login/",
"x-csrftoken": self.csrf
}
# Maintaining the acquired login session
self.scraper = cloudscraper.CloudScraper()
self.login()
self.run()
self.remove = "0"
self.p_ = ["0"]
self.period = 50
for e in range(self.period):
self.p_.append(".")
self.start = time.perf_counter()
def progress(self, user, e, total):
total_max = total
total = total_max - 1
try:
p = "".join(self.p_)
percentage = round(e / total * 100, 2)
now = time.perf_counter() - self.start
num = str(now).split("e")[0]
perc = float(num)
if int(percentage) > 0:
time_remaining = 100 * perc / round(percentage)
sys.stdout.write("\r" + colored(round(time_remaining - now, 5), 'magenta')
+ "|t/s| processing users: " + user + " " + "[" + colored(p.split(self.remove)[0],
"magenta") + colored(
str(percentage) + "%",
"yellow") +
p.split(self.remove)[1] + " ]" + "time remaining: " + str(
datetime.datetime.now() + datetime.timedelta(
seconds=round(time_remaining - now)) - datetime.datetime.now()))
sys.stdout.flush()
time.sleep(0.3)
iter_index = round(e / total * self.period)
current_position = self.p_.index(self.remove)
self.p_.pop(current_position)
if e == total - 2 or e == total or e == total - 1:
self.p_.append(str(percentage) + "%")
else:
self.p_.insert(iter_index, str(percentage) + "%")
self.remove = str(percentage) + "%"
except:
percentage = "0"
def save_login_credentials(self):
encrypt_creds = fernet.Fernet(key)
if os.path.exists("secrets.pickle"):
with open("secrets.pickle", "rb") as f:
decrypt_pickle = pickle.load(f)
decrypt_cryptography = encrypt_creds.decrypt(decrypt_pickle)
decrypt_pickle2 = pickle.loads(decrypt_cryptography)
self.username = decrypt_pickle2.get("username", "specify a username")
self.password = decrypt_pickle2.get("password", "specify a password")
self.p_api = P_InstaAPI({"username": self.username, "password": self.password})
else:
print(colored("[+] Creating credentials file", "green"))
self.username = input("Enter username: ")
self.password = input("Enter password: ")
credentials = {"username": self.username, "password": self.password}
pickled_credentials = pickle.dumps(credentials)
encrypted = encrypt_creds.encrypt(pickled_credentials)
with open("secrets.pickle", "wb") as f:
pickle.dump(encrypted, f)
self.p_api = P_InstaAPI(credentials)
def login(self):
# Logging the user in
login_response = self.scraper.post(self.login_url, data=self.payload, headers=self.login_header)
# print(login_response, login_response.text)
json_data = json.loads(login_response.text)
# print(json_data)
if json_data.get("authenticated"):
print(colored("\n[+] Successfully logged in", "green"))
cookies = login_response.cookies
cookie_jar = cookies.get_dict()
self.csrf_token = cookie_jar['csrftoken']
print("csrf_token:", self.csrf_token)
session_id = cookie_jar['sessionid']
user_id = cookie_jar['ds_user_id']
print("session_id:", session_id)
else:
print(colored(f"[{time.ctime()}] cloudscraper: login failed {login_response.text}", "red"))
try: os.remove("secrets.pickle")
except: pass
try: shutil.rmtree("sessions/")
except: pass
exit()
try:
time.sleep(random.randrange(2, 5))
user = self.scraper.get(f"https://www.instagram.com/{self.username}/")
logged_user = str(bs4.BeautifulSoup(user.text, 'lxml').title.text).split('•')[0]
if "is on Instagram " in logged_user:
print(colored(f"\n[+] {time.ctime()} logged in as {logged_user.replace('is on Instagram', '')}", "blue"))
else:
try:
print(colored(f"\n[+] {time.ctime()} logged in as {logged_user.replace('is on Instagram', '')}", "blue"))
except:
print(colored(f"\n[+] {time.ctime()} logged in as {logged_user}", "blue"))
except Exception:
print(colored(f"\n[+] logged in as {self.username}", "blue"))
self.user_id = json_data['userId']
self.uuid = self.generateUUID(True)
def pending_requests(self):
# Get the pending users
parameter = {"variables": {"fetch_media_count": 0, "fetch_suggested_count": 30, "ignore_cache": True,
"filter_followed_friends": True, "seen_ids": [], "include_reel": True}}
t = json.dumps(parameter)
time.sleep(random.randrange(1, 4))
headers = {
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 12_3_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, '
'like Gecko) Mobile/15E148 Instagram 105.0.0.11.118 (iPhone11,8; iOS 12_3_1; en_US; en-US; '
'scale=2.00; 828x1792; 165586599) '
}
r = self.scraper.get("https://i.instagram.com/api/v1/friendships/pending/", headers=headers)
print(f"\n{('═'*48)}\n\n[+] Pending follow requests")
pending = []
pending_total = 0
for e, data in enumerate(r.json()['users']):
e += 1
print(f"[{e}] {data['username']}")
pending.append(data["username"])
pending_total += 1
self.totalProgress.append(data)
if self.acceptRequests:
# Accept the obtained requests
user_num = 1
for user in pending:
self.accept_request(user, user_num)
self.accepted.append(user)
pending_total -= 1
user_num += 1
try:
for pending_user in self.accepted:
pending.remove(pending_user)
except:
pending.clear()
self.pending_users.update({f"username": pending, "total_requests": [pending_total]})
def process_users(self):
if os.path.exists(f"{self.username}_pending_users.json"):
total_pending = 0
newRequest = 0
processed_users = []
f = open(f"{self.username}_pending_users.json", "r")
data = json.load(f)
for e, user in enumerate(self.pending_users["username"]):
self.progress(user, e, len(self.pending_users['username']))
if user in data["username"] or user in self.accepted:
pass
else:
newRequest += 1
print(colored(f"[+] New request from {user}", "green"))
processed_users.append(user)
if user in data["username"]:
processed_users.remove(user)
if len(self.pending_users["username"]) >= 200:
# If pending requests are more than 200 add to previous data count
total_pending += data["total_requests"][0] + newRequest
else:
total_pending += self.pending_users["total_requests"][0]
# Use this to get the rate of users
self.new_requests = newRequest
print(f"\n{self.username} has {total_pending} pending follow requests")
f.close()
f2 = open(f"{self.username}_pending_users.json", "w")
if self.acceptRequests:
json.dump({"username": self.pending_users['username'], "total_requests": [total_pending],
"accepted": self.accepted}, f2, indent=4, sort_keys=True)
f2.close()
else:
json.dump({"username": data["username"] + processed_users, "total_requests": [total_pending],
"accepted": self.accepted}, f2, indent=4, sort_keys=True)
f2.close()
else:
with open(f"{self.username}_pending_users.json", "w") as f:
json.dump(self.pending_users, f, indent=4, sort_keys=True)
print(f"\n{self.username} has {self.pending_users['total_requests'][0]} pending follow requests")
total_pending = self.pending_users["total_requests"][0]
self.send_msg(total_pending)
def get_user_id(self, username):
# Get the users info
id = self.scraper.get(f"https://www.instagram.com/{username}/?__a=1")
id_data = id.json()
user_data = id_data["graphql"]["user"]
full_name = user_data.get("username")
user_id = user_data.get("id")
return {"id": user_id, "username": full_name}
def accept_request(self, accept_user, current_user):
# Called to accept the parsed user
headers = {
'content-length': '0',
'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 12_3_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, '
'like Gecko) Mobile/15E148 Instagram 105.0.0.11.118 (iPhone11,8; iOS 12_3_1; en_US; en-US; '
'scale=2.00; 828x1792; 165586599) ',
"x-csrftoken": self.csrf_token
}
requested_user = self.get_user_id(accept_user)
id = requested_user.get("id")
username = requested_user.get("username")
accept = self.scraper.post(f"https://www.instagram.com/web/friendships/{id}/approve/", headers=headers)
if 'status":"ok' in accept.text:
print(colored(f'[+] Accepted: @{username}', 'green'))
else:
print(colored('[-] Failed to accept user', 'red'))
def accept_all(self):
if self.acceptRequests:
sys.stdout.write("\r[+] Accepting pending follow requests")
sys.stdout.flush()
time.sleep(1)
while True:
if len(self.pending_users['username']) < 200:
sys.stdout.write("\r[+] No more pending follow requests to accept")
sys.stdout.flush()
time.sleep(1)
self.acceptRequests = False
break
else:
self.pending_requests()
self.process_users()
time.sleep(1)
f = open("accept.json", "w")
json.dump({'accept': False}, f)
f.close()
print("\n")
else:
pass
def generateUUID(self, type_):
generated_uuid = str(uuid.uuid4())
if (type_):
return generated_uuid
else:
return generated_uuid.replace('-', '')
def send_msg(self, total_pending):
try:
self.p_api.api.sendMessage(
self.user_id,
f"Pending follow requests: {total_pending}\n\n"
f"Date: {time.ctime()}\n\n"
f"User: @{self.username}\n"
f"User ID: {self.user_id}"
)
except Exception as e:
print("Unable to send DM ->", e)
print(self.p_api.api.LastResponse)
def run(self):
self.waiting = random.randint(3600, 4200)
def runScraper():
while True:
if self.new_requests >= 50:
self.waiting = random.randint(2400, 3000)
if self.new_requests >= 100:
self.waiting = random.randint(1800, 2400)
if self.new_requests >= 150:
self.waiting = random.randint(900, 1200)
self.pending_requests()
self.process_users()
self.remove = "0"
self.p_ = ["0"]
self.period = 50
for e in range(self.period):
self.p_.append(".")
self.accept_all()
print(colored(f"[{time.ctime()}] Next run in {round((self.waiting/60))} minutes", "blue"))
time.sleep(self.waiting)
runScraper()
def banner(self):
print(colored('''
██████╗ ███████╗ ██████╗ ██╗ ██╗███████╗███████╗████████╗ ███████╗ ██████╗ █████╗ ███╗ ██╗███╗ ██╗███████╗██████╗
██╔══██╗██╔════╝██╔═══██╗██║ ██║██╔════╝██╔════╝╚══██╔══╝ ██╔════╝██╔════╝██╔══██╗████╗ ██║████╗ ██║██╔════╝██╔══██╗
██████╔╝█████╗ ██║ ██║██║ ██║█████╗ ███████╗ ██║ ███████╗██║ ███████║██╔██╗ ██║██╔██╗ ██║█████╗ ██████╔╝
██╔══██╗██╔══╝ ██║▄▄ ██║██║ ██║██╔══╝ ╚════██║ ██║ ╚════██║██║ ██╔══██║██║╚██╗██║██║╚██╗██║██╔══╝ ██╔══██╗
██║ ██║███████╗╚██████╔╝╚██████╔╝███████╗███████║ ██║ ███████║╚██████╗██║ ██║██║ ╚████║██║ ╚████║███████╗██║ ██║
╚═╝ ╚═╝╚══════╝ ╚══▀▀═╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚══════╝ ╚═════╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═══╝╚══════╝╚═╝ ╚═╝
''', "blue"), end="\n")
if __name__ == "__main__":
'''
To accept follow requests -> Scraper(accept=True)
'''
Scraper(accept=False)
| python |
import floobits
# code run after our own by other plugins can not pollute the floobits namespace
__globals = globals()
for k, v in floobits.__dict__.items():
__globals[k] = v
# Vim essentially runs python by concating the python string into a single python file and running it.
# Before we did this, the following would happen:
# 1. import utils
# 2. from ycm import utils
# 3. utils.parse_url # references the wrong utils ...
| python |
# -*- coding: utf-8 -*-
"""OAuth Token views."""
from __future__ import absolute_import, division, print_function, unicode_literals
from flask import Blueprint, abort, flash, redirect, render_template, url_for
from flask_babel import lazy_gettext as _
from flask_login import current_user, login_required
from .models import Token
blueprint = Blueprint('oauth.token', __name__, url_prefix='/oauth/tokens',
static_folder='../../static')
@blueprint.route('/')
@login_required
def home():
"""Token landing page."""
if not current_user.is_admin:
abort(403)
tokens = Token.query.all()
return render_template('oauth/tokens/home.html', tokens=tokens)
@blueprint.route('/delete/<int:token_id>', methods=['GET', 'DELETE'])
@login_required
def delete(token_id):
"""Delete token."""
if not current_user.is_admin:
abort(403)
token = Token.query.get(token_id)
if not token:
abort(404)
else:
token_id = token.id
token.delete()
flash(_('Successfully deleted OAuth2 Bearer token "%(token_id)s".', token_id=token_id),
'success')
return redirect(url_for('oauth.token.home'))
| python |
# Copyright 2019 VMware, Inc.
# SPDX-License-Identifier: BSD-2-Clause
import argparse
import os
import network_insight_sdk_generic_datasources.common.yaml_utilities as yaml_utilities
from network_insight_sdk_generic_datasources.archive.zip_archiver import ZipArchiver
from network_insight_sdk_generic_datasources.common.constants import TABLE_JOINERS_KEY
from network_insight_sdk_generic_datasources.common.constants import WORKLOADS_KEY
from network_insight_sdk_generic_datasources.common.constants import PACKAGE_HANDLER_KEY
from network_insight_sdk_generic_datasources.common.constants import RESULT_WRITER_KEY
from network_insight_sdk_generic_datasources.common.constants import GENERATION_DIRECTORY_KEY
def parse_arguments():
parser = argparse.ArgumentParser(description='Collect cli data from physical device')
parser.add_argument('-d', '--device', action='store', help='Physical Device Type e.g. Cisco')
parser.add_argument('-m', '--model', action='store', help='Physical Device model e.g. N5k')
parser.add_argument('-s', '--device_type', action='store', help='Software installed on device')
parser.add_argument('-i', '--ip_or_fqdn', action='store', help='IP or FQDN')
parser.add_argument('-u', '--username', action='store', help='Username for login')
parser.add_argument('-p', '--password', action='store', help='Password for login')
parser.add_argument('-z', '--self_zip', action='store', help='Self Zip the Project', default='false')
parser.add_argument('-P', '--port', action='store', help='Specific port to connect', default='22')
parser.add_argument('-o', '--output_zip', action='store', help='Output zip file to create with CSVs')
args = parser.parse_args()
return args
def main():
import network_insight_sdk_generic_datasources.common.physical_device as physical_device
args = parse_arguments()
dir_path = "routers_and_switches/{}".format(args.device)
# yaml_definition_file_name = "{}_{}_command_map.yml".format(args.device, args.model)
yaml_definition_file_name = "{}.yml".format(args.device)
self_zip = True if args.self_zip == 'true' or args.self_zip == 'True' else False
with open("%s%s%s%s%s" % (os.path.dirname(__file__), os.path.sep,
dir_path,
os.path.sep,
yaml_definition_file_name)) as f:
configuration = yaml_utilities.altered_safe_load(f)
table_joiner = configuration[args.model][TABLE_JOINERS_KEY] if TABLE_JOINERS_KEY in configuration[
args.model] else None
generation_directory = configuration[GENERATION_DIRECTORY_KEY] + '/' + args.ip_or_fqdn
physical_device = physical_device.PhysicalDevice(args.device, args.model,
configuration[args.model][WORKLOADS_KEY],
args,
table_joiner,
configuration[args.model][RESULT_WRITER_KEY],
generation_directory)
physical_device.process()
if PACKAGE_HANDLER_KEY in configuration:
zipper = ZipArchiver(self_zip, args.output_zip, generation_directory)
zipper.zipdir()
if __name__ == "__main__":
main()
| python |
class Solution:
"""
@param nums: A set of numbers.
@return: A list of lists. All valid subsets.
"""
def subsetsWithDup(self, nums):
# write your code here
if not nums: return [[]]
nums = sorted(nums)
res = []
self.helper(res, [], nums, 0)
return res
def helper(self, res, part, nums, pos):
res.append(list(part))
for i in range(pos, len(nums)):
# todo
if i != pos and nums[i] == nums[i - 1]: continue
part.append(nums[i])
self.helper(res, part, nums, i + 1)
part.pop()
def subsetsWithDup(self, nums):
# write your code here
if not nums: return [[]]
nums = sorted(nums)
res = []
visited=[0]*len(nums)
self.helper(res, [], nums, 0,visited)
return res
def helper(self, res, part, nums, pos,visited):
res.append(list(part))
for i in range(pos, len(nums)):
# todo
if i>0 and visited[i - 1] == 0 and nums[i] == nums[i - 1]: continue
visited[i] = 1
part.append(nums[i])
self.helper(res, part, nums, i + 1,visited)
part.pop()
visited[i]=0
s=Solution()
s.subsetsWithDup([]) | python |
if __name__ =='__main__':
N = int(input("Enter Number of Commands "))
L =[]
for i in range(0,N):
tokens = input("Enter command ").split()
if tokens[0] == "insert":
L.insert(int(tokens[1]), int(tokens[2]))
elif tokens[0] == "print":
print(L)
elif tokens[0] == 'remove':
L.remove(int(tokens[1]))
elif tokens[0] == "append":
L.append(int(tokens[1]))
elif tokens[0] == "sort":
L.sort()
elif tokens[0] == "pop":
L.pop()
elif tokens[0] == "reverse":
L.reverse()
| python |
#execute: python3 script_path image_path min_wavelet_level max_wavelet_level erosion_times R_script_path output0 output1
import numpy as np
import pandas as pd
import pywt,cv2,sys,subprocess,homcloud,os
import matplotlib.pyplot as plt
args = sys.argv
image_path = args[1] #jpg file
min_wavelet_level = args[2] #int
max_wavelet_level = args[3] #int
erosion_times = args[4] #int
R_script_path = args[5] #path of RTDA.R
output0 = args[6] #txt file
output1 = args[7] #txt file
def preprocess(image_path, coordinate_data_path, min_wavelet_level=3, max_wavelet_level=10, erosion_times=5):
imArray = cv2.imread(image_path)
#trim the image to 1200*1400
imArray = imArray[0:1200,0:1400]
#transform to grayscale
imArray = cv2.cvtColor(imArray, cv2.COLOR_BGR2GRAY)
#transform to float (0~1)
imArray = np.float32(imArray)
imArray /= 255
#calculate wavelet coefficients (Haar base)
mode = "haar"
coeffs=pywt.wavedec2(imArray, mode, level=10)
#abandon coefficients of specified levels
coeffs_H=list(coeffs)
if 0 < min_wavelet_level:
coeffs_H[0] *= 0
for i in range(11):
if (i < min_wavelet_level or i > max_wavelet_level):
coeffs_H[i] = tuple([np.zeros_like(v) for v in coeffs_H[i]])
#reconstruct the image
imArray_H=pywt.waverec2(coeffs_H, mode)
imArray_H *= 255
imArray_H = np.uint8(imArray_H)
#binarize the image using Otsu's method
_,thr = cv2.threshold(imArray_H,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
#morphological operations
#set the kernel
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS,(3,3))
#erode the white region several times
binary_image = cv2.erode(thr, kernel, iterations = erosion_times)
#get coordinates of white pixels
y,x = binary_image.nonzero()
white_pixels = np.array([x,y])
white_pixels = white_pixels.T
#output
np.savetxt(coordinate_data_path, white_pixels,fmt="%.0f",delimiter=",")
preprocess(image_path, coordinate_data_path,min_wavelet_level, max_wavelet_level, erosion_times)
subprocess.call("Rscript " + R_script_path + " " + coordinate_data_path + " " + output0 + " " + output1, shell = True)
| python |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class PrivateEndpointDetails(object):
"""
Note: Deprecated. Use the new resource model APIs instead.
OCI Private Endpoint configuration details.
"""
def __init__(self, **kwargs):
"""
Initializes a new PrivateEndpointDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param compartment_id:
The value to assign to the compartment_id property of this PrivateEndpointDetails.
:type compartment_id: str
:param vcn_id:
The value to assign to the vcn_id property of this PrivateEndpointDetails.
:type vcn_id: str
:param subnet_id:
The value to assign to the subnet_id property of this PrivateEndpointDetails.
:type subnet_id: str
:param id:
The value to assign to the id property of this PrivateEndpointDetails.
:type id: str
"""
self.swagger_types = {
'compartment_id': 'str',
'vcn_id': 'str',
'subnet_id': 'str',
'id': 'str'
}
self.attribute_map = {
'compartment_id': 'compartmentId',
'vcn_id': 'vcnId',
'subnet_id': 'subnetId',
'id': 'id'
}
self._compartment_id = None
self._vcn_id = None
self._subnet_id = None
self._id = None
@property
def compartment_id(self):
"""
Gets the compartment_id of this PrivateEndpointDetails.
The `OCID`__ of the compartment to contain the
private endpoint.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this PrivateEndpointDetails.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this PrivateEndpointDetails.
The `OCID`__ of the compartment to contain the
private endpoint.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this PrivateEndpointDetails.
:type: str
"""
self._compartment_id = compartment_id
@property
def vcn_id(self):
"""
Gets the vcn_id of this PrivateEndpointDetails.
The `OCID`__ of the VCN where the Private Endpoint will be bound to.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The vcn_id of this PrivateEndpointDetails.
:rtype: str
"""
return self._vcn_id
@vcn_id.setter
def vcn_id(self, vcn_id):
"""
Sets the vcn_id of this PrivateEndpointDetails.
The `OCID`__ of the VCN where the Private Endpoint will be bound to.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param vcn_id: The vcn_id of this PrivateEndpointDetails.
:type: str
"""
self._vcn_id = vcn_id
@property
def subnet_id(self):
"""
Gets the subnet_id of this PrivateEndpointDetails.
The `OCID`__ of the customer's
subnet where the private endpoint VNIC will reside.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The subnet_id of this PrivateEndpointDetails.
:rtype: str
"""
return self._subnet_id
@subnet_id.setter
def subnet_id(self, subnet_id):
"""
Sets the subnet_id of this PrivateEndpointDetails.
The `OCID`__ of the customer's
subnet where the private endpoint VNIC will reside.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param subnet_id: The subnet_id of this PrivateEndpointDetails.
:type: str
"""
self._subnet_id = subnet_id
@property
def id(self):
"""
Gets the id of this PrivateEndpointDetails.
`OCID`__ of a previously created Private Endpoint.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The id of this PrivateEndpointDetails.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this PrivateEndpointDetails.
`OCID`__ of a previously created Private Endpoint.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param id: The id of this PrivateEndpointDetails.
:type: str
"""
self._id = id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| python |
import os
import pandas as pd
import pytest
import clustereval as ce
@pytest.fixture
def data():
return pd.read_csv('clustereval/data/testdata.csv.gz')
def test_vanilla_cluster_louvain(data):
ce.cluster.run_full_experiment(reduction = data,
alg = 'louvain',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_louvain_prune(data):
ce.cluster.run_full_experiment(reduction=data,
alg='louvain',
k=30,
global_pruning_jac_threshold='median',
local_pruning_dist_threshold=3,
quality_function='ModularityVertexPartition',
cluster_kwargs={},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_louvain_alt_quality_function(data):
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_vanilla_cluster_leiden(data):
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={'resolution_parameter': 1.0, 'n_iterations':5},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_leiden_prune(data):
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=.2,
local_pruning_dist_threshold=3,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={
'resolution_parameter': 1.0, 'n_iterations': 5},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_leiden_alt_quality_function(data):
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='ModularityVertexPartition',
cluster_kwargs={'n_iterations': 5},
n_perturbations=0,
edge_permut_frac=None,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_edge_perturb(data):
ce.cluster.run_full_experiment(reduction=data,
alg='louvain',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={},
n_perturbations=1,
edge_permut_frac=.05,
weight_permut_range=None,
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_weight_perturb(data):
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={
'resolution_parameter': 1.0, 'n_iterations': 5},
n_perturbations=2,
edge_permut_frac=None,
weight_permut_range=(.5,1.5),
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
def test_dup_row_error_fails():
data = pd.read_csv('clustereval/data/testdata.csv.gz', index_col=0)
try:
ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={
'resolution_parameter': 1.0, 'n_iterations': 5},
n_perturbations=2,
edge_permut_frac=None,
weight_permut_range=(.5, 1.5),
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
assert 1==2
except ce.cluster.DuplicateRowError:
pass
# def test_umap(data):
# clu_obj = ce.cluster.ClusterExperiment(data ,verbosity=2)
# clu_obj.buildNeighborGraph(knn=10, nn_space='l2',
# local_pruning=True, global_pruning=True, jac_std_global='median', dist_std_local = 3)
# embedding = clu_obj.run_UMAP()
def test_unsorted_metric_input_fails(data):
metrics, labels, pertubations = ce.cluster.run_full_experiment(reduction=data,
alg='leiden',
k=30,
global_pruning_jac_threshold=None,
local_pruning_dist_threshold=None,
quality_function='RBConfigurationVertexPartition',
cluster_kwargs={
'resolution_parameter': 1.0, 'n_iterations': 5},
n_perturbations=2,
edge_permut_frac=None,
weight_permut_range=(.5, 1.5),
min_cluster_size=10,
experiment_name='clusterEval',
verbosity=0
)
labels = labels.sample(labels.shape[0])
try:
ce.metrics.calculate_metrics(labels, pertubations)
except:
pass
return
| python |
import re
from django.contrib.auth.backends import ModelBackend
from .models import User
def jwt_response_payload_handler(token, user=None, request=None):
"""
由于我们的jwt 响应的数据只有token
当时我们需要用户名和id所以我们需要让django框架取认识我们自定义的响应
自定义状态保持的响应内容
:param token: token
:param user: 用户名
:param request: 请求对象
:return: token,username,id
"""
return {
'token': token,
'user_id': user.id,
'username': user.username
}
def get_username_mobile_account(account):
"""
跟据帐号获取user对象
:param caaount: 用户名或者手机号
:return: user对象或者None
"""
try:
if re.match(r"1[3-9]\d{9}", account):
user = User.objects.get(mobile=account)
else:
user = User.objects.get(username=account)
except User.DoesNotExist:
return None
return user
class UsernameMobileLogin(ModelBackend):
"""
由于我们需要多张好登录
所以需要重写JWT的认证 ModelBackend的方法authenticate
"""
def authenticate(self, request, username=None, password=None, **kwargs):
"""重写父类的认证"""
user = get_username_mobile_account(username)
if user is not None and user.check_password(password):
return user | python |
import re
from functools import reduce
from django.template import Template, Context
from django_grapesjs.settings import NAME_RENDER_TAG
__all__ = ('ApplyRenderTag', )
REGEX_RENDER_TAG = '<%s>(.*?)</%s>' % (NAME_RENDER_TAG, NAME_RENDER_TAG)
class ApplyRenderTag(object):
def apply_tag_init(self, string):
strings_to_render = re.findall(REGEX_RENDER_TAG, string)
replace_to_strings = map(lambda t: t.render(Context({})), map(Template, strings_to_render))
return reduce(lambda s, r: re.sub(REGEX_RENDER_TAG, r, s, 1), replace_to_strings, string)
def apply_tag_save(self, string):
return string
| python |
import rdkit
import rdkit.Chem as Chem
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import minimum_spanning_tree
from collections import defaultdict
from rdkit.Chem.EnumerateStereoisomers import EnumerateStereoisomers, StereoEnumerationOptions
from vocab import Vocab
def get_mol(smiles):
mol = Chem.MolFromSmiles(smiles)
if mol is None:
return None
Chem.Kekulize(mol)
return mol | python |
# -*- coding: utf-8 -*-
from datafield import DataFieldForm, NamedDataFieldForm
from dataset import DataSetForm
from robot import RobotForm
from urlsource import URLSourceForm
| python |
from operator import itemgetter
def isPlayerWon(board, champ):
if (board[0] == champ and board[1] == champ and board[2] == champ or
board[3] == champ and board[4] == champ and board[5] == champ or
board[6] == champ and board[7] == champ and board[8] == champ or
board[0] == champ and board[3] == champ and board[6] == champ or
board[1] == champ and board[4] == champ and board[7] == champ or
board[2] == champ and board[5] == champ and board[8] == champ or
board[0] == champ and board[4] == champ and board[8] == champ or
board[2] == champ and board[4] == champ and board[6] == champ):
return True
else:
return False
def avail(board):
return [int(i) for i in board if (i != 'X' and i != 'O')]
def minmax(board, champ):
availablePlaces = avail(board)
if isPlayerWon(board, 'X'):
return 0, -100
elif isPlayerWon(board, 'O'):
return 0, 100
elif availablePlaces == []:
return 0, 0
validMoves = []
for i in availablePlaces:
board[i] = champ
if champ == 'O':
score = minmax(board, 'X')[1]
validMoves.append((i, score))
elif champ == 'X':
score = minmax(board, 'O')[1]
validMoves.append((i, score))
board[i] = i
if champ == 'X':
return min(validMoves, key=itemgetter(1))
elif champ == 'O':
return max(validMoves, key=itemgetter(1))
def drawBoard(board):
# for i in range(3):
# print(board[3*i : 3*i+3])
for i in range(3):
for j in range(3):
if board[i*3 + j] != 'X' and board[i*3 + j] != 'O':
print(' ', end=' | ')
else:
print(board[i*3 + j], end=' | ')
else:
print()
print('-' * 11)
def main():
board = [str(i) for i in range(9)]
# print(board)
human = 'X'
bot = 'O'
drawBoard(board)
while True:
# print(board)
humanMove = int(input('Enter the position: '))
if((humanMove < 0 or humanMove > 8) or
board[humanMove] == 'X' or
board[humanMove] == 'O'):
print('Invalid Move!! Try again!!')
continue
board[humanMove] = human
botMove = minmax(board, bot)[0]
print(botMove)
board[botMove] = bot
drawBoard(board)
if isPlayerWon(board, 'X'):
print('You Won')
break
elif isPlayerWon(board, 'O'):
print('You Lose')
break
elif avail(board) == []:
print('Tied')
break
if __name__ == '__main__':
main()
| python |
# -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_context
def hello(name):
return "Hello" + name
| python |
import matplotlib.pyplot as plt
import numpy as np
from sklearn.linear_model import LogisticRegression
from common_functions import load_data
if __name__ == '__main__':
X, y = load_data('ex2data1.txt')
x1, x2 = X.T
f_y = y.ravel()
plt.plot(x1[f_y==0], x2[f_y==0], 'yo')
plt.plot(x1[f_y==1], x2[f_y==1], 'bx')
plt.show()
lr = LogisticRegression(C=100)
lr.fit(X, f_y)
theta = np.array([lr.intercept_[0], lr.coef_[0, 0], lr.coef_[0, 1]])
x1_boundery = np.array([np.min(x1)-2, np.max(x1)+2])
x2_boundery = (-1/theta[2])*(theta[1]*x1_boundery + theta[0])
plt.plot(x1[f_y==0], x2[f_y==0], 'yo')
plt.plot(x1[f_y==1], x2[f_y==1], 'bx')
plt.plot(x1_boundery, x2_boundery)
plt.show()
print 'Train Accuracy: {}%'.format(lr.score(X, y)*100)
| python |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
KNearestConcaveHull.py
----------------------
Date : November 2014
Copyright : (C) 2014 by Detlev Neumann
Dr. Neumann Consulting - Geospatial Services
Email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
__author__ = 'Detlev Neumann'
__date__ = 'November 2014'
__copyright__ = '(C) 2014, Detlev Neumann'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '176c06ceefb5f555205e72b20c962740cc0ec183'
import os.path
import math
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsApplication,
QgsExpression,
QgsFeature,
QgsFeatureRequest,
QgsFeatureSink,
QgsField,
QgsFields,
QgsGeometry,
QgsProcessing,
QgsProcessingException,
QgsProcessingParameterFeatureSink,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsPoint,
QgsPointXY,
QgsWkbTypes)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
class KNearestConcaveHull(QgisAlgorithm):
KNEIGHBORS = 'KNEIGHBORS'
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
FIELD = 'FIELD'
def name(self):
return 'knearestconcavehull'
def displayName(self):
return self.tr('Concave hull (k-nearest neighbor)')
def shortDescription(self):
return self.tr('Creates a concave hull using the k-nearest neighbor algorithm.')
def icon(self):
return QgsApplication.getThemeIcon("/algorithms/mAlgorithmConcaveHull.svg")
def svgIconPath(self):
return QgsApplication.iconPath("/algorithms/mAlgorithmConcaveHull.svg")
def group(self):
return self.tr('Vector geometry')
def groupId(self):
return 'vectorgeometry'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterNumber(self.KNEIGHBORS,
self.tr('Number of neighboring points to consider (a lower number is more concave, a higher number is smoother)'),
QgsProcessingParameterNumber.Integer,
defaultValue=3, minValue=3))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Field (set if creating concave hulls by class)'),
parentLayerParameterName=self.INPUT, optional=True))
self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Concave hull'),
QgsProcessing.TypeVectorPolygon))
def processAlgorithm(self, parameters, context, feedback):
# Get variables from dialog
source = self.parameterAsSource(parameters, self.INPUT, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
field_name = self.parameterAsString(parameters, self.FIELD, context)
kneighbors = self.parameterAsInt(parameters, self.KNEIGHBORS, context)
use_field = bool(field_name)
field_index = -1
fields = QgsFields()
fields.append(QgsField('id', QVariant.Int, '', 20))
current = 0
# Get properties of the field the grouping is based on
if use_field:
field_index = source.fields().lookupField(field_name)
if field_index >= 0:
fields.append(source.fields()[field_index]) # Add a field with the name of the grouping field
# Initialize writer
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.Polygon, source.sourceCrs())
if sink is None:
raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT))
success = False
fid = 0
# Get unique values of grouping field
unique_values = source.uniqueValues(field_index)
total = 100.0 / float(source.featureCount() * len(unique_values))
for unique in unique_values:
points = []
filter = QgsExpression.createFieldEqualityExpression(field_name, unique)
request = QgsFeatureRequest().setFilterExpression(filter)
request.setSubsetOfAttributes([])
# Get features with the grouping attribute equal to the current grouping value
features = source.getFeatures(request)
for in_feature in features:
if feedback.isCanceled():
break
# Add points or vertices of more complex geometry
points.extend(extract_points(in_feature.geometry()))
current += 1
feedback.setProgress(int(current * total))
# A minimum of 3 points is necessary to proceed
if len(points) >= 3:
out_feature = QgsFeature()
the_hull = concave_hull(points, kneighbors)
if the_hull:
vertex = [QgsPointXY(point[0], point[1]) for point in the_hull]
poly = QgsGeometry().fromPolygonXY([vertex])
out_feature.setGeometry(poly)
# Give the polygon the same attribute as the point grouping attribute
out_feature.setAttributes([fid, unique])
sink.addFeature(out_feature, QgsFeatureSink.FastInsert)
success = True # at least one polygon created
fid += 1
if not success:
raise QgsProcessingException('No hulls could be created. Most likely there were not at least three unique points in any of the groups.')
else:
# Field parameter provided but can't read from it
raise QgsProcessingException('Unable to find grouping field')
else:
# Not grouped by field
# Initialize writer
(sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context,
fields, QgsWkbTypes.Polygon, source.sourceCrs())
if sink is None:
raise QgsProcessingException(self.invalidSinkError(parameters, self.OUTPUT))
points = []
request = QgsFeatureRequest()
request.setSubsetOfAttributes([])
features = source.getFeatures(request) # Get all features
total = 100.0 / source.featureCount() if source.featureCount() else 0
for in_feature in features:
if feedback.isCanceled():
break
# Add points or vertices of more complex geometry
points.extend(extract_points(in_feature.geometry()))
current += 1
feedback.setProgress(int(current * total))
# A minimum of 3 points is necessary to proceed
if len(points) >= 3:
out_feature = QgsFeature()
the_hull = concave_hull(points, kneighbors)
if the_hull:
vertex = [QgsPointXY(point[0], point[1]) for point in the_hull]
poly = QgsGeometry().fromPolygonXY([vertex])
out_feature.setGeometry(poly)
out_feature.setAttributes([0])
sink.addFeature(out_feature, QgsFeatureSink.FastInsert)
else:
# the_hull returns None only when there are less than three points after cleaning
raise QgsProcessingException('At least three unique points are required to create a concave hull.')
else:
raise QgsProcessingException('At least three points are required to create a concave hull.')
return {self.OUTPUT: dest_id}
def clean_list(list_of_points):
"""
Deletes duplicate points in list_of_points
"""
return list(set(list_of_points))
def find_min_y_point(list_of_points):
"""
Returns that point of *list_of_points* having minimal y-coordinate
:param list_of_points: list of tuples
:return: tuple (x, y)
"""
min_y_pt = list_of_points[0]
for point in list_of_points[1:]:
if point[1] < min_y_pt[1] or (point[1] == min_y_pt[1] and point[0] < min_y_pt[0]):
min_y_pt = point
return min_y_pt
def add_point(vector, element):
"""
Returns vector with the given element append to the right
"""
vector.append(element)
return vector
def remove_point(vector, element):
"""
Returns a copy of vector without the given element
"""
vector.pop(vector.index(element))
return vector
def euclidian_distance(point1, point2):
"""
Returns the euclidian distance of the 2 given points.
:param point1: tuple (x, y)
:param point2: tuple (x, y)
:return: float
"""
return math.sqrt(math.pow(point1[0] - point2[0], 2) + math.pow(point1[1] - point2[1], 2))
def nearest_points(list_of_points, point, k):
"""
Returns a list of the indices of the k closest neighbors from list_of_points to the specified point. The measure
of proximity is the Euclidean distance. Internally, k becomes the minimum between the given value for k and the
number of points in list_of_points
:param list_of_points: list of tuples
:param point: tuple (x, y)
:param k: integer
:return: list of k tuples
"""
# build a list of tuples of distances between point *point* and every point in *list_of_points*, and
# their respective index of list *list_of_distances*
list_of_distances = []
for index in range(len(list_of_points)):
list_of_distances.append((euclidian_distance(list_of_points[index], point), index))
# sort distances in ascending order
list_of_distances.sort()
# get the k nearest neighbors of point
nearest_list = []
for index in range(min(k, len(list_of_points))):
nearest_list.append((list_of_points[list_of_distances[index][1]]))
return nearest_list
def angle(from_point, to_point):
"""
Returns the angle of the directed line segment, going from *from_point* to *to_point*, in radians. The angle is
positive for segments with upward direction (north), otherwise negative (south). Values ranges from 0 at the
right (east) to pi at the left side (west).
:param from_point: tuple (x, y)
:param to_point: tuple (x, y)
:return: float
"""
return math.atan2(to_point[1] - from_point[1], to_point[0] - from_point[0])
def angle_difference(angle1, angle2):
"""
Calculates the difference between the given angles in clockwise direction as radians.
:param angle1: float
:param angle2: float
:return: float; between 0 and 2*Pi
"""
if (angle1 > 0 and angle2 >= 0) and angle1 > angle2:
return abs(angle1 - angle2)
elif (angle1 >= 0 and angle2 > 0) and angle1 < angle2:
return 2 * math.pi + angle1 - angle2
elif (angle1 < 0 and angle2 <= 0) and angle1 < angle2:
return 2 * math.pi + angle1 + abs(angle2)
elif (angle1 <= 0 and angle2 < 0) and angle1 > angle2:
return abs(angle1 - angle2)
elif angle1 <= 0 < angle2:
return 2 * math.pi + angle1 - angle2
elif angle1 >= 0 >= angle2:
return angle1 + abs(angle2)
else:
return 0
def intersect(line1, line2):
"""
Returns True if the two given line segments intersect each other, and False otherwise.
:param line1: 2-tuple of tuple (x, y)
:param line2: 2-tuple of tuple (x, y)
:return: boolean
"""
a1 = line1[1][1] - line1[0][1]
b1 = line1[0][0] - line1[1][0]
c1 = a1 * line1[0][0] + b1 * line1[0][1]
a2 = line2[1][1] - line2[0][1]
b2 = line2[0][0] - line2[1][0]
c2 = a2 * line2[0][0] + b2 * line2[0][1]
tmp = (a1 * b2 - a2 * b1)
if tmp == 0:
return False
sx = (c1 * b2 - c2 * b1) / tmp
if (sx > line1[0][0] and sx > line1[1][0]) or (sx > line2[0][0] and sx > line2[1][0]) or\
(sx < line1[0][0] and sx < line1[1][0]) or (sx < line2[0][0] and sx < line2[1][0]):
return False
sy = (a1 * c2 - a2 * c1) / tmp
if (sy > line1[0][1] and sy > line1[1][1]) or (sy > line2[0][1] and sy > line2[1][1]) or\
(sy < line1[0][1] and sy < line1[1][1]) or (sy < line2[0][1] and sy < line2[1][1]):
return False
return True
def point_in_polygon_q(point, list_of_points):
"""
Return True if given point *point* is laying in the polygon described by the vertices *list_of_points*,
otherwise False
Based on the "Ray Casting Method" described by Joel Lawhead in this blog article:
http://geospatialpython.com/2011/01/point-in-polygon.html
"""
x = point[0]
y = point[1]
poly = [(pt[0], pt[1]) for pt in list_of_points]
n = len(poly)
inside = False
p1x, p1y = poly[0]
for i in range(n + 1):
p2x, p2y = poly[i % n]
if y > min(p1y, p2y):
if y <= max(p1y, p2y):
if x <= max(p1x, p2x):
if p1y != p2y:
xints = (y - p1y) * (p2x - p1x) / (p2y - p1y) + p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x, p1y = p2x, p2y
return inside
def extract_points(geom):
"""
Generate list of QgsPoints from QgsGeometry *geom* ( can be point, line, or polygon )
Code taken from fTools plugin
:param geom: an arbitrary geometry feature
:return: list of points
"""
multi_geom = QgsGeometry()
temp_geom = []
# point geometry
if geom.type() == 0:
if geom.isMultipart():
temp_geom = geom.asMultiPoint()
else:
temp_geom.append(geom.asPoint())
# line geometry
if geom.type() == 1:
# if multipart feature explode to single part
if geom.isMultipart():
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.extend(i)
else:
temp_geom = geom.asPolyline()
# polygon geometry
elif geom.type() == 2:
# if multipart feature explode to single part
if geom.isMultipart():
multi_geom = geom.asMultiPolygon()
# now single part polygons
for i in multi_geom:
# explode to line segments
for j in i:
temp_geom.extend(j)
else:
multi_geom = geom.asPolygon()
# explode to line segments
for i in multi_geom:
temp_geom.extend(i)
return temp_geom
def sort_by_angle(list_of_points, last_point, last_angle):
"""
returns the points in list_of_points in descending order of angle to the last segment of the envelope, measured
in a clockwise direction. Thus, the rightmost of the neighboring points is always selected. The first point of
this list will be the next point of the envelope.
"""
def getkey(item):
return angle_difference(last_angle, angle(last_point, item))
vertex_list = sorted(list_of_points, key=getkey, reverse=True)
return vertex_list
def concave_hull(points_list, k):
"""
Calculates a valid concave hull polygon containing all given points. The algorithm searches for that
point in the neighborhood of k nearest neighbors which maximizes the rotation angle in clockwise direction
without intersecting any previous line segments.
This is an implementation of the algorithm described by Adriano Moreira and Maribel Yasmina Santos:
CONCAVE HULL: A neighborhood_k-NEAREST NEIGHBORS APPROACH FOR THE COMPUTATION OF THE REGION OCCUPIED BY A SET OF POINTS.
GRAPP 2007 - International Conference on Computer Graphics Theory and Applications; pp 61-68.
:param points_list: list of tuples (x, y)
:param k: integer
:return: list of tuples (x, y)
"""
# return an empty list if not enough points are given
if k > len(points_list):
k = len(points_list)
# the number of nearest neighbors k must be greater than or equal to 3
kk = max(k, 3)
# delete duplicate points
point_set = clean_list(points_list)
# if point_set has less then 3 points no polygon can be created and an empty list will be returned
if len(point_set) < 3:
return None
# if point_set has 3 points then these are already vertices of the hull. Append the first point to
# close the hull polygon
if len(point_set) == 3:
return add_point(point_set, point_set[0])
# make sure that k neighbors can be found
kk = min(kk, len(point_set))
# start with the point having the smallest y-coordinate (most southern point)
first_point = find_min_y_point(point_set)
# add this points as the first vertex of the hull
hull = [first_point]
# make the first vertex of the hull to the current point
current_point = first_point
# remove the point from the point_set, to prevent him being among the nearest points
point_set = remove_point(point_set, first_point)
previous_angle = math.pi
# step counts the number of segments
step = 2
# as long as point_set is not empty or search is returning to the starting point
while (current_point != first_point) or (step == 2) and (len(point_set) > 0):
# after 3 iterations add the first point to point_set again, otherwise a hull cannot be closed
if step == 5:
point_set = add_point(point_set, first_point)
# search the k nearest neighbors of the current point
k_nearest_points = nearest_points(point_set, current_point, kk)
# sort the candidates (neighbors) in descending order of right-hand turn. This way the algorithm progresses
# in clockwise direction through as many points as possible
c_points = sort_by_angle(k_nearest_points, current_point, previous_angle)
its = True
i = -1
# search for the nearest point to which the connecting line does not intersect any existing segment
while its is True and (i < len(c_points) - 1):
i += 1
if c_points[i] == first_point:
last_point = 1
else:
last_point = 0
j = 2
its = False
while its is False and (j < len(hull) - last_point):
its = intersect((hull[step - 2], c_points[i]), (hull[step - 2 - j], hull[step - 1 - j]))
j += 1
# there is no candidate to which the connecting line does not intersect any existing segment, so the
# for the next candidate fails. The algorithm starts again with an increased number of neighbors
if its is True:
return concave_hull(points_list, kk + 1)
# the first point which complies with the requirements is added to the hull and gets the current point
current_point = c_points[i]
hull = add_point(hull, current_point)
# calculate the angle between the last vertex and his precursor, that is the last segment of the hull
# in reversed direction
previous_angle = angle(hull[step - 1], hull[step - 2])
# remove current_point from point_set
point_set = remove_point(point_set, current_point)
# increment counter
step += 1
all_inside = True
i = len(point_set) - 1
# check if all points are within the created polygon
while (all_inside is True) and (i >= 0):
all_inside = point_in_polygon_q(point_set[i], hull)
i -= 1
# since at least one point is out of the computed polygon, try again with a higher number of neighbors
if all_inside is False:
return concave_hull(points_list, kk + 1)
# a valid hull has been constructed
return hull
| python |
# date: 2021.03.29
# author: Han Tran ([email protected])
import os
import re
import openml as oml
#####################################################################
'''
*** Function: write a proto file with a given regconized ID in OpenML
*** Input: dataID from OpenML, name and location for the output file
*** Output: filename.proto (default: "model.proto")
'''
#####################################################################
def write_proto(dataID, file_name=f'model.proto', output_folder=''):
output_file = os.path.join(output_folder, file_name)
try:
df = oml.datasets.get_dataset(dataID).get_data()[0]
except:
print(f'No data with ID {dataID}')
with open(output_file, 'w') as f:
f.write('syntax = "proto3";\n\n')
f.write(f'option java_outer_classname = "Data{dataID}Proto";\n')
f.write('option objc_class_prefix = "KC";\n\n')
f.write(f'package know_center.openml.data{dataID};\n\n')
f.write(f'service Data {{ \n')
f.write('\trpc PullData(Empty) returns (Response);\n')
f.write('}\n\n')
f.write(f'message Empty {{\n}}\n\n')
f.write(f'message Response {{\n')
f.write(f'\tstring label = 1;\n')
f.write(f'\tFeature feature = 2;\n')
f.write('}\n\n')
f.write('message Feature {\n')
label = 'class'
try:
df_label = df.loc[:, [label]].shape[1]
except:
df_label = 0
if df_label == 1:
df = df.drop(label, axis=1)
else:
print('No label ("class" name) found in the dataset')
type_ser = df.dtypes
types = [str(m) for m in type_ser]
for k, c in enumerate(types):
text = c if c!='category' else "string"
f.write(f'\t{text:8} {type_ser.index[k].capitalize():30} = {k+1};\n')
f.write('}')
print(f'Done writing {dataID} into {output_file}') | python |
#!/usr/bin/env python
import os.path
from django.db import models
from django.utils.timezone import now
from panda.models.user_proxy import UserProxy
class BaseUpload(models.Model):
"""
Base class for any file uploaded to PANDA.
"""
filename = models.CharField(max_length=256,
help_text='Filename as stored in PANDA.')
original_filename = models.CharField(max_length=256,
help_text='Filename as originally uploaded.')
size = models.IntegerField(
help_text='Size of the file in bytes.')
creator = models.ForeignKey(UserProxy,
help_text='The user who uploaded this file.')
creation_date = models.DateTimeField(
help_text='The date this file was uploaded.')
title = models.TextField(max_length=256,
help_text='A user-friendly name for this file.')
class Meta:
app_label = 'panda'
abstract = True
def __unicode__(self):
return self.filename
def save(self, *args, **kwargs):
if not self.creation_date:
self.creation_date = now()
if not self.title:
self.title = self.original_filename
super(BaseUpload, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""
When deleting an upload, it will attempt to clean
up its own associated files.
"""
try:
os.remove(self.get_path())
except:
pass
super(BaseUpload, self).delete(*args, **kwargs)
def get_path(self):
"""
Get the absolute path to this upload on disk.
"""
return os.path.join(self.file_root, self.filename)
| python |
import numpy as np
import matplotlib.pyplot as plt
filepath = '/home/jp/opensourcecode/OpenSourceORBVIO/tmp/';
biasa = np.loadtxt(filepath+'biasa.txt');
plt.figure(1);
p11, =plt.plot(biasa[:,0]-biasa[0,0],biasa[:,1]);
p12, =plt.plot(biasa[:,0]-biasa[0,0],biasa[:,2]);
p13, =plt.plot(biasa[:,0]-biasa[0,0],biasa[:,3]);
plt.title('bias-acc');
plt.legend([p11,p12,p13],["x","y","z"]);
plt.savefig(filepath+"biasa.eps", format="eps")
#plt.legend(p12,'y');
#plt.legend(p13,'z');
scale = np.loadtxt(filepath+'scale.txt');
plt.figure(2);
[p21,p22] = plt.plot(scale[:,0]-scale[0,0],scale[:,1:3]);
plt.title('scale');
plt.legend([p21,p22],['aftopt','befopt']);
plt.savefig(filepath+'/scale.eps', format="eps")
condnum = np.loadtxt(filepath+'condnum.txt');
plt.figure(3);
plt.plot(condnum[:,0]-condnum[0,0],condnum[:,1]/condnum[:,6]);
plt.title('condnum');
plt.savefig(filepath+'condnum.eps', format="eps")
biasg = np.loadtxt(filepath+'biasg.txt');
plt.figure(4);
p41, =plt.plot(biasg[:,0]-biasg[0,0],biasg[:,1]);
p42, =plt.plot(biasg[:,0]-biasg[0,0],biasg[:,2]);
p43, =plt.plot(biasg[:,0]-biasg[0,0],biasg[:,3]);
plt.title('bias-gyr');
plt.legend([p41,p42,p43],["x","y","z"]);
plt.savefig(filepath+"biasg.eps", format="eps")
plt.show();
| python |
from . import db, login_manager
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from datetime import datetime
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Pitch(db.Model):
__tablename__ = 'pitches'
pitch_id = db.Column(db.Integer, primary_key=True)
pitch_author = db.Column(db.String(255))
pitch_title = db.Column(db.String(350))
pitch_category = db.Column(db.String(255))
pitch_message = db.Column(db.String(2000))
date_published = db.Column(db.DateTime, default=datetime.utcnow)
upvotes = db.Column(db.Integer)
downvotes = db.Column(db.Integer)
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
comments = db.relationship('Comment', backref = 'pitch', lazy ="dynamic")
def __repr__(self):
return f'Pitch {self.pitch_message}'
class User(UserMixin ,db.Model):
__tablename__ = 'users'
user_id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255))
email = db.Column(db.String(255), unique = True, index = True)
user_bio = db.Column(db.String(600))
user_profile_pic_path = db.Column(db.String)
pass_secure = db.Column(db.String(255))
pitches = db.relationship('Pitch', backref='user', lazy="dynamic")
comments = db.relationship('Comment', backref='user', lazy="dynamic")
@property
def password(self):
raise AttributeError('You are not authorized to read password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.pass_secure, password)
def __repr__(self):
return f'User {self.username}'
class Comment(db.Model):
__tablename__ = 'comments'
comment_id = db.Column(db.Integer, primary_key=True)
comment_message =db.Column(db.String(1000))
date_posted = db.Column(db.DateTime, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('users.user_id'))
pitch_id = db.Column(db.Integer, db.ForeignKey('pitches.pitch_id'))
def save_comment(self):
db.session.add(self)
db.session.commit()
def __repr__(self):
return f'Comment {self.comment_message}' | python |
from sls.completion.item import CompletionItem, CompletionItemKind
from .argument import Argument
class Event(CompletionItem):
"""
An individual service event with its arguments.
"""
def __init__(self, name, description, args):
self._name = name
self._description = description
self._args = args
@classmethod
def from_hub(cls, name, event):
args = {}
if 'arguments' in event:
for arg_name, arg in event['arguments'].items():
args[arg_name] = Argument.from_hub(name=arg_name, argument=arg)
description = event.get(
'help', 'No description available'
)
return cls(
name=name,
description=description,
args=args,
)
def name(self):
return self._name
def args(self):
return self._args.values()
def arg(self, name):
return self._args.get(name, None)
def to_completion(self, context):
return self.completion_build(
label=self.name(),
detail=f'Event {self.name()}',
documentation=f'Event doc: {self.name()}',
completion_kind=CompletionItemKind.Unit,
context=context,
)
| python |
from setuptools import setup
setup(
name='nzpaye',
version='0.1.1',
description='NZ Paye Summary',
long_description="""Calculate the NZ Paye Summary based on the hourly rate and the number of hours worked.""",
url='https://github.com/anuj-ssharma/NZPaye',
author='Anuj Sharma',
author_email='[email protected]',
license='MIT',
packages=['nzpaye'],
install_requires=['tabulate==0.8.7'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Financial and Insurance Industry',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.8',
],
test_suite='nzpaye.test',
tests_require=[
'mock'
],
entry_points={
'console_scripts': [
'nzpaye = nzpaye.__main__:main',
]
}
)
| python |
from agent import Agent
import random
class SimpleAgent(Agent):
def __init__(self, config):
super().__init__(config)
def name(self):
return "Simple"
def move(self, board):
op_piece = self.piece % 2 + 1
valid_moves = self.valid_moves(board)
if len(valid_moves) == 0:
return None
for col in valid_moves:
if self.check_if_winning(self.drop_piece(board, col, self.piece), self.piece) or \
self.check_if_winning(self.drop_piece(board, col, op_piece), op_piece):
return col
return random.choice(valid_moves)
| python |
from matplotlib import pyplot as plt
from matplotlib.patches import Wedge
import numpy as np
from config_space_angular_constraints import plot_config_space
def path_figure(theta_matrix, robot_arm, show=True):
"""
Arguments:
theta_matrix - A set of theta column vectors
robot_arm - An object of the RobotArm class
Returns:
None, but plots the configuration of each theta vector as subplots
"""
# Check input arguments
num_of_destinations = robot_arm.destinations.shape[1]
if not theta_matrix.shape == (robot_arm.n, num_of_destinations):
raise ValueError('''
The number of joint positions does not match the
number of destination points
''')
# Set up plot style options
plt.style.use('ggplot')
fig, axes = plt.subplots(nrows=2, ncols=3)
np.ravel(axes)[-1].axis('off')
axes = np.ravel(axes)[:-1]
for ax in np.ravel(axes):
set_axis_options(ax, robot_arm)
# Plotting content of each subplot
for index, theta in enumerate(theta_matrix.T):
plot_position(np.ravel(axes)[index], theta, robot_arm)
if show is True:
#plt.savefig('figures/inequality.png', bbox_inches='tight', dpi=500)
plt.show()
return fig
def set_axis_options(ax, robot_arm):
ax.set_autoscale_on(False)
ax.set_aspect('equal')
if robot_arm.angular_constraint is not None:
plot_config_space(robot_arm.config_space_points, ax)
ax.axhline(y=0, color='grey')
ax.axvline(x=0, color='grey')
# Padding
a = 1.1
max_x = abs(max(robot_arm.destinations, key=lambda p: abs(p[0]))[0])
max_y = abs(max(robot_arm.destinations, key=lambda p: abs(p[1]))[1])
m = max(max_x, max_y, robot_arm.reach)
ax.set_xlim(-a*m, a * m)
ax.set_ylim(-a * m, a * m)
def plot_position(axis, theta, robot_arm):
joint_positions = robot_arm.joint_positions(theta)
x = np.hstack((0, joint_positions[0, :]))
y = np.hstack((0, joint_positions[1, :]))
axis.plot(x, y, '-o')
# Plot all the points that shall be reached
for index, p in enumerate(robot_arm.destinations.T):
point, = axis.plot(p[0], p[1], 'x')
axis.text(p[0], p[1], str(index + 1), fontsize=14, color=point.get_color())
# Plot configuration space of robot
if robot_arm.angular_constraint is None:
configuration_space = Wedge(
(0, 0),
r=robot_arm.reach,
theta1=0,
theta2=360,
width=robot_arm.reach - robot_arm.inner_reach,
facecolor='grey',
alpha=0.3,
edgecolor='black',
linewidth=0.6
)
axis.add_patch(configuration_space)
| python |
#!/usr/bin/python3
'''jump_player.py'''
import pgzrun
SPEED = 6
WIDTH = 800
HEIGHT = 300
PLAYER_XPOS, PLAYER_YPOS = 75, HEIGHT-60
ANI_SPEED = 4
JUMP = 18
GRAVITY = 1.0
PLAYER_IMG = 'bot'
bg = []
bg.append(Actor('ground', anchor=('left', 'bottom')))
bg.append(Actor('ground', anchor=('left', 'bottom')))
player = Actor(f'{PLAYER_IMG}0', anchor=('left', 'bottom'))
player.vy = 0
player.frame = 0
bg[1].x = WIDTH
bg[0].y = HEIGHT
bg[1].y = HEIGHT
def reset():
''' set starting positions '''
player.x = PLAYER_XPOS
player.vy = 0
player.y = PLAYER_YPOS
def update_bg():
''' scroll the background images '''
bg[0].left -= SPEED
bg[1].left -= SPEED
if bg[0].x < -WIDTH:
bg[0].x = WIDTH
if bg[1].x < -WIDTH:
bg[1].x = WIDTH
def update_player():
''' handle animation and score player '''
uy = player.vy
player.vy += GRAVITY
player.y += (uy + player.vy) / 2
if player.y > PLAYER_YPOS:
player.image = f'{PLAYER_IMG}{player.frame // ANI_SPEED}'
player.y = PLAYER_YPOS
player.vy = 0
else:
player.image = f'{PLAYER_IMG}up{player.frame // ANI_SPEED}'
player.frame = (player.frame + 1) % (3*ANI_SPEED)
def tap():
''' react to taps '''
if player.vy == 0:
player.vy -= JUMP
def on_key_down():
''' react to key presses '''
tap()
def on_mouse_down():
''' react to mouse clicks '''
tap()
def update():
''' pgzero function to update game objects '''
update_bg()
update_player()
def draw():
''' pgzero function to establish objects '''
bg[1].draw()
bg[0].draw()
player.draw()
reset()
pgzrun.go()
#End
| python |
from celery import shared_task
from grandchallenge.archives.models import Archive
from grandchallenge.cases.models import Image
@shared_task
def add_images_to_archive(*, upload_session_pk, archive_pk):
images = Image.objects.filter(origin_id=upload_session_pk)
archive = Archive.objects.get(pk=archive_pk)
archive.images.add(*images.all())
| python |
from markdown import markdown
def yup():
return markdown('A **long** time ago in a galaxy far, **far** away...') | python |
#twitterclient
import twitter
from configuration import configuration
class twitterclient:
def __init__(self):
config = configuration("config.ini")
self.api = twitter.Api(consumer_key=config.getTwitterConsumerKey(),
consumer_secret=config.getTwitterConsumerSecret(),
access_token_key=config.getTwitterAccessToken(),
access_token_secret=config.getTwitterAccessTokenSecret())
def tweet(self, message):
self.api.PostUpdate(message) | python |
from datetime import datetime, timedelta
import airflow
from airflow import DAG
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
# Task arguments
task_args = {
"depends_on_past": False,
"email_on_failure": True,
"owner": "filippoberio",
"email": ["[email protected]"],
}
dag = DAG(
"sdt",
default_args=task_args,
description="run at a specified time of day",
start_date= datetime.now(),
schedule_interval= None,
#start_date=datetime(2018, 12, 19),
#schedule_interval= '0 4 * * *',
catchup=False
)
def assign_task_to_dag(target_dag):
# Define your docker image and the AWS role that will run the image (based on your airflow-repo)
IMAGE = "593291632749.dkr.ecr.eu-west-1.amazonaws.com/airflow-sdt:v1.6.3"
ROLE = "airflow_sdt"
task_id = "sdt-data-update"
return KubernetesPodOperator(
dag= target_dag,
namespace="airflow",
image=IMAGE,
labels={"app": dag.dag_id},
name=task_id,
in_cluster=True,
task_id=task_id,
get_logs=True,
annotations={"iam.amazonaws.com/role": ROLE},
)
task = assign_task_to_dag(dag)
| python |
""" Inference demo """
import numpy as np
from bcipy.signal.model.inference import inference
from bcipy.signal.model.mach_learning.train_model import train_pca_rda_kde_model
import matplotlib as mpl
mpl.use('TkAgg')
import matplotlib.pylab as plt
dim_x = 5
num_ch = 1
num_x_p = 100
num_x_n = 900
mean_pos = .8
var_pos = .5
mean_neg = 0
var_neg = .5
x_p = mean_pos + var_pos * np.random.randn(num_ch, num_x_p, dim_x)
x_n = mean_neg + var_neg * np.random.randn(num_ch, num_x_n, dim_x)
y_p = [1] * num_x_p
y_n = [0] * num_x_n
x = np.concatenate((x_p, x_n), 1)
y = np.concatenate(np.asarray([y_p, y_n]), 0)
permutation = np.random.permutation(x.shape[1])
x = x[:, permutation, :]
y = y[permutation]
k_folds = 10
model, _ = train_pca_rda_kde_model(x, y, k_folds=k_folds)
alp = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'R', 'S', 'T', 'U', 'V', 'Y', 'Z', '<', '_']
num_x_p = 1
num_x_n = 9
x_p_s = mean_pos + var_pos * np.random.randn(num_ch, num_x_p, dim_x)
x_n_s = mean_neg + var_neg * np.random.randn(num_ch, num_x_n, dim_x)
x_s = np.concatenate((x_n_s, x_p_s), 1)
idx_let = np.random.permutation(len(alp))
letters = [alp[i] for i in idx_let[0:(num_x_p + num_x_n)]]
print(letters)
print('target letter: {}'.format(letters[-1]))
lik_r = inference(x_s, letters, model, alp)
plt.plot(np.array(list(range(len(alp)))), lik_r, 'ro')
plt.xticks(np.array(list(range(len(alp)))), alp)
plt.show()
| python |
import pub.settings as s
import json, requests
import pub.response.wrap as wrapper
import pub.response.error as e
import pub.client.auth_handler as auth_handler
import re
auth_url = 'https://github.com/login/oauth/authorize?client_id=' \
+ s.GITHUB_CLIENT_ID + '&state='
access_token_url = 'https://github.com/login/oauth/access_token'
info_url = 'https://api.github.com/user?access_token='
def begin_login(session):
return wrapper.jump(auth_url+session)
def handle_callback(request, _):
try:
code = request.GET.get('code')
session = request.GET.get('state')
# \
# + '?client_id=' + s.GITHUB_CLIENT_ID \
# + '&client_secret=' + s.GITHUB_CLIENT_SECRETS \
# + '&code='
params = {'client_id': s.GITHUB_CLIENT_ID, 'client_secret': s.GITHUB_CLIENT_SECRETS, 'code': code}
headers = {'accept': 'application/json'}
res = requests.post(access_token_url, data=params).text
#return e.json_err_text(res)
try:
access_token = re.match(r'access_token=(.*?)&', res).group(1)
#return e.page(request, 511, access_token, res)
except Exception as e1:
return e.page(request, 501, e1, res)
url = info_url + access_token
headers = {"Authorization": "token " + access_token}
res2 = requests.get(url, headers=headers).text
#return e.page(request, 502, 'id?', res2)
try:
result = json.loads(res2)
except Exception as e2:
return e.page(request, 502, e2, res2)
data = {'openid': result['id'], 'nickname': result['login'], 'headimg': result['avatar_url'],
'session_id': session, 'authprovider': s.AUTH_PROVIDER_GITHUB}
return auth_handler.login_user(request, data)
except Exception as d:
return e.page(request,500,"x",d)
| python |
from django.shortcuts import render ,redirect
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.exceptions import AuthenticationFailed
from .models import UserDetail ,Profile ,HotelDetail
import os
from django.conf import settings
from twilio.rest import Client
import random
import jwt
from userapi.serializers import UserDetailSerializer ,HotelDetailSerializer
# Create your views here.
otp = random.randint(100000,999999)
def index(request):
name = settings.ALLOWED_HOSTS[0]
context = {
"name":name
}
return render(request,'index.html',context)
class Register(APIView):
# def send_otp(self , phone ,otp):
# print("send_otp is called")
# # Find your Account SID and Auth Token at twilio.com/console
# # and set the environment variables. See http://twil.io/secure
# account_sid = 'ACcd0432425fbd8f4d1e87fb25e9fce7b6'
# auth_token = '05cc64529a27dd4392d887a81d8e80af'
# client = Client(account_sid, auth_token)
#
# message = client.messages \
# .create(
# body=f'Your login otp is {otp}',
# from_='+14352161497',
# to='+918400842036'
# )
# print(message.sid)
# return None
def post(self,request):
name = request.data['name']
email = request.data['email']
phone = request.data['phone']
check_phone = UserDetail.objects.filter(phone = phone).first()
check_email = UserDetail.objects.filter(email=email)
if check_phone:
return Response({'message':'This mobile number already exist'})
if check_email:
return Response({'message':'This email already exist'})
serializer = UserDetailSerializer(data = request.data)
if serializer.is_valid():
serializer.save()
return Response({'status':'true','message':'you are registered succesfully'},status=status.HTTP_201_CREATED)
# self.send_otp(phone,otp)
# profile = Profile(phone = phone ,otp = otp)
# profile.save()
# request.session['phone'] = phone
# redirect('OtpVerification')
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class OtpVerification(APIView):
def post(self,request):
phone = request.session['phone']
otp_check = request.data['otp']
user = UserDetail.objects.filter(phone = phone).first()
profile = Profile.objects.filter(phone = phone).last()
if otp_check != profile.otp:
return Response({'status':'False','message':'otp is wrong'})
else:
#return Response({'status':'True','message':'otp verified'})
payload = {
'id':profile.id,
'phone':profile.phone
}
token = jwt.encode(payload ,key = "donottellanyone",algorithm='HS256')
# print(token.decode('utf-8'))
# print(type(token))
response = Response()
response.set_cookie(key='jwt',value = token, httponly = True)
detail = {
'name':user.name,
'phone':user.phone,
'email':user.email
}
response.data = {
'status':'True',
'jwt':token,
'message':'otp verified',
'detail':detail
}
return response
# return Response({'message':'Saved succesfully in database'})
class Login(APIView):
def send_otp(self , phone ,otp):
print("send_otp is called")
# Find your Account SID and Auth Token at twilio.com/console
# and set the environment variables. See http://twil.io/secure
account_sid = 'ACcd0432425fbd8f4d1e87fb25e9fce7b6'
auth_token = '05cc64529a27dd4392d887a81d8e80af'
client = Client(account_sid, auth_token)
message = client.messages \
.create(
body=f'Your login otp is {otp}',
from_='+14352161497',
to='+918400842036'
)
print(message.sid)
return None
def post(self,request):
phone = request.data['phone']
phone_check = UserDetail.objects.filter(phone = phone).first()
if phone_check is None:
return Response({'message':'This number is not registered'},status = status.HTTP_403_FORBIDDEN)
self.send_otp(phone,otp)
profile = Profile(phone = phone ,otp = otp)
profile.save()
request.session['phone'] = phone
return Response({'status':'true','message':'otp is send to you'})
class HotelDetailView(APIView):
def get(self,request):
# token = request.COOKIES.get('jwt')
# print(token)
# if not token:
# raise AuthenticationFailed('Unauthenticated')
# try:
# payload = jwt.decode(jwt = token ,key="donottellanyone",algorithms = ['HS256'])
# except jwt.ExpiredSignatureError:
# raise AuthenticationFailed('Unauthenticated')
hotel = HotelDetail.objects.all()
serializer =HotelDetailSerializer(hotel ,many=True)
return Response(serializer.data)
class Logout(APIView):
def post(self,request):
response = Response()
response.delete_cookie('jwt')
response.data ={
'message': 'Succesfully Logout'
}
return response
| python |
from decimal import Decimal
class BaseSymbolDTO(object):
def __init__(self, symbol: str):
self.symbol = symbol
@property
def symbol(self) -> str:
return self._symbol
@symbol.setter
def symbol(self, value: str):
self._symbol = value
class BaseOrder(BaseSymbolDTO):
def __init__(
self,
symbol: str,
orderId: int,
clientOrderId: str,
price: float,
origQty: float,
executedQty: float,
cummulativeQuoteQty: float,
status: str,
timeInForce: str,
type: str,
side: str,
):
super().__init__(symbol)
self.orderId = orderId
self.clientOrderId = clientOrderId
self.price = price
self.origQty = origQty
self.executedQty = executedQty
self.cummulativeQuoteQty = cummulativeQuoteQty
self.status = status
self.timeInForce = timeInForce
self.type = type
self.side = side
@property
def orderId(self) -> int:
return self._orderId
@orderId.setter
def orderId(self, value: int):
self._orderId = int(value)
@property
def clientOrderId(self) -> str:
return self._clientOrderId
@clientOrderId.setter
def clientOrderId(self, value: str):
self._clientOrderId = value
@property
def price(self) -> Decimal:
return self._price
@price.setter
def price(self, value: float):
self._price = Decimal(value)
@property
def origQty(self) -> Decimal:
return self._origQty
@origQty.setter
def origQty(self, value: float):
self._origQty = Decimal(value)
@property
def executedQty(self) -> Decimal:
return self._executedQty
@executedQty.setter
def executedQty(self, value: float):
self._executedQty = Decimal(value)
@property
def cummulativeQuoteQty(self) -> Decimal:
return self._cummulativeQuoteQty
@cummulativeQuoteQty.setter
def cummulativeQuoteQty(self, value: float):
self._cummulativeQuoteQty = Decimal(value)
@property
def status(self) -> str:
return self._status
@status.setter
def status(self, value: str):
self._status = value
@property
def timeInForce(self) -> str:
return self._timeInForce
@timeInForce.setter
def timeInForce(self, value: str):
self._timeInForce = value
@property
def type(self) -> str:
return self._type
@type.setter
def type(self, value: str):
self._type = value
@property
def side(self) -> str:
return self._side
@side.setter
def side(self, value: str):
self._side = value
class PairInfo(BaseSymbolDTO):
def __init__(
self,
symbol: str,
status: str,
baseAsset: str,
baseAssetPrecision: int,
quoteAsset: int,
quotePrecision: int,
orderTypes: list,
icebergAllowed: bool,
filters: list,
):
super().__init__(symbol)
self.status = status
self.baseAsset = baseAsset
self.baseAssetPrecision = baseAssetPrecision
self.quoteAsset = quoteAsset
self.quotePrecision = quotePrecision
self.orderTypes = orderTypes
self.icebergAllowed = icebergAllowed
self.filters = filters
self._extractFilters()
@property
def status(self) -> str:
return self._status
@status.setter
def status(self, value: str):
self._status = value
@property
def baseAsset(self) -> str:
return self._baseAsset
@baseAsset.setter
def baseAsset(self, value: str):
self._baseAsset = value
@property
def baseAssetPrecision(self) -> int:
return self._baseAssetPrecision
@baseAssetPrecision.setter
def baseAssetPrecision(self, value: int):
self._baseAssetPrecision = int(value)
@property
def quoteAsset(self) -> str:
return self._quoteAsset
@quoteAsset.setter
def quoteAsset(self, value: str):
self._quoteAsset = value
@property
def quotePrecision(self) -> int:
return self._quotePrecision
@quotePrecision.setter
def quotePrecision(self, value: int):
self._quotePrecision = int(value)
@property
def orderTypes(self) -> list:
return self._orderTypes
@orderTypes.setter
def orderTypes(self, value: list):
self._orderTypes = value
@property
def icebergAllowed(self) -> bool:
return self._icebergAllowed
@icebergAllowed.setter
def icebergAllowed(self, value: bool):
self._icebergAllowed = bool(value)
@property
def filters(self) -> list:
return self._filters
@filters.setter
def filters(self, value: list):
self._filters = value
@property
def minPrice(self) -> Decimal:
return self._minPrice
@minPrice.setter
def minPrice(self, value: float):
self._minPrice = Decimal(value)
@property
def maxPrice(self) -> Decimal:
return self._maxPrice
@maxPrice.setter
def maxPrice(self, value: float):
self._maxPrice = Decimal(value)
@property
def tickSize(self) -> Decimal:
return self._tickSize
@tickSize.setter
def tickSize(self, value: float):
self._tickSize = Decimal(value)
@property
def minAmount(self) -> Decimal:
return self._minAmount
@minAmount.setter
def minAmount(self, value: float):
self._minAmount = Decimal(value)
def _extractFilters(self):
price = None
notional = None
for item in self.filters:
if item["filterType"] == "PRICE_FILTER":
price = item
continue
if item["filterType"] == "MIN_NOTIONAL":
notional = item
continue
if not price:
InsufficientDataException(
'Unable find filter "PRICE_FILTER" for pair: {}'.format(self.symbol)
)
if not notional:
InsufficientDataException(
'Unable find filter "MIN_NOTIONAL" for pair: {}'.format(self.symbol)
)
self.minPrice = Decimal(price["minPrice"])
self.maxPrice = Decimal(price["maxPrice"])
self.tickSize = Decimal(price["tickSize"])
self.minAmount = Decimal(notional["minNotional"])
class Order(BaseOrder):
def __init__(
self,
symbol: str,
orderId: int,
clientOrderId: str,
price: float,
origQty: float,
executedQty: float,
cummulativeQuoteQty: float,
status: str,
timeInForce: str,
type: str,
side: str,
stopPrice: float,
icebergQty: float,
time: int,
updateTime: int,
isWorking: bool,
):
super().__init__(
symbol,
orderId,
clientOrderId,
price,
origQty,
executedQty,
cummulativeQuoteQty,
status,
timeInForce,
type,
side,
)
self.stopPrice = stopPrice
self.icebergQty = icebergQty
self.time = time
self.updateTime = updateTime
self.isWorking = isWorking
@property
def stopPrice(self) -> Decimal:
return self._stopPrice
@stopPrice.setter
def stopPrice(self, value: float):
self._stopPrice = Decimal(value)
@property
def icebergQty(self) -> Decimal:
return self._icebergQty
@icebergQty.setter
def icebergQty(self, value: float):
self._icebergQty = Decimal(value)
@property
def time(self) -> int:
return self._time
@time.setter
def time(self, value: int):
self._time = int(value)
@property
def updateTime(self) -> int:
return self._updateTime
@updateTime.setter
def updateTime(self, value: int):
self._updateTime = int(value)
@property
def isWorking(self) -> bool:
return self._isWorking
@isWorking.setter
def isWorking(self, value: bool):
self._isWorking = bool(value)
class BookTicker(BaseSymbolDTO):
def __init__(
self,
symbol: str,
bidPrice: float,
bidQty: float,
askPrice: float,
askQty: float,
):
super().__init__(symbol=symbol)
self.bidPrice = bidPrice
self.bidQty = bidQty
self.askPrice = askPrice
self.askQty = askQty
@property
def bidPrice(self) -> Decimal:
return self._bidPrice
@bidPrice.setter
def bidPrice(self, value: float):
self._bidPrice = Decimal(value)
@property
def bidQty(self) -> Decimal:
return self._bidQty
@bidQty.setter
def bidQty(self, value: float):
self._bidQty = Decimal(value)
@property
def askPrice(self) -> Decimal:
return self._askPrice
@askPrice.setter
def askPrice(self, value: float):
self._askPrice = Decimal(value)
@property
def askQty(self) -> Decimal:
return self._askQty
@askQty.setter
def askQty(self, value: float):
self._askQty = Decimal(value)
class TradeResult(BaseOrder):
def __init__(
self,
symbol: str,
orderId: int,
clientOrderId: str,
transactTime: int,
price: float,
origQty: float,
executedQty: float,
cummulativeQuoteQty: float,
status: str,
timeInForce: str,
type: str,
side: str,
fills: list,
):
super().__init__(
symbol,
orderId,
clientOrderId,
price,
origQty,
executedQty,
cummulativeQuoteQty,
status,
timeInForce,
type,
side,
)
self.transactTime = transactTime
self.fills = fills
@property
def transactTime(self) -> int:
return self._transactTime
@transactTime.setter
def transactTime(self, value: int):
self._transactTime = int(value)
@property
def fills(self) -> list:
return self._fills
@fills.setter
def fills(self, value: list):
self._fills = value
class Ticker(BaseSymbolDTO):
def __init__(
self,
symbol: str,
priceChange: float,
priceChangePercent: float,
weightedAvgPrice: float,
prevClosePrice: float,
lastPrice: float,
lastQty: float,
bidPrice: float,
askPrice: float,
openPrice: float,
highPrice: float,
lowPrice: float,
volume: float,
quoteVolume: float,
openTime: int,
closeTime: int,
firstId: int,
lastId: int,
count: int,
):
super().__init__(symbol)
self.priceChange = priceChange
self.priceChangePercent = priceChangePercent
self.weightedAvgPrice = weightedAvgPrice
self.prevClosePrice = prevClosePrice
self.lastPrice = lastPrice
self.lastQty = lastQty
self.bidPrice = bidPrice
self.askPrice = askPrice
self.openPrice = openPrice
self.highPrice = highPrice
self.lowPrice = lowPrice
self.volume = volume
self.quoteVolume = quoteVolume
self.openTime = openTime
self.closeTime = closeTime
self.firstId = firstId
self.lastId = lastId
self.count = count
@property
def priceChange(self) -> Decimal:
return self._priceChange
@priceChange.setter
def priceChange(self, value: float):
self._priceChange = Decimal(value)
@property
def priceChangePercent(self) -> Decimal:
return self._priceChangePercent
@priceChangePercent.setter
def priceChangePercent(self, value: float):
self._priceChangePercent = Decimal(value)
@property
def weightedAvgPrice(self) -> Decimal:
return self._weightedAvgPrice
@weightedAvgPrice.setter
def weightedAvgPrice(self, value: float):
self._weightedAvgPrice = Decimal(value)
@property
def prevClosePrice(self) -> Decimal:
return self._prevClosePrice
@prevClosePrice.setter
def prevClosePrice(self, value: float):
self._prevClosePrice = Decimal(value)
@property
def lastPrice(self) -> Decimal:
return self._lastPrice
@lastPrice.setter
def lastPrice(self, value: float):
self._lastPrice = Decimal(value)
@property
def lastQty(self) -> Decimal:
return self._lastQty
@lastQty.setter
def lastQty(self, value: float):
self._lastQty = Decimal(value)
@property
def bidPrice(self) -> Decimal:
return self._bidPrice
@bidPrice.setter
def bidPrice(self, value: float):
self._bidPrice = Decimal(value)
@property
def askPrice(self) -> Decimal:
return self._askPrice
@askPrice.setter
def askPrice(self, value: float):
self._askPrice = Decimal(value)
@property
def openPrice(self) -> Decimal:
return self._openPrice
@openPrice.setter
def openPrice(self, value: float):
self._openPrice = Decimal(value)
@property
def highPrice(self) -> Decimal:
return self._highPrice
@highPrice.setter
def highPrice(self, value: float):
self._highPrice = Decimal(value)
@property
def lowPrice(self) -> Decimal:
return self._lowPrice
@lowPrice.setter
def lowPrice(self, value: float):
self._lowPrice = Decimal(value)
@property
def volume(self) -> Decimal:
return self._volume
@volume.setter
def volume(self, value: float):
self._volume = Decimal(value)
@property
def quoteVolume(self) -> Decimal:
return self._quoteVolume
@quoteVolume.setter
def quoteVolume(self, value: float):
self._quoteVolume = Decimal(value)
@property
def openTime(self) -> int:
return self._openTime
@openTime.setter
def openTime(self, value: int):
self._openTime = int(value)
@property
def closeTime(self) -> int:
return self._closeTime
@closeTime.setter
def closeTime(self, value: int):
self._closeTime = int(value)
@property
def firstId(self) -> int:
return self._firstId
@firstId.setter
def firstId(self, value: int):
self._firstId = int(value)
@property
def lastId(self) -> int:
return self._lastId
@lastId.setter
def lastId(self, value: int):
self._lastId = int(value)
@property
def count(self) -> int:
return self._count
@count.setter
def count(self, value: int):
self._count = int(value)
class InsufficientDataException(RuntimeError):
"""
Exception when data from response is not enough to init DTO object
"""
pass
| python |
from __future__ import annotations
from spark_auto_mapper_fhir.fhir_types.uri import FhirUri
from spark_auto_mapper_fhir.value_sets.generic_type import GenericTypeCode
from spark_auto_mapper.type_definitions.defined_types import AutoMapperTextInputType
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class SearchEntryModeCode(GenericTypeCode):
"""
SearchEntryMode
From: http://hl7.org/fhir/search-entry-mode in valuesets.xml
Why an entry is in the result set - whether it's included as a match or
because of an _include requirement, or to convey information or warning
information about the search process.
"""
def __init__(self, value: AutoMapperTextInputType):
super().__init__(value=value)
"""
http://hl7.org/fhir/search-entry-mode
"""
codeset: FhirUri = "http://hl7.org/fhir/search-entry-mode"
class SearchEntryModeCodeValues:
"""
This resource matched the search specification.
From: http://hl7.org/fhir/search-entry-mode in valuesets.xml
"""
Match = SearchEntryModeCode("match")
"""
This resource is returned because it is referred to from another resource in
the search set.
From: http://hl7.org/fhir/search-entry-mode in valuesets.xml
"""
Include = SearchEntryModeCode("include")
"""
An OperationOutcome that provides additional information about the processing
of a search.
From: http://hl7.org/fhir/search-entry-mode in valuesets.xml
"""
Outcome = SearchEntryModeCode("outcome")
| python |
import datetime, random, requests
import pytz
data={
"tempInternal" : random.randint(40,100),
"humInternal" : random.randint(0,100),
"tempCab" : random.randint(40,100),
"humCab" : random.randint(0,100),
"batteryV" : random.uniform(12,16),
"batteryIP" : random.uniform(0,50),
"batteryIN" : random.uniform(0,50),
"SoC" : random.uniform(0,100),
"PVV" : random.uniform(12,21),
"PVI" : random.uniform(0,8),
"lightPWM" : random.randint(0,100),
"bInverter" : 0,
"bUVLO" : random.randint(0,1),
"bFridge" : random.randint(0,1),
"generatedTimestamp" : datetime.datetime.now(pytz.timezone('US/Pacific'))
}
r = requests.post('http://localhost:8000/possumTrack/telemetry', data = data)
print(r.content)
| python |
import torch
import torch.distributed as dist
from .pairwise import PairwiseCommTrainer
class GossipingSGDPullTrainer(PairwiseCommTrainer):
""" Gossiping SGD - pull variant. """
def __init__(self, *args, **kwargs):
super(GossipingSGDPullTrainer, self).__init__(*args, **kwargs)
def compute_comm_updates(self):
if (self.peer is None) and (not self.requesters):
return
self.logger.debug('Computing gossiping sgd (pull) updates')
self.comm_updates = []
with self.on_cpu_for_comm():
for param in self.model.parameters():
self.logger.debug('Sending and receiving param(s)')
# A container to hold async requests and param sets
requests = []
buffer = torch.zeros_like(param.data)
if self.peer is not None:
self.logger.debug('Initiating irecv request with own '
'peer: rank %s' % self.peer)
requests.append(dist.irecv(
tensor=buffer,
src=self.peer
))
for peer in self.requesters:
self.logger.debug('Initiating isend request with '
'requesting peer: rank %s' % peer)
requests.append(dist.isend(
tensor=param.data,
dst=peer
))
# Wait for all the requests to complete
for r in requests:
r.wait()
self.logger.debug('Requests complete')
if self.peer is None:
continue
# Then compute the Gossiping SGD update.
s = param.data - buffer
s /= 2
self.comm_updates.append(s)
self.logger.debug('Finished computing average '
'for parameter set')
self.logger.debug('Done computing gossiping sgd (pull) updates')
return
| python |
import os
import json
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
with open(os.path.join(__location__, 'para_whitelist.json')) as data_file:
whitelist = json.load(data_file)
| python |
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
'''
@Description:
@Author: Zpp
@Date: 2019-09-02 16:04:11
@LastEditTime: 2019-09-12 11:27:19
@LastEditors: Zpp
'''
| python |
"""Test speed initialization by a map of speeds and their corresponding ratios."""
import numpy as np
from flatland.envs.rail_env import RailEnv
from flatland.envs.rail_generators import complex_rail_generator
from flatland.envs.schedule_generators import speed_initialization_helper, complex_schedule_generator
def test_speed_initialization_helper():
np.random.seed(1)
speed_ratio_map = {1: 0.3, 2: 0.4, 3: 0.3}
actual_speeds = speed_initialization_helper(10, speed_ratio_map)
# seed makes speed_initialization_helper deterministic -> check generated speeds.
assert actual_speeds == [2, 3, 1, 2, 1, 1, 1, 2, 2, 2]
def test_rail_env_speed_intializer():
speed_ratio_map = {1: 0.3, 2: 0.4, 3: 0.1, 5: 0.2}
env = RailEnv(width=50, height=50,
rail_generator=complex_rail_generator(nr_start_goal=10, nr_extra=1, min_dist=8, max_dist=99999,
seed=1), schedule_generator=complex_schedule_generator(),
number_of_agents=10)
env.reset()
actual_speeds = list(map(lambda agent: agent.speed_data['speed'], env.agents))
expected_speed_set = set(speed_ratio_map.keys())
# check that the number of speeds generated is correct
assert len(actual_speeds) == env.get_num_agents()
# check that only the speeds defined are generated
assert all({(actual_speed in expected_speed_set) for actual_speed in actual_speeds})
| python |
from aqt import mw
from aqt.utils import showInfo, showWarning
from PyQt5.QtWidgets import QAction, QMenu
from aqt.qt import *
from sqlite3 import connect
from os.path import dirname, join, realpath
import webbrowser
from .Ui import start_main
all_data = ""
this_version = "v2.2"
###MENU###
def About():
showInfo("""<h2>Chinese Words Finder %(version)s</h2><br>This add-on uses the <a href="https://cc-cedict.org/wiki/">CC-CEDICT</a> dictionary.
It is licensed under the <a href="https://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-Share Alike 3.0 License</a>.
<br>The HSK list can be downloaded <a href="http://www.chinesetest.cn/godownload.do">here.</a><br>The results of 'Chinese Word Finder' are
ordered by frequency based on the results of the BCC corpus. The complete wordlist can be downloaded
<a href="http://bcc.blcu.edu.cn/downloads/resources/BCC_LEX_Zh.zip">here.</a><br>
<a href="https://www.plecoforums.com/threads/word-frequency-list-based-on-a-15-billion-character-corpus-bcc-blcu-chinese-corpus.5859/">More
info about the corpus.</a><br><br>The code for this add-on is available on
<a href='https://github.com/ThoreBor/ChineseWordsFinder'>GitHub. </a>Licensed under the
<a href='https://github.com/ThoreBor/ChineseWordsFinder/blob/master/License.txt'>MIT License.</a><br><br>
If you like this add-on, rate and review it on <a href='https://ankiweb.net/shared/info/2048169015'>Anki Web</a>,
or contribute code on GitHub.</b><br><div>Icon made by <a href="https://www.flaticon.com/authors/freepik" title="Freepik">Freepik</a>
from <a href="https://www.flaticon.com/" title="Flaticon">www.flaticon.com</a></div><br><b>©Thore Tyborski 2020</b>"""
% {'version':this_version}, title='About')
def github():
webbrowser.open('https://github.com/ThoreBor/ChineseWordsFinder/issues')
def Main():
mw.wordfinder = start_main()
mw.wordfinder.show()
mw.wordfinder.raise_()
mw.wordfinder.activateWindow()
def add_menu(Name, Button, exe, *sc):
action = QAction(Button, mw)
action.triggered.connect(exe)
if not hasattr(mw, 'menu'):
mw.menu = {}
if Name not in mw.menu:
add = QMenu(Name, mw)
mw.menu[Name] = add
mw.form.menubar.insertMenu(mw.form.menuTools.menuAction(), add)
mw.menu[Name].addAction(action)
for i in sc:
action.setShortcut(QKeySequence(i))
add_menu('&CWF',"&Start", Main, 'Ctrl+W')
add_menu('&CWF',"&Make a feature request or report a bug", github)
add_menu('&CWF',"&About", About) | python |
import orio.main.tuner.search.search
from orio.main.util.globals import *
import time
import itertools
import math
class Direct(orio.main.tuner.search.search.Search):
def __init__(self, params):
orio.main.tuner.search.search.Search.__init__(self, params)
# rate-of-change
self.K_roc = .5
# Difference between the current minimum and the "guessed" absolute minimum
# such that f* <= fmin - epsilon fmin
self.epsilon = 1e-4
def searchBestCoord(self, startCoord=None):
# We are in a hyperrectangle. Initialization: take the whole parameter space.
rectangle = [[0, self.dim_uplimits[i]] for i in range(self.total_dims)]
info("initial rectangle %s" % str(rectangle))
fmin = float('inf')
rectangles = [rectangle]
minpoint = self.dim_uplimits
start_time = time.time()
runs = 0
# Keep the rectangles that are of the same measure
# key: measure (half longuest diagonal length)
# value: list of tuples ( rectangle, value at the center ) )
rect_sizes = {}
# initialize
center = self.__getCentroid(rectangle)
cost = self.getPerfCost(center)
fc = sum(cost) / len(cost)
dist = 0
for c in rectangle:
dist = max(dist, self.__distance(c, center))
rect_sizes[dist] = [(rectangle, fc)]
while True:
if rectangles == []:
break
rectangle = rectangles.pop(0)
runs += 1
# Trisect the rectangle along the longuest dimension
longuest_len, longuest_dim = max((x, i) for i, x in enumerate([i[1] - i[0] for i in rectangle]))
if 0 == int(round(longuest_len / 3)):
break
rec1 = rectangle[:]
rec1[longuest_dim] = rectangle[longuest_dim][:]
rec1[longuest_dim][1] = rectangle[longuest_dim][0] + int(round(longuest_len / 3)) # DIRTY
corners = list(itertools.product(*rec1, repeat=1))
cor1 = [list(c) for c in corners]
r1 = (rec1, cor1)
rec2 = rectangle[:]
rec2[longuest_dim] = rectangle[longuest_dim][:]
rec2[longuest_dim][0] = rectangle[longuest_dim][0] + int(round(longuest_len / 3))
rec2[longuest_dim][1] = rectangle[longuest_dim][0] + int(round(2 * longuest_len / 3))
corners = list(itertools.product(*rec2, repeat=1))
cor2 = [list(c) for c in corners]
r2 = (rec2, cor2)
rec3 = rectangle[:]
rec3[longuest_dim] = rectangle[longuest_dim][:]
rec3[longuest_dim][0] = rectangle[longuest_dim][0] + int(round(2 * longuest_len / 3))
corners = list(itertools.product(*rec3, repeat=1))
cor3 = [list(c) for c in corners]
r3 = (rec3, cor3)
info("Dividing rectangle " + str(rectangle) + " into " + str(rec1) + " AND " + str(rec2) + " AND " + str(rec3))
info("With corners " + str(cor1) + " AND " + str(cor2) + " AND " + str(cor3))
# Select the potentially optimal rectangles
new_fmin = fmin
fstar = (1 - self.epsilon) * fmin
for rec, cor in r1, r2, r3:
info("working in rectangle: " + str(rec) + " corners " + str(cor))
# Take the center
center = self.__getCentroid(cor)
# Evaluate the perf at the center
cost = self.getPerfCost(center)
fc = sum(cost) / len(cost)
dist = 0
for c in cor:
dist = max(dist, self.__distance(c, center))
info("fc " + str(fc) + " dist " + str(dist))
# Add it to the dictionnary
if dist in rect_sizes:
rect_sizes[dist].append((cor, fc))
else:
rect_sizes[dist] = [(cor, fc)]
s = sorted(rect_sizes.keys())
if dist in rect_sizes:
i = s.index(dist)
else:
for i in s:
if i > dist:
break
# rectangles smaller than the current one
I1 = {k: v for k, v in list(rect_sizes.items()) if k in s[:i]}
# rectangles larger than the current one
if i < len(list(rect_sizes.keys())):
I2 = {k: v for k, v in list(rect_sizes.items()) if k in s[i + 1:]}
else:
I2 = {}
# rectangles as big as than the current one
if dist in rect_sizes:
I3 = rect_sizes[dist]
else:
I3 = []
opt = True
# equation (6)
if I3 != []:
for i in I3:
if i[1] < fc:
opt = False
if opt == False:
# Not optimal
continue
# equation (7)
maxI1 = 0
for i in I1:
for r in I1[i]:
value = abs((r[1] - fc) / (i - dist))
if value > maxI1:
maxI1 = value
minI2 = float('inf')
for i in I2:
for r in I2[i]:
value = abs((r[1] - fc) / (i - dist))
if value < minI2:
minI2 = value
if maxI1 > minI2:
opt = False
continue
# equation (8)
if fmin != 0:
value = (fmin - fc) + dist * minI2
value /= abs(fmin)
if value < self.epsilon:
opt = False
continue
# equation (9)
else:
if fc > dist * minI1:
opt = False
continue
# If we are still here, the conditions are fulfilled. The rectangle is potentially optimal.
# Add it (it will be divided later).
info("potentially optimal rectangle found: " + str(rec))
rectangles.append(rec)
# do we have the minimum?
if I1 == {}:
if fc < new_fmin:
new_fmin = fc
minpoint = center
# Remove the big rectangle from the dictionnary
for r in rect_sizes[dist]:
if r[0] == rectangle:
rect_sizes[dist].remove(r)
break
fmin = new_fmin
search_time = time.time() - start_time
return minpoint, fmin, search_time, runs
def __distance(self, p1, p2):
d = 0
for c1, c2 in zip(p1, p2):
d += (c1 - c2) * (c1 - c2)
d = math.sqrt(d)
return d
def __getCentroid(self, coords):
'''Return a centroid coordinate'''
# if self.have_z3:
# model = self.getCentroidZ3(coords)
# point = self.z3ToPoint(model)
# return self.perfParamToCoord(point)
total_coords = len(coords)
centroid = coords[0]
for c in coords[1:]:
centroid = self.addCoords(centroid, c)
centroid = self.mulCoords((1.0 / total_coords), centroid)
return centroid
| python |
import uuid
from django.db import models
FLAVOR_TYPES = (
('ovh.ssd.eg', 'ovh.ssd.eg'),
('ovh.ssd.cpu', 'ovh.ssd.cpu'),
('ovh.ceph.eg', 'ovh.ceph.eg'),
('ovh.cpu', 'ovh.cpu'),
('ovh.ssd.ram', 'ovh.ssd.ram'),
('ovh.vps-ssd', 'ovh.vps-ssd'),
('ovh.ram', 'ovh.ram'),
)
OS_TYPES = (
('linux', 'linux'),
('windows', 'windows'),
)
VISIBILITY = (
('private', 'private'),
('public', 'public'),
)
IMAGE_STATUS = (
('active', 'active'),
)
IP_TYPES = (
('private', 'private'),
('public', 'public'),
)
IP_STATUS = (
('active', 'active'),
)
INSTANCE_STATUS = (
('active', 'active'),
)
class Account(models.Model):
username = models.CharField(max_length=30, primary_key=True)
class Service(models.Model):
id = models.CharField(max_length=32, primary_key=True)
description = models.TextField(max_length=1000)
creation_date = models.DateTimeField()
class Region(models.Model):
id = models.CharField(max_length=5, primary_key=True)
continent_code = models.CharField(max_length=3)
datacenter_location = models.CharField(max_length=3)
name = models.CharField(max_length=20)
volume = models.CharField(max_length=10, default='UP')
image = models.CharField(max_length=10, default='UP')
network = models.CharField(max_length=10, default='UP')
instance = models.CharField(max_length=10, default='UP')
class Flavor(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
region = models.ForeignKey(Region)
name = models.CharField(max_length=50)
type = models.CharField(max_length=20, choices=FLAVOR_TYPES)
os_type = models.CharField(max_length=20, choices=OS_TYPES)
vcpus = models.PositiveSmallIntegerField()
ram = models.PositiveSmallIntegerField()
disk = models.PositiveSmallIntegerField()
outbound_bandwidth = models.PositiveSmallIntegerField()
inbound_bandwidth = models.PositiveSmallIntegerField()
available = models.BooleanField(default=True)
class Image(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
user = models.CharField(max_length=20)
name = models.CharField(max_length=64)
type = models.CharField(max_length=20, choices=OS_TYPES)
visibility = models.CharField(max_length=7, choices=VISIBILITY)
flavor_type = models.CharField(max_length=20, choices=FLAVOR_TYPES, null=True, blank=True)
status = models.CharField(max_length=15, choices=IMAGE_STATUS, default='active')
region = models.ForeignKey(Region)
plan_code = models.CharField(max_length=64, blank=True, null=True)
min_disk = models.PositiveSmallIntegerField(default=0)
min_ram = models.PositiveSmallIntegerField(default=0)
size = models.FloatField()
creation_date = models.DateTimeField()
class SshKey(models.Model):
id = models.CharField(max_length=24, primary_key=True)
name = models.CharField(max_length=64)
regions = models.ManyToManyField(Region)
public = models.TextField(max_length=2000)
class IpAddress(models.Model):
id = models.CharField(max_length=24, primary_key=True)
type = models.CharField(max_length=24, choices=IP_TYPES)
status = models.CharField(max_length=24, choices=IP_STATUS)
ip = models.GenericIPAddressField()
class Instance(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
name = models.CharField(max_length=64)
region = models.ForeignKey(Region)
flavor = models.ForeignKey(Flavor)
image = models.ForeignKey(Image)
plan_code = models.CharField(max_length=64, blank=True, null=True)
status = models.CharField(max_length=20, choices=INSTANCE_STATUS)
created = models.DateTimeField()
ssh_key = models.ForeignKey(SshKey, null=True, blank=True)
monthly_billing = models.BooleanField(default=False)
ip_addresses = models.ManyToManyField(IpAddress, blank=True)
| python |
# -*- coding: utf-8 -*-
import scrapy
from scrapy_rss import RssItem
class SomeSpider(scrapy.Spider):
name = 'second_spider'
start_urls = ['https://woxcab.github.io/scrapy_rss/']
custom_settings = {
'FEED_TITLE': 'New shop categories',
'FEED_FILE': 'feed2.rss'
}
def parse(self, response):
for category_name in response.css('.list-group-item ::text'):
item = RssItem()
item.title = category_name.extract()
yield item
| python |
# -*- coding: utf-8 -*-
# @Author: Chieh-Han Lee
# @Date: 2015-08-05 19:40:44
# @Last Modified by: Chieh-Han Lee
# @Last Modified time: 2016-10-31 23:26:00
# -*- coding: utf-8 -*-
'''
Created on 2012/4/11
@author: KSJ
'''
import numpy as np
from scipy.spatial import cKDTree as KDTree
from scipy.spatial.distance import cdist as scipy_cdist
def idw_est( x, y, z, x_est, y_est ,power = 2):
x, y, z, x_est, y_est =\
map( lambda x : np.array( x, ndmin = 2 ),
( x, y, z, x_est, y_est ) )
#dist_matrix = np.linalg.norm(
# np.hstack((x.T - x_est, y.T - y_est)) , axis=0 ) + 10**-10
dist_matrix =\
np.sqrt( ( x.T - x_est ) **2 + ( y.T - y_est ) **2 ) + 10**-10
weight_matrix = np.reciprocal( dist_matrix ** power )
up_matrix = weight_matrix * z.T
up_matrix = up_matrix.sum( axis = 0 ) #sum column
down_matrix = weight_matrix.sum( axis = 0 ) #sum column
z_est = up_matrix / down_matrix
return z_est
def idw_est_coord_value(coord, value, coord_est, power = 2):
'''
coord: a 2d array, r x d, row is data count, column is dimension
value: a 2d array, r x 1, row is data count, column is value
coord_est: dito coord
'''
coord_matrix = scipy_cdist(coord_est, coord) #coord_est by coord
weight_matrix = np.reciprocal(coord_matrix**power)
# remove dupliacted localtion (Set 0 wieght)
weight_matrix[np.isinf(weight_matrix)] = 0.
up_matrix = weight_matrix * value.T
up_matrix = up_matrix.sum(axis=1, keepdims=True) #sum column
down_matrix = weight_matrix.sum(axis=1, keepdims=True) #sum column
value_est = up_matrix / down_matrix
return value_est
def idw_kdtree( grid_s, grid_v, grid_s_est, nnear=10, eps=0, power=2, weights=None, leafsize=16 ):
'''
Inverse distance weighting (IDW) method using KDtree
Syntax
interp = idw_kdtree( grid_s, grid_v, grid_s_est, nnear=10, eps=0, power=2, weights=None, leafsize=10 ):
Input
grid_s:
[r1 x d]. Coordinates in grid format.
grid_v:
[r1 x 1].
grid_s_est:
[r2 x d].
nnear:
integer. The list of k-th nearest neighbors to return. f k is an integer it is
treated as a list of [1, ... k] (range(1, k+1)). Note that the counting starts
from 1.
eps:
nonnegative float. Return approximate nearest neighbors;the k-th returned
value is guaranteed to be no further than (1+eps) times the distance to
the real k-th nearest neighbor.
power:
integer. Power parameter. Greater values of p assign greater influence to values
closest to the interpolated point, with the result turning into a mosaic of tiles
(a Voronoi diagram) with nearly constant interpolated value for large values of p
weights:
[]. Weighted matrix.
leafsize:
positive integer. The number of points at which the algorithm switches over to brute-force.
Output
interp:
[r2 x 1].Interpolation result of IDW.
'''
tree = KDTree(grid_s, leafsize=leafsize)
distances, indices = tree.query(grid_s_est, k=nnear, eps=eps)
interp = np.zeros( (len(grid_s_est),) + np.shape(grid_v[0]) )
iternum = 0
for dist, idx in zip(distances, indices):
z0 = grid_v[idx[0]]
if nnear == 1:
weighted_v = grid_v[idx]
elif dist[0] < 1e-10 and ~np.isnan(z0):
weighted_v = grid_v[idx[0]]
else:
ix = np.where(dist==0)[0]
if ix.size:
dist = np.delete(dist, ix)
idx = np.delete(idx, ix)
ix = np.where(np.isnan(grid_v[idx]))[0]
dist = np.delete(dist, ix)
idx = np.delete(idx, ix)
weight_matrix = np.reciprocal( dist ** power )
if weights is not None:
weight_matrix *= weights[idx]
weight_matrix /= np.sum(weight_matrix)
weighted_v = np.dot(weight_matrix, grid_v[idx])
interp[iternum] = weighted_v
iternum += 1
return interp
if __name__ == "__main__":
x = np.random.random(5)
y = np.random.random(5)
z = np.random.random(5)
x_est = np.random.random(7)
y_est = np.random.random(7)
print idw_est( x, y, z, x_est, y_est)
grid_s = np.random.random((100,2))
grid_v = np.random.random((100,1))
grid_s_est = np.random.random((7000,2))
print idw_kdtree( grid_s, grid_v, grid_s_est )
| python |
# -*- coding: utf-8 -*-
"""
Test metering and floating behaviors of DRM Library.
"""
from time import sleep
from random import randint
from datetime import datetime, timedelta
from re import search
import pytest
@pytest.mark.minimum
def test_metered_start_stop_short_time(accelize_drm, conf_json, cred_json, async_handler):
"""
Test no error occurs in normal start/stop metering mode during a short period of time
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_02')
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
activators[0].generate_coin(1000)
drm_manager.activate()
sleep(1)
activators[0].check_coin(drm_manager.get('metered_data'))
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
activators[0].generate_coin(10)
activators[0].check_coin(drm_manager.get('metered_data'))
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
coins = drm_manager.get('metered_data')
assert coins == 0
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
def test_metered_start_stop_short_time_in_debug(accelize_drm, conf_json, cred_json, async_handler):
"""
Test no error occurs in normal start/stop metering mode during a short period of time
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_02')
async_cb.reset()
conf_json.reset()
conf_json['settings']['log_verbosity'] = 1
conf_json.save()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.activate()
sleep(1)
assert drm_manager.get('metered_data') == 0
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
activators[0].generate_coin(10)
activators[0].check_coin(drm_manager.get('metered_data'))
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
assert drm_manager.get('metered_data') == 0
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
@pytest.mark.long_run
def test_metered_start_stop_long_time(accelize_drm, conf_json, cred_json, async_handler):
"""
Test no error occurs in normal start/stop metering mode during a long period of time
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_02')
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.activate()
start = datetime.now()
license_duration = drm_manager.get('license_duration')
assert drm_manager.get('license_status')
assert drm_manager.get('metered_data') == 0
activators.autotest(is_activated=True)
activators[0].generate_coin(10)
activators[0].check_coin(drm_manager.get('metered_data'))
for i in range(3):
wait_period = randint(license_duration-2, license_duration+2)
sleep(wait_period)
start += timedelta(seconds=license_duration)
new_coins = randint(1,10)
activators[0].generate_coin(new_coins)
activators[0].check_coin(drm_manager.get('metered_data'))
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
@pytest.mark.minimum
def test_metered_pause_resume_short_time(accelize_drm, conf_json, cred_json, async_handler):
"""
Test no error occurs in normal pause/resume metering mode during a short period of time
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_02')
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert not drm_manager.get('session_status')
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.activate()
start = datetime.now()
assert drm_manager.get('metered_data') == 0
assert drm_manager.get('session_status')
assert drm_manager.get('license_status')
session_id = drm_manager.get('session_id')
assert len(session_id) > 0
activators.autotest(is_activated=True)
lic_duration = drm_manager.get('license_duration')
assert drm_manager.get('metered_data') == 0
activators[0].generate_coin(10)
activators[0].check_coin(drm_manager.get('metered_data'))
drm_manager.deactivate(True)
assert drm_manager.get('session_status')
assert drm_manager.get('license_status')
assert drm_manager.get('session_id') == session_id
activators.autotest(is_activated=True)
# Wait right before license expiration
wait_period = start + timedelta(seconds=2*lic_duration-2) - datetime.now()
sleep(wait_period.total_seconds())
assert drm_manager.get('session_status')
assert drm_manager.get('license_status')
assert drm_manager.get('session_id') == session_id
activators.autotest(is_activated=True)
# Wait expiration
sleep(4)
assert drm_manager.get('session_status')
assert drm_manager.get('session_id') == session_id
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
drm_manager.activate(True)
assert drm_manager.get('session_status')
assert drm_manager.get('session_id') == session_id
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
drm_manager.deactivate()
assert not drm_manager.get('session_status')
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
assert drm_manager.get('session_id') != session_id
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
@pytest.mark.long_run
def test_metered_pause_resume_long_time(accelize_drm, conf_json, cred_json, async_handler):
"""
Test no error occurs in normal start/stop metering mode during a long period of time
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_02')
async_cb.reset()
conf_json.reset()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert not drm_manager.get('session_status')
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
async_cb.assert_NoError()
drm_manager.activate()
start = datetime.now()
assert drm_manager.get('metered_data') == 0
assert drm_manager.get('session_status')
assert drm_manager.get('license_status')
session_id = drm_manager.get('session_id')
assert len(session_id) > 0
lic_duration = drm_manager.get('license_duration')
activators.autotest(is_activated=True)
coins = drm_manager.get('metered_data')
for i in range(3):
new_coins = randint(1, 100)
activators[0].generate_coin(new_coins)
activators[0].check_coin(drm_manager.get('metered_data'))
drm_manager.deactivate(True)
async_cb.assert_NoError()
assert drm_manager.get('session_status')
assert drm_manager.get('license_status')
assert drm_manager.get('session_id') == session_id
# Wait randomly
nb_lic_expired = int((datetime.now() - start).total_seconds() / lic_duration)
random_wait = randint((nb_lic_expired+2)*lic_duration-2, (nb_lic_expired+2)*lic_duration+2)
wait_period = start + timedelta(seconds=random_wait) - datetime.now()
sleep(wait_period.total_seconds())
drm_manager.activate(True)
start = datetime.now()
assert drm_manager.get('session_status')
assert drm_manager.get('session_id') == session_id
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
drm_manager.deactivate()
assert not drm_manager.get('session_status')
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
assert drm_manager.get('session_id') != session_id
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
@pytest.mark.minimum
@pytest.mark.no_parallel
def test_metering_limits(accelize_drm, conf_json, cred_json, async_handler, ws_admin):
"""
Test an error is returned and the design is locked when the limit is reached.
"""
driver = accelize_drm.pytest_fpga_driver[0]
async_cb = async_handler.create()
activators = accelize_drm.pytest_fpga_activators[0]
activators.reset_coin()
activators.autotest()
cred_json.set_user('accelize_accelerator_test_03')
# Test activate function call fails when limit is reached
async_cb.reset()
conf_json.reset()
accelize_drm.clean_metering_env(cred_json, ws_admin)
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert drm_manager.get('license_type') == 'Floating/Metering'
assert not drm_manager.get('license_status')
drm_manager.activate()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
assert drm_manager.get('license_status')
assert drm_manager.get('metered_data') == 0
activators[0].generate_coin(999)
activators[0].check_coin(drm_manager.get('metered_data'))
sleep(1)
drm_manager.deactivate()
activators[0].reset_coin()
assert not drm_manager.get('license_status')
drm_manager.activate()
assert drm_manager.get('license_status')
activators[0].check_coin(drm_manager.get('metered_data'))
activators[0].generate_coin(1)
activators[0].check_coin(drm_manager.get('metered_data'))
sleep(1)
drm_manager.deactivate()
assert not drm_manager.get('license_status')
with pytest.raises(accelize_drm.exceptions.DRMWSReqError) as excinfo:
drm_manager.activate()
assert 'License Web Service error 400' in str(excinfo.value)
assert 'DRM WS request failed' in str(excinfo.value)
assert search(r'\\"Entitlement Limit Reached\\" with .+ for [email protected]', str(excinfo.value))
assert 'You have reached the maximum quantity of 1000. usage_unit for metered entitlement (licensed)' in str(excinfo.value)
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSReqError.error_code
async_cb.assert_NoError()
finally:
drm_manager.deactivate()
print('Test activate function fails when limit is reached: PASS')
# Test background thread stops when limit is reached
async_cb.reset()
conf_json.reset()
accelize_drm.clean_metering_env(cred_json, ws_admin)
activators.reset_coin()
drm_manager = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver.read_register_callback,
driver.write_register_callback,
async_cb.callback
)
try:
assert drm_manager.get('license_type') == 'Floating/Metering'
assert not drm_manager.get('license_status')
drm_manager.activate()
start = datetime.now()
assert drm_manager.get('drm_license_type') == 'Floating/Metering'
assert drm_manager.get('license_status')
assert drm_manager.get('metered_data') == 0
lic_duration = drm_manager.get('license_duration')
sleep(2)
activators[0].generate_coin(1000)
activators[0].check_coin(drm_manager.get('metered_data'))
# Wait right before expiration
wait_period = start + timedelta(seconds=3*lic_duration-3) - datetime.now()
sleep(wait_period.total_seconds())
assert drm_manager.get('license_status')
activators.autotest(is_activated=True)
sleep(5)
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
# Verify asynchronous callback has been called
assert async_cb.was_called
assert 'License Web Service error 400' in async_cb.message
assert 'DRM WS request failed' in async_cb.message
assert search(r'\\"Entitlement Limit Reached\\" with .+ for [email protected]', async_cb.message)
assert 'You have reached the maximum quantity of 1000. usage_unit for metered entitlement (licensed)' in async_cb.message
assert async_cb.errcode == accelize_drm.exceptions.DRMWSReqError.error_code
drm_manager.deactivate()
assert not drm_manager.get('license_status')
activators.autotest(is_activated=False)
finally:
drm_manager.deactivate()
print('Test background thread stops when limit is reached: PASS')
@pytest.mark.on_2_fpga
@pytest.mark.minimum
def test_floating_limits(accelize_drm, conf_json, cred_json, async_handler):
"""
Test an error is returned when the floating limit is reached
"""
driver0 = accelize_drm.pytest_fpga_driver[0]
driver1 = accelize_drm.pytest_fpga_driver[1]
async_cb0 = async_handler.create()
async_cb1 = async_handler.create()
cred_json.set_user('accelize_accelerator_test_04')
conf_json.reset()
async_cb0.reset()
drm_manager0 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver0.read_register_callback,
driver0.write_register_callback,
async_cb0.callback
)
async_cb1.reset()
drm_manager1 = accelize_drm.DrmManager(
conf_json.path,
cred_json.path,
driver1.read_register_callback,
driver1.write_register_callback,
async_cb1.callback
)
assert not drm_manager0.get('license_status')
assert not drm_manager1.get('license_status')
try:
drm_manager0.activate()
assert drm_manager0.get('license_status')
with pytest.raises(accelize_drm.exceptions.DRMWSError) as excinfo:
drm_manager1.activate()
assert search(r'Timeout on License request after .+ attempts', str(excinfo.value)) is not None
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSError.error_code
async_cb1.assert_NoError()
finally:
drm_manager0.deactivate()
assert not drm_manager0.get('license_status')
async_cb0.assert_NoError()
try:
drm_manager1.activate()
assert drm_manager1.get('license_status')
with pytest.raises(accelize_drm.exceptions.DRMWSError) as excinfo:
drm_manager0.activate()
assert search(r'Timeout on License request after .+ attempts', str(excinfo.value)) is not None
assert async_handler.get_error_code(str(excinfo.value)) == accelize_drm.exceptions.DRMWSError.error_code
async_cb0.assert_NoError()
finally:
drm_manager1.deactivate()
assert not drm_manager1.get('license_status')
async_cb1.assert_NoError()
| python |
"""An uncomplicated implementation of single-linked lists."""
from __future__ import annotations
from itertools import chain
from typing import List, Optional, Union, Iterator, Reversible, Final, Any
from csbasics.datastructure import DataStructure, ValueT, RefT
MAX_LENGTH_DISPLAY_LIST = 10
class _EOL:
pass
EOL = _EOL()
def hopn(node: LinkedList, n: int) -> LinkedList:
assert n >= 0
i = n
while i > 0 and node.data is not EOL: # type: ignore
i -= 1
node = node.tail # type: ignore
if i > 0:
raise KeyError(n)
return node
class LinkedList(DataStructure[int, ValueT]):
always_ordered: bool = False
data: Union[ValueT, _EOL]
tail: Optional[LinkedList[ValueT]]
def __iter__(self) -> Iterator[ValueT]:
node = self
while node.data is not EOL: # type: ignore
yield node.data # type: ignore
node = node.tail # type: ignore
def __init__(self, elems: Optional[Reversible[ValueT]] = None) -> None:
next_node = None
data: Union[ValueT, _EOL] = EOL
if elems is not None:
for e in chain(reversed(elems)):
node = self._make_node(data, next_node)
next_node = node
data = e
self.tail = next_node
self.data = data
@classmethod
def _make_node(
cls,
elem: Union[ValueT, _EOL],
tail: Optional[LinkedList[ValueT]],
) -> LinkedList[ValueT]:
assert (tail is None and elem is EOL) or \
(tail is not None and elem is not EOL)
node = cls()
node.data = elem
node.tail = tail
return node
@property
def length(self) -> int:
ll = self
i = 0
while (ll := ll.tail) is not None: # type: ignore
i += 1
return i
def insert(self, val: ValueT) -> int:
new_node = self._make_node(elem=self.data, tail=self.tail)
self.data = val
self.tail = new_node
return 0
def delete(self, pos: int) -> ValueT:
node: LinkedList[ValueT] = hopn(self, pos)
if node.data == EOL:
raise KeyError(pos)
ret = node.data
node.data = node.tail.data # type: ignore
node.tail = node.tail.tail # type: ignore
return ret # type: ignore
def at(self, pos: int) -> ValueT:
node = hopn(self, pos)
if node.data == EOL:
raise KeyError(pos)
return node.data # type: ignore
def search(self, val: Any) -> List[int]:
return [i for (i, e) in enumerate(self) if e == val]
def __str__(self) -> str:
node = self
elems = []
i = 0
while node.data is not EOL and i < MAX_LENGTH_DISPLAY_LIST:
elems.append(str(node.data))
node = node.tail # type: ignore
i += 1
if node.tail is not None and node.tail.data is not EOL:
elems[-1] = "…"
return f"LinkedList[{' → '.join(elems)}]"
| python |
import argparse
import os
from os import path
import glob
from google.cloud import storage
def copy_local_directory_to_gcs(local_path, bucket, gcs_path):
for local_file in glob.glob(local_path + '/**'):
if not os.path.isfile(local_file):
continue
remote_path = os.path.join(gcs_path, local_file[1 + len(local_path) :])
blob = bucket.blob(remote_path)
blob.upload_from_filename(local_file)
def deploy_model(modelPath, remoteGCSBucket):
print("Getting the model from {}".format(modelPath))
remote_path = 'tarsanmodel2'
storage_client = storage.Client()
bucket = storage_client.bucket(remoteGCSBucket)
copy_local_directory_to_gcs(modelPath, bucket, remote_path)
"""
print("Model files")
for fn in os.listdir(modelPath):
full_fn = os.path.join(modelPath, fn)
print("Copying {}".format(full_fn))
blob = bucket.blob(os.path.join(remote_path, fn))
blob.upload_from_filename(full_fn)
"""
return "gs://{}/{}".format(remoteGCSBucket, remote_path)
parser = argparse.ArgumentParser()
parser.add_argument('--modelPath', type=str, help='path to the model', required=True)
parser.add_argument('--DestGCSBucket', type=str, help='gcs bucket to copy the model to', required=True)
parser.add_argument('--endpointOutFile', type=str, help='path to output file with the endpoint URL in it', required=True)
parser.add_argument('--DestGCSPath', type=str, help='path to output file with the full gcs path of the model', required=True)
args = parser.parse_args()
print("Creating output dirs to return output variables")
os.makedirs(os.path.dirname(args.endpointOutFile), exist_ok=True)
os.makedirs(os.path.dirname(args.DestGCSPath), exist_ok=True)
model_url = deploy_model(args.modelPath, args.DestGCSBucket)
with open(args.endpointOutFile, "w") as f:
f.write(model_url)
with open(args.DestGCSPath, "w") as f:
f.write(model_url)
| python |
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pandas_datareader.data as web
import yfinance as yf
from talib import RSI, BBANDS
start = '2022-01-22'
end = '2022-04-21'
symbol = 'TSLA'
max_holding = 100
price = web.DataReader(name=symbol, data_source='quandl', start=start, end=end, api_key='-L1XxfzbhH1Zch7QzZ-y')
# price = yf.download("TSLA", start="2022-01-06", end="2022-04-21", interval="1d")
print(price)
price = price.iloc[::-1]
price = price.dropna()
close = price['Close'].values
# up, mid, low = BBANDS(close, timeperiod=20, nbdevup=2, nbdevdn=2, matype=0)
rsi = RSI(close, timeperiod=14)
print("RSI (first 10 elements)\n", rsi[14:24])
def bbp(price):
up, mid, low = BBANDS(close, timeperiod=20, nbdevup=2, nbdevdn=2, matype=0)
bbp = (price['AdjClose'] - low) / (up - low)
return bbp
holdings = pd.DataFrame(index=price.index, data={'Holdings': np.array([np.nan] * index.shape[0])})
holdings.loc[((price['RSI'] < 30) & (price['BBP'] < 0)), 'Holdings'] = max_holding
holdings.loc[((price['RSI'] > 70) & (price['BBP'] > 1)), 'Holdings'] = 0
holdings.ffill(inplace=True)
holdings.fillna(0, inplace=True)
holdings['Order'] = holdings.diff()
holdings.dropna(inplace=True)
fig, (ax0, ax1, ax2) = plt.subplots(3, 1, sharex=True, figsize=(12, 8))
ax0.plot(index, price['AdjClose'], label='AdjClose')
ax0.set_xlabel('Date')
ax0.set_ylabel('AdjClose')
ax0.grid()
for day, holding in holdings.iterrows():
order = holding['Order']
if order > 0:
ax0.scatter(x=day, y=price.loc[day, 'AdjClose'], color='green')
elif order < 0:
ax0.scatter(x=day, y=price.loc[day, 'AdjClose'], color='red')
ax1.plot(index, price['RSI'], label='RSI')
ax1.fill_between(index, y1=30, y2=70, color='#adccff', alpha='0.3')
ax1.set_xlabel('Date')
ax1.set_ylabel('RSI')
ax1.grid()
ax2.plot(index, price['BB_up'], label='BB_up')
ax2.plot(index, price['AdjClose'], label='AdjClose')
ax2.plot(index, price['BB_low'], label='BB_low')
ax2.fill_between(index, y1=price['BB_low'], y2=price['BB_up'], color='#adccff', alpha='0.3')
ax2.set_xlabel('Date')
ax2.set_ylabel('Bollinger Bands')
ax2.grid()
fig.tight_layout()
plt.show()
| python |
def julian_is_leap(year):
return year % 4 == 0
def gregorian_is_leap(year):
return year % 400 == 0 or (year % 4 == 0 and year % 100 != 0)
def solve(year):
month = '09'
day = '13'
if year <= 1917:
is_leap_year = julian_is_leap(year)
elif year == 1918:
day = '26'
is_leap_year = False
else:
is_leap_year = gregorian_is_leap(year)
if is_leap_year:
day = '12'
return '{}.{}.{}'.format(
day,
month,
year,
)
input_year = int(input().strip())
print(solve(input_year))
| python |
import random
class BotPlayer:
""" Your custom player code goes here, but should implement
all of these functions. You are welcome to implement
additional helper functions. You may wish to look at board.py
to see what functions are available to you.
"""
def __init__(self, gui, color="black"):
self.color = color
self.gui = gui
def get_current_board(self, board):
self.current_board = board
def get_move(self):
score = [[100, -20, 10, 5, 5, 10, -20, 100],\
[-20, -50, -2, -2, -2, -2, -50, -20],\
[ 10, -2, -1, -1, -1, -1, -2, 10],\
[ 5, -2, -1, -1, -1, -1, -2, 5],\
[ 5, -2, -1, -1, -1, -1, -2, 5],\
[ 10, -2, -1, -1, -1, -1, -2, 10],\
[-20, -50, -2, -2, -2, -2, -50, -20],\
[100, -20, 10, 5, 5, 10, -20, 100]]
moves = self.current_board.get_valid_moves(self.color)
values = [score[move[0]][move[1]] for move in moves]
self.current_board.apply_move(moves[values.index(max(values))], self.color)
return 0, self.current_board
| python |
import base64
from email.mime.text import MIMEText
import httplib2
from django.core.mail import EmailMessage
from django.core.mail.backends.base import BaseEmailBackend
from django.conf import settings
from googleapiclient import errors
class GMail(BaseEmailBackend):
def send_messages(self, email_messages):
r_http = settings.DELEGATED_CREDENTIALS.authorize(httplib2.Http())
for m in email_messages:
message = MIMEText(m.body)
message['to'] = ','.join(m.to)
message['from'] = m.from_email
message['subject'] = m.subject
settings.GMAIL_SERVICE.users().messages().send(userId='me', body={'raw': base64.urlsafe_b64encode(message.as_string())}).execute(http=r_http)
# def send_email():
# m = EmailMessage(subject='Test Email',
# body='Test Email',
# to=['[email protected]'],
# from_email='[email protected]')
#
# message = MIMEText(m.body)
# message['to'] = ','.join(m.to)
# message['from'] = m.from_email
# message['subject'] = m.subject
# r_http = settings.DELEGATED_CREDENTIALS.authorize(httplib2.Http())
#
# try:
# message = (settings.GMAIL_SERVICE.users().messages().send(userId='me', body={'raw': base64.urlsafe_b64encode(message.as_string())})
# .execute(http=r_http))
# print 'Message Id: %s' % message['id']
# return message
# except errors.HttpError, error:
# print 'An error occurred: %s' % error
| python |