id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
374742 | class Solution:
def canIwin(self, maxint: int, desiredtotal: int) -> bool:
if maxint * (maxint + 1) < desiredtotal:
return False
cache = dict()
def dp(running_total, used):
if used in cache:
return cache[used]
for k in range(maxint, 0, -1):
if used & (1 << k):
continue
if running_total + k >= desiredtotal:
cache[used] = True
return True
if not dp(running_total + k, used | 1 << k):
cache[used] = True
return True
cache[used] = False
return False
return dp(0, 0)
def __init__(self):
maxint = 7
desiredtotal = 9
print(self.canIwin(maxint, desiredtotal))
Solution()
| StarcoderdataPython |
1806194 | """
Structured information on a temporary state of a tissue sample.
"""
# this file was auto-generated
from datetime import date, datetime
from fairgraph.base_v3 import KGObjectV3, IRI
from fairgraph.fields import Field
class TissueSampleState(KGObjectV3):
"""
Structured information on a temporary state of a tissue sample.
"""
default_space = "dataset"
type = ["https://openminds.ebrains.eu/core/TissueSampleState"]
context = {
"schema": "http://schema.org/",
"kg": "https://kg.ebrains.eu/api/instances/",
"vocab": "https://openminds.ebrains.eu/vocab/",
"terms": "https://openminds.ebrains.eu/controlledTerms/",
"core": "https://openminds.ebrains.eu/core/"
}
fields = [
Field("additional_remarks", str, "vocab:additionalRemarks", multiple=False, required=False,
doc="Mention of what deserves additional attention or notice."),
Field("age", ["openminds.core.QuantitativeValue", "openminds.core.QuantitativeValueRange"], "vocab:age", multiple=False, required=False,
doc="Time of life or existence at which some particular qualification, capacity or event arises."),
Field("lookup_label", str, "vocab:lookupLabel", multiple=False, required=False,
doc="no description available"),
Field("pathologys", ["openminds.controlledterms.Disease", "openminds.controlledterms.DiseaseModel"], "vocab:pathology", multiple=True, required=False,
doc="Structural and functional deviation from the normal that constitutes a disease or characterizes a particular disease."),
Field("weight", ["openminds.core.QuantitativeValue", "openminds.core.QuantitativeValueRange"], "vocab:weight", multiple=False, required=False,
doc="Amount that a thing or being weighs."),
]
existence_query_fields = ('lookup_label',)
| StarcoderdataPython |
6450271 | from bs4 import BeautifulSoup
import dateutil.parser
import requests
html = requests.get('https://www.oasis-open.org/resources/open-repositories/cla/view-individual').text
# with open("oasis.html", 'w') as f:
# f.write(html)
# with open("oasis.html") as f:
# html = f.read()
soup = BeautifulSoup(html, "html.parser")
form = soup.find(id='oasis-cla-individual-page')
table = form.div.table
rows = [x for x in table.tbody.children if hasattr(x, 'tag') and x.name == 'tr']
for row in rows:
list_items = list(row.td.ul.find_all('li'))
username_li = list(list_items[1].children)
if len(username_li) > 1:
username = username_li[1]
else:
username = ""
# Strip off last part if username is a URL
username = username.split(r'/')[-1]
start_li = list(list_items[4].children)
if len(start_li) > 1:
start = start_li[1]
start = dateutil.parser.parse(start).isoformat()
else:
start = "???"
end_li = list(list_items[5].children)
if len(end_li) > 1:
end = end_li[1]
end = dateutil.parser.parse(end).isoformat()
else:
end = "present"
print("{} ({} - {})".format(username, start, end))
| StarcoderdataPython |
356628 | <reponame>duttashi/applied-machine-learning
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 23 22:00:08 2020
@author: Ashish
"""
# import sys
# sys.path.append('../') # use the sys.path.append() to call functions from another directory
from helpful_functions.eda_functions import print_data_head, find_null_columns,data_with_missing_vals
from helpful_functions.eda_functions import missing_data_plot
# create a dataframe with some missing values
df = data_with_missing_vals()
print(df)
x = find_null_columns(df)
y = print_data_head(df)
print("Data head are\n",y)
# y = missing_data_plot(df)
print("Null columns are:",x)
# missing_data_plot(df)
fig = missing_data_plot(df)
fig | StarcoderdataPython |
6574462 | <filename>Item47.py
"""
Item 47: Use __getattr__, __getattribute__, and __set__attr__ for Lazy Attributes
Use __getattr__ and __setattr__ to lazily load and save attributes for an object.
Understand __getattr__ only gets called when accessing a missing attribute while
__getattribute__ gets called every time any attribute is accessed.
Use methods from super(), the object class, to access instance attributes.
"""
#!/usr/bin/env PYTHONHASHSEED=1234 python3
# Reproduce book environment
import random
random.seed(1234)
import logging
from pprint import pprint
from sys import stdout as STDOUT
# Write all output to a temporary directory
import atexit
import gc
import io
import os
import tempfile
TEST_DIR = tempfile.TemporaryDirectory()
atexit.register(TEST_DIR.cleanup)
# Make sure Windows processes exit cleanly
OLD_CWD = os.getcwd()
atexit.register(lambda: os.chdir(OLD_CWD))
os.chdir(TEST_DIR.name)
def close_open_files():
everything = gc.get_objects()
for obj in everything:
if isinstance(obj, io.IOBase):
obj.close()
atexit.register(close_open_files)
# Example 1
class LazyRecord:
def __init__(self):
self.exists = 5
def __getattr__(self, name):
value = f'Value for {name}'
setattr(self, name, value)
return value
# Example 2: This example illustrates that if an attribute can't be found, foo in example,
# the __getattr__ method will be called. The acts as hook to allow the missing attribute to be defined.
data = LazyRecord()
print('Before:', data.__dict__)
print('foo: ', data.foo)
print('After: ', data.__dict__)
# Example 3
class LoggingLazyRecord(LazyRecord):
def __getattr__(self, name):
print(f'* Called __getattr__({name!r}), '
f'populating instance dictionary')
result = super().__getattr__(name)
print(f'* Returning {result!r}')
return result
# Added code here to show that __getattr__ is called once for each new attribute.
# After the first call the attribute is added to the dictionary and __getattr__
# is no longer used.
data = LoggingLazyRecord()
print('exists: ', data.exists)
print('First foo: ', data.foo1)
print('First foo again', data.foo1)
print('Second foo: ', data.foo2)
print('Second foo again: ', data.foo2)
print('After: ', data.__dict__)
# Example 4: The __getattribute__ object hook is called every time an attribute is accessed
# on an object even where it does not exist in the attribute dictionary.
# This allows hte coded to check global transaction states.
# The operation can impact performance.
class ValidatingRecord:
def __init__(self):
self.exists = 5
def __getattribute__(self, name):
print(f'* Called __getattribute__({name!r})')
try:
value = super().__getattribute__(name)
print(f'* Found {name!r}, returning {value!r}')
return value
except AttributeError:
value = f'Value for {name}'
print(f'* Setting {name!r} to {value!r}')
setattr(self, name, value)
return value
data = ValidatingRecord()
print('exists: ', data.exists)
print('First foo: ', data.foo)
print('Second foo: ', data.foo)
# Example 5: Raise an exception if a property should not exist.
try:
class MissingPropertyRecord:
def __getattr__(self, name):
if name == 'bad_name':
raise AttributeError(f'{name} is missing')
value = f'Value for {name}'
setattr(self, name, value)
return value
data = MissingPropertyRecord()
assert data.foo == 'Value for foo' # Test this works
data.bad_name
except:
logging.exception('Expected')
else:
assert False
# Example 6; Use hasattr built-in function to determine when a property exists
# and the getattr built-in function to retrieve property values
data = LoggingLazyRecord() # Implements __getattr__
print('Before: ', data.__dict__)
print('Has first foo: ', hasattr(data, 'foo'))
print('After: ', data.__dict__)
print('Has second foo: ', hasattr(data, 'foo'))
# Example 7: When __getattribute__ is implemented the class will call the method
# each time hasattr or getattr is used with an instance.
data = ValidatingRecord() # Implements __getattribute__
print('Has first foo: ', hasattr(data, 'foo'))
print('Has second foo: ', hasattr(data, 'foo'))
print('Has foobar: ', hasattr(data, 'foobar'))
print('Has first foo: ', hasattr(data, 'foo'))
print('First foobar: ', data.foobar)
print('Has first foobar: ', hasattr(data, 'foo'))
print('Dict after: ', data.__dict__)
print(20*'*')
# Example 8: Since the __setattr__ method is always called when an attribute is assigned to an instance
# it can be used as a hook to push data back to a database when values are assigned to the instance.
class SavingRecord:
def __setattr__(self, name, value):
# Save some data for the record
pass
super().__setattr__(name, value)
# Example 9: Here __setattr__ is called on each attribute assignment.
class LoggingSavingRecord(SavingRecord):
def __setattr__(self, name, value):
print(f'* Called __setattr__({name!r}, {value!r})')
super().__setattr__(name, value)
data = LoggingSavingRecord()
print('Before: ', data.__dict__)
data.foo = 5
print('After: ', data.__dict__)
data.foo = 7
print('Finally:', data.__dict__)
# Example 10: The __getattribute__ method here causes an infinite recursion since the method
# accesses self.data which cause _getattribute__ to run again and again.
class BrokenDictionaryRecord:
def __init__(self, data):
self._data = {}
def __getattribute__(self, name):
print(f'* Called __getattribute__({name!r})')
return self._data[name]
# Example 11
try:
data = BrokenDictionaryRecord({'foo': 3})
data.foo
except:
logging.exception('Expected')
else:
assert False
# Example 12: To avoid the recursion use super().__getattribute__ .
class DictionaryRecord:
def __init__(self, data):
self._data = data
def __getattribute__(self, name):
# Prevent weird interactions with isinstance() used
# by example code harness.
if name == '__class__':
return DictionaryRecord
print(f'* Called __getattribute__({name!r})')
data_dict = super().__getattribute__('_data')
return data_dict[name]
data = DictionaryRecord({'foo': 3})
print('foo: ', data.foo) | StarcoderdataPython |
3306338 | <gh_stars>10-100
import sys
import time
import struct
import serial
from hexdump import hexdump
from tqdm import tqdm
SERIALPORT = '/dev/ttyUSB0'
BAUDRATE = 9600
DEBUG = False
def handshake(ser):
print('[HANDSHAKE]')
ser.reset_input_buffer()
ser.reset_output_buffer()
ser.write(b'\x00' * 30)
get_response(ser, b'\x00', no_data=True)
send_request(ser, b'\x55')
get_response(ser, b'\xE6', no_data=True)
def get_checksum(req):
chksum = -sum(req)
return bytes([chksum & 0xFF])
def send_request(ser, id, data=None):
if DEBUG: print('TX --->')
req = id
if data and len(data):
req += bytes([len(data)]) + data
req += get_checksum(req)
if DEBUG: hexdump(req)
ser.write(req)
def get_response(ser, id, no_data=False, no_checksum=False, size_len=1):
res = ser.read()
assert len(res) == 1, 'TIMEOUT!'
if res != id:
if DEBUG: print('RX <---')
if DEBUG: hexdump(res + ser.read())
raise Exception('ERROR RESPONSE!')
try:
if no_data:
return None
size = ser.read(size_len)
res += size
assert len(size) == size_len, 'TIMEOUT!'
if size_len == 1:
byte_cnt = size[0]
elif size_len == 2:
byte_cnt = struct.unpack('!H', size)[0]
elif size_len == 4:
byte_cnt = struct.unpack('!I', size)[0]
else:
raise Exception("invalid size_len: {}".format(size_len))
data = ser.read(byte_cnt)
res += data
assert len(data) == byte_cnt, 'TIMEOUT!'
if no_checksum:
return data
expect_checksum = get_checksum(res)
actual_checksum = ser.read()
res += actual_checksum
assert len(actual_checksum) == 1, 'TIMEOUT!'
assert expect_checksum == actual_checksum, 'INVALID CHECKSUM!'
return data
finally:
if DEBUG: print('RX <---')
if DEBUG: hexdump(res)
def device_inquiry(ser):
print('[DEVICE INQUIRY]')
send_request(ser, b'\x20')
data = get_response(ser, b'\x30')
devices = list()
count = data[0]
idx = 1
for i in range(count):
char_count = data[idx]
devices.append(data[idx+1:idx+5]) # device code is 4 bytes
idx += char_count # skip product code
return devices
def device_select(ser, device):
print('[DEVICE SELECT] device={}'.format(device))
send_request(ser, b'\x10', device)
get_response(ser, b'\x06', no_data=True)
def clock_inquiry(ser):
print('[CLOCK INQUIRY]')
send_request(ser, b'\x21')
data = get_response(ser, b'\x31')
clocks = list()
for i in range(len(data)):
clocks.append(data[i])
return clocks
def clock_select(ser, clock):
print('[CLOCK SELECT] clock={}'.format(clock))
send_request(ser, b'\x11', bytes([clock]))
get_response(ser, b'\x06', no_data=True)
def user_boot_mat_inquiry(ser):
print('[USER BOOT MEMORY ADDR INQUIRY]')
send_request(ser, b'\x24')
data = get_response(ser, b'\x34')
mat_count = data[0]
mat_ranges = list()
for i in range(1, len(data), 8):
mat_ranges.append({
'start_addr': struct.unpack('!I', data[i:i+4])[0],
'end_addr': struct.unpack('!I', data[i+4:i+8])[0],
})
return mat_ranges
def user_mat_inquiry(ser):
print('[USER MEMORY ADDR INQUIRY]')
send_request(ser, b'\x25')
data = get_response(ser, b'\x35')
mat_count = data[0]
mat_ranges = list()
for i in range(1, len(data), 8):
mat_ranges.append({
'start_addr': struct.unpack('!I', data[i:i+4])[0],
'end_addr': struct.unpack('!I', data[i+4:i+8])[0],
})
return mat_ranges
def multiplication_ratio_inquiry(ser):
print('[MULTIPLICATION RATIO INQUIRY]')
send_request(ser, b'\x22')
data = get_response(ser, b'\x32')
clock_type_count = data[0]
clock_multi_ratios = list()
idx = 1
for i in range(clock_type_count):
ratio_count = data[idx]
idx += 1
ratios = data[idx:idx+ratio_count]
clock_multi_ratios.append(ratios)
idx += ratio_count
return clock_multi_ratios
def operating_freq_inquiry(ser):
print('[OPERATING FREQUENCY INQUIRY]')
send_request(ser, b'\x23')
data = get_response(ser, b'\x33')
clock_type_count = data[0]
clock_freq_ranges = list()
for i in range(1, 1+clock_type_count*4, 4):
clock_freq_ranges.append({
'min_mhz': struct.unpack('!H', data[i:i+2])[0] / 100,
'max_mhz': struct.unpack('!H', data[i+2:i+4])[0] / 100,
})
return clock_freq_ranges
def bitrate_select(ser, baud_rate, input_freq_mhz, clock_count, ratio1, ratio2):
print('[BITRATE SELECT] baud_rate={} input_freq_mhz={} clock_count={} ratio1={} ratio2={}'.format(baud_rate, input_freq_mhz, clock_count, ratio1, ratio2))
send_request(ser, b'\x3F', struct.pack('!H', int(baud_rate/100)) + struct.pack('!H', int(input_freq_mhz*100)) + bytes([clock_count, ratio1, ratio2]))
get_response(ser, b'\x06', no_data=True)
# wait 1 bit time step before changing
time.sleep(1/ser.baudrate)
ser.baudrate = baud_rate
# confirmation
send_request(ser, b'\x06')
get_response(ser, b'\x06', no_data=True)
def keycode_check(ser, key_code):
print('[KEYCODE CHECK]')
# transition to key-code determination state
send_request(ser, b'\x40')
get_response(ser, b'\x16', no_data=True)
# perform key-code check
send_request(ser, b'\x60', key_code)
get_response(ser, b'\x26', no_data=True)
def status_inquiry(ser):
print('[STATUS INQUIRY]')
send_request(ser, b'\x4F')
data = get_response(ser, b'\x5F', no_checksum=True)
return {
"status": data[0],
"error": data[1],
}
def read_memory(ser, mem_area, start, end, block_size):
print('[READ MEMORY] area={} start={} end={} block_size={}'.format(mem_area, start, end, block_size))
data = b''
for i in tqdm(range(start, end, block_size)):
send_request(ser, b'\x52', bytes([mem_area]) + struct.pack('!I', i) + struct.pack('!I', block_size))
data += get_response(ser, b'\x52', size_len=4)
return data
def user_boot_mat_checksum_inquiry(ser):
print('[USER BOOT MEMORY CHECKSUM INQUIRY]')
send_request(ser, b'\x4A')
data = get_response(ser, b'\x5A')
return struct.unpack('!I', data)[0]
def user_mat_checksum_inquiry(ser):
print('[USER MEMORY CHECKSUM INQUIRY]')
send_request(ser, b'\x4B')
data = get_response(ser, b'\x5B')
return struct.unpack('!I', data)[0]
if __name__ == "__main__":
# example usage
with serial.Serial(SERIALPORT, BAUDRATE, timeout=0.2) as ser:
handshake(ser)
devices = device_inquiry(ser)
#print("devices: {}".format(devices))
device_select(ser, devices[0])
clocks = clock_inquiry(ser)
#print("clocks: {}".format(clocks))
clock_select(ser, clocks[0])
multi_ratios = multiplication_ratio_inquiry(ser)
#print("multiplication ratios: {}".format(multi_ratios))
operating_freqs = operating_freq_inquiry(ser)
#print("operating frequencies: {}".format(operating_freqs))
ratio1 = multi_ratios[0][0]
ratio2 = multi_ratios[1][0]
base1 = operating_freqs[0]['max_mhz'] / ratio1
base2 = operating_freqs[1]['max_mhz'] / ratio2
assert base1 == base2, "failed to find base clock for both multipliers"
bitrate_select(ser, BAUDRATE, base1, 2, ratio1, ratio2)
user_boot_mat = user_boot_mat_inquiry(ser)
#print("user boot memory area: {}".format(user_boot_mat))
user_mat = user_mat_inquiry(ser)
#print("user memory area: {}".format(user_mat))
# any key code is accepted if the key code has not been set
keycode = b'\x00' * 16
keycode_check(ser, keycode)
user_boot_mat_checksum = user_boot_mat_checksum_inquiry(ser)
#print("user boot memory checksum: {}".format(user_boot_checksum))
user_mat_checksum = user_mat_checksum_inquiry(ser)
#print("user memory checksum: {}".format(user_mat_checksum))
mem_area = 0 # user boot memory area
start_addr = user_boot_mat[0]['start_addr']
end_addr = user_boot_mat[0]['end_addr']
data = read_memory(ser, mem_area, start_addr, end_addr+1, 0x40)
with open('user_boot.bin', 'wb') as f:
f.write(data)
checksum = sum(data) & 0xFFFFFFFF
assert user_boot_mat_checksum == checksum, f"failed boot checksum validation: {user_boot_mat_checksum} != {checksum}"
mem_area = 1 # user memory area
start_addr = user_mat[0]['start_addr']
end_addr = user_mat[0]['end_addr']
data = read_memory(ser, mem_area, start_addr, end_addr+1, 0x40)
with open('user.bin', 'wb') as f:
f.write(data)
checksum = sum(data + keycode) & 0xFFFFFFFF
assert user_mat_checksum == checksum, f"failed user checksum validation (not sure why this fails for some ecus): {user_mat_checksum} != {checksum}"
| StarcoderdataPython |
11283728 | '''Given an integer,N , perform the following conditional actions:
If N is odd, print Weird
If N is even and in the inclusive range of (2,5) , print Not Weird
If N is even and in the inclusive range of (6,20), print Weird
If N is even and greater than 20, print Not Weird'''
#!/bin/python3
N = int(input())
if(N%2!=0):
print("Weird")
elif(N%2==0):
if(N in range(2,5)):
print("Not Weird")
elif(N in range(6,21)):
print("Weird")
elif(N>20):
print("Not Weird")
| StarcoderdataPython |
11327449 | """
"""
import sys
import numpy as np
import os
import time
from dnnv.nn import parse as parse_network
from dnnv.properties import parse as parse_property
from pathlib import Path
from typing import Dict, List, Optional
from .cli import parse_args
from .falsifier import falsify
from .utils import initialize_logging, set_random_seed
from dnnv.nn.graph import OperationGraph
from dnnv.nn.utils import TensorDetails
orig_input_details = OperationGraph.input_details
@property
def new_input_details(self):
if self._input_details is None:
_input_details = orig_input_details.fget(self)
self._input_details = tuple(
TensorDetails(tuple(i if i >= 0 else 1 for i in d.shape), d.dtype)
for d in _input_details
)
return self._input_details
OperationGraph.input_details = new_input_details
def main(
property: Path,
networks: Dict[str, Path],
prop_format: Optional[str] = None,
save_violation: Optional[Path] = None,
extra_args: Optional[List[str]] = None,
**kwargs,
):
# os.setpgrp()
sys.setrecursionlimit(5000)
phi = parse_property(property, format=prop_format, args=extra_args)
print("FALSIFYING:", phi)
for name, network in networks.items():
print(f"PARSING NETWORK: {network.__str__()}")
dnn = parse_network(network, net_format="onnx")
if kwargs["debug"]:
print(f"Network {name}:")
dnn.pprint()
print()
print(f"CONCRETIZING: {phi}")
phi.concretize(**{name: dnn})
print()
print(f"FALSIFYING...")
start_t = time.time()
result = falsify(phi, **kwargs)
end_t = time.time()
print("dnnf")
if result["violation"] is not None:
print(" result: unsafe")
if save_violation is not None:
np.save(save_violation, result["violation"])
else:
print(" result: unknown")
falsification_time = result["time"]
print(f" falsification time: {falsification_time:.4f}")
print(f" total time: {end_t - start_t:.4f}", flush=True)
def __main__():
args, extra_args = parse_args()
set_random_seed(args.seed)
logger = initialize_logging(
__package__, verbose=args.verbose, quiet=args.quiet, debug=args.debug
)
main(**vars(args), extra_args=extra_args)
if extra_args is not None and len(extra_args) > 0:
logger.warning("Unused arguments: %r", extra_args)
if __name__ == "__main__":
__main__()
| StarcoderdataPython |
6442 | <filename>python/testData/editing/enterInIncompleteTupleLiteral.after.py
xs = ('foo', 'bar',
'baz'<caret> | StarcoderdataPython |
309833 | <filename>dynamicmodel/models.py
from django.db import models
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.core.validators import RegexValidator
from .fields import JSONField
from django.core.exceptions import ValidationError
from django.core.cache import cache
class DynamicModel(models.Model):
class Meta:
abstract = True
extra_fields = JSONField(editable=False, default="{}")
def __init__(self, *args, **kwargs):
self._schema = None
super(DynamicModel, self).__init__(*args, **kwargs)
self.get_schema()
self._sync_with_schema()
def _sync_with_schema(self):
schema_extra_fields = self.get_extra_fields_names()
clear_field = [field_name for field_name in self.extra_fields
if field_name not in schema_extra_fields]
new_field = [field_name for field_name in schema_extra_fields
if field_name not in self.extra_fields]
for el in clear_field:
del self.extra_fields[el]
for el in new_field:
self.extra_fields[el] = None
def get_extra_field_value(self, key):
if key in self.extra_fields:
return self.extra_fields[key]
else:
return None
def get_extra_fields(self):
_schema = self.get_schema()
for field in _schema.fields.all():
yield field.name, field.verbose_name, field.field_type, \
field.required, field.extra, self.get_extra_field_value(
field.name)
def get_extra_fields_names(self):
return [name for name, verbose_name, field_type, required, extra,
value in self.get_extra_fields()]
def get_schema(self):
type_value = ''
if self.get_schema_type_descriptor():
type_value = getattr(self, self.get_schema_type_descriptor())
return DynamicSchema.get_for_model(self, type_value)
@staticmethod
def get_schema_type_descriptor():
return ''
@staticmethod
def get_schema_type_choices():
return []
def __getattr__(self, attr_name):
if attr_name in self.extra_fields:
return self.extra_fields[attr_name]
else:
return getattr(super(DynamicModel, self), attr_name)
def __setattr__(self, attr_name, value):
if hasattr(self, 'extra_fields') and \
attr_name not in [el.name for el in self._meta.fields] and \
attr_name not in ['_schema'] and \
attr_name in self.get_extra_fields_names():
self.extra_fields[attr_name] = value
super(DynamicModel, self).__setattr__(attr_name, value)
class DynamicForm(forms.ModelForm):
field_mapping = [
('IntegerField', {'field': forms.IntegerField}),
('CharField', {'field': forms.CharField}),
('TextField', {'field': forms.CharField, 'widget': forms.Textarea}),
('EmailField', {'field': forms.EmailField}),
('Dropdown', {'field': forms.CharField, 'widget': forms.Select}),
('NullBooleanField', {'field': forms.NullBooleanField}),
('BooleanField', {'field': forms.BooleanField}),
]
def __init__(self, *args, **kwargs):
super(DynamicForm, self).__init__(*args, **kwargs)
if not isinstance(self.instance, DynamicModel):
raise ValueError(
"DynamicForm.Meta.model must be inherited from DynamicModel")
if self.instance and hasattr(self.instance, 'get_extra_fields'):
for name, verbose_name, field_type, req, extra, value in \
self.instance.get_extra_fields():
field_mapping_case = dict(self.field_mapping)[field_type]
widget = field_mapping_case.get('widget')
if extra and extra.get('choices'):
widget = widget(choices=extra.get('choices'))
field_kwargs = {
'required': req,
'widget': widget,
'initial': self.instance.get_extra_field_value(name),
'label': verbose_name.capitalize() if verbose_name else
" ".join(name.split("_")).capitalize(),
}
self.fields[name] = field_mapping_case['field'](**field_kwargs)
def save(self, force_insert=False, force_update=False, commit=True):
m = super(DynamicForm, self).save(commit=False)
extra_fields = {}
extra_fields_names = [name for name, verbose_name, field_type, req,
extra, value in self.instance.get_extra_fields()]
for cleaned_key in self.cleaned_data.keys():
if cleaned_key in extra_fields_names:
extra_fields[cleaned_key] = self.cleaned_data[cleaned_key]
m.extra_fields = extra_fields
if commit:
m.save()
return m
class DynamicSchemaQuerySet(models.query.QuerySet):
def delete(self, *args, **kwargs):
cases = []
for el in list(self):
tpl = (el.model, el.type_value)
if tpl not in cases:
cases.append(tpl)
super(DynamicSchemaQuerySet, self).delete(*args, **kwargs)
for el in cases:
cache_key = DynamicSchema.get_cache_key_static(
el[0].model_class(), el[1])
cache.set(cache_key, None)
return self
class DynamicSchemaManager(models.Manager):
def get_query_set(self):
return DynamicSchemaQuerySet(self.model, using=self._db)
def get_for_model(self, model_class, type_value=''):
cache_key = DynamicSchema.get_cache_key_static(model_class, type_value)
cache_value = cache.get(cache_key)
if cache_value is not None:
return cache_value
else:
return DynamicSchema.renew_cache_static(model_class, type_value)
class DynamicSchema(models.Model):
class Meta:
unique_together = ('model', 'type_value')
objects = DynamicSchemaManager()
model = models.ForeignKey(ContentType)
type_value = models.CharField(max_length=100, null=True, blank=True)
def __unicode__(self):
return "%s%s" % (self.model,
" (%s)" % self.type_value if self.type_value else '')
def add_field(self, name, type):
return self.fields.create(schema=self, name=name, field_type=type)
def remove_field(self, name):
return self.fields.filter(name=name).delete()
@classmethod
def get_for_model(cls, model_class, type_value=''):
return cls.objects.get_for_model(model_class, type_value)
@classmethod
def get_cache_key_static(cls, model_class, type_value):
return "%s-%s-%s-%s" % ('DYNAMICMODEL_SCHEMA_CACHE_KEY',
model_class._meta.app_label, model_class._meta.module_name,
type_value)
def get_cache_key(self):
return self.get_cache_key_static(self.model.model_class(),
self.type_value)
@classmethod
def renew_cache_static(cls, model_class, type_value):
cache_key = cls.get_cache_key_static(model_class, type_value)
if not cls.objects.filter(type_value=type_value,
model=ContentType.objects.get_for_model(model_class)).exists():
cls.objects.create(type_value=type_value,
model=ContentType.objects.get_for_model(model_class))
schema = cls.objects.prefetch_related('fields')\
.get(
type_value=type_value,
model=ContentType.objects.get_for_model(model_class))
cache.set(cache_key, schema)
return schema
def renew_cache(self):
return self.renew_cache_static(self.model.model_class(),
self.type_value)
# overrides
def save(self, *args, **kwargs):
super(DynamicSchema, self).save(*args, **kwargs)
self.renew_cache()
def delete(self, *args, **kwargs):
super(DynamicSchema, self).delete(*args, **kwargs)
cache.set(self.get_cache_key(), None)
return self
class DynamicSchemaFieldQuerySet(models.query.QuerySet):
def delete(self):
cache_el = None
for el in self:
cache_el = el.delete(renew=False)
if cache_el:
cache_el.renew_cache()
class DynamicSchemaFieldManager(models.Manager):
def get_query_set(self):
return DynamicSchemaFieldQuerySet(self.model, using=self._db)
class DynamicSchemaField(models.Model):
FIELD_TYPES = [
('IntegerField', 'Integer number field'),
('CharField', 'One line of text'),
('TextField', 'Multiline text input'),
('EmailField', 'Email'),
('Dropdown', 'Dropdown'),
('NullBooleanField', 'Yes / No / Unknown field'),
('BooleanField', 'Yes / No field'),
]
class Meta:
unique_together = ('schema', 'name')
objects = DynamicSchemaFieldManager()
schema = models.ForeignKey(DynamicSchema, related_name='fields')
name = models.CharField(max_length=100, validators=[RegexValidator(r'^[\w]+$',
message="Name should contain only alphanumeric characters and underscores.")])
verbose_name = models.CharField(max_length=100, null=True, blank=True)
field_type = models.CharField(max_length=100, choices=FIELD_TYPES)
required = models.BooleanField(default=False)
extra = JSONField(default='{}')
@property
def display_label(self):
ret_val = self.verbose_name or self.name.replace('_', ' ')
return ret_val.capitalize()
def save(self, *args, **kwargs):
self.clean()
super(DynamicSchemaField, self).save(*args, **kwargs)
self.renew_cache()
def delete(self, *args, **kwargs):
renew = kwargs.pop('renew', True)
super(DynamicSchemaField, self).delete(*args, **kwargs)
if renew:
self.renew_cache()
return self
def renew_cache(self):
DynamicSchema.renew_cache_static(self.schema.model.model_class(),
self.schema.type_value)
def clean(self):
if self.field_type not in dict(self.FIELD_TYPES).keys():
raise ValidationError("Wrong field_type")
if not self.id:
if DynamicSchemaField.objects.filter(schema=self.schema,
name=self.name).exists():
raise ValidationError('Field with name "%s" already exists.' %
self.name)
return
old_model = DynamicSchemaField.objects.get(pk=self.id)
fields = [f.name for f in DynamicSchemaField._meta.fields]
fields.remove('verbose_name')
fields.remove('required')
fields.remove('extra')
for field_name in fields:
if old_model.__dict__.get(field_name) != self.__dict__.get(field_name):
raise ValidationError("%s value cannot be modified" % field_name)
def __unicode__(self):
return "%s - %s" % (self.schema, self.name)
| StarcoderdataPython |
58569 | """
This module contains a class to describe physical connections between :mod:`Sea.model.components`.
"""
import math
import cmath
import numpy as np
import warnings # Handling of warnings
import abc # Abstract base classes
import logging # Add logging functionality
from ..base import Base
class Connection(Base):
"""Class for connections between components."""
#__metaclass__ = abc.ABCMeta
object_sort = 'Connection'
components = list()
"""
List of components that are connected through this connection. Every list item is a tuple (component, mount) where mount is a string describing whether the component is mounted at an edge or far from the edge.
"""
subsystems = list()
"""
List of all enabled subsystems.
"""
couplings = list()
"""
List of all couplings.
"""
@property
def impedance(self):
"""Total impedance at the coupling.
:rtype: :class:`numpy.ndarray`
"""
imp = np.zeros(len(self.omega))
print self.subsystems
for subsystem in self.subsystems:
print subsystem.impedance
imp = imp + subsystem.impedance
return impedance
def get_coupling(self, subsystem_from, subsystem_to):
"""Return the coupling between subsystems for calculations.
"""
return
#@property
#def routes(self):
#"""
#Create a list.
#"""
#return [(couplings.subsystem_from, coupling.subsystem_to) for coupling in couplings]
| StarcoderdataPython |
9684658 | <reponame>dinojugosloven/pymalcolm
import unittest
from mock import patch, ANY
import os
from malcolm.core import Process, Context, StringMeta
from malcolm.modules.scanning.controllers import RunnableController
from malcolm.modules.ca.util import catools
from malcolm.core.alarm import Alarm, AlarmSeverity
from malcolm.modules.system.parts import DirParsePart
deps = [
"TEST=/a/test\n",
"DEP1=$(TEST)/some/dependency\n"
]
class MockPv(str):
ok = True
class ManyAlarms:
def __iter__(self):
self.i = 1
return self
def __next__(self):
self.i += 1
return Alarm(message="Alarm #%s" % self.i)
def reset_alarms(mock):
mock.reset_mock()
mock.side_effect = iter(ManyAlarms())
class TestDirParsePart(unittest.TestCase):
@patch("malcolm.modules.ca.util.catools")
def add_part_and_start(self, catools):
self.part = DirParsePart("dir", "TS-DI-IOC-01")
self.c1.add_part(self.part)
self.p.add_controller(self.c1)
self.p.start()
def setUp(self):
self.p = Process('process1')
self.context = Context(self.p)
self.c1 = RunnableController(
mri="SYS", config_dir="/tmp", use_git=False)
try:
os.mkdir('/tmp/configure')
except OSError:
pass
def tearDown(self):
try:
self.p.stop(timeout=1)
except AssertionError:
pass
os.rmdir('/tmp/configure')
# @patch("malcolm.modules.ca.util.CAAttribute")
# def test_has_pvs(self, CAAttribute):
# self.add_part_and_start()
# CAAttribute.assert_called_once_with(ANY, catools.DBR_STRING, "", "ICON:KERNEL_VERS", throw=False)
# assert isinstance(CAAttribute.call_args[0][0], StringMeta)
# meta = CAAttribute.call_args[0][0]
# assert meta.description == "Host Architecture"
# assert not meta.writeable
# assert len(meta.tags) == 0
def test_set_dir_concats_strings(self):
self.add_part_and_start()
self.part.dir1 = "hello "
self.part.set_dir2(MockPv("world!"))
assert self.part.dir == "hello world!"
self.part.dir2 = "bar"
self.part.set_dir1(MockPv("foo"))
assert self.part.dir == "foobar"
def test_parses_dir(self):
self.add_part_and_start()
self.part.dir = "/tmp"
with open('/tmp/configure/RELEASE', 'w') as f:
f.writelines(deps)
self.part.parse_release()
assert len(self.part.dependencies.value.module) == 2
assert len(self.part.dependencies.value.path) == 2
assert self.part.dependencies.value.module[0] == "TEST"
assert self.part.dependencies.value.module[1] == "DEP1"
assert self.part.dependencies.value.path[0] == "/a/test"
assert self.part.dependencies.value.path[1] == "/a/test/some/dependency"
os.remove('/tmp/configure/RELEASE')
@patch("malcolm.core.alarm.Alarm")
def test_sets_alarm_if_dir_doesnt_exist(self, alarm):
reset_alarms(alarm)
self.add_part_and_start()
self.part.dir = "/i/am/not/a/dir"
reset_alarms(alarm)
self.part.parse_release()
alarm.assert_called_with(message="reported IOC directory not found",
severity=AlarmSeverity.MINOR_ALARM)
@patch("malcolm.core.alarm.Alarm")
def test_version_updated_sets_status_for_version(self, alarm):
reset_alarms(alarm)
self.add_part_and_start()
reset_alarms(alarm)
self.part.version_updated(MockPv("work"))
alarm.assert_called_once_with(message='IOC running from non-prod area', severity=AlarmSeverity.MINOR_ALARM)
reset_alarms(alarm)
self.part.version_updated(MockPv("other"))
alarm.assert_called_once_with(message='IOC running from non-prod area', severity=AlarmSeverity.MINOR_ALARM)
reset_alarms(alarm)
self.part.version_updated(MockPv("somethingelse"))
alarm.assert_called_once_with(message="OK", severity=AlarmSeverity.NO_ALARM)
@patch("malcolm.core.alarm.Alarm")
def test_version_updated_sets_alarm_if_no_version(self, alarm):
reset_alarms(alarm)
self.add_part_and_start()
reset_alarms(alarm)
self.part.has_procserv = True
self.part.version_updated(None)
alarm.assert_called_once_with(message="IOC not running (procServ enabled)",
severity=AlarmSeverity.UNDEFINED_ALARM)
reset_alarms(alarm)
self.part.has_procserv = False
self.part.version_updated(None)
alarm.assert_called_once_with(message="neither IOC nor procServ are running",
severity=AlarmSeverity.INVALID_ALARM)
| StarcoderdataPython |
3279505 | import pytest
from pytest_mock import MockerFixture
from pystratis.api.signalr import SignalR
from pystratis.api.signalr.responsemodels import *
from pystratis.core.networks import StraxMain, CirrusMain
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_connection_info(mocker: MockerFixture, network):
data = {
'signalRUri': 'http://localhost',
'signalRPort': 17104
}
mocker.patch.object(SignalR, 'get', return_value=data)
signalr = SignalR(network=network, baseuri=mocker.MagicMock())
response = signalr.get_connection_info()
assert response == GetConnectionInfoModel(**data)
# noinspection PyUnresolvedReferences
signalr.get.assert_called_once()
| StarcoderdataPython |
3470457 | <filename>Deep Learning - SG segmentation/FCN-DenseNet inference/DeepSGUS - sample.py
# Script for the automatic semantic segmentation of SGUS images
from DeepSGUS import DeepSGUS_CNN
import matplotlib.pyplot as plt
import cv2 as cv # Version 3.7
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
print(tf.__version__) # 1.14.0
print(cv.__version__) # 4.1.0
#LOAD PRETRAINED MODEL
DeepSGUS = DeepSGUS_CNN('2 Deep Learning - SG segmentation/FCN-DenseNet inference/frozen_graph.pb')
DeepSGUS.print_layerNames()
#INPUTS
# inputImg = 'IMG-0001-00008.jpg'
inputImg = 'TIONI_0001_img.jpg'
# inputImg = 'STELLIN_0001_img.jpg'
#RUN SEGMENTATION
rez = DeepSGUS.segmentImage('2 Deep Learning - SG segmentation/FCN-DenseNet inference/in/' + inputImg)
output_PerPixelPredictions = rez[0] # 0-background, 1-salivary gland (image)
output_BlackAndWhiteSG = rez[1] # black-background, white-salivary gland (imge)
output_ContourOverInput = rez[2] # resulting contour is drawn over the input image (image)
output_contourSG_points = rez[3] # contour points (array)
#SAVE
cv.imwrite('2 Deep Learning - SG segmentation/FCN-DenseNet inference/out/' + inputImg + '_SG_predictions.jpg' , output_PerPixelPredictions)
cv.imwrite('2 Deep Learning - SG segmentation/FCN-DenseNet inference/out/' + inputImg + '_SG_Black&White.jpg' , output_BlackAndWhiteSG)
cv.imwrite('2 Deep Learning - SG segmentation/FCN-DenseNet inference/out/' + inputImg + '_SG_Contour.jpg' , output_ContourOverInput)
np.savetxt('2 Deep Learning - SG segmentation/FCN-DenseNet inference/out/' + inputImg + '_SG_Contour_Points.txt', output_contourSG_points)
#SHOW
img = cv.imread('2 Deep Learning - SG segmentation/FCN-DenseNet inference/in/' + inputImg)
img = cv.cvtColor(img, cv.COLOR_BGR2RGB)
plt.imshow(img)
plt.plot(output_contourSG_points[:,0], output_contourSG_points[:,1])
plt.show() | StarcoderdataPython |
285647 | from __future__ import absolute_import
from django.contrib import admin
from smsgateway.models import SMS, QueuedSMS
class SMSAdmin(admin.ModelAdmin):
date_hierarchy = 'sent'
list_display = ('direction', 'sent', 'sender', 'to', 'content', 'operator', 'backend', 'gateway', 'gateway_ref')
search_fields = ('sender', 'to', 'content',)
list_filter = ('operator', 'direction', 'gateway', 'backend')
admin.site.register(SMS, SMSAdmin)
class QueuedSMSAdmin(admin.ModelAdmin):
list_display = ('to', 'content', 'created', 'using', 'priority')
search_fields = ('to', 'content')
list_filter = ('created', 'priority', 'using')
admin.site.register(QueuedSMS, QueuedSMSAdmin)
| StarcoderdataPython |
6520121 | from .settings import *
APP_ERROR_DB_MODEL = 'utils.TestErrorModel'
| StarcoderdataPython |
250711 | import rospy
import sys
import moveit_commander
from motion_control.moveit_helpers import load_joint_configurations_from_file
if __name__ == '__main__':
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node("moveit_commander_node")
group_name = rospy.get_param("~move_group")
pose_name = rospy.get_param("~pose_name")
move_group = moveit_commander.MoveGroupCommander(group_name)
load_joint_configurations_from_file(move_group)
move_group.set_named_target(pose_name)
print("Moving...")
move_group.go(wait=True)
move_group.stop()
print("Movement done")
| StarcoderdataPython |
12824260 | <reponame>michalnand/reinforcement_learning_im<gh_stars>0
import sys
sys.path.insert(0, '../../')
from libs_common.RLStatsCompute import *
import matplotlib.pyplot as plt
result_path = "./results/"
files = []
files.append("./models/ddpg_baseline/run_0/result/result.log")
files.append("./models/ddpg_baseline/run_1/result/result.log")
files.append("./models/ddpg_baseline/run_2/result/result.log")
files.append("./models/ddpg_baseline/run_3/result/result.log")
files.append("./models/ddpg_baseline/run_4/result/result.log")
files.append("./models/ddpg_baseline/run_5/result/result.log")
rl_stats_compute_ddpg = RLStatsCompute(files, result_path + "ddpg_baseline.log")
'''
files = []
files.append("./models/ddpg_imagination/run_0/result/result.log")
files.append("./models/ddpg_imagination/run_1/result/result.log")
files.append("./models/ddpg_imagination/run_2/result/result.log")
files.append("./models/ddpg_imagination/run_3/result/result.log")
rl_stats_compute_imagination = RLStatsCompute(files, result_path + "ddpg_imagination.log")
'''
plt.cla()
plt.ylabel("score")
plt.xlabel("episode")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.games_mean, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
#plt.plot(rl_stats_compute_imagination.games_mean, rl_stats_compute_imagination.episode_mean, label="ddpg imagination entropy", color='red')
#plt.fill_between(rl_stats_compute_imagination.games_mean, rl_stats_compute_imagination.episode_lower, rl_stats_compute_imagination.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_episode.png", dpi = 300)
plt.cla()
plt.ylabel("score")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_mean, label="ddpg baseline", color='blue')
plt.fill_between(rl_stats_compute_ddpg.iterations, rl_stats_compute_ddpg.episode_lower, rl_stats_compute_ddpg.episode_upper, color='blue', alpha=0.2)
#plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.episode_mean, label="ddpg imagination entropy", color='red')
#plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.episode_lower, rl_stats_compute_imagination.episode_upper, color='red', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "score_per_iteration.png", dpi = 300)
'''
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.entropy_mean, label="entropy", color='orange')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.entropy_lower, rl_stats_compute_imagination.entropy_upper, color='orange', alpha=0.2)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.curiosity_mean, label="curiosity", color='green')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.curiosity_lower, rl_stats_compute_imagination.curiosity_upper, color='green', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "internal_motivation.png", dpi = 300)
plt.cla()
plt.ylabel("value")
plt.xlabel("iteration")
plt.grid(color='black', linestyle='-', linewidth=0.1)
plt.plot(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.forward_loss_mean, label="forward model loss", color='navy')
plt.fill_between(rl_stats_compute_imagination.iterations, rl_stats_compute_imagination.forward_loss_lower, rl_stats_compute_imagination.forward_loss_upper, color='navy', alpha=0.2)
plt.legend(loc='lower right', borderaxespad=0.)
plt.savefig(result_path + "forward_model_loss.png", dpi = 300)
''' | StarcoderdataPython |
5135072 | # brick_test.py
import unittest
from lego.brick import LegoBrick
class LegoBrick_Test(unittest.TestCase):
def test_wrongInitialization(self):
throws = False
try:
LegoBrick(10, 0)
except ValueError as e:
throws = True
self.assertTrue(
throws,
"LegoBrick constructor didn't throw ValueError on illegal parameter"
)
throws = False
try:
LegoBrick(0, 10)
except ValueError as e:
throws = True
self.assertTrue(
throws,
"LegoBrick constructor didn't throw ValueError on illegal parameter"
)
def test_initialization(self):
width = 5
height = 5
b = LegoBrick(width, height)
self.assertEqual(b.getHeight(), height)
self.assertEqual(b.getWidth(), width)
self.assertEqual(b.getArea(), width * height)
self.assertEqual(b.getId(), LegoBrick.NONE_ID)
id = 13
b = LegoBrick(width, height, id)
self.assertEqual(b.getHeight(), height)
self.assertEqual(b.getWidth(), width)
self.assertEqual(b.getArea(), width * height)
self.assertEqual(b.getId(), id)
def test_setAttributes(self):
height = 5
b = LegoBrick(height, height)
width = 10
b.setWidth(width)
self.assertEqual(b.getHeight(), height)
self.assertEqual(b.getWidth(), width)
self.assertEqual(b.getArea(), width * height)
id = 13
b.setId(id)
self.assertEqual(b.getId(), id)
def test_copy(self):
b = LegoBrick(5, 5)
copy = b.copy()
self.assertEqual(b.getHeight(), copy.getHeight())
self.assertEqual(b.getWidth(), copy.getWidth())
self.assertEqual(b.getArea(), copy.getArea())
self.assertEqual(b.getId(), copy.getId())
def test_equals(self):
b1 = LegoBrick(5, 5)
b1copy = b1.copy()
self.assertTrue(b1 == b1copy)
b1copy.setId(b1.getId() + 10)
self.assertFalse(b1 == b1copy)
b2 = LegoBrick(10, 5)
self.assertFalse(b2 == b1)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
4974850 | import sys
import pathlib
import re
from collections import Counter
import numpy as np
p = pathlib.Path(sys.argv[1])
file_dicts = []
for f in p.glob('*'):
with open(f) as in_stream:
d = Counter(re.split(r'\W+', in_stream.read()))
file_dicts.append(d)
voc = []
for d in file_dicts:
voc.extend(d.keys())
voc = sorted(set(voc))
array_list = []
for d in file_dicts:
d_size = sum(d.values())
l = [d[word]/d_size for word in voc]
array_list.append(l)
docs_array = np.array(array_list)
tf = docs_array
idf = np.log((docs_array.shape[0]/np.count_nonzero(docs_array, axis=0)))
tfidf = tf * idf
np.savetxt(sys.stdout, tfidf, delimiter='\t')
| StarcoderdataPython |
3329179 | import tensorflow as tf
import numpy as np
from PIL import Image
from .deeplab import Deeplab_xcep_pascal
from .semantic import obtain_segmentation
import cv2
import time
class alter_bg():
def __init__(self):
self.model = Deeplab_xcep_pascal()
def load_pascalvoc_model(self, model_path):
self.model.load_weights(model_path)
def segmentAsPascalvoc(self, image_path, process_frame = False):
trained_image_width=512
mean_subtraction_value=127.5
if process_frame == True:
image = image_path
else:
image = np.array(Image.open(image_path))
# resize to max dimension of images from training dataset
w, h, n = image.shape
if n > 3:
image = cv2.cvtColor(image, cv2.COLOR_BGRA2RGB)
ratio = float(trained_image_width) / np.max([w, h])
resized_image = np.array(Image.fromarray(image.astype('uint8')).resize((int(ratio * h), int(ratio * w))))
resized_image = (resized_image / mean_subtraction_value) -1
# pad array to square image to match training images
pad_x = int(trained_image_width - resized_image.shape[0])
pad_y = int(trained_image_width - resized_image.shape[1])
resized_image = np.pad(resized_image, ((0, pad_x), (0, pad_y), (0, 0)), mode='constant')
#run prediction
res = self.model.predict(np.expand_dims(resized_image, 0))
labels = np.argmax(res.squeeze(), -1)
# remove padding and resize back to original image
if pad_x > 0:
labels = labels[:-pad_x]
if pad_y > 0:
labels = labels[:, :-pad_y]
raw_labels = labels
#Apply segmentation color map
labels = obtain_segmentation(labels)
labels = np.array(Image.fromarray(labels.astype('uint8')).resize((h, w)))
new_img = cv2.cvtColor(labels, cv2.COLOR_RGB2BGR)
return raw_labels, new_img
#### ALTER IMAGE BACKGROUND WITH A NEW PICTURE ###
def change_bg_img(self, f_image_path,b_image_path, output_image_name = None, verbose = None):
if verbose is not None:
print("processing image......")
seg_image = self.segmentAsPascalvoc(f_image_path)
ori_img = cv2.imread(f_image_path)
bg_img = cv2.imread(b_image_path)
w, h, _ = ori_img.shape
bg_img = cv2.resize(bg_img, (h,w))
result = np.where(seg_image[1], ori_img, bg_img)
if output_image_name is not None:
cv2.imwrite(output_image_name, result)
return result
##### GIVE THE BACKGROUND OF AN IMAGE A DISTINCT COLOR ######
def color_bg(self, image_path, colors, output_image_name = None, verbose = None):
if verbose is not None:
print("processing image......")
seg_image = self.segmentAsPascalvoc(image_path)
ori_img = cv2.imread(image_path)
obtain_img = cv2.subtract(seg_image[1], ori_img)
out = cv2.subtract(seg_image[1], obtain_img)
out[np.where((out == [0, 0, 0]).all(axis = 2))] = [colors]
out = cv2.cvtColor(out, cv2.COLOR_BGR2RGB)
result = np.where(seg_image[1], ori_img, out)
if output_image_name is not None:
cv2.imwrite(output_image_name, result)
return result
##### GIVE THE BACKGROUND OF A FRAME A DISTINCT COLOR ######
def color_frame(self, frame, colors, verbose = None):
if verbose is not None:
print("processing frame....")
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
obtain_frame = cv2.subtract(seg_frame[1], frame)
out = cv2.subtract(seg_frame[1], obtain_frame)
out[np.where((out == [0, 0, 0]).all(axis = 2))] = [colors]
out = cv2.cvtColor(out, cv2.COLOR_BGR2RGB)
result = np.where(seg_frame[1], frame, out)
return result
##### GIVE THE BACKGROUND OF A VIDEO A DISTINCT COLOR ######
def color_video(self, video_path, colors, frames_per_second = None, output_video_name = None):
capture = cv2.VideoCapture(video_path)
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
print("No. of frames:", counter)
obtain_frame = cv2.subtract(seg_frame[1], frame)
out = cv2.subtract(seg_frame[1], obtain_frame)
out[np.where((out == [0, 0, 0]).all(axis = 2))] = [colors]
out = cv2.cvtColor(out, cv2.COLOR_BGR2RGB)
out = np.where(seg_frame[1], frame, out)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if output_video_name is not None:
save_video.write(output)
else:
break
capture.release()
end = time.time()
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
##### GIVE THE BACKGROUND OF A CAMERA FRAME A DISTINCT COLOR ######
def color_camera(self, cam, colors, frames_per_second = None, check_fps = False,show_frames = False,
frame_name = None, verbose = None, output_video_name = None):
capture = cam
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
if verbose is not None:
print("No.of frames:", counter)
obtain_frame = cv2.subtract(seg_frame[1], frame)
out = cv2.subtract(seg_frame[1], obtain_frame)
out[np.where((out == [0, 0, 0]).all(axis = 2))] = [colors]
out = cv2.cvtColor(out, cv2.COLOR_BGR2RGB)
out = np.where(seg_frame[1], frame, out)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if show_frames == True:
if frame_name is not None:
cv2.imshow(frame_name, output)
if cv2.waitKey(25) & 0xFF == ord('q'):
break
if output_video_name is not None:
save_video.write(output)
else:
break
if check_fps == True:
out = capture.get(cv2.CAP_PROP_FPS)
print(f"{out} frames per seconds")
capture.release()
end = time.time()
if verbose is not None:
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
##### BLUR THE BACKGROUND OF AN IMAGE #####
def blur_bg(self, image_path,low = False, moderate = False, extreme = False, output_image_name = None, verbose = None):
if verbose is not None:
print("processing image......")
seg_image = self.segmentAsPascalvoc(image_path)
ori_img = cv2.imread(image_path)
if low == True:
blur_img = cv2.blur(ori_img, (21,21), 0)
if moderate == True:
blur_img = cv2.blur(ori_img, (39,39), 0)
if extreme == True:
blur_img = cv2.blur(ori_img, (81,81), 0)
out = np.where(seg_image[1], ori_img, blur_img)
if output_image_name is not None:
cv2.imwrite(output_image_name, out)
return out
##### BLUR THE BACKGROUND OF A FRAME #####
def blur_frame(self, frame,low = False, moderate = False, extreme = False, verbose = None):
if verbose is not None:
print("processing frame......")
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
if low == True:
blur_frame = cv2.blur(frame, (21,21), 0)
if moderate == True:
blur_frame = cv2.blur(frame, (39,39), 0)
if extreme == True:
blur_frame = cv2.blur(frame, (81,81), 0)
result = np.where(seg_frame[1], frame, blur_frame)
return result
#### BLUR THE BACKGROUND OF A VIDEO #####
def blur_video(self, video_path, low = False, moderate = False, extreme = False, frames_per_second = None, output_video_name = None):
capture = cv2.VideoCapture(video_path)
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
print("No. of frames:", counter)
if low == True:
blur_frame = cv2.blur(frame, (21,21), 0)
if moderate == True:
blur_frame = cv2.blur(frame, (39,39), 0)
if extreme == True:
blur_frame = cv2.blur(frame, (81,81), 0)
out = np.where(seg_frame[1], frame, blur_frame)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if output_video_name is not None:
save_video.write(output)
else:
break
capture.release()
end = time.time()
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
##### BLUR THE BACKGROUND OF A CAMERA FRAME ######
def blur_camera(self, cam, low = False, moderate = False, extreme = False, frames_per_second = None,
check_fps = False,show_frames = False, frame_name = None, verbose = None, output_video_name = None):
capture = cam
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
if verbose is not None:
print("No.of frames:", counter)
if low == True:
blur_frame = cv2.blur(frame, (21,21), 0)
if moderate == True:
blur_frame = cv2.blur(frame, (39,39), 0)
if extreme == True:
blur_frame = cv2.blur(frame, (81,81), 0)
out = np.where(seg_frame[1], frame, blur_frame)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if show_frames == True:
if frame_name is not None:
cv2.imshow(frame_name, output)
if cv2.waitKey(25) & 0xFF == ord('q'):
break
if output_video_name is not None:
save_video.write(output)
else:
break
if check_fps == True:
out = capture.get(cv2.CAP_PROP_FPS)
print(f"{out} frames per seconds")
capture.release()
end = time.time()
if verbose is not None:
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
### GRAYSCALE THE BACKGROUND OF AN IMAGE ###
def gray_bg(self, image_path, output_image_name = None, verbose = None):
if verbose is not None:
print("processing image......")
seg_image = self.segmentAsPascalvoc(image_path)
ori_img = cv2.imread(image_path)
gray_img = cv2.cvtColor(ori_img, cv2.COLOR_BGR2GRAY)
gray_img = cv2.cvtColor(gray_img, cv2.COLOR_GRAY2BGR)
result = np.where(seg_image[1], ori_img, gray_img)
if output_image_name is not None:
cv2.imwrite(output_image_name, result)
return result
def gray_frame(self, frame, verbose = None):
if verbose is not None:
print("processing frame......")
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray_frame = cv2.cvtColor(gray_frame, cv2.COLOR_GRAY2BGR)
result = np.where(seg_frame[1], frame, gray_frame)
return result
### GRAYSCALE THE BACKGROUND OF A VIDEO ###
def gray_video(self, video_path, frames_per_second = None, output_video_name = None):
capture = cv2.VideoCapture(video_path)
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
print("No. of frames:", counter)
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray_frame = cv2.cvtColor(gray_frame, cv2.COLOR_GRAY2BGR)
out = np.where(seg_frame[1], frame, gray_frame)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if output_video_name is not None:
save_video.write(output)
else:
break
capture.release()
end = time.time()
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
### GRAYSCALE THE BACKGROUND OF A CAMERA FEED ###
def gray_camera(self, cam, frames_per_second = None, check_fps = False,show_frames = False,
frame_name = None, verbose = None, output_video_name = None):
capture = cam
width = int(capture.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(capture.get(cv2.CAP_PROP_FRAME_HEIGHT))
if frames_per_second is not None:
save_video = cv2.VideoWriter(output_video_name, cv2.VideoWriter_fourcc(*'DIVX'),frames_per_second, (width, height))
counter = 0
start = time.time()
while True:
counter += 1
ret, frame = capture.read()
if ret:
seg_frame = self.segmentAsPascalvoc(frame, process_frame=True)
if verbose is not None:
print("No. of frames:", counter)
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray_frame = cv2.cvtColor(gray_frame, cv2.COLOR_GRAY2BGR)
out = np.where(seg_frame[1], frame, gray_frame)
output = cv2.resize(out, (width,height), interpolation=cv2.INTER_AREA)
if show_frames == True:
if frame_name is not None:
cv2.imshow(frame_name, output)
if cv2.waitKey(25) & 0xFF == ord('q'):
break
if output_video_name is not None:
save_video.write(output)
else:
break
if check_fps == True:
out = capture.get(cv2.CAP_PROP_FPS)
print(f"{out} frames per seconds")
capture.release()
end = time.time()
if verbose is not None:
print(f"Processed {counter} frames in {end-start:.1f} seconds")
if frames_per_second is not None:
save_video.release()
return output
| StarcoderdataPython |
5119874 | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 25 11:34:44 2018
@author: conte
"""
import sys
import cv2
import skimage
import numpy
import approaches.approach0.approach0 as a0
import approaches.approach1.approach1 as a1
import approaches.approach2.approach2 as a2
import approaches.approach3.approach3 as a3
import approaches.approach4.approach4 as a4
import approaches.approach5.approach5 as a5
if len(sys.argv) != 7:
usg = 'Usage : virtualHeartRate <video file> <method (int)> <X range (in sec)> <X step (in sec)> <out file (txt)> <show>'
print(usg)
# print('Implemented methods (see README) : 1 - approach 1 ; 2 - approach 2; 3 - approach 3 ; 5 - approach 5 ')
exit()
videofilename = sys.argv[1] # video file
methodNumber = int(sys.argv[2]) # method (int)
intX = int(sys.argv[3]) # X range (in sec)
step = int(sys.argv[4]) # X step (in sec)
outfilename = sys.argv[5] # out file (txt)
show = int(sys.argv[6]) # show
# Read video
video = cv2.VideoCapture(videofilename)
# Exit if video not opened.
if not video.isOpened():
print("Could not open video")
sys.exit()
# Calculate the number of frames per interval
fps = int(video.get(cv2.CAP_PROP_FPS))
totalFrames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
listImages = []
listBpms = []
while True:
ret, frame = video.read()
if not ret:
break
listImages.append(frame)
data = numpy.arange(0, totalFrames)
indexes = skimage.util.view_as_windows(data, fps * intX, step=fps * step)
try:
for i in range(len(indexes)):
print("Processing window %d/%d" % (i + 1, len(indexes)))
curImages = listImages[indexes[i][0]: indexes[i][-1]]
bpm = 0
# Apply method X to current list of images
if methodNumber == 0:
bpm = a0.approach0(curImages, show, fps)
elif methodNumber == 1:
bpm = a1.approach1(curImages, show, fps)
elif methodNumber == 2:
bpm = a2.approach2(curImages, show, fps)
elif methodNumber == 3:
bpm = a3.approach3(curImages, show, fps)
elif methodNumber == 4:
bpm = a4.approach4(curImages, show, fps)
elif methodNumber == 5:
bpm = a5.approach5(curImages, show, fps)
listBpms.append(bpm)
except Exception as error:
print('Error: ')
print(error)
fout = open(outfilename, 'w')
for x in listBpms:
fout.write(str(x) + '\n')
fout.close()
video.release()
| StarcoderdataPython |
5094077 | <filename>saspy/sasbase.py<gh_stars>0
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# This module is designed to connect to SAS from python, providing a natural python style interface.
# it provides base functionality, data access and processing, and includes analytics and ODS results.
# There is a sample configuration file named sascfg in the saspy package showing how to configure connections
# to SAS. Currently supported methods are STDIO, connecting to a local (same machine) Linux SAS using
# stdio methods (fork, exec, and pipes). The is also support for running STDIO over SSH, which can
# connect to a remote linux SAS via passwordless ssh. The ssh method cannot currently support interrupt
# handling, as the local STDIO method can. An interrupt on this method can only terminate the SAS process;
# you'll be prompted to terminate or wait for completion. The third method is HTTP, which can connect
# to SAS Viya via the Compute Service, a restful micro service in the Viya system.
#
# Each of these connection methods (access methods) are handled by their own IO module. This main
# module determines which IO module to use based upon the configuration chosen at runtime. More
# IO modules can be seamlessly plugged in, if needed, in the future.
#
# The expected use is to simply import this package and establish a SAS session, then use the methods:
#
# import saspy
# sas = saspy.SASsession()
# sas.[have_at_it]()
#
import os
import sys
import getpass
import tempfile
from saspy.sasioiom import SASsessionIOM
from saspy.sasets import SASets
from saspy.sasml import SASml
from saspy.sasqc import SASqc
from saspy.sasstat import SASstat
from saspy.sasutil import SASutil
from saspy.sasViyaML import SASViyaML
from saspy.sasdata import SASdata
try:
import pandas as pd
except ImportError:
pass
try:
import saspy.sascfg_personal as SAScfg
except ImportError:
try:
import sascfg_personal as SAScfg
except ImportError:
import saspy.sascfg as SAScfg
if os.name != 'nt':
from saspy.sasiostdio import SASsessionSTDIO
try:
from IPython.display import HTML
from IPython.display import display as DISPLAY
except ImportError:
pass
class SASconfig:
"""
This object is not intended to be used directly. Instantiate a SASsession object instead
"""
def __init__(self, **kwargs):
self._kernel = kwargs.get('kernel', None)
self.valid = True
self.mode = ''
configs = []
cfgfile = kwargs.get('cfgfile', None)
if cfgfile:
tempdir = tempfile.TemporaryDirectory()
try:
fdin = open(cfgfile)
except:
print("Couldn't open cfgfile " + cfgfile)
cfgfile = None
if cfgfile:
f1 = fdin.read()
fdout = open(tempdir.name + os.sep + "sascfgfile.py", 'w')
fdout.write(f1)
fdout.close()
fdin.close()
sys.path.append(tempdir.name)
import sascfgfile as SAScfg
tempdir.cleanup()
sys.path.remove(tempdir.name)
if not cfgfile:
try:
import saspy.sascfg_personal as SAScfg
except ImportError:
try:
import sascfg_personal as SAScfg
except ImportError:
import saspy.sascfg as SAScfg
self.SAScfg = SAScfg
# GET Config options
try:
self.cfgopts = getattr(SAScfg, "SAS_config_options")
except:
self.cfgopts = {}
# in lock down mode, don't allow runtime overrides of option values from the config file.
lock = self.cfgopts.get('lock_down', True)
# GET Config names
configs = getattr(SAScfg, "SAS_config_names")
cfgname = kwargs.get('cfgname', '')
if len(cfgname) == 0:
if len(configs) == 0:
print("No SAS Configuration names found in saspy.sascfg")
self.valid = False
return
else:
if len(configs) == 1:
cfgname = configs[0]
if self._kernel is None:
print("Using SAS Config named: " + cfgname)
else:
cfgname = self._prompt(
"Please enter the name of the SAS Config you wish to run. Available Configs are: " +
str(configs) + " ")
while cfgname not in configs:
cfgname = self._prompt(
"The SAS Config name specified was not found. Please enter the SAS Config you wish to use. Available Configs are: " +
str(configs) + " ")
if cfgname is None:
raise KeyboardInterrupt
self.name = cfgname
cfg = getattr(SAScfg, cfgname)
ip = cfg.get('ip', '')
ssh = cfg.get('ssh', '')
path = cfg.get('saspath', '')
java = cfg.get('java', '')
self.results = cfg.get('results', None)
self.autoexec = cfg.get('autoexec', None)
inautoexec = kwargs.get('autoexec', None)
if inautoexec:
if lock and self.autoexec:
print("Parameter 'autoexec' passed to SAS_session was ignored due to configuration restriction.")
else:
self.autoexec = inautoexec
if len(java) > 0:
self.mode = 'IOM'
elif len(ip) > 0:
self.mode = 'HTTP'
elif len(ssh) > 0:
self.mode = 'SSH'
elif len(path) > 0:
self.mode = 'STDIO'
else:
print("Configuration Definition " + cfgname + " is not valid. Failed to create a SASsession.")
self.valid = False
def _prompt(self, prompt, pw=False):
if self._kernel is None:
if not pw:
try:
return input(prompt)
except KeyboardInterrupt:
return None
else:
try:
return getpass.getpass(prompt)
except KeyboardInterrupt:
return None
else:
try:
return self._kernel._input_request(prompt, self._kernel._parent_ident, self._kernel._parent_header,
password=pw)
except KeyboardInterrupt:
return None
class SASsession():
"""
**Overview**
The SASsession object is the main object to instantiate and provides access to the rest of the functionality.
Most of these parameters will be configured in the sascfg_personal.py configuration file.
All of these parameters are documented more thoroughly in the configuration section of the saspy doc:
https://sassoftware.github.io/saspy/install.html#configuration
These are generally defined in the sascfg_personal.py file as opposed to being specified on the SASsession() invocation.
Common parms for all access methods are:
:param cfgname: the Configuration Definition to use - value in SAS_config_names List in the sascfg_personal.py file
:param cfgfile: fully qualified file name of your sascfg_personal.py file, if it's not in the python search path
:param kernel: None - internal use when running the SAS_kernel notebook
:param results: Type of tabular results to return. default is 'Pandas', other options are 'HTML or 'TEXT'
:param lrecl: An integer specifying the record length for transferring wide data sets from SAS to Data Frames.
:param autoexec: A string of SAS code that will be submitted upon establishing a connection
:return: 'SASsession'
:rtype: 'SASsession'
And each access method has its own set of parameters.
**STDIO**
:param saspath: overrides saspath Dict entry of cfgname in sascfg_personal.py file
:param options: overrides options Dict entry of cfgname in sascfg_personal.py file
:param encoding: This is the python encoding value that matches the SAS session encoding
**STDIO over SSH**
and for running STDIO over passwordless ssh, add these required parameters
:param ssh: full path of the ssh command; /usr/bin/ssh for instance
:param host: host name of the remote machine
:param identity: (Optional) path to a .ppk identity file to be used on the ssh -i parameter
:param port: (Optional) The ssh port of the remote machine normally 22 (equivalent to invoking ssh with the -p option)
:param tunnel: (Optional) Certain methods of saspy require opening a local port and accepting data streamed from the SAS instance.
**IOM**
and for the IOM IO module to connect to SAS9 via Java IOM
:param java: the path to the java executable to use
:param iomhost: for remote IOM case, not local Windows] the resolvable host name, or ip to the IOM server to connect to
:param iomport: for remote IOM case, not local Windows] the port IOM is listening on
:param omruser: user id for remote IOM access
:param omrpw: pw for user for remote IOM access
:param encoding: This is the python encoding value that matches the SAS session encoding of the IOM server you are connecting to
:param classpath: classpath to IOM client jars and saspyiom client jar.
:param authkey: Key value for finding credentials in .authfile
:param timeout: Timeout value for establishing connection to workspace server
:param appserver: Appserver name of the workspace server to connect to
:param sspi: Boolean for using IWA to connect to a workspace server configured to use IWA
:param javaparms: for specifying java command line options if necessary
"""
# def __init__(self, cfgname: str ='', kernel: 'SAS_kernel' =None, saspath :str ='', options: list =[]) -> 'SASsession':
def __init__(self, **kwargs):
self._loaded_macros = False
self._obj_cnt = 0
self.nosub = False
self.sascfg = SASconfig(**kwargs)
self.batch = False
self.results = kwargs.get('results', self.sascfg.results)
if not self.results:
self.results = 'Pandas'
self.workpath = ''
self.sasver = ''
self.sascei = ''
self.SASpid = None
self.HTML_Style = "HTMLBlue"
self.sas_date_fmts = sas_date_fmts
self.sas_time_fmts = sas_time_fmts
self.sas_datetime_fmts = sas_datetime_fmts
if not self.sascfg.valid:
self._io = None
return
if self.sascfg.mode in ['STDIO', 'SSH', '']:
if os.name != 'nt':
self._io = SASsessionSTDIO(sascfgname=self.sascfg.name, sb=self, **kwargs)
else:
print("Cannot use STDIO I/O module on Windows. No "
"SASsession established. Choose an IOM SASconfig "
"definition")
elif self.sascfg.mode == 'IOM':
self._io = SASsessionIOM(sascfgname=self.sascfg.name, sb=self, **kwargs)
try:
if self._io.pid:
sysvars = """
%put WORKPATH=%sysfunc(pathname(work));
%put ENCODING=&SYSENCODING;
%put SYSVLONG=&SYSVLONG4;
%put SYSJOBID=&SYSJOBID;
%put SYSSCP=&SYSSCP;
"""
res = self.submit(sysvars)['LOG']
vlist = res.rpartition('SYSSCP=')
self.hostsep = vlist[2].partition('\n')[0]
vlist = res.rpartition('SYSJOBID=')
self.SASpid = vlist[2].partition('\n')[0]
vlist = res.rpartition('SYSVLONG=')
self.sasver = vlist[2].partition('\n')[0]
vlist = res.rpartition('ENCODING=')
self.sascei = vlist[2].partition('\n')[0]
vlist = res.rpartition('WORKPATH=')
self.workpath = vlist[2].partition('\n')[0]
if self.hostsep == 'WIN':
self.hostsep = '\\'
else:
self.hostsep = '/'
self.workpath = self.workpath + self.hostsep
if self.sascfg.autoexec:
self.submit(self.sascfg.autoexec)
except (AttributeError):
self._io = None
def __repr__(self):
"""
Display info about this object
:return [str]:
"""
if self._io is None:
pyenc = ''
if self.sascfg.cfgopts.get('verbose', True):
print("This SASsession object is not valid\n")
else:
pyenc = self._io.sascfg.encoding
x = "Access Method = %s\n" % self.sascfg.mode
x += "SAS Config name = %s\n" % self.sascfg.name
x += "WORK Path = %s\n" % self.workpath
x += "SAS Version = %s\n" % self.sasver
x += "SASPy Version = %s\n" % sys.modules['saspy'].__version__
x += "Teach me SAS = %s\n" % str(self.nosub)
x += "Batch = %s\n" % str(self.batch)
x += "Results = %s\n" % self.results
x += "SAS Session Encoding = %s\n" % self.sascei
x += "Python Encoding value = %s\n" % pyenc
x += "SAS process Pid value = %s\n" % self.SASpid
x += "\n"
return x
def __del__(self):
if self._io:
if self._io:
return self._io.__del__()
def _objcnt(self):
self._obj_cnt += 1
return '%04d' % self._obj_cnt
def _startsas(self):
return self._io._startsas()
def _endsas(self):
self.SASpid = None
return self._io._endsas()
def _getlog(self, **kwargs):
return self._io._getlog(**kwargs)
def _getlst(self, **kwargs):
return self._io._getlst(**kwargs)
def _getlsttxt(self, **kwargs):
return self._io._getlsttxt(**kwargs)
def _asubmit(self, code, results):
if results == '':
if self.results.upper() == 'PANDAS':
results = 'HTML'
else:
results = self.results
return self._io._asubmit(code, results)
def submit(self, code: str, results: str = '', prompt: dict = None) -> dict:
'''
This method is used to submit any SAS code. It returns the Log and Listing as a python dictionary.
- code - the SAS statements you want to execute
- results - format of results, HTLML and TEXT is the alternative
- prompt - dict of names:flags to prompt for; create macro variables (used in submitted code), then keep or delete
the keys which are the names of the macro variables. The boolean flag is to either hide what you type and delete the macros,
or show what you type and keep the macros (they will still be available later).
for example (what you type for pw will not be displayed, user and dsname will):
.. code-block:: python
results_dict = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
'''
if self.nosub:
return dict(LOG=code, LST='')
prompt = prompt if prompt is not None else {}
if results == '':
if self.results.upper() == 'PANDAS':
results = 'HTML'
else:
results = self.results
return self._io.submit(code, results, prompt)
def saslog(self) -> str:
"""
This method is used to get the current, full contents of the SASLOG
:return: SAS log
:rtype: str
"""
return self._io.saslog()
def teach_me_SAS(self, nosub: bool):
"""
nosub - bool. True means don't submit the code, print it out so I can see what the SAS code would be.
False means run normally - submit the code.
"""
self.nosub = nosub
def set_batch(self, batch: bool):
"""
This method sets the batch attribute for the SASsession object; it stays in effect until changed.
For methods that just display results like SASdata object methods (head, tail, hist, series, etc.)
and SASresult object results, you can set 'batch' to true to get the results back directly so you
can write them to files or whatever you want to do with them.
This is intended for use in python batch scripts so you can still get ODS XML5 results
and save them to files, which you couldn't otherwise do for these methods.
When running interactively, the expectation is that you want to have the results directly rendered,
but you can run this way too; get the objects display them yourself and/or write them to somewhere.
When `set_batch ==True`, you get the same dictionary returned as from the `SASsession.submit()` method.
:param batch: bool
:rtype: bool
:return: True = return dict([LOG, LST]. False = display LST to screen.
"""
self.batch = batch
def set_results(self, results: str):
"""
This method set the results attribute for the SASsession object; it stays in effect till changed
:param results: set the default result type for this SASdata object. ``'Pandas' or 'HTML' or 'TEXT'``.
:return: string of the return type
:rtype: str
"""
self.results = results
def exist(self, table: str, libref: str = "") -> bool:
"""
Does the SAS data set currently exist
:param table: the name of the SAS Data Set
:param libref: the libref for the Data Set, defaults to WORK, or USER if assigned
:return: Boolean True it the Data Set exists and False if it does not
:rtype: bool
"""
return self._io.exist(table, libref)
def sasets(self) -> 'SASets':
"""
This methods creates a SASets object which you can use to run various analytics.
See the sasets.py module.
:return: sasets object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASets(self)
def sasstat(self) -> 'SASstat':
"""
This methods creates a SASstat object which you can use to run various analytics.
See the sasstat.py module.
:return: sasstat object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASstat(self)
def sasml(self) -> 'SASml':
"""
This methods creates a SASML object which you can use to run various analytics. See the sasml.py module.
:return: sasml object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASml(self)
def sasqc(self) -> 'SASqc':
"""
This methods creates a SASqc object which you can use to run various analytics. See the sasqc.py module.
:return: sasqc object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASqc(self)
def sasutil(self) -> 'SASutil':
"""
This methods creates a SASutil object which you can use to run various analytics.
See the sasutil.py module.
:return: sasutil object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASutil(self)
def sasviyaml(self) -> 'SASViyaML':
"""
This methods creates a SASViyaML object which you can use to run various analytics.
See the SASViyaML.py module.
:return: SASViyaML object
"""
if not self._loaded_macros:
self._loadmacros()
self._loaded_macros = True
return SASViyaML(self)
def _loadmacros(self):
"""
Load the SAS macros at the start of the session
:return:
"""
macro_path = os.path.dirname(os.path.realpath(__file__))
fd = os.open(macro_path + '/' + 'libname_gen.sas', os.O_RDONLY)
code = b'options nosource;\n'
code += os.read(fd, 32767)
code += b'\noptions source;'
self._io._asubmit(code.decode(), results='text')
os.close(fd)
def sasdata(self, table: str, libref: str = '', results: str = '', dsopts: dict = None) -> 'SASdata':
"""
Method to define an existing SAS dataset so that it can be accessed via SASPy
:param table: the name of the SAS Data Set
:param libref: the libref for the Data Set, defaults to WORK, or USER if assigned
:param results: format of results, SASsession.results is default, Pandas, HTML and TEXT are the valid options
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs):
- where is a string
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:return: SASdata object
"""
dsopts = dsopts if dsopts is not None else {}
if results == '':
results = self.results
sd = SASdata(self, libref, table, results, dsopts)
if not self.exist(sd.table, sd.libref):
if not self.batch:
print(
"Table " + sd.libref + '.' + sd.table + " does not exist. This SASdata object will not be useful until the data set is created.")
return sd
def saslib(self, libref: str, engine: str = ' ', path: str = '',
options: str = ' ', prompt: dict = None) -> str:
"""
:param libref: the libref to be assigned
:param engine: the engine name used to access the SAS Library (engine defaults to BASE, per SAS)
:param path: path to the library (for engines that take a path parameter)
:param options: other engine or engine supervisor options
:return: SAS log
"""
prompt = prompt if prompt is not None else {}
code = "libname " + libref + " " + engine + " "
if len(path) > 0:
code += " '" + path + "' "
code += options + ";"
if self.nosub:
print(code)
else:
ll = self._io.submit(code, "text", prompt)
if self.batch:
return ll['LOG'].rsplit(";*\';*\";*/;\n")[0]
else:
print(ll['LOG'].rsplit(";*\';*\";*/;\n")[0])
def datasets(self, libref: str = '') -> str:
"""
This method is used to query a libref. The results show information about the libref including members.
:param libref: the libref to query
:return:
"""
code = "proc datasets"
if libref:
code += " dd=" + libref
code += "; quit;"
if self.nosub:
print(code)
else:
ll = self._io.submit(code, "text")
if self.batch:
return ll['LOG'].rsplit(";*\';*\";*/;\n")[0]
else:
print(ll['LOG'].rsplit(";*\';*\";*/;\n")[0])
def read_csv(self, file: str, table: str = '_csv', libref: str = '', results: str = '',
opts: dict = None) -> 'SASdata':
"""
:param file: either the OS filesystem path of the file, or HTTP://... for a url accessible file
:param table: the name of the SAS Data Set to create
:param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:param opts: a dictionary containing any of the following Proc Import options(datarow, delimiter, getnames, guessingrows)
:return: SASdata object
"""
opts = opts if opts is not None else {}
if results == '':
results = self.results
self._io.read_csv(file, table, libref, self.nosub, opts)
if self.exist(table, libref):
return SASdata(self, libref, table, results)
else:
return None
def write_csv(self, file: str, table: str, libref: str = '',
dsopts: dict = None, opts: dict = None) -> str:
"""
:param file: the OS filesystem path of the file to be created (exported from the SAS Data Set)
:param table: the name of the SAS Data Set you want to export to a CSV file
:param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs)
:param opts: a dictionary containing any of the following Proc Export options(delimiter, putnames)
- where is a string
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:return: SAS log
"""
dsopts = dsopts if dsopts is not None else {}
opts = opts if opts is not None else {}
log = self._io.write_csv(file, table, libref, self.nosub, dsopts, opts)
if not self.batch:
print(log)
else:
return log
def df2sd(self, df: 'pd.DataFrame', table: str = '_df', libref: str = '',
results: str = '', keep_outer_quotes: bool = False) -> 'SASdata':
"""
This is an alias for 'dataframe2sasdata'. Why type all that?
:param df: :class:`pandas.DataFrame` Pandas Data Frame to import to a SAS Data Set
:param table: the name of the SAS Data Set to create
:param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:param keep_outer_quotes: the defualt is for SAS to strip outer quotes from delimitted data. This lets you keep them
:return: SASdata object
"""
return self.dataframe2sasdata(df, table, libref, results, keep_outer_quotes)
def dataframe2sasdata(self, df: 'pd.DataFrame', table: str = '_df', libref: str = '',
results: str = '', keep_outer_quotes: bool = False) -> 'SASdata':
"""
This method imports a Pandas Data Frame to a SAS Data Set, returning the SASdata object for the new Data Set.
:param df: Pandas Data Frame to import to a SAS Data Set
:param table: the name of the SAS Data Set to create
:param libref: the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:param keep_outer_quotes: the defualt is for SAS to strip outer quotes from delimitted data. This lets you keep them
:return: SASdata object
"""
if results == '':
results = self.results
if self.nosub:
print("too complicated to show the code, read the source :), sorry.")
return None
else:
self._io.dataframe2sasdata(df, table, libref, keep_outer_quotes)
if self.exist(table, libref):
return SASdata(self, libref, table, results)
else:
return None
def sd2df(self, table: str, libref: str = '', dsopts: dict = None, method: str = 'MEMORY',
**kwargs) -> 'pd.DataFrame':
"""
This is an alias for 'sasdata2dataframe'. Why type all that?
SASdata object that refers to the Sas Data Set you want to export to a Pandas Data Frame
:param table: the name of the SAS Data Set you want to export to a Pandas Data Frame
:param libref: the libref for the SAS Data Set.
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs):
- where is a string
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data
:param kwargs: dictionary
:return: Pandas data frame
"""
dsopts = dsopts if dsopts is not None else {}
return self.sasdata2dataframe(table, libref, dsopts, method, **kwargs)
def sd2df_CSV(self, table: str, libref: str = '', dsopts: dict = None, tempfile: str = None, tempkeep: bool = False,
**kwargs) -> 'pd.DataFrame':
"""
This is an alias for 'sasdata2dataframe' specifying method='CSV'. Why type all that?
SASdata object that refers to the Sas Data Set you want to export to a Pandas Data Frame
:param table: the name of the SAS Data Set you want to export to a Pandas Data Frame
:param libref: the libref for the SAS Data Set.
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs):
- where is a string
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:param tempfile: [optional] an OS path for a file to use for the local CSV file; default it a temporary file that's cleaned up
:param tempkeep: if you specify your own file to use with tempfile=, this controls whether it's cleaned up after using it
:param kwargs: dictionary
:return: Pandas data frame
"""
dsopts = dsopts if dsopts is not None else {}
return self.sasdata2dataframe(table, libref, dsopts, method='CSV', tempfile=tempfile, tempkeep=tempkeep,
**kwargs)
def sasdata2dataframe(self, table: str, libref: str = '', dsopts: dict = None, method: str = 'MEMORY',
**kwargs) -> 'pd.DataFrame':
"""
This method exports the SAS Data Set to a Pandas Data Frame, returning the Data Frame object.
SASdata object that refers to the Sas Data Set you want to export to a Pandas Data Frame
:param table: the name of the SAS Data Set you want to export to a Pandas Data Frame
:param libref: the libref for the SAS Data Set.
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs):
- where is a string
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data
:param kwargs: dictionary
:return: Pandas data frame
"""
dsopts = dsopts if dsopts is not None else {}
if self.exist(table, libref) == 0:
print('The SAS Data Set ' + libref + '.' + table + ' does not exist')
return None
if self.nosub:
print("too complicated to show the code, read the source :), sorry.")
return None
else:
return self._io.sasdata2dataframe(table, libref, dsopts, method=method, **kwargs)
def _dsopts(self, dsopts):
"""
:param dsopts: a dictionary containing any of the following SAS data set options(where, drop, keep, obs, firstobs):
- where is a string or list of strings
- keep are strings or list of strings.
- drop are strings or list of strings.
- obs is a numbers - either string or int
- first obs is a numbers - either string or int
- format is a string or dictionary { var: format }
.. code-block:: python
{'where' : 'msrp < 20000 and make = "Ford"'
'keep' : 'msrp enginesize Cylinders Horsepower Weight'
'drop' : ['msrp', 'enginesize', 'Cylinders', 'Horsepower', 'Weight']
'obs' : 10
'firstobs' : '12'
'format' : {'money': 'dollar10', 'time': 'tod5.'}
}
:return: str
"""
opts = ''
fmat = ''
if len(dsopts):
for key in dsopts:
if len(str(dsopts[key])):
if key == 'where':
if isinstance(dsopts[key], str):
opts += 'where=(' + dsopts[key] + ') '
elif isinstance(dsopts[key], list):
opts += 'where=(' + " and ".join(dsopts[key]) + ') '
else:
raise TypeError("Bad key type. {} must be a str or list type".format(key))
elif key == 'drop':
opts += 'drop='
if isinstance(dsopts[key], list):
for var in dsopts[key]:
opts += var + ' '
else:
opts += dsopts[key] + ' '
elif key == 'keep':
opts += 'keep='
if isinstance(dsopts[key], list):
for var in dsopts[key]:
opts += var + ' '
else:
opts += dsopts[key] + ' '
elif key == 'obs':
opts += 'obs=' + str(dsopts[key]) + ' '
elif key == 'firstobs':
opts += 'firstobs=' + str(dsopts[key]) + ' '
elif key == 'format':
if isinstance(dsopts[key], str):
fmat = 'format ' + dsopts[key] + ';'
elif isinstance(dsopts[key], dict):
fmat = 'format '
for k, v in dsopts[key].items():
fmat += ' '.join((k, v)) + ' '
fmat += ';'
else:
raise TypeError("Bad key type. {} must be a str or dict type".format(key))
if len(opts):
opts = '(' + opts + ')'
if len(fmat) > 0:
opts += ';\n\t' + fmat
elif len(fmat) > 0:
opts = ';\n\t' + fmat
return opts
def _impopts(self, opts):
"""
:param opts: a dictionary containing any of the following Proc Import options(datarow, delimiter, getnames, guessingrows):
- datarow is a number
- delimiter is a character
- getnames is a boolean
- guessingrows is a numbers or the string 'MAX'
.. code-block:: python
{'datarow' : 2
'delimiter' : ',''
'getnames' : True
'guessingrows': 20
}
:return: str
"""
optstr = ''
if len(opts):
for key in opts:
if len(str(opts[key])):
if key == 'datarow':
optstr += 'datarow=' + str(opts[key]) + ';'
elif key == 'delimiter':
optstr += 'delimiter='
optstr += "'" + '%02x' % ord(opts[key].encode(self._io.sascfg.encoding)) + "'x; "
elif key == 'getnames':
optstr += 'getnames='
if opts[key]:
optstr += 'YES; '
else:
optstr += 'NO; '
elif key == 'guessingrows':
optstr += 'guessingrows='
if opts[key] == 'MAX':
optstr += 'MAX; '
else:
optstr += str(opts[key]) + '; '
return optstr
def _expopts(self, opts):
"""
:param opts: a dictionary containing any of the following Proc Export options(delimiter, putnames):
- delimiter is a character
- putnames is a boolean
.. code-block:: python
{'delimiter' : ',''
'putnames' : True
}
:return: str
"""
optstr = ''
if len(opts):
for key in opts:
if len(str(opts[key])):
if key == 'delimiter':
optstr += 'delimiter='
optstr += "'" + '%02x' % ord(opts[key].encode(self._io.sascfg.encoding)) + "'x; "
elif key == 'putnames':
optstr += 'putnames='
if opts[key]:
optstr += 'YES; '
else:
optstr += 'NO; '
return optstr
def symput(self, name, value):
"""
:param name: name of the macro varable to set:
:param value: python variable to use for the value to assign to the macro variable:
- name is a character
- value is a variable that can be resolved to a string
"""
ll = self.submit("%let " + name + "=%NRBQUOTE(" + str(value) + ");\n")
def symget(self, name):
"""
:param name: name of the macro varable to set:
- name is a character
"""
ll = self.submit("%put " + name + "=&" + name + ";\n")
l2 = ll['LOG'].rpartition(name + "=")
l2 = l2[2].partition("\n")
try:
var = int(l2[0])
except:
try:
var = float(l2[0])
except:
var = l2[0]
return var
def disconnect(self):
"""
This method disconnects an IOM session to allow for reconnecting when switching networks
See the Advanced topics section of the doc for details
"""
if self.sascfg.mode != 'IOM':
res = "This method is only available with the IOM access method"
else:
res = self._io.disconnect()
return res
def SYSINFO(self):
"""
This method returns the SAS Automatic Macro Variable SYSINFO which
contains return codes provided by some SAS procedures.
"""
return self.symget("SYSINFO")
def SYSERR(self):
"""
This method returns the SAS Automatic Macro Variable SYSERR which
contains a return code status set by some SAS procedures and the DATA step.
"""
return self.symget("SYSERR")
def SYSERRORTEXT(self):
"""
This method returns the SAS Automatic Macro Variable SYSERRORTEXT which
is the text of the last error message generated in the SAS log.
"""
return self.symget("SYSERRORTEXT")
def SYSWARNINGTEXT(self):
"""
This method returns the SAS Automatic Macro Variable SYSWARNINGTEXT which
is the text of the last warning message generated in the SAS log.
"""
return self.symget("SYSWARNINGTEXT")
def SYSFILRC(self):
"""
This method returns the SAS Automatic Macro Variable SYSFILRC which
identifies whether or not the last FILENAME statement executed correctly.
"""
return self.symget("SYSFILRC")
def SYSLIBRC(self):
"""
This method returns the SAS Automatic Macro Variable SYSLIBRC which
reports whether the last LIBNAME statement executed correctly.
"""
return self.symget("SYSLIBRC")
def assigned_librefs(self):
"""
This method returns the list of currently assigned librefs
"""
code = """
data _null_; retain libref; retain cobs 1;
set sashelp.vlibnam end=last;
if cobs EQ 1 then
put "LIBREFSSTART";
cobs = 2;
if libref NE libname then
put "LIBREF=" libname;
libref = libname;
if last then
put "LIBREFSEND";
run;
"""
ll = self.submit(code, results='text')
librefs = []
log = ll['LOG'].rpartition('LIBREFSEND')[0].rpartition('LIBREFSSTART')
for i in range(log[2].count('LIBREF=')):
log = log[2].partition('LIBREF=')[2].partition('\n')
librefs.append(log[0].strip())
return librefs
def dirlist(self, path):
"""
This method returns the directory list for the path specified where SAS is running
"""
code = """
data _null_;
spd = '""" + path + """';
rc = filename('saspydir', spd);
did = dopen('saspydir');
if did > 0 then
do;
memcount = dnum(did);
put 'MEMCOUNT=' memcount;
do while (memcount > 0);
name = dread(did, memcount);
memcount = memcount - 1;
qname = spd || '"""+self.hostsep+"""' || name;
rc = filename('saspydq', qname);
dq = dopen('saspydq');
if dq NE 0 then
do;
dname = strip(name) || '"""+self.hostsep+"""';
put 'DIR=' dname;
rc = dclose(dq);
end;
else
put 'FILE=' name;
end;
put 'MEMEND';
rc = dclose(did);
end;
else
do;
put 'MEMCOUNT=0';
put 'MEMEND';
end;
rc = filename('saspydq');
rc = filename('saspydir');
run;
"""
ll = self.submit(code, results='text')
dirlist = []
l2 = ll['LOG'].rpartition("MEMCOUNT=")[2].partition("\n")
memcount = int(l2[0])
l3 = l2[2].rpartition("MEMEND")[0]
for row in l3.split(sep='\n'):
i = row.partition('=')
if i[0] in ['FILE', 'DIR']:
dirlist.append(i[2])
if memcount != len(dirlist):
print("Some problem parsing list. Should be " + str(memcount) + " entries but got " + str(
len(dirlist)) + " instead.")
return dirlist
def list_tables(self, libref, results: str = 'list'):
"""
This method returns a list of tuples containing MEMNAME, MEMTYPE of members in the library of memtype data or view
If you would like a Pandas dataframe returned instead of a list, specify results='pandas'
"""
ll = self.submit("%put LIBREF_EXISTS=%sysfunc(libref("+libref+"));")
exists = ll['LOG'].rsplit('LIBREF_EXISTS=')[2].split('\n')[0]
if exists != '0':
print('Libref provided is not assigned')
return None
code = """
proc datasets dd=librefx nodetails nolist noprint;
contents memtype=(data view) nodetails
dir out=work._saspy_lib_list(keep=memname memtype) data=_all_ noprint;
run;
proc sql;
create table work._saspy_lib_list as select distinct * from work._saspy_lib_list;
quit;
""".replace('librefx', libref)
ll = self.submit(code, results='text')
if results != 'list':
res = self.sd2df('_saspy_lib_list', 'work')
if res is None:
res = pd.DataFrame.from_records([], ['MEMNAME', 'MEMTYPE'])
return res
code = """
data _null_;
set work._saspy_lib_list end=last curobs=first;
if first EQ 1 then
put 'MEMSTART';
put 'MEMNAME=' memname;
put 'MEMTYPE=' memtype;
if last then
put 'MEMEND';
run;
"""
ll = self.submit(code, results='text')
res = []
log = ll['LOG'].rpartition('MEMEND')[0].rpartition('MEMSTART')
for i in range(log[2].count('MEMNAME')):
log = log[2].partition('MEMNAME=')[2].partition('\n')
key = log[0]
log = log[2].partition('MEMTYPE=')[2].partition('\n')
val = log[0]
res.append(tuple((key, val)))
return res
def file_info(self, filepath, results: str = 'dict', fileref: str = '_spfinfo'):
"""
This method returns a dictionaty containing the file attributes for the file name provided
If you would like a Pandas dataframe returned instead of a dictionary, specify results='pandas'
"""
code = "filename "+fileref+" '"+filepath+"';\n"
code += "%put FILEREF_EXISTS=%sysfunc(fexist("+fileref+"));"
ll = self.submit(code)
exists = ll['LOG'].rsplit('FILEREF_EXISTS=')[2].split('\n')[0]
if exists != '1':
print('The filepath provided does not exist')
ll = self.submit("filename "+fileref+" clear;")
return None
if results != 'dict':
code="""
proc delete data=work._SASPY_FILE_INFO;run;
data work._SASPY_FILE_INFO;
length infoname infoval $60;
drop rc fid infonum i close;
fid=fopen('filerefx');
if fid then
do;
infonum=foptnum(fid);
do i=1 to infonum;
infoname=foptname(fid, i);
infoval=finfo(fid, infoname);
output;
end;
end;
close=fclose(fid);
rc = filename('filerefx');
run;
""".replace('filerefx', fileref)
ll = self.submit(code, results='text')
res = self.sd2df('_SASPY_FILE_INFO', 'work')
if res is None:
res = pd.DataFrame.from_records([], ['infoname', 'infoval'])
return res
code="""
data _null_;
length infoname infoval $60;
drop rc fid infonum i close;
put 'INFOSTART';
fid=fopen('filerefx');
if fid then
do;
infonum=foptnum(fid);
do i=1 to infonum;
infoname=foptname(fid, i);
infoval=finfo(fid, infoname);
put 'INFONAME=' infoname;
put 'INFOVAL=' infoval;
end;
end;
put 'INFOEND';
close=fclose(fid);
rc = filename('filerefx');
run;
""".replace('filerefx', fileref)
ll = self.submit(code, results='text')
res = {}
log = ll['LOG'].rpartition('INFOEND')[0].rpartition('INFOSTART')
for i in range(log[2].count('INFONAME')):
log = log[2].partition('INFONAME=')[2].partition('\n')
key = log[0]
log = log[2].partition('INFOVAL=')[2].partition('\n')
val = log[0]
res[key] = val
return res
if __name__ == "__main__":
startsas()
submit(sys.argv[1], "text")
print(_getlog())
print(_getlsttxt())
endsas()
sas_date_fmts = (
'AFRDFDD', 'AFRDFDE', 'AFRDFDE', 'AFRDFDN', 'AFRDFDWN', 'AFRDFMN', 'AFRDFMY', 'AFRDFMY', 'AFRDFWDX', 'AFRDFWKX',
'ANYDTDTE', 'B8601DA', 'B8601DA', 'B8601DJ', 'CATDFDD', 'CATDFDE', 'CATDFDE', 'CATDFDN', 'CATDFDWN', 'CATDFMN',
'CATDFMY', 'CATDFMY', 'CATDFWDX', 'CATDFWKX', 'CRODFDD', 'CRODFDE', 'CRODFDE', 'CRODFDN', 'CRODFDWN', 'CRODFMN',
'CRODFMY', 'CRODFMY', 'CRODFWDX', 'CRODFWKX', 'CSYDFDD', 'CSYDFDE', 'CSYDFDE', 'CSYDFDN', 'CSYDFDWN', 'CSYDFMN',
'CSYDFMY', 'CSYDFMY', 'CSYDFWDX', 'CSYDFWKX', 'DANDFDD', 'DANDFDE', 'DANDFDE', 'DANDFDN', 'DANDFDWN', 'DANDFMN',
'DANDFMY', 'DANDFMY', 'DANDFWDX', 'DANDFWKX', 'DATE', 'DATE', 'DAY', 'DDMMYY', 'DDMMYY', 'DDMMYYB', 'DDMMYYC',
'DDMMYYD', 'DDMMYYN', 'DDMMYYP', 'DDMMYYS', 'DESDFDD', 'DESDFDE', 'DESDFDE', 'DESDFDN', 'DESDFDWN', 'DESDFMN',
'DESDFMY', 'DESDFMY', 'DESDFWDX', 'DESDFWKX', 'DEUDFDD', 'DEUDFDE', 'DEUDFDE', 'DEUDFDN', 'DEUDFDWN', 'DEUDFMN',
'DEUDFMY', 'DEUDFMY', 'DEUDFWDX', 'DEUDFWKX', 'DOWNAME', 'E8601DA', 'E8601DA', 'ENGDFDD', 'ENGDFDE', 'ENGDFDE',
'ENGDFDN', 'ENGDFDWN', 'ENGDFMN', 'ENGDFMY', 'ENGDFMY', 'ENGDFWDX', 'ENGDFWKX', 'ESPDFDD', 'ESPDFDE', 'ESPDFDE',
'ESPDFDN', 'ESPDFDWN', 'ESPDFMN', 'ESPDFMY', 'ESPDFMY', 'ESPDFWDX', 'ESPDFWKX', 'EURDFDD', 'EURDFDE', 'EURDFDE',
'EURDFDN', 'EURDFDWN', 'EURDFMN', 'EURDFMY', 'EURDFMY', 'EURDFWDX', 'EURDFWKX', 'FINDFDD', 'FINDFDE', 'FINDFDE',
'FINDFDN', 'FINDFDWN', 'FINDFMN', 'FINDFMY', 'FINDFMY', 'FINDFWDX', 'FINDFWKX', 'FRADFDD', 'FRADFDE', 'FRADFDE',
'FRADFDN', 'FRADFDWN', 'FRADFMN', 'FRADFMY', 'FRADFMY', 'FRADFWDX', 'FRADFWKX', 'FRSDFDD', 'FRSDFDE', 'FRSDFDE',
'FRSDFDN', 'FRSDFDWN', 'FRSDFMN', 'FRSDFMY', 'FRSDFMY', 'FRSDFWDX', 'FRSDFWKX', 'HUNDFDD', 'HUNDFDE', 'HUNDFDE',
'HUNDFDN', 'HUNDFDWN', 'HUNDFMN', 'HUNDFMY', 'HUNDFMY', 'HUNDFWDX', 'HUNDFWKX', 'IS8601DA', 'IS8601DA', 'ITADFDD',
'ITADFDE', 'ITADFDE', 'ITADFDN', 'ITADFDWN', 'ITADFMN', 'ITADFMY', 'ITADFMY', 'ITADFWDX', 'ITADFWKX', 'JDATEMD',
'JDATEMDW', 'JDATEMNW', 'JDATEMON', 'JDATEQRW', 'JDATEQTR', 'JDATESEM', 'JDATESMW', 'JDATEWK', 'JDATEYDW', 'JDATEYM',
'JDATEYMD', 'JDATEYMD', 'JDATEYMW', 'JNENGO', 'JNENGO', 'JNENGOW', 'JULDATE', 'JULDAY', 'JULIAN', 'JULIAN', 'MACDFDD',
'MACDFDE', 'MACDFDE', 'MACDFDN', 'MACDFDWN', 'MACDFMN', 'MACDFMY', 'MACDFMY', 'MACDFWDX', 'MACDFWKX', 'MINGUO',
'MINGUO', 'MMDDYY', 'MMDDYY', 'MMDDYYB', 'MMDDYYC', 'MMDDYYD', 'MMDDYYN', 'MMDDYYP', 'MMDDYYS', 'MMYY', 'MMYYC',
'MMYYD', 'MMYYN', 'MMYYP', 'MMYYS', 'MONNAME', 'MONTH', 'MONYY', 'MONYY', 'ND8601DA', 'NENGO', 'NENGO', 'NLDATE',
'NLDATE', 'NLDATEL', 'NLDATEM', 'NLDATEMD', 'NLDATEMDL', 'NLDATEMDM', 'NLDATEMDS', 'NLDATEMN', 'NLDATES', 'NLDATEW',
'NLDATEW', 'NLDATEWN', 'NLDATEYM', 'NLDATEYML', 'NLDATEYMM', 'NLDATEYMS', 'NLDATEYQ', 'NLDATEYQL', 'NLDATEYQM',
'NLDATEYQS', 'NLDATEYR', 'NLDATEYW', 'NLDDFDD', 'NLDDFDE', 'NLDDFDE', 'NLDDFDN', 'NLDDFDWN', 'NLDDFMN', 'NLDDFMY',
'NLDDFMY', 'NLDDFWDX', 'NLDDFWKX', 'NORDFDD', 'NORDFDE', 'NORDFDE', 'NORDFDN', 'NORDFDWN', 'NORDFMN', 'NORDFMY',
'NORDFMY', 'NORDFWDX', 'NORDFWKX', 'POLDFDD', 'POLDFDE', 'POLDFDE', 'POLDFDN', 'POLDFDWN', 'POLDFMN', 'POLDFMY',
'POLDFMY', 'POLDFWDX', 'POLDFWKX', 'PTGDFDD', 'PTGDFDE', 'PTGDFDE', 'PTGDFDN', 'PTGDFDWN', 'PTGDFMN', 'PTGDFMY',
'PTGDFMY', 'PTGDFWDX', 'PTGDFWKX', 'QTR', 'QTRR', 'RUSDFDD', 'RUSDFDE', 'RUSDFDE', 'RUSDFDN', 'RUSDFDWN', 'RUSDFMN',
'RUSDFMY', 'RUSDFMY', 'RUSDFWDX', 'RUSDFWKX', 'SLODFDD', 'SLODFDE', 'SLODFDE', 'SLODFDN', 'SLODFDWN', 'SLODFMN',
'SLODFMY', 'SLODFMY', 'SLODFWDX', 'SLODFWKX', 'SVEDFDD', 'SVEDFDE', 'SVEDFDE', 'SVEDFDN', 'SVEDFDWN', 'SVEDFMN',
'SVEDFMY', 'SVEDFMY', 'SVEDFWDX', 'SVEDFWKX', 'WEEKDATE', 'WEEKDATX', 'WEEKDAY', 'WEEKU', 'WEEKU', 'WEEKV', 'WEEKV',
'WEEKW', 'WEEKW', 'WORDDATE', 'WORDDATX', 'XYYMMDD', 'XYYMMDD', 'YEAR', 'YYMM', 'YYMMC', 'YYMMD', 'YYMMDD', 'YYMMDD',
'YYMMDDB', 'YYMMDDC', 'YYMMDDD', 'YYMMDDN', 'YYMMDDP', 'YYMMDDS', 'YYMMN', 'YYMMN', 'YYMMP', 'YYMMS', 'YYMON', 'YYQ',
'YYQ', 'YYQC', 'YYQD', 'YYQN', 'YYQP', 'YYQR', 'YYQRC', 'YYQRD', 'YYQRN', 'YYQRP', 'YYQRS', 'YYQS', 'YYQZ', 'YYQZ',
'YYWEEKU', 'YYWEEKV', 'YYWEEKW',
)
sas_time_fmts = (
'ANYDTTME', 'B8601LZ', 'B8601LZ', 'B8601TM', 'B8601TM', 'B8601TZ', 'B8601TZ', 'E8601LZ', 'E8601LZ', 'E8601TM',
'E8601TM', 'E8601TZ', 'E8601TZ', 'HHMM', 'HOUR', 'IS8601LZ', 'IS8601LZ', 'IS8601TM', 'IS8601TM', 'IS8601TZ',
'IS8601TZ', 'JTIMEH', 'JTIMEHM', 'JTIMEHMS', 'JTIMEHW', 'JTIMEMW', 'JTIMESW', 'MMSS', 'ND8601TM', 'ND8601TZ',
'NLTIMAP', 'NLTIMAP', 'NLTIME', 'NLTIME', 'STIMER', 'TIME', 'TIMEAMPM', 'TOD',
)
sas_datetime_fmts = (
'AFRDFDT', 'AFRDFDT', 'ANYDTDTM', 'B8601DN', 'B8601DN', 'B8601DT', 'B8601DT', 'B8601DZ', 'B8601DZ', 'CATDFDT',
'CATDFDT', 'CRODFDT', 'CRODFDT', 'CSYDFDT', 'CSYDFDT', 'DANDFDT', 'DANDFDT', 'DATEAMPM', 'DATETIME', 'DATETIME',
'DESDFDT', 'DESDFDT', 'DEUDFDT', 'DEUDFDT', 'DTDATE', 'DTMONYY', 'DTWKDATX', 'DTYEAR', 'DTYYQC', 'E8601DN',
'E8601DN', 'E8601DT', 'E8601DT', 'E8601DZ', 'E8601DZ', 'ENGDFDT', 'ENGDFDT', 'ESPDFDT', 'ESPDFDT', 'EURDFDT',
'EURDFDT', 'FINDFDT', 'FINDFDT', 'FRADFDT', 'FRADFDT', 'FRSDFDT', 'FRSDFDT', 'HUNDFDT', 'HUNDFDT', 'IS8601DN',
'IS8601DN', 'IS8601DT', 'IS8601DT', 'IS8601DZ', 'IS8601DZ', 'ITADFDT', 'ITADFDT', 'JDATEYT', 'JDATEYTW', 'JNENGOT',
'JNENGOTW', 'MACDFDT', 'MACDFDT', 'MDYAMPM', 'MDYAMPM', 'ND8601DN', 'ND8601DT', 'ND8601DZ', 'NLDATM', 'NLDATM',
'NLDATMAP', 'NLDATMAP', 'NLDATMDT', 'NLDATML', 'NLDATMM', 'NLDATMMD', 'NLDATMMDL', 'NLDATMMDM', 'NLDATMMDS',
'NLDATMMN', 'NLDATMS', 'NLDATMTM', 'NLDATMTZ', 'NLDATMW', 'NLDATMW', 'NLDATMWN', 'NLDATMWZ', 'NLDATMYM', 'NLDATMYML',
'NLDATMYMM', 'NLDATMYMS', 'NLDATMYQ', 'NLDATMYQL', 'NLDATMYQM', 'NLDATMYQS', 'NLDATMYR', 'NLDATMYW', 'NLDATMZ',
'NLDDFDT', 'NLDDFDT', 'NORDFDT', 'NORDFDT', 'POLDFDT', 'POLDFDT', 'PTGDFDT', 'PTGDFDT', 'RUSDFDT', 'RUSDFDT',
'SLODFDT', 'SLODFDT', 'SVEDFDT', 'SVEDFDT', 'TWMDY', 'YMDDTTM',
)
| StarcoderdataPython |
9701695 | from django.conf.urls.defaults import patterns
from views import *
urlpatterns = patterns('',
(r'^$', presence),
(r'^panel/$', panel),
(r'^error/(\d+)/$', error),
(r'^accounts/login/$', entrar),
(r'^accounts/logout/$', salir),
(r'^cambio/$', cambioPassword),
(r'^registroAlumno/$', registroAlumno),
(r'^registroEmpresa/$', registroEmpresa),
(r'^editarEmpresa/(\d+)/$', editarEmpresa),
(r'^registroLaboral/(\d+)?/?$', registroLaboral),
(r'^verLaborals/(\d+)?/?$', verLaborals),
(r'^editarLaboral/(\d+)/$', editarLaboral),
(r'^registroDocente/$', registroDocente),
(r'^registroCompleto/$', registroCompleto),
(r'^FichaPdf/$', FichaPdf),
(r'^insertarSeguimiento/$', insertarSeguimiento),
(r'^editarSeguimiento/(\d+)/$', editarSeguimiento),
(r'^verSeguimiento/(\d+)?/?$', verSeguimiento),
(r'^borrarSeguimiento/(\d+)/$', borrarSeguimiento),
(r'^verEmpresas/$', verEmpresas),
(r'^verAlumnos/$', verAlumnos),
(r'^verDocentes/$', verDocentes),
(r'^ProgramaPdf/(\S+)/$', ProgramaPdf),
(r'^ContratoPdf/(\S+)/$', ContratoPdf),
(r'^verContratos/(\d+)?/?$', verContratos),
(r'^verContrato/(\d+)/$', verContrato),
(r'^crearContrato/(\d+)/$', crearContrato),
(r'^editarContrato/(\d+)/$', editarContrato),
(r'^crearGerente/(\d+)/$', crearGerente),
(r'^editarGerente/(\d+)/$', editarGerente),
(r'^verGerentes/(\d+)/$', verGerentes),
(r'^verCentros/(\d+)?/?$', verCentros),
(r'^crearCentro/(\d+)?/?$', crearCentro),
(r'^editarCentro/(\d+)/$', editarCentro),
(r'^verAlumnosEmpresa/(\d+)/$', verAlumnosEmpresa),
(r'^activaUsuario/(\d+)/$', activaUsuario),
(r'^desactivaUsuario/(\d+)/$', desactivaUsuario),
(r'^hablaUsuario/(\d+)/$', hablaUsuario),
(r'^nohablaUsuario/(\d+)/$', nohablaUsuario),
(r'^updateUser/$', updateUser),
(r'^updateComplete/$', updateComplete),
(r'^confirmaContrato/(\d+)/$', confirmaContrato),
(r'^desconfirmaContrato/(\d+)/$', desconfirmaContrato),
(r'^crearVisita/(\d+)/$', crearVisita),
(r'^editarVisita/(\d+)/$', editarVisita),
(r'^verVisitas/(\d+)?/?$', verVisitas),
(r'^verVisita/(\d+)/$', verVisita),
(r'^VisitaPdf/(\S+)/$', VisitaPdf),
(r'^verVisitasAlu/(\d+)/$', verVisitasAlu),
(r'^crearVisitaAlu/(\d+)/$', crearVisitaAlu),
(r'^borrarVisita/(\d+)/$', borrarVisita),
(r'^activar/(\S+)/$', activar),
(r'^norecuerdo/$', restaurarPassword),
)
| StarcoderdataPython |
5170131 | <gh_stars>10-100
from django.db import models
class BattleRequest(models.Model):
id = models.BigAutoField(primary_key=True)
initiator = models.BigIntegerField()
matchmaker_type = models.IntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
db_table = 'battle_requests'
unique_together = (('matchmaker_type', 'initiator'),)
class Battle(models.Model):
id = models.BigAutoField(primary_key=True)
matchmaker_type = models.IntegerField(db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'battles'
class BattleParticipant(models.Model):
id = models.BigAutoField(primary_key=True)
battle = models.ForeignKey(Battle, db_column='battle', on_delete=models.CASCADE)
participant = models.BigIntegerField(unique=True)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'battles_participants'
| StarcoderdataPython |
1981832 | <gh_stars>10-100
import lib.logger as logging
from lib.functions import wait_until, r_sleep
from lib.game import ui
from lib.game.notifications import Notifications
logger = logging.get_logger(__name__)
class Alliance(Notifications):
"""Class for working with Alliance."""
class STORE_ITEM:
ENERGY = "ALLIANCE_STORE_ENERGY_ITEM_1"
UNIFORM_EXP_CHIP = "ALLIANCE_STORE_UNIFORM_EXP_CHIP_ITEM_2"
HIDDEN_TICKET = "ALLIANCE_STORE_HIDDEN_TICKET_ITEM_3"
BOOST_POINT = "ALLIANCE_STORE_BOOST_POINT_ITEM_4"
class SUPPORT_ITEM:
NORN_STONE_OF_STRENGTH = "ALLIANCE_SUPPORT_REQUEST_NORN_STONE_OF_STRENGTH"
NORN_STONE_OF_ENERGY = "ALLIANCE_SUPPORT_REQUEST_NORN_STONE_OF_ENERGY"
NORN_STONE_OF_BRILLIANCE = "ALLIANCE_SUPPORT_REQUEST_NORN_STONE_OF_BRILLIANCE"
NORN_STONE_OF_OMNIPOTENCE = "ALLIANCE_SUPPORT_REQUEST_NORN_STONE_OF_OMNIPOTENCE"
BLACK_ANTI_MATTER = "ALLIANCE_SUPPORT_REQUEST_BLACK_ANTI_MATTER"
NORN_STONE_OF_CHAOS = "ALLIANCE_SUPPORT_REQUEST_NORN_STONE_OF_CHAOS"
MKRAAN_SHARD = "ALLIANCE_SUPPORT_REQUEST_MKRAAN_SHARD"
PHOENIX_FEATHER = "ALLIANCE_SUPPORT_REQUEST_PHOENIX_FEATHER"
MKRAAN_CRYSTAL = "ALLIANCE_SUPPORT_REQUEST_MKRAAN_CRYSTAL"
GEAR_UP_KIT = "ALLIANCE_SUPPORT_REQUEST_GEAR_UP_KIT"
DIMENSION_DEBRIS = "ALLIANCE_SUPPORT_REQUEST_DIMENSION_DEBRIS"
ON_SECOND_LIST = [MKRAAN_SHARD, PHOENIX_FEATHER, MKRAAN_CRYSTAL, GEAR_UP_KIT, DIMENSION_DEBRIS]
def check_in(self):
"""Clicks Check-In button in Alliance."""
self.game.go_to_alliance()
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_CHECK_IN):
self.emulator.click_button(ui.ALLIANCE_CHECK_IN)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_CHECK_IN_CLOSE):
self.emulator.click_button(ui.ALLIANCE_CHECK_IN_CLOSE)
self.game.go_to_main_menu()
def donate_resources(self, donate_gold=True, donate_memento=True):
"""Donates resources to Alliance
:param bool donate_gold: True or False.
:param bool donate_memento: True or False.
"""
if not donate_gold and not donate_memento:
logger.info("Nothing to donate.")
return self.game.go_to_main_menu()
self.game.go_to_alliance()
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_DONATE):
self.emulator.click_button(ui.ALLIANCE_DONATE)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_DONATION_MENU):
if donate_gold:
logger.debug("Maxing GOLD for donation.")
self.emulator.click_button(ui.ALLIANCE_DONATION_MAX_GOLD)
if donate_memento:
logger.debug("Maxing ALLIANCE MEMENTO for donation.")
self.emulator.click_button(ui.ALLIANCE_DONATION_MAX_MEMENTO)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_DONATION_CONFIRM):
logger.info("Donating resources for Alliance.")
self.emulator.click_button(ui.ALLIANCE_DONATION_CONFIRM)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_DONATION_REWARD_CLOSE):
logger.info("Resources were donated, exiting.")
self.emulator.click_button(ui.ALLIANCE_DONATION_REWARD_CLOSE)
else:
logger.warning("Can't donate resource for Alliance. Probably already donated, exiting.")
self.emulator.click_button(ui.ALLIANCE_DONATION_CANCEL)
self.game.go_to_main_menu()
def buy_items_from_store(self, items=None, buy_all_available=True):
"""Buys items from Alliance Store.
:param str | list[str] items: list of names of UI Elements of items to buy.
:param bool buy_all_available: buy all available copies of item for today or not.
"""
self.game.go_to_alliance()
if not wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_STORE_TAB):
logger.error(f"Can't find {ui.ALLIANCE_STORE_TAB} tab, exiting.")
return self.game.go_to_main_menu()
self.emulator.click_button(ui.ALLIANCE_STORE_TAB)
self.game.close_ads()
if isinstance(items, str):
items = [items]
for item in items:
logger.debug(f"Trying to buy {item}.")
bought = self._buy_item_once(item)
if buy_all_available and bought:
while bought:
logger.debug(f"Trying to buy {item} again.")
bought = self._buy_item_once(item)
self.game.go_to_main_menu()
def _buy_item_once(self, item):
"""Buys item from Alliance Store once.
:param str item: name of the UI element of the item to buy.
:return: was item bought or not.
:rtype: bool
"""
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.get_by_name(item)):
self.emulator.click_button(ui.get_by_name(item))
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_STORE_PURCHASE):
logger.debug("Purchasing via Alliance Tokens.")
self.emulator.click_button(ui.ALLIANCE_STORE_PURCHASE)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_STORE_PURCHASE_CLOSE):
logger.info("Item was bought.")
self.emulator.click_button(ui.ALLIANCE_STORE_PURCHASE_CLOSE)
return True
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_STORE_PURCHASE_NO_TOKENS):
logger.info("Not enough Alliance Tokens for purchase.")
self.emulator.click_button(ui.ALLIANCE_STORE_PURCHASE_NO_TOKENS)
return False
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_STORE_PURCHASE_LIMIT):
logger.info("Reached daily limit for purchasing.")
self.emulator.click_button(ui.ALLIANCE_STORE_PURCHASE_LIMIT)
return False
logger.warning(f"Item {item} was not found in the Alliance Store.")
return False
def request_support_item(self, support_item):
"""Requests Support item and collect previous request.
:param support_item: item to request.
"""
self.game.go_to_alliance()
if not wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_TAB):
logger.error(f"Can't find {ui.ALLIANCE_SUPPORT_TAB} tab, exiting.")
return self.game.go_to_main_menu()
self.emulator.click_button(ui.ALLIANCE_SUPPORT_TAB)
self.claim_support_item()
if not wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_REQUEST):
logger.warning("Can't request support item for now, exiting.")
return self.game.go_to_main_menu()
self.emulator.click_button(ui.ALLIANCE_SUPPORT_REQUEST)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_REQUEST_MENU):
if support_item in self.SUPPORT_ITEM.ON_SECOND_LIST:
self._drag_support_item_list()
logger.debug(f"Sending support request for item {support_item}.")
self.emulator.click_button(ui.get_by_name(support_item))
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_REQUEST_CONFIRM):
self.emulator.click_button(ui.ALLIANCE_SUPPORT_REQUEST_CONFIRM)
r_sleep(1) # Wait for animations
self.game.go_to_main_menu()
def claim_support_item(self):
"""Tries to claim available item from support request.
:return: was item claimed or not.
:rtype: bool
"""
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_CLAIM):
logger.info("Claiming previous support request.")
self.emulator.click_button(ui.ALLIANCE_SUPPORT_CLAIM)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_SUPPORT_CLAIM_CLOSE):
self.emulator.click_button(ui.ALLIANCE_SUPPORT_CLAIM_CLOSE)
return True
return False
def _drag_support_item_list(self):
"""Drags Support Items list from top to bottom."""
logger.debug("Dragging list to the bottom.")
self.emulator.drag(ui.ALLIANCE_SUPPORT_REQUEST_MENU_DRAG_BOTTOM, ui.ALLIANCE_SUPPORT_REQUEST_MENU_DRAG_TOP)
r_sleep(1)
def collect_energy_from_challenges(self, collect_daily=True, collect_weekly=True):
"""Collects energy from Alliance Challenges.
:param bool collect_daily: collect daily rewards or not.
:param bool collect_weekly: collect weekly rewards or not.
"""
if not collect_daily and not collect_weekly:
logger.info("Nothing to collect.")
return self.game.go_to_main_menu()
self.game.go_to_alliance()
if not wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_CHALLENGES_TAB):
logger.error(f"Can't find {ui.ALLIANCE_CHALLENGES_TAB} tab, exiting.")
return self.game.go_to_main_menu()
self.emulator.click_button(ui.ALLIANCE_CHALLENGES_TAB)
if collect_daily and wait_until(self.emulator.is_ui_element_on_screen,
ui_element=ui.ALLIANCE_CHALLENGES_DAILY_ENERGY):
logger.info("Collecting daily energy from challenge.")
self.emulator.click_button(ui.ALLIANCE_CHALLENGES_DAILY_ENERGY)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_CHALLENGES_REWARD_CLOSE):
self.emulator.click_button(ui.ALLIANCE_CHALLENGES_REWARD_CLOSE)
if collect_weekly and wait_until(self.emulator.is_ui_element_on_screen,
ui_element=ui.ALLIANCE_CHALLENGES_WEEKLY_ENERGY):
logger.info("Collecting weekly energy from challenge.")
self.emulator.click_button(ui.ALLIANCE_CHALLENGES_WEEKLY_ENERGY)
if wait_until(self.emulator.is_ui_element_on_screen, ui_element=ui.ALLIANCE_CHALLENGES_REWARD_CLOSE):
self.emulator.click_button(ui.ALLIANCE_CHALLENGES_REWARD_CLOSE)
self.game.go_to_main_menu()
| StarcoderdataPython |
6488389 | <filename>rozipparser/tests/test_small_locality_parsing.py
# coding=utf-8
import unittest
from rozipparser.codeparser import CodeParser
class TestSmallLocalityParsing(unittest.TestCase):
def test_number_of_codes(self):
parser = CodeParser("rozipparser/tests/inputs/small_locality_input.xlsx")
codes = parser.get_codes()
self.assertEqual(len(codes), 9)
def test_code_correctness(self):
parser = CodeParser("rozipparser/tests/inputs/small_locality_input.xlsx")
codes = parser.get_codes()
first_code = codes[0]
self.assertEqual(first_code.county, u"Ilfov")
self.assertEqual(first_code.locality, u"Buftea")
self.assertIsNone(first_code.sector)
self.assertIsNone(first_code.street)
self.assertIsNone(first_code.house_number)
self.assertEqual(first_code.zip, u"070000")
self.assertIsNone(first_code.street_type)
| StarcoderdataPython |
4971234 | from rest_framework import serializers
from django.contrib.auth.models import User
from rest_framework.validators import UniqueValidator
from django.utils.timezone import now
from datetime import datetime
from django.contrib.humanize.templatetags.humanize import naturaltime
from forums.models import Forum
from threads.models import Thread
from posts.models import Post
class ForumListSerializer(serializers.ModelSerializer):
posts_count = serializers.SerializerMethodField()
threads_count = serializers.SerializerMethodField()
last_activity = serializers.SerializerMethodField()
class Meta:
model = Forum
fields = (
'slug',
'name',
'description',
'posts_count',
'threads_count',
'last_activity'
)
read_only_fields = ('slug',)
def get_posts_count(self, obj):
return Post.objects.filter(thread__forum=obj).count()
def get_threads_count(self, obj):
return Thread.objects.filter(forum=obj).count()
def get_last_activity(self, obj):
try:
thread = Thread.objects.filter(forum=obj).order_by('-last_activity').first()
if thread:
last_activity = {
'thread_id': thread.id,
'thread_name': thread.name,
'username': thread.creator.username,
'avatar': thread.creator.profile.avatar,
'pinned': thread.pinned,
'naturaltime': naturaltime(thread.created_at)
}
post = Post.objects.filter(thread__forum=obj).order_by('-created_at').first()
if post and post.created_at > thread.created_at:
last_activity = {
'thread_id': post.thread.id,
'thread_name': post.thread.name,
'username': post.creator.username,
'avatar': post.creator.profile.avatar,
'pinned': post.thread.pinned,
'naturaltime': naturaltime(post.created_at),
}
return last_activity
except:
return None
class ForumCreateDeleteSerializer(serializers.ModelSerializer):
class Meta:
model = Forum
fields = (
'slug',
'name',
'description'
)
read_only_fields = ('slug',)
lookup_field = 'slug'
class ForumUpdateSerializer(serializers.ModelSerializer):
class Meta:
model = Forum
fields = (
'slug',
'name',
'description'
)
read_only_fields = ('slug', 'name')
lookup_field = 'slug'
class ForumDetailSerializer(serializers.ModelSerializer):
threads = serializers.SerializerMethodField()
class Meta:
model = Forum
fields = (
'slug',
'name',
'description',
'threads'
)
read_only_fields = ('slug',)
lookup_field = 'slug'
def get_threads(self, obj):
def get_last_activity(thread):
try:
post = Post.objects.filter(thread=thread).order_by('-created_at').first()
if post:
return {
'avatar': post.creator.profile.avatar,
'naturaltime': naturaltime(post.created_at),
'username': post.creator.username,
'name': post.creator.profile.name
}
return {
'avatar': thread.creator.profile.avatar,
'naturaltime': naturaltime(thread.created_at),
'username': thread.creator.username,
'name': thread.creator.profile.name
}
except:
return None
def get_replies_count(thread):
return Post.objects.filter(thread=thread).count()
def get_detail(thread):
return {
'id': thread.id,
'name': thread.name,
'pinned': thread.pinned,
'creator': thread.creator.username,
'avatar': thread.creator.profile.avatar,
'naturaltime': naturaltime(thread.created_at),
'replies_count': get_replies_count(thread),
'last_activity': get_last_activity(thread)
}
try:
threads = Thread.objects.filter(forum=obj).order_by('-pinned', '-last_activity')
return map(get_detail, threads)
except:
return []
| StarcoderdataPython |
1606744 | <reponame>python20180319howmework/homework<gh_stars>0
a=input("请输入身高")
b=input("请输入体重")
a=float(a)
b=float(b)
BMI=b/(a*a)
print("%d" % BMI)
if (BMI < 18.5):
print("你这么瘦,可以肆无忌惮的大吃大喝了")
elif (18.5 <=BMI < 25):
print("兄弟!你离模特就差八块腹肌了")
elif (25 <= BMI < 30):
print("控制你自己哦!脂肪有点多啦")
else:
print("不能在吃了!跑几圈去吧,你也可以是男神")
| StarcoderdataPython |
9688153 | <gh_stars>1-10
from flask_on_fhir.restful_resources import CodeSystemResource
def test_code_system(client, fhir):
fhir.add_fhir_resource(CodeSystemResource)
res = client.get('/CodeSystem')
assert res.status_code == 200
assert res.json['resourceType'] == 'CodeSystem'
| StarcoderdataPython |
1919734 | <gh_stars>0
from random import choice
from itertools import product
from tkinter import *
def set_default(obj):
if isinstance(obj, set):
return list(obj)
raise TypeError
class Maze:
"""Лабиринт для игры.
Нужен, чтобы можно было добавлять в лабиринт произвольные стены."""
def __init__(self, size):
self.size = size
self.walls = {(x, y) for x, y in product((0, self.size[0] - 1), range(self.size[1]))}
self.walls |= {(x, y) for x, y in product(range(self.size[0]), (0, self.size[1] - 1))}
self.space = set(product(range(self.size[0]), range(self.size[1]))) - self.walls
class Snake:
"""Змея.
Представляется списком ячеек, которые занимает.
Тело змеи = голова змеи + хвост змеи.
За один игровой такт змея должна менять направление движения не больше одного раза - контролируется на уровне выше.
Направления движения змеи:
1 - left
2 - right
3 - up
4 - down
"""
def __init__(self, body: list[tuple[int, int]], direction: int):
self.cached_body = body[:]
self.cached_direction = direction
self.lock = False
self.body = body
self.direction = direction
self.moves = {'right': self.right, 'up': self.up, 'left': self.left, 'down': self.down}
def reset(self):
self.body = self.cached_body
self.direction = self.cached_direction
def up(self, event):
if not self.lock and self.direction in (1, 2):
self.lock = True
self.direction = 3
def left(self, event):
if not self.lock and self.direction in (3, 4):
self.lock = True
self.direction = 1
def right(self, event):
if not self.lock and self.direction in (3, 4):
self.lock = True
self.direction = 2
def down(self, event):
if not self.lock and self.direction in (1, 2):
self.lock = True
self.direction = 4
def move(self, target):
self.body = [target] + self.body[:-1]
def grow(self, target):
self.body = [target] + self.body
def get_target(self):
x, y = self.body[0]
if self.direction == 1:
x -= 1
if self.direction == 2:
x += 1
if self.direction == 3:
y -= 1
if self.direction == 4:
y += 1
self.lock = False
return x, y
class Key:
def __init__(self, key):
self.key = key
def connect(self, frame, fun):
frame.master.bind(self.key, fun)
class Controller:
def __init__(self, keys, snake: Snake, frame: Frame):
self.keys = keys
self.snake = snake
self.frame = frame
for direction in self.keys:
self.keys[direction].connect(self.frame, self.snake.moves[direction])
class Player:
def __init__(self, name, snake, color):
self.cached_color = color
self.name = name
self.snake = snake
self.color = None
self.crashed = False
self.score = 0
def game_over(self):
self.crashed = True
self.color = 'red'
def eat(self):
self.score += 1
def reset(self):
self.score = 0
self.color = self.cached_color
self.snake.reset()
self.crashed = False
class Game:
def __init__(self, players: list[Player], maze: Maze):
self.maze = maze
self.players = players
self.apple = None
self.speed = None
def reset(self):
self.apple = self.random_apple()
self.speed = 1
for player in self.players:
player.reset()
def random_apple(self):
free_space = self.maze.space - set.union(*[set(player.snake.body) for player in self.players])
if free_space:
return choice(list(free_space))
else:
return None
def forward(self):
snake_targets = {player: player.snake.get_target() for player in self.players}
for player in self.players:
if snake_targets[player] == self.apple:
player.snake.grow(snake_targets[player])
self.speed += 1
player.eat()
self.apple = None
else:
player.snake.move(snake_targets[player])
for player in self.players:
bodies = set(player.snake.body[1:])
for other in self.players:
if other != player:
bodies |= set(other.snake.body)
head = snake_targets[player]
if head in bodies | self.maze.walls:
player.game_over()
if not self.apple:
self.apple = self.random_apple()
return all(not player.crashed for player in self.players)
class GameFrame(Frame):
def __init__(self, parent, **options):
Frame.__init__(self, parent, **options)
self.pack(expand=YES, fill=BOTH)
self.after_id = None
self.maze = Maze((16, 10))
self.keys = {'right': Key('<Right>'), 'up': Key('<Up>'), 'left': Key('<Left>'), 'down': Key('<Down>')}
self.game = None
self.scale = 50
self.canvas = Canvas(self, width=self.scale * self.maze.size[0], height=self.scale * self.maze.size[1])
self.canvas.pack(expand=YES, fill=BOTH)
self.paint_maze(self.maze)
self.labels = {}
snake = Snake([(2, 2), (3, 2)], 2)
player = Player("Garry", snake, 'yellow')
self.controller = Controller(self.keys, snake, self)
self.game = Game([player], self.maze)
for player in self.game.players:
self.labels[player] = Label(self, text=f"Игрок {player.name}: {player.score}")
self.labels[player].pack(side=TOP, expand=YES, fill=X)
Button(self, text="Pause", command=self.pause).pack(side=LEFT, expand=YES, fill=X)
Button(self, text="Start", command=self.start).pack(side=LEFT, expand=YES, fill=X)
Button(self, text="Quit", command=self.destroy).pack(side=RIGHT, expand=YES, fill=X)
def paint_snake(self, player):
head, *tail = player.snake.body
scale = self.scale
self.canvas.delete(player.name)
self.canvas.create_rectangle(head[0] * scale, head[1] * scale, head[0] * scale + scale, head[1] * scale + scale,
fill='black', outline='brown', width=3, tag=player.name)
for link in tail:
self.canvas.create_rectangle(link[0] * scale, link[1] * scale, link[0] * scale + scale, link[1] * scale + scale,
fill=player.color, outline='brown', width=3, tag=player.name)
self.labels[player].config(text=f"Игрок {player.name}: {player.score}")
def paint(self):
for player in self.game.players:
self.paint_snake(player)
self.paint_apple()
def paint_apple(self):
scale = self.scale
apple = self.game.apple
self.canvas.delete("apple")
self.canvas.create_oval(apple[0] * scale, apple[1] * scale, apple[0] * scale + scale, apple[1] * scale + scale,
fill='red', outline='green', width=3, tag="apple")
def paint_maze(self, maze):
for wall in maze.walls:
self.canvas.create_rectangle(wall[0] * self.scale, wall[1] * self.scale, wall[0] * self.scale + self.scale,
wall[1] * self.scale + self.scale,
fill='black', outline='green', width=5)
def update(self):
if self.game.forward():
self.after_id = self.after(1000 // self.game.speed, self.update)
self.paint()
def pause(self):
if self.after_id:
self.after_cancel(self.after_id)
self.after_id = None
else:
self.update()
def start(self):
self.game.reset()
if self.after_id:
self.after_cancel(self.after_id)
self.after_id = None
self.update()
| StarcoderdataPython |
193168 | <reponame>sympolite/pridebot<filename>src/pridebot_v1_1.py<gh_stars>1-10
#=============================================================
#PRIDE BOT 1.1
#by sympolite
#github.com/sympolite
#femme pride flag by noodle.tumblr.com
#=============================================================
#core modules
import random
import time
import os
import io
#make sure to get these using pip
import discord
from discord.ext.commands import Bot
import requests
import numpy as np
from PIL import Image, ImageDraw, ImageFont
#i made this one - it's ~*~local~*~
import dictbuilder
#IMPORTANT VARIABLES ============================================================
client_token = '' #<-------- put your token here!
pridebot = Bot(command_prefix = "!", pm_help = True)
pride_flags_dictionary = {} #built on startup
#temp variables for logging who called what
msg = None
caller = None
#THE HELP PROMPT ===============================================================
help_prompt = """
PRIDE BOT 1.1
by sympolite
COMMANDS:
(Commands are listed like "!command parameter [attachment]".)
!helpme - DMs you this prompt.
!flags - Returns a list of flags.
!prideflag flag [transparent PNG file] - Superimposes the PNG file onto a 1000x1000 pride flag
of your choice. See below for a list of flags, or call !flags.
"""
#HELPER FUCNTIONS ==============================================================
def get_flag_help():
flags_prompt = ""
try:
with open('flaglist.txt', 'r') as flist:
for line in flist:
flags_prompt = flags_prompt + line
except FileNotFoundError:
flags_prompt = "WARNING: the administrator of this bot has not created \"flaglist.txt\", or it is missing!"
return flags_prompt
def random_name():
random_name = ""
#create a random image name
for i in range(8):
random_name += chr(random.randint(ord('a'),ord('z')))
return random_name
def save_image(pic_url):
filetype = pic_url[-4:].lower()
temp_file = 'temp/' + random_name() + filetype
with open(temp_file, 'wb') as handle:
response = requests.get(pic_url, stream=True)
if not response.ok:
print(response)
for block in response.iter_content(1024):
if not block:
break
handle.write(block)
return temp_file
def autocrop(base_image):
base_image_data = np.asarray(base_image)
#get alpha channel of all pixels (R = 0, G = 1, B = 2, A = 3)
image_data_bw = base_image_data[:,:,3]
#get all non-transparent pixels (alpha > 0)
non_empty_columns = np.where(image_data_bw.max(axis=0)>0)[0]
non_empty_rows = np.where(image_data_bw.max(axis=1)>0)[0]
if not (non_empty_columns.any() and non_empty_rows.any()):
return None
cropBox = (min(non_empty_rows), max(non_empty_rows), min(non_empty_columns), max(non_empty_columns))
im_data = base_image_data[cropBox[0]:cropBox[1]+1, cropBox[2]:cropBox[3]+1 , :]
autocropped_image = Image.fromarray(im_data)
return autocropped_image
#EVENTS========================================================================
@pridebot.event
async def on_ready():
print('Ready!')
@pridebot.event
async def on_message(message):
await pridebot.change_presence(game=discord.Game(name='!helpme'))
global msg
msg = message
await pridebot.process_commands(msg)
@pridebot.event
async def on_message_edit(before, after):
global msg
msg = after
await pridebot.process_commands(msg)
#COMMANDS=======================================================================
@pridebot.command()
async def helpme():
_msg = msg
await pridebot.send_message(_msg.author, content=f"```{help_prompt}```")
@pridebot.command()
async def flags():
_msg = msg
await pridebot.say(f"```{flags_prompt}```")
@pridebot.command() #to do: make this less monolithic and procedural
async def prideflag(arg):
arg = arg.lower()
#temporay copy of the message
_msg = msg
#check the goods
if arg not in pride_flags_dictionary:
await pridebot.say(f"ERROR: `{arg}` is currently not a valid pride flag name.")
return
if not _msg.attachments:
await pridebot.say("ERROR: There is no attached image.")
return
pic_url = _msg.attachments[0]['url']
filetype = pic_url[-4:].lower()
print(f'filetype = {filetype}')
if filetype != ".png":
await pridebot.say("ERROR: the attached image is not a PNG file.")
return
#and now, we save the image
temp_image = save_image(pic_url)
await pridebot.send_typing(_msg.channel)
base = Image.open(f'flags/{pride_flags_dictionary[arg]}.png').convert('RGBA')
top = Image.open(temp_image).convert('RGBA')
copy_base = base.copy()
copy_top = top.copy()
#autocrop and see if the image isn't blank
cropped_top = autocrop(copy_top)
if cropped_top is None:
await pridebot.say("ERROR: The image was blank.")
os.remove(os.path.join(os.getcwd(),final_file))
os.remove(os.path.join(os.getcwd(),temp_image))
return
#get dims
width, height = cropped_top.size
flag_w, flag_h = copy_base.size
min_dim = min(flag_w/width, flag_h/height)
new_w = int(width * min_dim)
new_h = int(height * min_dim)
copy_top_resized = cropped_top.resize((new_w, new_h),resample=1)
#create a blank and paste the image onto it for compositing
blank = Image.new('RGBA', (flag_w,flag_h), (0,0,0,0))
xpos = (flag_w-new_w)//2
ypos = (flag_h-new_h)//2
blank.paste(copy_top_resized, (xpos, ypos))
#and voila! the pride flag is made
final_image = Image.alpha_composite(copy_base, blank)
final_file = f'temp/{arg}_{random_name()}.png'
final_image.save(final_file)
await pridebot.send_file(_msg.channel, final_file)
os.remove(os.path.join(os.getcwd(),final_file))
os.remove(os.path.join(os.getcwd(),temp_image))
#================================================================================================
#THE REAL SHISH
#================================================================================================
print("PRIDE BOT v1.0\nby sympolite\ngithub.com/sympolite\n...")
try:
os.makedirs(os.path.join(os.getcwd(),"temp"))
except:
print("Temp folder exists!")
if os.path.isdir(os.path.join(os.getcwd(),"flags")):
try:
pride_flags_dictionary = dictbuilder.build_dict('config.txt')
print('Dictionary built!')
flags_prompt = get_flag_help()
help_prompt += flags_prompt
print('Flags prompt created!')
print('Starting bot...')
pridebot.run(client_token)
except Exception as e:
print(str(e))
sys.exit(1)
else:
print("FATAL ERROR: /flags folder is missing!")
sys.exit(1)
| StarcoderdataPython |
4922100 | import copy
import pickle
from typing import cast
import numpy as np
import autofit as af
from autoarray.fit import fit as aa_fit
from autoastro.galaxy import galaxy as g
from autoastro.hyper import hyper_data as hd
from autolens.dataset import imaging
from autolens.fit import fit
from autolens.pipeline import visualizer
from autolens.pipeline.phase.imaging import PhaseImaging
from .hyper_phase import HyperPhase
class Analysis(af.Analysis):
def __init__(
self, masked_imaging, hyper_model_image, hyper_galaxy_image, image_path
):
"""
An analysis to fit the noise for a single galaxy image.
Parameters
----------
masked_imaging: LensData
lens dataset, including an image and noise
hyper_model_image: ndarray
An image produce of the overall system by a model
hyper_galaxy_image: ndarray
The contribution of one galaxy to the model image
"""
self.masked_imaging = masked_imaging
self.visualizer = visualizer.HyperGalaxyVisualizer(image_path=image_path)
self.hyper_model_image = hyper_model_image
self.hyper_galaxy_image = hyper_galaxy_image
def visualize(self, instance, during_analysis):
if self.visualizer.plot_hyper_galaxy_subplot:
hyper_image_sky = self.hyper_image_sky_for_instance(instance=instance)
hyper_background_noise = self.hyper_background_noise_for_instance(
instance=instance
)
contribution_map = instance.hyper_galaxy.contribution_map_from_hyper_images(
hyper_model_image=self.hyper_model_image,
hyper_galaxy_image=self.hyper_galaxy_image,
)
fit_normal = aa_fit.FitImaging(
masked_imaging=self.masked_imaging, model_image=self.hyper_model_image
)
fit_hyper = self.fit_for_hyper_galaxy(
hyper_galaxy=instance.hyper_galaxy,
hyper_image_sky=hyper_image_sky,
hyper_background_noise=hyper_background_noise,
)
self.visualizer.visualize_hyper_galaxy(
fit=fit_normal,
hyper_fit=fit_hyper,
galaxy_image=self.hyper_galaxy_image,
contribution_map_in=contribution_map,
)
def fit(self, instance):
"""
Fit the model image to the real image by scaling the hyper_galaxies noise.
Parameters
----------
instance: ModelInstance
A model instance with a hyper_galaxies galaxy property
Returns
-------
fit: float
"""
hyper_image_sky = self.hyper_image_sky_for_instance(instance=instance)
hyper_background_noise = self.hyper_background_noise_for_instance(
instance=instance
)
fit = self.fit_for_hyper_galaxy(
hyper_galaxy=instance.hyper_galaxy,
hyper_image_sky=hyper_image_sky,
hyper_background_noise=hyper_background_noise,
)
return fit.figure_of_merit
@staticmethod
def hyper_image_sky_for_instance(instance):
if hasattr(instance, "hyper_image_sky"):
return instance.hyper_image_sky
@staticmethod
def hyper_background_noise_for_instance(instance):
if hasattr(instance, "hyper_background_noise"):
return instance.hyper_background_noise
def fit_for_hyper_galaxy(
self, hyper_galaxy, hyper_image_sky, hyper_background_noise
):
image = fit.hyper_image_from_image_and_hyper_image_sky(
image=self.masked_imaging.image, hyper_image_sky=hyper_image_sky
)
if hyper_background_noise is not None:
noise_map = hyper_background_noise.hyper_noise_map_from_noise_map(
noise_map=self.masked_imaging.noise_map
)
else:
noise_map = self.masked_imaging.noise_map
hyper_noise_map = hyper_galaxy.hyper_noise_map_from_hyper_images_and_noise_map(
hyper_model_image=self.hyper_model_image,
hyper_galaxy_image=self.hyper_galaxy_image,
noise_map=self.masked_imaging.noise_map,
)
noise_map = noise_map + hyper_noise_map
masked_imaging = self.masked_imaging.modify_image_and_noise_map(
image=image, noise_map=noise_map
)
return aa_fit.FitImaging(
masked_imaging=masked_imaging, model_image=self.hyper_model_image
)
@classmethod
def describe(cls, instance):
return "Running hyper_galaxies galaxy fit for HyperGalaxy:\n{}".format(
instance.hyper_galaxy
)
class HyperGalaxyPhase(HyperPhase):
Analysis = Analysis
def __init__(self, phase):
super().__init__(phase=phase, hyper_name="hyper_galaxy")
self.include_sky_background = False
self.include_noise_background = False
def run_hyper(self, dataset, info=None, results=None):
"""
Run a fit for each galaxy from the previous phase.
Parameters
----------
dataset: LensData
results: ResultsCollection
Results from all previous phases
Returns
-------
results: HyperGalaxyResults
A collection of results, with one item per a galaxy
"""
phase = self.make_hyper_phase()
masked_imaging = imaging.MaskedImaging(
imaging=dataset,
mask=results.last.mask,
psf_shape_2d=dataset.psf.shape_2d,
positions=results.last.positions,
positions_threshold=cast(
PhaseImaging, phase
).meta_dataset.positions_threshold,
pixel_scale_interpolation_grid=cast(
PhaseImaging, phase
).meta_dataset.pixel_scale_interpolation_grid,
inversion_pixel_limit=cast(
PhaseImaging, phase
).meta_dataset.inversion_pixel_limit,
inversion_uses_border=cast(
PhaseImaging, phase
).meta_dataset.inversion_uses_border,
preload_sparse_grids_of_planes=None,
)
hyper_result = copy.deepcopy(results.last)
hyper_result.model = hyper_result.model.copy_with_fixed_priors(
hyper_result.instance
)
hyper_result.analysis.hyper_model_image = results.last.hyper_model_image
hyper_result.analysis.hyper_galaxy_image_path_dict = (
results.last.hyper_galaxy_image_path_dict
)
for path, galaxy in results.last.path_galaxy_tuples:
# TODO : NEed t be sure these wont mess up anything else.
optimizer = phase.optimizer.copy_with_name_extension(extension=path[-1])
optimizer.const_efficiency_mode = af.conf.instance.non_linear.get(
"MultiNest", "extension_hyper_galaxy_const_efficiency_mode", bool
)
optimizer.sampling_efficiency = af.conf.instance.non_linear.get(
"MultiNest", "extension_hyper_galaxy_sampling_efficiency", float
)
optimizer.n_live_points = af.conf.instance.non_linear.get(
"MultiNest", "extension_hyper_galaxy_n_live_points", int
)
optimizer.multimodal = af.conf.instance.non_linear.get(
"MultiNest", "extension_hyper_galaxy_multimodal", bool
)
optimizer.evidence_tolerance = af.conf.instance.non_linear.get(
"MultiNest", "extension_hyper_galaxy_evidence_tolerance", float
)
model = copy.deepcopy(phase.model)
# TODO : This is a HACK :O
model.galaxies = []
model.hyper_galaxy = g.HyperGalaxy
if self.include_sky_background:
model.hyper_image_sky = hd.HyperImageSky
if self.include_noise_background:
model.hyper_background_noise = hd.HyperBackgroundNoise
# If arrays is all zeros, galaxy did not have image in previous phase and
# shoumasked_imaging be ignored
if not np.all(
hyper_result.analysis.hyper_galaxy_image_path_dict[path] == 0
):
hyper_model_image = hyper_result.analysis.hyper_model_image
analysis = self.Analysis(
masked_imaging=masked_imaging,
hyper_model_image=hyper_model_image,
hyper_galaxy_image=hyper_result.analysis.hyper_galaxy_image_path_dict[
path
],
image_path=optimizer.paths.image_path,
)
result = optimizer.fit(analysis=analysis, model=model)
def transfer_field(name):
if hasattr(result.instance, name):
setattr(
hyper_result.instance.object_for_path(path),
name,
getattr(result.instance, name),
)
setattr(
hyper_result.model.object_for_path(path),
name,
getattr(result.model, name),
)
transfer_field("hyper_galaxy")
hyper_result.instance.hyper_image_sky = getattr(
result.instance, "hyper_image_sky"
)
hyper_result.model.hyper_image_sky = getattr(
result.model, "hyper_image_sky"
)
hyper_result.instance.hyper_background_noise = getattr(
result.instance, "hyper_background_noise"
)
hyper_result.model.hyper_background_noise = getattr(
result.model, "hyper_background_noise"
)
return hyper_result
class HyperGalaxyBackgroundSkyPhase(HyperGalaxyPhase):
def __init__(self, phase):
super().__init__(phase=phase)
self.include_sky_background = True
self.include_noise_background = False
class HyperGalaxyBackgroundNoisePhase(HyperGalaxyPhase):
def __init__(self, phase):
super().__init__(phase=phase)
self.include_sky_background = False
self.include_noise_background = True
class HyperGalaxyBackgroundBothPhase(HyperGalaxyPhase):
def __init__(self, phase):
super().__init__(phase=phase)
self.include_sky_background = True
self.include_noise_background = True
| StarcoderdataPython |
5042532 | from embit import bip39
from embit.bip39 import mnemonic_to_bytes, mnemonic_from_bytes
import unicodedata
import hashlib
def calculate_checksum(partial_mnemonic: list, wordlist):
# Provide 11- or 23-word mnemonic, returns complete mnemonic w/checksum
if len(partial_mnemonic) not in [11, 23]:
raise Exception("Pass in a 11- or 23-word mnemonic")
# Work on a copy of the input list
mnemonic_copy = partial_mnemonic.copy()
mnemonic_copy.append("abandon")
# Ignores the final checksum word and recalcs
mnemonic_bytes = bip39.mnemonic_to_bytes(unicodedata.normalize("NFKD", " ".join(mnemonic_copy)), ignore_checksum=True, wordlist=wordlist)
# Return as a list
return bip39.mnemonic_from_bytes(mnemonic_bytes).split()
def generate_mnemonic_from_bytes(entropy_bytes):
# Return as a list
return bip39.mnemonic_from_bytes(entropy_bytes).split()
def generate_mnemonic_from_dice(roll_data: str):
entropy_bytes = hashlib.sha256(roll_data.encode()).digest()
# Return as a list
return bip39.mnemonic_from_bytes(entropy_bytes).split()
# Note: This currently isn't being used since we're now chaining hashed bytes for the
# image-based entropy and aren't just ingesting a single image.
def generate_mnemonic_from_image(image):
hash = hashlib.sha256(image.tobytes())
# Return as a list
return bip39.mnemonic_from_bytes(hash.digest()).split()
| StarcoderdataPython |
3232040 | <reponame>triangle1984/vk-bot
from PIL import Image
class Pillowhelper():
def resize_image(input_image_path,
size):
original_image = Image.open(input_image_path)
width, height = original_image.size
resized_image = original_image.resize(size)
width, height = resized_image.size
return resized_image
def scale_image(input_image_path,width=None,
height=None):
original_image = Image.open(input_image_path)
w, h = original_image.size
if width and height:
max_size = (width, height)
elif width:
max_size = (width, h)
elif height:
max_size = (w, height)
else:
raise RuntimeError('Не передал пральна аргументы')
original_image.thumbnail(max_size, Image.ANTIALIAS)
return original_image
| StarcoderdataPython |
3430130 | <gh_stars>1-10
from __future__ import print_function, division, absolute_import
from ..message import BulkFrontendMessage
class Sync(BulkFrontendMessage):
message_id = b'S'
| StarcoderdataPython |
3243081 | <filename>source/main_app.py
from tkinter import *
from tkinter import messagebox
import time
import threading
from splinter import Browser
import re
from urllib.request import urlopen
import sys # for debug purpose
# path for chromedriver
executable_path = {'executable_path':'/usr/local/bin/chromedriver'}
HEADLESS = True
# HEADLESS = False
class MSUROLLAPP(Tk):
def __init__(self):
Tk.__init__(self)
self.title("MSU Roll")
self.resizable(False, False)
container = Frame(self)
container.pack(side="top", fill="both", expand=True, padx=50, pady=20)
container.grid_rowconfigure(0, weight=1)
container.grid_columnconfigure(0, weight=1)
self.frames = {}
for F in (MainPage, RollingPage):
page_name = F.__name__
frame = F(master=container, controller=self)
self.frames[page_name] = frame
# put all of the pages in the same location;
# the one on the top of the stacking order
# will be the one that is visible.
frame.grid(row=0, column=0, sticky="news")
self.showFrame("MainPage")
self.updateURL = "https://raw.githubusercontent.com/by-the-w3i/MSU_ROLL/master/VERSION"
self.downloadURL = "https://github.com/by-the-w3i/MSU_ROLL/releases"
self.version = "1.32"
def showFrame(self, page_name):
'''Show a frame for the given page name'''
frame = self.frames[page_name]
frame.tkraise()
def checkUpdates(self):
try:
with urlopen(self.updateURL) as response:
latest_version = response.read().decode('utf-8').strip()
if self.version != latest_version:
messagebox.showinfo(
"Update",
"Current Version: {}\n\nThe latest version {} is out.\n\nPlease download from {}".format(self.version, latest_version, self.downloadURL)
)
else:
messagebox.showinfo(
"Update",
"Current version is the latest one."
)
except:
messagebox.showwarning(
"Internet Error",
"The Internet is NOT CONNECTED."
)
class MainPage(Frame):
def __init__(self, master, controller):
# all the courses targets
self.targets = []
Frame.__init__(self, master)
self.controller = controller
# self.pack(padx=50, pady=20)
# self.master.geometry('{}x{}'.format(500, 300))
# self.master.tk_setPalette(background='#545454')
Label(self, text="MSU NetID:").grid(row=0, column=0, sticky=E)
Label(self, text="PASSWORD:").grid(row=1, column=0, sticky=E)
self.ID_entry = Entry(self)
self.PW_entry = Entry(self, show="*")
self.ID_entry.grid(row=0, column=1, columnspan=2, sticky=(E,W))
self.PW_entry.grid(row=1, column=1, columnspan=2, sticky=(E,W))
self.ID_entry.focus_set()
Label(self, text="Year:").grid(row=2, column=0, sticky=E)
self.year_entry = Entry(self, width=10)
self.year_entry.insert(0,'2019')
self.year_entry.grid(row=2, column=1)
self.season = StringVar(self)
self.season.set("Spring")
OptionMenu(self, self.season, "Fall", "Spring", "Summer").grid(row=2,column=2)
# Message(self, text="Seperate by a single space." ,fg='red', anchor=E, width=50).grid(row=3, columnspan=3)
Label(self, text="Subject:").grid(row=3, column=0, sticky=E)
self.sub_entry = Entry(self, width=10)
self.sub_entry.insert(0,'CSE 415')
self.sub_entry.grid(row=3, column=1, columnspan=2, sticky=(E,W))
Label(self, text="Section:").grid(row=4, column=0, sticky=E)
self.sec_entry = Entry(self, width=10)
self.sec_entry.insert(0,'1')
self.sec_entry.grid(row=4, column=1)
Button(self, text="add to list", command=self.addCourse).grid(row=4, column=2, sticky=(E,W))
self.courses = Listbox(self)
self.courses.grid(row=5, column=0, columnspan=2)
Button(self, text="delete", command=self.delCourse).grid(row=5, column=2, sticky=(E,W))
Button(self, text="Start Rolling >>>", command=self.rolling).grid(row=6,columnspan=3, sticky=(E,W))
Button(self, text="Check for updates", command=lambda:self.controller.checkUpdates()).grid(row=7,columnspan=3, sticky=(E,W))
def authentication(self, ID, PW):
try:
b = Browser('chrome', headless=HEADLESS, **executable_path)
URL = "https://schedule.msu.edu/Login.aspx"
b.visit(URL)
b.find_by_id("netid").fill(ID)
b.find_by_id("pswd").fill(PW)
b.find_by_value("Login").click()
url = b.url
b.quit()
if url == "https://login.msu.edu/Login":
return False
return True
except:
# messagebox.showwarning(
# "System Error",
# "Error: chromedriver not found!!!"
# )
messagebox.showwarning(
"System Error",
"Error:{}\n{}".format(sys.exc_info()[0], sys.exc_info()[1])
)
def addCourse(self):
course_lst = [s.strip() for s in self.sub_entry.get().strip().split()]
course = "{} {}".format(course_lst[0],course_lst[1]).upper()
year = self.year_entry.get().strip()
section = self.sec_entry.get().strip()
if not year.isdigit():
messagebox.showwarning(
"Add Error",
"Year: Please input a valid year! (make sure there is no space)"
)
elif len(course_lst) != 2:
messagebox.showwarning(
"Add Error",
"Subject should be separated by a space. Format should be 'CSE 415'or 'IAH 241A'"
)
elif not section.isdigit():
messagebox.showwarning(
"Add Error",
"Section: Please input a valid integer in Section! (make sure there is no space)"
)
else:
info = "{} {} {} sec{}".format(year,self.season.get(), course, int(section))
if info not in self.targets:
self.targets.append(info)
self.courses.insert(END, info)
else:
messagebox.showwarning(
"Add Error",
"Duplicate: {}".format(info)
)
def delCourse(self):
to_del = self.courses.curselection()
if len(to_del)==0 :
messagebox.showwarning(
"Delete Error",
"No active course is selected ..."
)
else:
ind = to_del[0]
self.targets.remove(self.courses.get(ind))
self.courses.delete(ind)
def rolling(self):
if len(self.targets)==0:
messagebox.showwarning(
"Error",
"No class in the list. Please click 'add to list'."
)
elif self.ID_entry.get()=="" or self.PW_entry.get()=="":
messagebox.showwarning(
"Error",
"NETID and PASSWORD can not be empty."
)
if self.ID_entry.get()=="":
self.ID_entry.focus_set()
else:
self.PW_entry.focus_set()
elif not self.authentication(self.ID_entry.get(), self.PW_entry.get()):
messagebox.showwarning(
"Error",
"NETID and PASSWORD are not right."
)
self.ID_entry.focus_set()
else:
understand = messagebox.askyesno(
"Something you should know",
"I understand that MSU Rolling works only when my computer is on."
)
if understand:
self.controller.showFrame("RollingPage")
self.controller.frames["RollingPage"].start_thread()
# print("rollings")
else:
donate = messagebox.askyesno(
"Why I can not use MSU Roll offline?",
'''Two reasons:
<1> The APP developper wants to keep user's NetID and PASSWORD locally. It is unsafe to upload user's info online.
<2> The APP developper is soooo POOR. He can not afford a server for this App.
If you want to see some features in the future:
a) offline enrolling (without keeping computer turning on)
b) SMS notification
c) Mobile App
etc ...
You are very welcome to DONATE this developper by clicking [Yes]
"YES, Anything Helps."
''')
if donate:
messagebox.showinfo(
"Donate",
"WECHAT PAY: 351425189\n\nCHASE Quickpay:\n <EMAIL>"
)
class RollingPage(Frame):
def __init__(self, master, controller):
Frame.__init__(self, master)
self.controller = controller
Button(self, text="FAQs",command=self.faqs).pack()
label = Label(self, text="Enrolling class for you ...")
label.pack(side="top", fill="x", pady=10)
button = Button(self, text="STOP Rolling and Quit",
command=lambda:controller.destroy())
button.pack()
Button(self, text="Live Status:",command=self.liveStatus).pack(pady=(10,0))
self.msg = Message(self, text="Validating class list ...", width=500, fg="#25a5e4")
self.msg.pack()
self.status_table = {}
self.ready_table = {}
def start_thread(self):
thread = threading.Thread(target=self.looping, args=())
thread.daemon = True
thread.start()
def faqs(self):
messagebox.showwarning(
"FAQs",
">> ISSUES:\nIf you CLOSE THIS PROGRAM, DISCONNECT TO INTERNET or CLOSE THE COMPUTER, the Enrolling will fail. You may need to start over again. However feel free to minimize this window when it is running.\n\n>> BUG REPORT:\nWECHAT: 351425189"
)
def liveStatus(self):
messagebox.showwarning(
"Live Status",
"[ERROR]\nThis class is not found.\n\n[READY]\nThis class is ready to enroll anythime when someone drops it.\n\n[ENROLLED]\nCongratulations!!! You Successfully enrolled this class.\n\n[FAILED]\nEnroll failed due to permission denied. (You may not have the right to enroll this class or You have already enrolled this class)"
)
def updateStatus(self, cls_lst, finish=False):
clor = "#25a5e4"
msg = ""
for c in cls_lst:
if c in self.status_table:
msg += "[{}] {}\n".format(self.status_table[c], c)
else:
break
if finish:
msg += "\nROLLING FINISHED!!!\nPlease check your msu schedule."
clor = "red"
self.msg.configure(text=msg, fg=clor)
def updateReady(self, contents):
for k in self.ready_table:
c = self.ready_table[k][1]
plan_idx = re.findall('<a id="(.+)?" title="Enroll in {}"'.format(c), contents)[0][-1]
self.ready_table[k][0] = plan_idx
def checkLogin(self, b_lst, url_plan, ID, PW):
if b_lst[0].url != url_plan:
b_lst[0].find_by_id("netid").fill(ID)
b_lst[0].find_by_id("pswd").fill(PW)
b_lst[0].find_by_value("Login").click()
def looping(self):
NETID = self.controller.frames["MainPage"].ID_entry.get()
PASSWD = self.controller.frames["MainPage"].PW_entry.get()
CLS_LST = self.controller.frames["MainPage"].targets
URL = "https://schedule.msu.edu"
URL_PLAN = "https://schedule.msu.edu/Planner.aspx"
b = Browser('chrome', headless=HEADLESS, **executable_path)
for course in CLS_LST:
tar = course.split()
TERM = "{} {}".format(tar[1], tar[0])
SUB = tar[2]
SUB_NUM = tar[3]
SEC = "{:03}".format(int(tar[4][3:]))
try:
# put all the list class in to user account planner
b.visit(URL)
# term = b.find_by_text(TERM).value
term = re.findall('<option .*?value="(.+)??".*?>{}(-Tentative)?</option>'.format(TERM), b.html)[0][0]
# b.find_by_id("MainContent_SrearchUC_ddlTerm").select(term)
# b.find_by_id("MainContent_SrearchUC_ddlSubject").select(SUB)
# b.find_by_id("MainContent_SrearchUC_txtCourseNumber").fill(SUB_NUM)
# b.find_by_id("MainContent_SrearchUC_btnSubmit").click()
b.find_by_id("MainContent_ddlTerm").select(term)
b.find_by_id("MainContent_ddlSubject").select(SUB)
b.find_by_id("MainContent_txtCourseNumber").fill(SUB_NUM)
b.find_by_id("MainContent_btnSubmit").click()
combo = "{} {} Section {}".format(SUB, SUB_NUM, SEC)
link = re.findall('<a href="(.+)?" title="[^"]+add {} to your planner"?>'.format(combo), b.html)[0]
b.click_link_by_href(link)
self.checkLogin([b], URL_PLAN, NETID, PASSWD)
self.status_table[course] = "READY"
self.ready_table[course] = ["-1", combo]
except:
# print("Error:", sys.exc_info()[0])
self.status_table[course] = "ERROR"
self.updateStatus(CLS_LST)
# now go to the planner
b.visit(URL_PLAN)
self.checkLogin([b], URL_PLAN, NETID, PASSWD)
# find the plan idx
self.updateReady(b.html)
# print(self.ready_table)
STATUS_CODE = "MainContent_UCPlan_rptPlanner_tdStatus_"
ENROLL_CODE = "MainContent_UCPlan_rptPlanner_imgEnroll_"
CONTINUE_CODE ="MainContent_btnContinue"
to_delete = None
# looping arround
while len(self.ready_table) > 0:
b.visit(URL_PLAN)
self.checkLogin([b], URL_PLAN, NETID, PASSWD)
for course in self.ready_table:
plan_idx = self.ready_table[course][0]
combo = self.ready_table[course][1]
# print(b.find_by_id(STATUS_CODE+plan_idx).text)
if "Open" in b.find_by_id(STATUS_CODE+plan_idx).text:
# section open!! enroll the class
b.find_by_id(ENROLL_CODE+plan_idx).click()
b.find_by_id(CONTINUE_CODE).click()
if b.html.find("The course has been added to your schedule.")!=-1:
# enroll successfully
self.status_table[course] = "ENROLLED"
else:
# FAILED
self.status_table[course] = "FAILED"
to_delete = course
self.updateStatus(CLS_LST)
break
if to_delete != None:
b.visit(URL_PLAN)
self.checkLogin([b], URL_PLAN, NETID, PASSWD)
del self.ready_table[to_delete]
self.updateReady(b.html)
to_delete = None
else:
time.sleep(1) # sleep 1 second
self.updateStatus(CLS_LST, True)
b.quit()
# print("Exit Looping")
if __name__ == "__main__":
root = MSUROLLAPP()
root.mainloop()
| StarcoderdataPython |
6440579 | name = "gcornilib"
| StarcoderdataPython |
3268363 | """Apply correction factors to energy_demand_unconstrained outputs
"""
from smif.exception import SmifException
from smif.model import SectorModel
class EnergyCorrectionFactor_Unconstrained(SectorModel):
"""Adaptor to apply energy correction factors
"""
def simulate(self, data_handle):
"""Read inputs, apply factor, write out.
"""
# Conversions to apply
# - fuel: gas or electricity
# - service: service or technology grouping
# - factor: correction factor to apply
# - inputs: list of inputs (all have the same factor applied)
conversions = [
{
'fuel': 'electricity',
'service': 'non_heating',
'factor': 0.9,
'inputs': [
'residential_electricity',
'industry_electricity',
'service_electricity'
],
},
{
'fuel': 'gas',
'service': 'non_heating',
'factor': 0.7,
'inputs': [
'residential_gas',
'industry_gas',
'service_gas'
]
}
]
for conversion in conversions:
for input_name in conversion['inputs']:
if input_name in self.inputs:
self._check_output_exists(input_name)
data = data_handle.get_data(input_name).as_ndarray()
# divide by factor
results = data / conversion['factor']
data_handle.set_results(input_name, results)
else:
self.logger.warning(
"No input found for {}, skipping correction factor".format(input_name))
def _check_output_exists(self, input_name):
try:
model_output = self.outputs[input_name]
except KeyError:
msg = "Output '{}' not found to match input '{}' in model '{}'".format(
input_name, model_input, self.name)
raise SmifException(msg)
class EnergyCorrectionFactor_Constrained(SectorModel):
"""Adaptor to apply energy correction factors
"""
def simulate(self, data_handle):
"""Read inputs, apply factor, write out.
"""
# Conversions to apply
# - fuel: gas or electricity
# - service: service or technology grouping
# - factor: correction factor to apply
# - inputs: list of inputs (all have the same factor applied)
conversions = [
{
'fuel': 'electricity',
'service': 'boiler',
'factor': 0.9,
'inputs': [
'residential_electricity_boiler_electricity',
'industry_electricity_boiler_electricity',
'service_electricity_boiler_electricity'
]
},
{
'fuel': 'electricity',
'service': 'heat_pump',
'factor': 1.5,
'inputs': [
'residential_electricity_heat_pumps_electricity',
'industry_electricity_heat_pumps_electricity',
'service_electricity_heat_pumps_electricity'
]
},
{
'fuel': 'electricity',
'service': 'dh',
'factor': 0.9,
'inputs': [
'residential_electricity_district_heating_electricity',
'industry_electricity_district_heating_electricity',
'service_electricity_district_heating_electricity'
]
},
{
'fuel': 'electricity',
'service': 'non_heating',
'factor': 0.9,
'inputs': [
'residential_electricity_non_heating',
'industry_electricity_non_heating',
'service_electricity_non_heating'
],
},
{
'fuel': 'gas',
'service': 'boiler',
'factor': 0.8,
'inputs': [
'industry_gas_boiler_gas',
'service_gas_boiler_gas',
'residential_gas_boiler_gas'
]
},
{
'fuel': 'gas',
'service': 'dh_chp',
'factor': 0.5,
'inputs': [
'residential_gas_district_heating_CHP_gas',
'industry_gas_district_heating_CHP_gas',
'service_gas_district_heating_CHP_gas'
]
},
{
'fuel': 'gas',
'service': 'non_heating',
'factor': 0.7,
'inputs': [
'residential_gas_non_heating',
'industry_gas_non_heating',
'service_gas_non_heating'
]
}
]
for conversion in conversions:
for input_name in conversion['inputs']:
if input_name in self.inputs:
self._check_output_exists(input_name)
data = data_handle.get_data(input_name).as_ndarray()
# divide by factor
results = data / conversion['factor']
data_handle.set_results(input_name, results)
else:
self.logger.warning(
"No input found for {}, skipping correction factor".format(input_name))
def _check_output_exists(self, input_name):
try:
model_output = self.outputs[input_name]
except KeyError:
msg = "Output '{}' not found to match input '{}' in model '{}'".format(
input_name, model_input, self.name)
raise SmifException(msg)
| StarcoderdataPython |
6489795 | <filename>Page Replacement Policy/First In First Out Algorithm (FIFO).py
# FIFO Page Replacement Program In Python
def FIFO(pages, capacity):
memory = list() # Initializing the memory
pageFaults = 0 # Count of number of page Faults
for page in pages:
if (page not in memory): # If page not found in memory
pageFaults+=1 # Incrementing the count of Page faults
if (len(memory) < capacity): # If Free spaces available in frames
memory.append(page) # Add page to memory if free space exists
else: # If page not found in memory, apply FIFO Page Replacement Algorithm
memory.pop(0) # Remove the page at first position of memory
memory.append(page) # Adding current page in memory
return pageFaults
# Driver Code to run the program
if __name__ == "__main__":
pages = list(map(int, input('Enter the sequence of Pages: ').strip().split()))
capacity = int(input('Enter maximum number of pages in a frame: ')) # max pages in frame
print("\nThe number of Page Faults occurred in FIFO are: ", FIFO(pages, capacity))
##### Note : The Output Image of the Program is also uploaded in the same directory. #####
| StarcoderdataPython |
171223 | import socket, zlib, thread, json, time, collections
from classes import User
PROTOCOL_VERSION = 3
HOST = 'localhost'
PORT = 1338
PREFIX = {'start':'\x02', 'end':'\r\n'}
HOOKS = {}
def Hook(hook):
def deco(func):
if hook not in HOOKS.keys():
HOOKS[hook] = func
return func
raise HookException('Hook %s already exsists' % hook)
return deco
class Client():
def __init__(self, data=('localhost', 27960)):
self.host, self.port = data
self.user = User('Jeffery', self)
self.Q = collections.deque()
self.serverInfo = {}
self.last_packets = {'pos':-1}
self.last_sent = {'pos':[-1, -1]}
self.state = 'DISCONNECTED'
self.c = None
self.hash = None
self.id = None
self.connected = False
def connect(self):
if self.connected is False:
try:
self.c = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.c.connect((self.host, self.port))
self.write({'tag': 'HELLO'})
self.state = 'JOINING'
self.connected = True
self.c.settimeout(10)
self.parse(self.read())
except Exception, e:
print e
return
finally:
self.c.settimeout(None)
thread.start_new_thread(self.readLoop, ())
self.consumerLoop()
def action_JOINREQ(self):
self.write({'tag':'JOIN_REQ', 'name':self.user.name, 'hash':self.hash})
def disconnect(self):
self.connected = False
def write(self, line):
self.c.send('%s%s%s' % (PREFIX['start'], zlib.compress(json.dumps(line)), PREFIX['end']))
def parse(self, line):
if line:
if line['tag'] in HOOKS.keys():
HOOKS[line['tag']](self, line)
def read(self, a=2048):
line = self.c.recv(a)
if line:
for line in line.split('\r\n'):
if line.startswith('\x02'):
return json.loads(zlib.decompress(line[1:]))
def readLoop(self):
while self.connected:
self.Q.append(self.read())
def consumerLoop(self):
while self.connected:
while len(self.Q) > 0:
self.parse(self.Q.popleft())
if time.time() - self.last_packets['pos'] >= 5 and self.user.pos != self.last_sent['pos']: #send pos packet at least every 5 seconds
self.action_POS
self.last_packets['pos'] = time.time()
time.sleep(.1)
@Hook('INFO')
def event_INFO(self, E):
if self.state == 'JOINING' and type(E) == dict:
self.serverInfo = E
del self.serverInfo['tag']
if self.serverInfo['maxclients'] <= self.serverInfo['clients']:
print 'Server is full!'
elif self.serverInfo['version'] != PROTOCOL_VERSION:
print 'Protocol mismatch (Server is %s, we are %s)' % (self.serverInfo['version'], PROTOCOL_VERSION)
else:
return self.action_JOINREQ()
self.disconnect()
@Hook('PING')
def event_PING(self, E): action_PONG()
def action_PONG(self):
self.write({'tag':'PONG', 'time':time.time()})
@Hook('WELCOME')
def event_WELCOME(self, E):
if self.state == 'JOINING' and type(E) == dict:
self.hash = E['hash']
self.serverInfo['motd'] = E['motd']
self.id = E['id']
self.state = 'JOINED'
self.action_PONG()
self.action_POS()
print 'Joined Server!'
@Hook('KICK')
def event_KICK(self, E):
print 'Got kicked! for %s' % E['reason']
self.disconnect()
@Hook('ACTION')
def event_ACTION(self, E):
if E['action'] in self.user.methods:
r = self.user.methods[E['action']](self.user, *E['data'])
if r:
self.write({'tag':'ACTION_RESP', 'data':r})
@Hook('ENT_ACTION')
def event_ENTACTION(self, E): pass
@Hook('POS')
def event_POS(self, E):
if self.E['id'] == self.id:
self.pos = self.E['pos']
else: pass #find the user, update
def action_POS(self):
self.write({'tag':'POS', 'pos':self.user.pos, 'id':self.id})
if __name__ == '__main__':
c = Client()
c.connect()
| StarcoderdataPython |
5072874 | #!/usr/bin/env python
# coding: utf-8
# Copyright (c) Qotto, 2019
import pytest
import asyncio
import os
from avro.schema import NamedSchema
from tonga.stores.manager.kafka_store_manager import KafkaStoreManager
from tonga.models.store.store_record import StoreRecord
from tonga.models.store.store_record_handler import StoreRecordHandler
from tonga.services.serializer.avro import AvroSerializer
# TestEvent / TestEventHandler import
from tests.misc.event_class.test_event import TestEvent
from tests.misc.handler_class.test_event_handler import TestEventHandler
# TestCommand / TestCommandHandler import
from tests.misc.event_class.test_command import TestCommand
from tests.misc.handler_class.test_command_handler import TestCommandHandler
# TestResult / TestResultHandler import
from tests.misc.event_class.test_result import TestResult
from tests.misc.handler_class.test_result_handler import TestResultHandler
# Tonga Kafka client
from tonga.services.coordinator.client.kafka_client import KafkaClient
from tonga.errors import AvroAlreadyRegister, AvroEncodeError
def test_init_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
schemas = serializer.get_schemas()
assert isinstance(schemas['tonga.store.record'], NamedSchema)
assert isinstance(schemas['tonga.test.event'], NamedSchema)
def test_register_event_handler_store_record_avro_serializer(get_avro_serializer, get_avro_serializer_store):
serializer = get_avro_serializer
local_store, global_store = get_avro_serializer_store
tonga_kafka_client = KafkaClient(client_id='waiter', cur_instance=0, nb_replica=1,
bootstrap_servers='localhost:9092')
loop = asyncio.get_event_loop()
store_builder = KafkaStoreManager(name='test_store_builder', client=tonga_kafka_client, topic_store='test-store',
serializer=serializer, loop=loop, rebuild=True, event_sourcing=False,
local_store=local_store, global_store=global_store)
store_record_handler = StoreRecordHandler(store_builder)
serializer.register_event_handler_store_record(StoreRecord, store_record_handler)
events = serializer.get_events()
found = False
for e_name, event in events.items():
if e_name.match('tonga.store.record'):
assert event == StoreRecord
found = True
break
assert found
found = False
handlers = serializer.get_handlers()
for e_name, handler in handlers.items():
if e_name.match('tonga.store.record'):
assert handler == store_record_handler
found = True
break
assert found
def test_register_event_class_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_event_handler = TestEventHandler()
serializer.register_class('tonga.test.event', TestEvent, test_event_handler)
events = serializer.get_events()
found = False
for e_name, event in events.items():
if e_name.match('tonga.test.event'):
assert event == TestEvent
found = True
break
assert found
found = False
handlers = serializer.get_handlers()
for e_name, handler in handlers.items():
if e_name.match('tonga.test.event'):
assert handler == test_event_handler
found = True
break
assert found
def test_register_command_class_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_command_handler = TestCommandHandler()
serializer.register_class('tonga.test.command', TestCommand, test_command_handler)
events = serializer.get_events()
found = False
for e_name, event in events.items():
if e_name.match('tonga.test.command'):
assert event == TestCommand
found = True
break
assert found
found = False
handlers = serializer.get_handlers()
for e_name, handler in handlers.items():
if e_name.match('tonga.test.command'):
assert handler == test_command_handler
found = True
break
assert found
def test_register_result_class_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_result_handler = TestResultHandler()
serializer.register_class('tonga.test.result', TestResult, test_result_handler)
events = serializer.get_events()
found = False
for e_name, event in events.items():
if e_name.match('tonga.test.result'):
assert event == TestResult
found = True
break
assert found
found = False
handlers = serializer.get_handlers()
for e_name, handler in handlers.items():
if e_name.match('tonga.test.result'):
assert handler == test_result_handler
found = True
break
assert found
def test_register_more_than_once_avro_serializer():
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
with pytest.raises(AvroAlreadyRegister):
serializer = AvroSerializer(BASE_DIR + '/misc/schemas/bad')
def test_register_bad_event_name_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_event_handler = TestEventHandler()
with pytest.raises(NameError):
serializer.register_class('tonga.qlf', TestEvent, test_event_handler)
def event_name():
return 'test'
def test_encode_name_error_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_encode = TestEvent(test='LOL')
test_encode.event_name = event_name
assert test_encode.event_name() == 'test'
with pytest.raises(NameError):
encoded_test = serializer.encode(test_encode)
def test_encode_fail_error_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_encode = TestEvent(test='LOL')
test_encode.__delattr__('test')
with pytest.raises((AvroEncodeError, AttributeError)):
encoded_test = serializer.encode(test_encode)
def test_encode_avro_serializer(get_avro_serializer):
serializer = get_avro_serializer
test_encode = TestEvent(test='LOL')
encoded_test = serializer.encode(test_encode)
r_dict = serializer.decode(encoded_test)
assert r_dict['record_class'].to_dict() == test_encode.to_dict()
assert r_dict['handler_class'].handler_name() == 'tonga.test.event'
| StarcoderdataPython |
1787616 | <reponame>ttung/starfish
from starfish import Experiment
def MERFISH(use_test_data: bool=False):
if use_test_data:
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/MERFISH-TEST/experiment.json")
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/MERFISH/experiment.json")
def allen_smFISH(use_test_data: bool=False):
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/allen_smFISH/experiment.json")
def DARTFISH(use_test_data: bool=False):
if use_test_data:
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/DARTFISH-TEST/experiment.json")
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/DARTFISH/experiment.json")
def ISS(use_test_data: bool=False):
if use_test_data:
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/ISS-TEST/experiment.json")
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181005/ISS/experiment.json")
def osmFISH(use_test_data: bool=False):
"""Return osmFISH data from Codeluppi et al. 2018
This function returns a single round of a single field of view from the 16 field of view study
if use_test_data is True, or three fields of view containing all rounds of data if use_test_data
is False.
Parameters
----------
use_test_data : bool
If True, return one round from one field of view, suitable for testing (default False)
Notes
-----
- osmFISH fields of view are quite large (14, 2, 45, 2048, 2048) which takes up approximately
21 gb in memory. Use the non-test data with care.
See Also
--------
Codeluppi et al. 2018: https://www.nature.com/articles/s41592-018-0175-z
"""
if use_test_data:
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20181217/osmFISH/"
"experiment.json"
)
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/20181031/osmFISH/experiment.json")
def BaristaSeq(use_test_data: bool=False) -> Experiment:
"""Loads a BaristaSeq dataset generated from mouse visual cortex. The extracted field of view
comes from an internal layer of V1 (range: 2-5)
Parameters
----------
use_test_data : bool
This parameter is not used for this data type, as there is no data of testing size.
Returns
-------
Experiment
Experiment containing raw image data
"""
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20181028/"
"BaristaSeq/cropped_formatted/experiment.json"
)
def ImagingMassCytometry(use_test_data: bool=False):
return Experiment.from_json(
"https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20181023/"
"imaging_cytof/BodenmillerBreastCancerSamples/experiment.json"
)
def SeqFISH(use_test_data: bool=False):
"""Loads a SeqFISH field of view generated from cultured mES cells.
Parameters
----------
use_test_data : bool
If true, return a small region of testing data that was visually determined to contain two
cells.
Notes
-----
SeqFISH fields of view are quite large (12, 5, 29, 2048, 2048) and take up approximately
5 gb in memory. Use the non-test data with care.
See Also
--------
Manuscript for Intron-SeqFISH: https://doi.org/10.1016/j.cell.2018.05.035
"""
suffix = "-TEST" if use_test_data else ""
url = (
f"https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20181211/"
f"seqfish{suffix}/experiment.json"
)
return Experiment.from_json(url)
def STARmap(use_test_data: bool=False):
"""Loads a STARmap field of view generated from mouse primary visual cortext.
Parameters
----------
use_test_data : bool
If true, return a small region of testing data that was visually determined to contain two
cells.
Notes
-----
starfish received stitched STARmap images. To make it compatible with starfish, we extracted a
single field of view with shape (r=6, c=4, z=29, y=1024, x=1024).
See Also
--------
Manuscript for STARmap: https://doi.org/10.1126/science.aat5691
"""
url = (
"https://d2nhj9g34unfro.cloudfront.net/browse/formatted/20190309/"
"starmap/experiment.json"
)
return Experiment.from_json(url)
| StarcoderdataPython |
8185512 | import numpy as np
import random
import torch
import torch.nn as nn
import sys
import os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), "../"))
def seed_all(seed=42):
torch.cuda.empty_cache()
random.seed(seed)
np.random.seed(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
def seed_worker():
worker_seed = 42
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
torch.manual_seed(worker_seed)
torch.cuda.manual_seed_all(worker_seed)
np.random.seed(worker_seed)
random.seed(worker_seed)
def weights_init(m):
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight, 0.0, 0.02)
elif isinstance(m, nn.BatchNorm2d):
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
torch.nn.init.zeros_(m.bias)
| StarcoderdataPython |
9701037 | <gh_stars>1-10
#!/usr/bin/env python
from datetime import date, timedelta
from django import template
from eventapp.models import Event
register = template.Library()
from datetime import date, timedelta
def get_last_day_of_month(year, month):
if (month == 12):
year += 1
month = 1
else:
month += 1
return date(year, month, 1) - timedelta(1)
def month_cal(year, month):
event_list = Event.objects.filter(start_date__year=year,start_date__month=month)
#event_list = []
#for i in tmp_list:
# if i.start.year == year and i.start.month == month:
# event_list.append(i)
first_day_of_month = date(year, month, 1)
last_day_of_month = get_last_day_of_month(year, month)
first_day_of_calendar = first_day_of_month - timedelta(first_day_of_month.weekday())
last_day_of_calendar = last_day_of_month + timedelta(7 - last_day_of_month.weekday())
month_cal = []
week = []
week_headers = []
i = 0
day = first_day_of_calendar
while day <= last_day_of_calendar:
if i < 7:
week_headers.append(day)
cal_day = {}
cal_day['day'] = day
cal_day['event'] = False
for event in event_list:
#if day == event.start.date():
if day >= event.start_date.date() and day <= event.end_date.date():
cal_day['event'] = True
cal_day['slug'] = event.slug
if day.month == month:
cal_day['in_month'] = True
else:
cal_day['in_month'] = False
week.append(cal_day)
if day.weekday() == 6:
month_cal.append(week)
week = []
i += 1
day += timedelta(1)
return {'calendar': month_cal, 'headers': week_headers}
register.inclusion_tag('calendar.html')(month_cal)
| StarcoderdataPython |
1778476 | '''
Search for fixed points in vicinity of a plane wave.
args: k1, k2 - wave numbers
===Optimization method discussion===
Use now - `lm` with non-zero starting phase; If it fails - switch to other methods
`hybr` - NOT OK: strange jump and breaks my solve_cycle
'lm' - OK if start with non-zero mean phase
'broyden1' - was OK, but need more testing
MAYBE: try scipy.optimize.fixed_point
MAYBE: try `krylov` - it is said to be good for large-scale problems.
'''
import sys
import os
import pickle
import numpy as np
import logging
from sim_physics import carpet, solve_cycle, N, NN, TT, get_mtwist, define_solve_cycle_class
import carpet.classes as cc
import scipy.optimize as opt
carpet.setup_logging('master.log', mode='a')
def get_vec_diff(solve_cycle, tol):
def vec_diff(phi):
# phi_global_end = 0
# sol = solve_cycle(phi, tol, phi_global_end=phi_global_end) # end at global phase = 0
sol = solve_cycle(phi, tol) # end at global phase = 0
phi1 = sol.y.T[-1]
diff = phi1 - phi - 2 * np.pi - phi.mean() # force it to prefer zero mean phase
return diff
return vec_diff
def find_fixpoint(phi0, tol, mini_tol):
'''
v2: optional phi0 as input; otherwise use m-twist
2019-08-12: - subtract mean phase from the mtwist to work in the same Poincare plane
- change dynamics for sine coupling
'''
# Map to classes
ix_to_class, class_to_ix = cc.get_classes(phi0) # cc.get_classes(phi0)
nclass = len(class_to_ix)
# Get classes representatives
# Get one oscillator from each of cilia classes
unique_cilia_ids = np.array([class_to_ix[iclass][0] for iclass in range(nclass)], dtype=np.int64)
# Get neighbours
NN_class, TT_class = cc.get_neighbours_list_class(unique_cilia_ids, ix_to_class, NN, TT)
solve_cycle_class = define_solve_cycle_class(NN_class,TT_class)
# Optimize!
phi0_class = phi0[unique_cilia_ids] # I.Co.
vec_diff = get_vec_diff(solve_cycle_class, tol)
res = opt.root(vec_diff, phi0_class, tol=mini_tol, method='lm')
if not res.success:
logging.warning(f'Did not converge, k1,k2=({k1},{k2})')
fixpoint_class = np.array(res.x)
fixpoint = fixpoint_class[ix_to_class] # from classes to cilia ix
return fixpoint
### Main
k1, k2 = int(sys.argv[1]), int(sys.argv[2])
tol = 10 ** -8
phi0 = get_mtwist(k1, k2)
# phi0 = phi0 - carpet.get_mean_phase(phi0) # Test without this line - it makes it worse at least in 1 case
phi0 = phi0 - carpet.get_mean_phase(phi0) + 0.01 # Start with SMALL mean phase -> clear direction of minimizing for the solver
if k1 == 0 and k2 == 0:
# assuming that symmetries of fixpoint are preserved,
# there is only one possibility up to a constant shift: (0,0,0..,0)
fixpoint = get_mtwist(0,0)
else:
fixpoint = find_fixpoint(phi0, tol, tol)
## Test
# sol = solve_cycle(fixpoint, tol)
# fp_image = sol.y.T[-1] - 2 * np.pi
# print("fp mean phase:", fixpoint.mean())
# print("dist to image:", carpet.rms(fp_image - fixpoint))
outfolder = 'out/fixpoint/'
os.makedirs(outfolder, exist_ok=True)
filename = outfolder + "fixpoint_k1={}_k2={}.npy".format(k1, k2)
np.save(filename, fixpoint)
| StarcoderdataPython |
3402911 | <reponame>lazy-labs/star_resty<filename>star_resty/inject.py<gh_stars>1-10
import operator
from typing import TypeVar, Type, Optional, Generic
from star_resty import Method
__all__ = ('attr',)
T = TypeVar('T')
class InjectAttr(Generic[T]):
__slots__ = ('_func',)
def __init__(self, name=None):
if name:
self._func = operator.attrgetter(name)
else:
self._func = None
def __set_name__(self, owner, name):
if self._func is None:
self._func = operator.attrgetter(name)
def __get__(self, instance: Optional[Method], owner: Type[Method]) -> T:
if instance is None:
return self
return self._func(instance.request.app)
def attr(_: Optional[Type[T]] = None, *, name: Optional[str] = None) -> T:
return InjectAttr[T](name)
| StarcoderdataPython |
1903983 | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from ast_parser.core import analyze
from ast_parser.core import cli_list_region_tags_datatypes as cli_datatypes
# This file contains helper functions for the list_region_tags CLI command.
def process_list_region_tags(
invocation: cli_datatypes.ListRegionTagsInvocation
) -> cli_datatypes.ListRegionTagsResult:
"""Compute values displayed in list_region_tags
This method is a helper method that computes the values displayed in
list_region_tags. (Some of the returned values may not be displayed,
depending on CLI options.)
Args:
invocation: A CLI invocation object with the requisite user input.
Returns:
A CLI response object with the required processed data.
"""
def _get_test_count_str(region_tag):
test_data_matches = [method for method in source_methods if
region_tag in method['region_tags']]
total_tests = 0
for test_data in test_data_matches:
total_tests += len(test_data['test_methods'])
return f'({total_tests} test(s))'
grep_tags, source_tags, ignored_tags, source_methods = (
analyze.analyze_json(invocation.data_json, invocation.root_dir))
test_count_map = {tag: _get_test_count_str(tag) for tag in source_tags}
undetected_tags = [tag for tag in grep_tags
if tag not in source_tags
and tag not in ignored_tags]
return cli_datatypes.ListRegionTagsResult(
source_methods,
source_tags,
undetected_tags,
ignored_tags,
test_count_map
)
def format_list_region_tags(
invocation: cli_datatypes.ListRegionTagsInvocation,
result: cli_datatypes.ListRegionTagsResult,
) -> List[str]:
"""Convert computed values for list_region_tags into formatted output.
This method is a helper method that converts the results generated by the
_process_list_region_tags() method into human-readable output based on
user-specified CLI options.
Args:
invocation: A CLI invocation object with the requisite user input.
result: A CLI response object with the required processed data.
Returns:
Human readable output as a list of lines.
"""
output_lines = []
if invocation.show_undetected and invocation.show_test_counts:
output_lines.append(
'WARN Undetected/ignored region tags do not have test counts')
if invocation.show_detected:
output_lines.append('Detected region tags:')
for tag in result.source_tags:
if invocation.show_test_counts:
output_lines.append(f' {tag} {result.test_count_map[tag]}')
else:
output_lines.append(f' {tag}')
if invocation.show_filenames:
source_file = [method['source_path']
for method in result.source_methods
if tag in method['region_tags']][0]
output_lines.append(f' Source file: {source_file}')
if invocation.show_undetected:
output_lines.append('Undetected region tags:')
for tag in result.undetected_tags:
output_lines.append(f' {tag}')
if result.ignored_tags:
output_lines.append('Ignored region tags')
for tag in result.ignored_tags:
output_lines.append(f' {tag}')
return output_lines
| StarcoderdataPython |
306433 | """Constants."""
import jmespath
PROJECT_NAME = "flake8-nitpick"
ERROR_PREFIX = "NIP"
LOG_ROOT = PROJECT_NAME.replace("-", ".")
TOML_EXTENSION = ".toml"
NITPICK_STYLE_TOML = f"nitpick-style{TOML_EXTENSION}"
DEFAULT_NITPICK_STYLE_URL = f"https://raw.githubusercontent.com/andreoliwa/flake8-nitpick/master/{NITPICK_STYLE_TOML}"
ROOT_PYTHON_FILES = ("setup.py", "manage.py", "autoapp.py")
ROOT_FILES = ("requirements*.txt", "Pipfile") + ROOT_PYTHON_FILES
#: Special unique separator for :py:meth:`flatten()` and :py:meth:`unflatten()`,
# to avoid collision with existing key values (e.g. the default dot separator "." can be part of a pyproject.toml key).
UNIQUE_SEPARATOR = "$#@"
# JMESPath expressions
TOOL_NITPICK_JMEX = jmespath.compile("tool.nitpick")
NITPICK_STYLES_INCLUDE_JMEX = jmespath.compile("nitpick.styles.include")
NITPICK_MINIMUM_VERSION_JMEX = jmespath.compile("nitpick.minimum_version")
| StarcoderdataPython |
276822 | <filename>utils/utils.py
import torch
import csv
import numpy as np
def encode_onehot(labels, n_classes):
onehot = torch.FloatTensor(labels.size()[0], n_classes)
labels = labels.data
if labels.is_cuda:
onehot = onehot.cuda()
onehot.zero_()
onehot.scatter_(1, labels.view(-1, 1), 1)
return onehot
class CSVLogger():
def __init__(self, args, filename='log.csv', fieldnames=['epoch']):
self.filename = filename
self.csv_file = open(filename, 'w')
# Write model configuration at top of csv
writer = csv.writer(self.csv_file)
for arg in vars(args):
writer.writerow([arg, getattr(args, arg)])
writer.writerow([''])
self.writer = csv.DictWriter(self.csv_file, fieldnames=fieldnames)
self.writer.writeheader()
self.csv_file.flush()
def writerow(self, row):
self.writer.writerow(row)
self.csv_file.flush()
def close(self):
self.csv_file.close()
class Cutout(object):
"""Randomly mask out one or more patches from an image.
https://arxiv.org/abs/1708.04552
Args:
length (int): The length (in pixels) of each square patch.
"""
def __init__(self, length):
self.length = length
def __call__(self, img):
"""
Args:
img (Tensor): Tensor image of size (C, H, W).
Returns:
Tensor: Image with n_holes of dimension length x length cut out of it.
"""
h = img.size(1)
w = img.size(2)
if np.random.choice([0, 1]):
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length / 2, 0, h)
y2 = np.clip(y + self.length / 2, 0, h)
x1 = np.clip(x - self.length / 2, 0, w)
x2 = np.clip(x + self.length / 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img = img * mask
return img
| StarcoderdataPython |
1937858 | <filename>13_module_advanced/04_json/01_json.py
# 1)把python中的字典或者列表,转化为json字符串
# 2)前端返回的json字符串,转换为Python中的字典
import json
dic = {'id': 1, 'name':'我的天哪这么好玩', 'usertype': 0}
s = json.dumps(dic, ensure_ascii=False)
# s = json.dumps(dic) #json处理中文 不用ascii
print(s)
print(type(s))
s = '{"id":1, "name":"我的天哪这么好玩","usertype":0}'
d = json.loads(s)
print(d, type(d))
"""
前端的json和python中的字典有什么区别
d = {"id":1, "islogin": True, "hasGirl": None}
"""
d = {"id":1, "islogin": True, "hasGirl": None}
print(json.dumps(d))
# {"id": 1, "islogin": true, "hasGirl": null}
# 列表和字典都是OK
lst = ['A', 'B', 'C', 'D']
s = json.dumps(lst, ensure_ascii=False)
print(s, type(s))
| StarcoderdataPython |
1891402 | <filename>myy/myapp/urls.py
from django.urls import path
from myapp import views
app_name='myapp'
urlpatterns=[
path('',views.hello),
path('add',views.add)
] | StarcoderdataPython |
122939 | <reponame>ChristosChristofidis/h2o-3
import h2o
import sys, os
def demo(func=None, interactive=True, echo=True, test=False):
"""
H2O built-in demo facility
:param func: A string that identifies the h2o python function to demonstrate.
:param interactive: If True, the user will be prompted to continue the demonstration after every segment.
:param echo: If True, the python commands that are executed will be displayed.
:param test: Used for pyunit testing. h2o.init() will not be called if set to True.
:return:
Example:
>>> h2o.demo("gbm")
"""
if func == "gbm": gbm_demo(interactive, echo, test)
elif func == "deeplearning": deeplearning_demo(interactive, echo, test)
else: print "Demo for {0} has not been implemented.".format(func)
def gbm_demo(interactive, echo, test):
demo_description = ['\n-----------------------------------------------------------------',
'This is a demo of H2O\'s GBM function.',
'It uploads a dataset to h2o, parses it, and shows a description.',
'Then, it divides the dataset into training and test sets, ',
'builds a GBM from the training set, and predicts on the test set.',
'Finally, default performance metrics are displayed.',
'-----------------------------------------------------------------']
demo_commands = ['# Connect to h2o',
'>>> h2o.init()\n',
'\n# Upload the prostate dataset that comes included in the h2o python package',
'>>> prostate = h2o.upload_file(path = os.path.join(sys.prefix, "h2o_data/prostate.csv"))\n',
'\n# Print a description of the prostate data',
'>>> prostate.describe()\n',
'\n# Randomly split the dataset into ~70/30, training/test sets',
'>>> r = prostate[0].runif()',
'>>> train = prostate[r < 0.70]',
'>>> valid = prostate[r >= 0.30]\n',
'\n# Convert the response columns to factors (for binary classification problems)',
'>>> train["CAPSULE"] = train["CAPSULE"].asfactor()',
'>>> test["CAPSULE"] = test["CAPSULE"].asfactor()\n',
'\n# Build a (classification) GBM',
'>>> prostate_gbm = h2o.gbm(x=train[["AGE", "RACE", "PSA", "VOL", "GLEASON"]], '
'y=train["CAPSULE"], distribution="bernoulli", ntrees=10, max_depth=8, min_rows=10, '
'learn_rate=0.2)\n',
'\n# Show the model',
'>>> prostate_gbm.show()\n',
'\n# Predict on the test set and show the first ten predictions',
'>>> predictions = prostate_gbm.predict(test)',
'>>> predictions.show()\n',
'\n# Show default performance metrics',
'>>> performance = prostate_gbm.model_performance(test)',
'>>> performance.show()\n']
for line in demo_description: print line
print
echo_and_interact(demo_commands, interactive, echo)
if not test: h2o.init()
echo_and_interact(demo_commands, interactive, echo)
if not test: prostate = h2o.upload_file(path = os.path.join(sys.prefix, 'h2o_data/prostate.csv'))
else: prostate = h2o.upload_file(path = h2o.locate('smalldata/prostate/prostate.csv'))
echo_and_interact(demo_commands, interactive, echo)
prostate.describe()
echo_and_interact(demo_commands, interactive, echo, npop=4)
r = prostate[0].runif()
train = prostate[r < 0.70]
test = prostate[r >= 0.30]
echo_and_interact(demo_commands, interactive, echo, npop=3)
train["CAPSULE"] = train["CAPSULE"].asfactor()
test["CAPSULE"] = test["CAPSULE"].asfactor()
echo_and_interact(demo_commands, interactive, echo)
prostate_gbm = h2o.gbm(x=train[["AGE", "RACE", "PSA", "VOL", "GLEASON"]], y=train["CAPSULE"],
distribution="bernoulli", ntrees=10, max_depth=8, min_rows=10, learn_rate=0.2)
echo_and_interact(demo_commands, interactive, echo)
prostate_gbm.show()
echo_and_interact(demo_commands, interactive, echo, npop=3)
predictions = prostate_gbm.predict(test)
predictions.show()
echo_and_interact(demo_commands, interactive, echo, npop=3)
performance = prostate_gbm.model_performance(test)
performance.show()
def deeplearning_demo(interactive, echo, test):
demo_description = ['\n-----------------------------------------------------------------',
'This is a demo of H2O\'s Deeplearning function.',
'It uploads a dataset to h2o, parses it, and shows a description.',
'Then, it divides the dataset into training and test sets, ',
'builds a model from the training set, and predicts on the test set.',
'Finally, default performance metrics are displayed.',
'-----------------------------------------------------------------']
demo_commands = ['# Connect to h2o',
'>>> h2o.init()\n',
'\n# Upload the prostate dataset that comes included in the h2o python package',
'>>> prostate = h2o.upload_file(path = os.path.join(sys.prefix, "h2o_data/prostate.csv"))\n',
'\n# Print a description of the prostate data',
'>>> prostate.describe()\n',
'\n# Randomly split the dataset into ~70/30, training/test sets',
'>>> r = prostate[0].runif()',
'>>> train = prostate[r < 0.70]',
'>>> valid = prostate[r >= 0.30]\n',
'\n# Convert the response columns to factors (for binary classification problems)',
'>>> train["CAPSULE"] = train["CAPSULE"].asfactor()',
'>>> test["CAPSULE"] = test["CAPSULE"].asfactor()\n',
'\n# Build a (classification) Deeplearning model',
'>>> prostate_dl = h2o.deeplearning(x=train[list(set(prostate.col_names())-set(["ID","CAPSULE"]))]'
', y=train["CAPSULE"], activation="Tanh", hidden=[10, 10, 10], epochs=10000)\n',
'\n# Show the model',
'>>> prostate_dl.show()\n',
'\n# Predict on the test set and show the first ten predictions',
'>>> predictions = prostate_dl.predict(test)',
'>>> predictions.show()\n',
'\n# Show default performance metrics',
'>>> performance = prostate_dl.model_performance(test)',
'>>> performance.show()\n']
for line in demo_description: print line
print
echo_and_interact(demo_commands, interactive, echo)
if not test: h2o.init()
echo_and_interact(demo_commands, interactive, echo)
if not test: prostate = h2o.upload_file(path = os.path.join(sys.prefix, 'h2o_data/prostate.csv'))
else: prostate = h2o.upload_file(path = h2o.locate('smalldata/prostate/prostate.csv'))
echo_and_interact(demo_commands, interactive, echo)
prostate.describe()
echo_and_interact(demo_commands, interactive, echo, npop=4)
r = prostate[0].runif()
train = prostate[r < 0.70]
test = prostate[r >= 0.30]
echo_and_interact(demo_commands, interactive, echo, npop=3)
train["CAPSULE"] = train["CAPSULE"].asfactor()
test["CAPSULE"] = test["CAPSULE"].asfactor()
echo_and_interact(demo_commands, interactive, echo)
prostate_dl = h2o.deeplearning(x=train[list(set(prostate.col_names())-set(["ID","CAPSULE"]))], y=train["CAPSULE"],
activation="Tanh", hidden=[10, 10, 10], epochs=10000)
echo_and_interact(demo_commands, interactive, echo)
prostate_dl.show()
echo_and_interact(demo_commands, interactive, echo, npop=3)
predictions = prostate_dl.predict(test)
predictions.show()
echo_and_interact(demo_commands, interactive, echo, npop=3)
performance = prostate_dl.model_performance(test)
performance.show()
def echo_and_interact(demo_commands, interactive, echo, npop=2):
if demo_commands:
if echo:
for p in range(npop): print demo_commands.pop(0)
if interactive:
raw_input('Press ENTER...\n') | StarcoderdataPython |
12859073 | # Copyright (c) 2020 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM,OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""EII Message Bus Azure Edge Runtime Bridge
"""
import asyncio
import traceback as tb
from eab.bridge_state import BridgeState
def main():
"""Main method.
"""
bs = None
try:
bs = BridgeState.get_instance()
loop = asyncio.get_event_loop()
loop.run_forever()
except Exception as e:
print(f'[ERROR] {e}\n{tb.format_exc()}')
raise
finally:
if bs is not None:
# Fully stop the bridge
bs.stop()
# Clean up asyncio
loop.stop()
loop.close()
if __name__ == "__main__":
main()
| StarcoderdataPython |
1738885 | <filename>tf_ops/grouping/tf_grouping_op_test.py
import tensorflow as tf
import numpy as np
from tf_grouping import query_ball_point, query_ball_point2, group_point
from scipy.spatial.distance import cdist
class GroupPointTest(tf.test.TestCase):
def test(self):
pass
def test_grad(self):
with tf.device('/gpu:0'):
points = tf.constant(np.random.random((1, 128, 16)).astype('float32'))
print(points)
xyz1 = tf.constant(np.random.random((1, 128, 3)).astype('float32'))
xyz2 = tf.constant(np.random.random((1, 8, 3)).astype('float32'))
radius = 0.3
nsample = 32
idx, pts_cnt = query_ball_point(radius, nsample, xyz1, xyz2)
grouped_points = group_point(points, idx)
print(grouped_points)
# with self.test_session():
with tf.Session() as sess:
print("---- Going to compute gradient error")
err = tf.test.compute_gradient_error(points, (1, 128, 16), grouped_points, (1, 8, 32, 16))
print(err)
self.assertLess(err, 1e-4)
class QueryBallPoint2Test(tf.test.TestCase):
def test(self):
nbatch = 1
xyz1 = np.random.random((nbatch, 128, 3)).astype('float32')
xyz2 = np.random.random((nbatch, 8, 3)).astype('float32')
radii = np.random.uniform(low=0.2, high=0.4, size=(nbatch, 8)).astype('float32')
print('---- Verifying QueryBallPoint2')
with tf.device('/gpu:0'):
xyz1_tensor = tf.constant(xyz1)
xyz2_tensor = tf.constant(xyz2)
radii_tensor = tf.constant(radii)
nsample = 32
idx_op, pts_cnt_op = query_ball_point2(radii_tensor, nsample, xyz1_tensor, xyz2_tensor)
with tf.Session() as sess:
idx, pts_cnt = sess.run([idx_op, pts_cnt_op])
assert (np.max(idx < 128))
assert (np.max(pts_cnt <= nsample))
for i in range(nbatch): # For each model in batch
Y = cdist(xyz1[i, :], xyz2[i, :], 'euclidean')
within_ball = Y < np.expand_dims(radii[i, :], axis=0)
pts_cnt_gt = np.sum(within_ball, axis=0)
assert (np.all(pts_cnt[i, :] == pts_cnt_gt))
for j in range(xyz2.shape[1]): # For each cluster
assert (set(idx[i, j, :]) == set(np.nonzero(within_ball[:, j])[0]))
pass
if __name__ == '__main__':
tf.test.main()
| StarcoderdataPython |
3594221 | # encoding=utf-8
import os
import numpy as np
from Moldata import Moldata
from Reaction import Reaction
import matplotlib.pyplot as plt
def shermo(
input,
Temp=300.0,
sclZPE=1.0,
sclHeat=1.0,
sclS=1.0,
E='N/A',
shermo_path='Shermo'
):
if E=='N/A':
command=shermo_path+' '+input+' -T '+str(Temp)+' -P 1.0 -sclZPE '+str(sclZPE)+' -sclheat '+str(sclHeat)+' -sclS '+str(sclS)
else:
command=shermo_path+' '+input+' -E '+str(E)+' -T '+str(Temp)+' -P 1.0 -sclZPE '+str(sclZPE)+' -sclheat '+str(sclHeat)+' -sclS '+str(sclS)
os.system(command+'> tmp')
file=open('tmp','r')
tmp=file.readlines()[-13:]
file.close()
os.remove('tmp')
data=['','','']
data[0]=tmp[-4].split()
data[1]=tmp[-1].split()
data[2]=tmp[0].split()
result={}
result['U0K']=data[0][-2]
result['GTK']=data[1][-2]
result['Q']=data[2][-1]
return result
def run_shermo(
Nmol,
Rinput='',
R2input='N/A',
TSinput='',
Pinput='',
P2input='N/A',
iFreq=-0.0,
Temp=300.0,
RE='N/A',
R2E='N/A',
TSE='N/A',
PE='N/A',
P2E='N/A',
sclZPE=1.0,
sclHeat=1.0,
sclS=1.0,
shermo_path='Shermo'
):
if Nmol==1:
Rresult=shermo(
input=Rinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS, E=RE,
shermo_path=shermo_path
)
TSresult=shermo(
input=TSinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=TSE,
shermo_path=shermo_path
)
Presult=shermo(
input=Pinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=PE,
shermo_path=shermo_path
)
R=Moldata(
U0K=Rresult['U0K'],
GTK=Rresult['GTK'],
Q=Rresult['Q'],
EUnit='Eh'
)
TS=Moldata(
U0K=TSresult['U0K'],
GTK=TSresult['GTK'],
Q=TSresult['Q'],
EUnit='Eh'
)
if P2input=='N/A':
P=Moldata(
U0K=Presult['U0K'],
GTK=Presult['GTK'],
Q=Presult['Q'],
EUnit='Eh'
)
else:
P2result=shermo(
input=Pinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=PE,
shermo_path=shermo_path
)
P1=Moldata(
U0K=Presult['U0K'],
GTK=Presult['GTK'],
Q=Presult['Q'],
EUnit='Eh'
)
P2=Moldata(
U0K=P2result['U0K'],
GTK=P2result['GTK'],
Q=P2result['Q'],
EUnit='Eh'
)
P=P1+P2
cal=Reaction(
Nmol=1,
molR=R,
molTS=TS,
molP=P,
Temp=Temp,
iFreq=iFreq
)
cal.printf()
elif Nmol==2 and R2input!='N/A':
Rresult=shermo(
input=Rinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=RE,
shermo_path=shermo_path
)
R2result=shermo(
input=R2input,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=R2E,
shermo_path=shermo_path
)
TSresult=shermo(
input=TSinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=TSE,
shermo_path=shermo_path
)
Presult=shermo(
input=Pinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=PE,
shermo_path=shermo_path
)
R=Moldata(
U0K=Rresult['U0K'],
GTK=Rresult['GTK'],
Q=Rresult['Q'],
EUnit='Eh'
)
R2=Moldata(
U0K=R2result['U0K'],
GTK=R2result['GTK'],
Q=R2result['Q'],
EUnit='Eh'
)
TS=Moldata(
U0K=TSresult['U0K'],
GTK=TSresult['GTK'],
Q=TSresult['Q'],
EUnit='Eh'
)
if P2input=='N/A':
P=Moldata(
U0K=Presult['U0K'],
GTK=Presult['GTK'],
Q=Presult['Q'],
EUnit='Eh'
)
else:
P2result=shermo(
input=Pinput,
Temp=Temp,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
E=PE,
shermo_path=shermo_path
)
P1=Moldata(
U0K=Presult['U0K'],
GTK=Presult['GTK'],
Q=Presult['Q'],
EUnit='Eh'
)
P2=Moldata(
U0K=P2result['U0K'],
GTK=P2result['GTK'],
Q=P2result['Q'],
EUnit='Eh'
)
P=P1+P2
cal=Reaction(
Nmol=2,
molR=R+R2,
molTS=TS,
molP=P,
Temp=Temp,
iFreq=iFreq
)
cal.printf()
elif Nmol==2 and R2input=='N/A':
print("Parameter R2input is necessary! ")
exit()
else:
print("The number of molecules in primitive chemical reaction is not suitable! ")
exit()
return cal
def scan_shermo(
Nmol,
Rinput='',
R2input='N/A',
TSinput='',
Pinput='',
P2input='N/A',
iFreq=-0.0,
Temp=[],
RE='N/A',
R2E='N/A',
TSE='N/A',
PE='N/A',
P2E='N/A',
sclZPE=1.0,
sclHeat=1.0,
sclS=1.0,
shermo_path='Shermo'
):
result=[]
Xaxis=Temp
GYaxis=[]
QYaxis=[]
for temp in Temp:
calc=run_shermo(
Nmol=Nmol,
Rinput=Rinput,
R2input=R2input,
TSinput=TSinput,
Pinput=Pinput,
P2input=P2input,
iFreq=iFreq,
Temp=float(temp),
RE=RE,
R2E=R2E,
TSE=TSE,
PE=PE,
P2E=P2E,
sclZPE=sclZPE,
sclHeat=sclHeat,
sclS=sclS,
shermo_path=shermo_path
)
GYaxis.append(calc.get_kinetic_g())
QYaxis.append(calc.get_kinetic_q())
xx=np.linspace(min(Temp),max(Temp),200)
GFunc=np.poly1d(np.polyfit(Xaxis,GYaxis,3))
QFunc=np.poly1d(np.polyfit(Xaxis,QYaxis,3))
result.append(Temp)
result.append(GYaxis)
result.append(QYaxis)
if Nmol==1:
print(f'''
---------------------------------------------------------
Part IV
Relationship between Reaction Kinetics and Temperature
---------------------------------------------------------
Gibbs Free Energy Method:
k(T)/s-1
= ({GFunc.coeffs[0]:.3E})\t * (T/K)^3
+ ({GFunc.coeffs[1]:.3E})\t * (T/K)^2
+ ({GFunc.coeffs[2]:.3E})\t * (T/K)
+ ({GFunc.coeffs[3]:.3E})
RMSD=
Partition Function Method:
k(T)/s-1
= ({QFunc.coeffs[0]:.3E})\t * (T/K)^3
+ ({QFunc.coeffs[1]:.3E})\t * (T/K)^2
+ ({QFunc.coeffs[2]:.3E})\t * (T/K)
+ ({QFunc.coeffs[3]:.3E})
RMSD=
---------------------------------------------------------
''')
elif Nmol==2:
print(f'''
---------------------------------------------------------
Part IV
Relationship between Reaction Kinetics and Temperature
---------------------------------------------------------
Gibbs Free Energy Method:
k(T)/((mol/L)-1*s-1)
= ({GFunc.coeffs[0]:.3E})\t * (T/K)^3
+ ({GFunc.coeffs[1]:.3E})\t * (T/K)^2
+ ({GFunc.coeffs[2]:.3E})\t * (T/K)
+ ({GFunc.coeffs[3]:.3E})
RMSD=
Partition Function Method:
k(T)/((mol/L)-1*s-1)
= ({QFunc.coeffs[0]:.3E})\t * (T/K)^3
+ ({QFunc.coeffs[1]:.3E})\t * (T/K)^2
+ ({QFunc.coeffs[2]:.3E})\t * (T/K)
+ ({QFunc.coeffs[3]:.3E})
RMSD=
---------------------------------------------------------
''')
else:
print("The number of molecules in primitive chemical reaction is not suitable! ")
exit()
print('''
d8b db .d88b. d8888b. .88b d88. .d8b. db
888o 88 .8P Y8. 88 `8D 88'YbdP`88 d8' `8b 88
88V8o 88 88 88 88oobY' 88 88 88 88ooo88 88
88 V8o88 88 88 88`8b 88 88 88 88~~~88 88
88 V888 `8b d8' 88 `88. 88 88 88 88 88 88booo.
VP V8P `Y88P' 88 YD YP YP YP YP YP Y88888P
d88888b d8b db d8888b. db
88' 888o 88 88 `8D 88
88ooooo 88V8o 88 88 88 YP
88~~~~~ 88 V8o88 88 88
88. 88 V888 88 .8D db
Y88888P VP V8P Y8888D' YP
''')
plt.plot(Xaxis,GYaxis,'rx',xx,GFunc(xx),'-')
plt.title('Reaction Rate Constant (Gibbs Free Energy Method)')
if Nmol==1:
plt.ylabel('k/(s-1)')
elif Nmol==2:
plt.ylabel('k/((mol/L)-1*s-1)')
else:
print("The number of molecules in primitive chemical reaction is not suitable! ")
exit()
plt.xlabel('Temperature/K')
plt.figure(2)
plt.plot(Xaxis,QYaxis,'rx',xx,QFunc(xx),'-')
plt.title('Reaction Rate Constant (Partition Function Method)')
if Nmol==1:
plt.ylabel('k/(s-1)')
elif Nmol==2:
plt.ylabel('k/((mol/L)-1*s-1)')
else:
print("The number of molecules in primitive chemical reaction is not suitable! ")
exit()
plt.xlabel('Temperature/K')
plt.show()
return result
if __name__ == "__main__":
result=scan_shermo(
Nmol=2,
Rinput='DA-R.log',
R2input='DA-R2.log',
TSinput='DA-TS.log',
Pinput='DA-P.log',
iFreq=-2000.2842,
Temp=list(range(273,373))
)
print(result)
| StarcoderdataPython |
6496539 | <filename>userena/forms.py<gh_stars>10-100
import random
from collections import OrderedDict
from hashlib import sha1
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth import get_user_model
from django.utils.translation import gettext_lazy as _
from userena import settings as userena_settings
from userena.models import UserenaSignup
from userena.utils import get_profile_model
attrs_dict = {"class": "required"}
USERNAME_RE = r"^[\.\w]+$"
class SignupForm(forms.Form):
"""
Form for creating a new user account.
Validates that the requested username and e-mail is not already in use.
Also requires the password to be entered twice.
"""
username = forms.RegexField(
regex=USERNAME_RE,
max_length=30,
widget=forms.TextInput(attrs=attrs_dict),
label=_("Username"),
error_messages={
"invalid": _(
"Username must contain only letters, numbers, dots and underscores."
)
},
)
email = forms.EmailField(
widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)), label=_("Email")
)
password1 = forms.CharField(
widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_("Create password"),
)
password2 = forms.CharField(
widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_("Repeat password"),
)
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already in use.
Also validates that the username is not listed in
``USERENA_FORBIDDEN_USERNAMES`` list.
"""
try:
user = get_user_model().objects.get(
username__iexact=self.cleaned_data["username"]
)
except get_user_model().DoesNotExist:
pass
else:
if userena_settings.USERENA_ACTIVATION_REQUIRED and UserenaSignup.objects.filter(
user__username__iexact=self.cleaned_data["username"]
).exclude(
activation_key=userena_settings.USERENA_ACTIVATED
):
raise forms.ValidationError(
_(
"This username is already taken but not confirmed. Please check your email for verification steps."
)
)
raise forms.ValidationError(_("This username is already taken."))
if (
self.cleaned_data["username"].lower()
in userena_settings.USERENA_FORBIDDEN_USERNAMES
):
raise forms.ValidationError(_("This username is not allowed."))
return self.cleaned_data["username"]
def clean_email(self):
""" Validate that the e-mail address is unique. """
if get_user_model().objects.filter(email__iexact=self.cleaned_data["email"]):
if userena_settings.USERENA_ACTIVATION_REQUIRED and UserenaSignup.objects.filter(
user__email__iexact=self.cleaned_data["email"]
).exclude(
activation_key=userena_settings.USERENA_ACTIVATED
):
raise forms.ValidationError(
_(
"This email is already in use but not confirmed. Please check your email for verification steps."
)
)
raise forms.ValidationError(
_("This email is already in use. Please supply a different email.")
)
return self.cleaned_data["email"]
def clean(self):
"""
Validates that the values entered into the two password fields match.
Note that an error here will end up in ``non_field_errors()`` because
it doesn't apply to a single field.
"""
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError(_("The two password fields didn't match."))
return self.cleaned_data
def save(self):
""" Creates a new user and account. Returns the newly created user. """
username, email, password = (
self.cleaned_data["username"],
self.cleaned_data["email"],
self.cleaned_data["password1"],
)
new_user = UserenaSignup.objects.create_user(
username,
email,
password,
not userena_settings.USERENA_ACTIVATION_REQUIRED,
userena_settings.USERENA_ACTIVATION_REQUIRED,
)
return new_user
class SignupFormOnlyEmail(SignupForm):
"""
Form for creating a new user account but not needing a username.
This form is an adaptation of :class:`SignupForm`. It's used when
``USERENA_WITHOUT_USERNAME`` setting is set to ``True``. And thus the user
is not asked to supply an username, but one is generated for them. The user
can than keep sign in by using their email.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self.fields["username"]
def save(self):
""" Generate a random username before falling back to parent signup form """
while True:
username = sha1(str(random.random()).encode("utf-8")).hexdigest()[:5]
try:
get_user_model().objects.get(username__iexact=username)
except get_user_model().DoesNotExist:
break
self.cleaned_data["username"] = username
return super().save()
class SignupFormTos(SignupForm):
""" Add a Terms of Service button to the ``SignupForm``. """
tos = forms.BooleanField(
widget=forms.CheckboxInput(attrs=attrs_dict),
label=_("I have read and agree to the Terms of Service"),
error_messages={"required": _("You must agree to the terms to register.")},
)
def identification_field_factory(label, error_required):
"""
A simple identification field factory which enable you to set the label.
:param label:
String containing the label for this field.
:param error_required:
String containing the error message if the field is left empty.
"""
return forms.CharField(
label=label,
widget=forms.TextInput(attrs=attrs_dict),
max_length=75,
error_messages={"required": error_required},
)
class AuthenticationForm(forms.Form):
"""
A custom form where the identification can be a e-mail address or username.
"""
identification = identification_field_factory(
_("Email or username"), _("Either supply us with your email or username.")
)
password = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
)
remember_me = forms.BooleanField(
widget=forms.CheckboxInput(),
required=False,
label=_("Remember me for %(days)s")
% {"days": _(userena_settings.USERENA_REMEMBER_ME_DAYS[0])},
)
def __init__(self, *args, **kwargs):
""" A custom init because we need to change the label if no usernames is used """
super().__init__(*args, **kwargs)
# Dirty hack, somehow the label doesn't get translated without declaring
# it again here.
self.fields["remember_me"].label = _("Remember me for %(days)s") % {
"days": _(userena_settings.USERENA_REMEMBER_ME_DAYS[0])
}
if userena_settings.USERENA_WITHOUT_USERNAMES:
self.fields["identification"] = identification_field_factory(
_("Email"), _("Please supply your email.")
)
def clean(self):
"""
Checks for the identification and password.
If the combination can't be found will raise an invalid sign in error.
"""
identification = self.cleaned_data.get("identification")
password = self.cleaned_data.get("password")
if identification and password:
user = authenticate(identification=identification, password=password)
if user is None:
raise forms.ValidationError(
_(
"Please enter a correct username or email and password. Note that both fields are case-sensitive."
)
)
return self.cleaned_data
class ChangeEmailForm(forms.Form):
email = forms.EmailField(
widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=75)),
label=_("New email"),
)
def __init__(self, user, *args, **kwargs):
"""
The current ``user`` is needed for initialisation of this form so
that we can check if the email address is still free and not always
returning ``True`` for this query because it's the users own e-mail
address.
"""
super().__init__(*args, **kwargs)
if not isinstance(user, get_user_model()):
raise TypeError(
"user must be an instance of %s" % get_user_model().__name__
)
else:
self.user = user
def clean_email(self):
""" Validate that the email is not already registered with another user """
if self.cleaned_data["email"].lower() == self.user.email:
raise forms.ValidationError(_("You're already known under this email."))
if (
get_user_model()
.objects.filter(email__iexact=self.cleaned_data["email"])
.exclude(email__iexact=self.user.email)
):
raise forms.ValidationError(
_("This email is already in use. Please supply a different email.")
)
return self.cleaned_data["email"]
def save(self):
"""
Save method calls :func:`user.change_email()` method which sends out an
email with an verification key to verify and with it enable this new
email address.
"""
return self.user.userena_signup.change_email(self.cleaned_data["email"])
class EditProfileForm(forms.ModelForm):
""" Base form used for fields that are always required """
first_name = forms.CharField(label=_("First name"), max_length=30, required=False)
last_name = forms.CharField(label=_("Last name"), max_length=30, required=False)
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
# Put the first and last name at the top
new_order = [
("first_name", self.fields["first_name"]),
("last_name", self.fields["last_name"]),
]
new_order.extend(list(self.fields.items())[:-2])
self.fields = OrderedDict(new_order)
class Meta:
model = get_profile_model()
exclude = ["user"]
def save(self, force_insert=False, force_update=False, commit=True):
profile = super().save(commit=commit)
# Save first and last name
user = profile.user
user.first_name = self.cleaned_data["first_name"]
user.last_name = self.cleaned_data["last_name"]
user.save()
return profile
class ActivationForm(forms.Form):
"""Form for activating an account."""
pass
| StarcoderdataPython |
5073889 | <gh_stars>0
import numpy as np
from jax import numpy as jnp
from jax import jit, grad, hessian, vmap, random
from jax.example_libraries import optimizers, stax
from jax.example_libraries.stax import (Dense, Tanh)
import paddlescience as psci
import time
def Network(num_outs, hiden_size):
return stax.serial(
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(hiden_size), Tanh,
Dense(num_outs)
)
# Analytical solution
def LaplaceRecSolution(x, y, k=1.0):
if (k == 0.0):
return x * y
else:
return np.cos(k * x) * np.cosh(k * y)
# Generate analytical Solution using Geometry points
def GenSolution(xy, bc_index):
sol = np.zeros((len(xy), 1)).astype(np.float32)
bc_value = np.zeros((len(bc_index), 1)).astype(np.float32)
for i in range(len(xy)):
sol[i] = LaplaceRecSolution(xy[i][0], xy[i][1])
for i in range(len(bc_index)):
bc_value[i][0] = sol[bc_index[i]]
return [sol, bc_value]
if __name__ == "__main__":
rng_key = random.PRNGKey(0)
batch_size = 101 * 101
num_ins = 2
num_outs = 1
num_epochs = 2000
hiden_size = 512
input_shape = (batch_size, num_ins)
init_func, predict_func = Network(num_outs, hiden_size)
_, init_params = init_func(rng_key, input_shape)
# Geometry
geo = psci.geometry.Rectangular(
space_origin=(0.0, 0.0), space_extent=(1.0, 1.0))
geo = geo.discretize(space_nsteps=(101, 101))
golden, bc_value = GenSolution(geo.space_domain, geo.bc_index)
# save golden
psci.visu.save_vtk(geo, golden, 'golden_laplace_2d')
np.save("./golden_laplace_2d.npy", golden)
inputs = jnp.array(geo.space_domain)
bc_index = jnp.array(geo.bc_index)
bc_value = jnp.array(bc_value)
def laplace_eq_loss(params, inputs):
def pde_func(params, inputs):
hes = hessian(predict_func, argnums=1)(params, inputs)
return hes[0][0][0] + hes[0][1][1]
pde_vfunc = vmap(pde_func, [None, 0], 0)
pde_v = pde_vfunc(params, inputs)
return jnp.linalg.norm(pde_v, ord=2)
def loss(params, inputs):
outputs = predict_func(params, inputs)
eq_loss = laplace_eq_loss(params, inputs)
bc_loss = jnp.linalg.norm(outputs[bc_index] - bc_value, ord=2)
return eq_loss + bc_loss
opt_init, opt_update, get_params = optimizers.adam(step_size=0.001)
@jit
def update(i, opt_state, inputs):
params = get_params(opt_state)
total_loss = loss(params, inputs)
opt_state = opt_update(i, grad(loss)(params, inputs), opt_state)
return total_loss, opt_state
opt_state = opt_init(init_params)
begin = time.time()
total_loss, opt_state = update(0, opt_state, inputs)
print("num_epoch: ", 0, " totol_loss: ", total_loss)
total_loss.block_until_ready()
mid = time.time()
for i in range(num_epochs):
total_loss, opt_state = update(i+1, opt_state, inputs)
print("num_epoch: ", i+1, " totol_loss: ", total_loss)
total_loss.block_until_ready()
end = time.time()
trained_params = get_params(opt_state)
rslt = np.array(predict_func(trained_params, inputs), copy=False)
psci.visu.save_vtk(geo, rslt, 'rslt_laplace_2d')
np.save('./rslt_laplace_2d.npy', rslt)
# Calculate diff and l2 relative error
diff = rslt - golden
psci.visu.save_vtk(geo, diff, 'diff_laplace_2d')
np.save('./diff_laplace_2d.npy', diff)
root_square_error = np.linalg.norm(diff, ord=2)
mean_square_error = root_square_error * root_square_error / geo.get_domain_size(
)
print("mean_sqeare_error: ", mean_square_error)
print("first epoch time: ", mid-begin)
print("2000 epoch time: ", end-mid)
| StarcoderdataPython |
103126 | import re
from typing import Optional
class BaseModel:
r"""
The Base Class for Model objects.
.. container:: operations
.. describe:: x == y
Checks if two models have the same slug.
.. describe:: x != y
Checks if two models do not have the same slug.
.. describe:: str(x)
Returns the model's name.
Parameters
----------
slug: :class:`str`
The unique identifier of the model.
Other Parameters
----------------
name: Optional[:class:`str`]
The name of the object.
Attributes
----------
slug: :class:`str`
The unique identifier.
name: Optional[:class:`str`]
The name of the object.
"""
def __init__(self, slug: str, name: str = None):
self.slug: str = slug
self.name: Optional[str] = name
def __eq__(self, other):
return self.slug == other.slug
def __ne__(self, other):
return not self == other
def __str__(self):
return self.name
@staticmethod
def remove_html(content: str) -> str:
"""
Removes HTML tags of the html content and returns a cleansed version.
Parameters
----------
content: :class:`str`
The raw html content to remove html tags from.
Returns
-------
A cleansed string with no HTML.: :class:`str`
"""
if not content:
return ""
content = content.replace("<br>", "\n") # replace new line tags before they get replaced.
html_cleaner = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
clean_text = re.sub(html_cleaner, '', content)
return clean_text
| StarcoderdataPython |
4855433 | <reponame>KUTuaNithid/connect4Nithid<gh_stars>10-100
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 12 12:48:21 2018
@author: Arpit
"""
import logging
def setup_logger(name, log_file, level=logging.INFO):
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file)
handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(level)
if not logger.handlers:
logger.addHandler(handler)
return logger
LOGGER_DISABLED = {
'main':True
}
main = setup_logger('main', 'logs/main.log', logging.INFO)
main.disabled = LOGGER_DISABLED['main'] | StarcoderdataPython |
5095627 | # encoding=utf8
from niapy.algorithms.basic import DifferentialEvolution, DynNpDifferentialEvolution, AgingNpDifferentialEvolution, \
MultiStrategyDifferentialEvolution, DynNpMultiStrategyDifferentialEvolution
from niapy.algorithms.basic.de import cross_rand1, cross_rand2, cross_best1, cross_best2, cross_curr2rand1, cross_curr2best1
from niapy.tests.test_algorithm import AlgorithmTestCase, MyProblem
class DETestCase(AlgorithmTestCase):
def setUp(self):
AlgorithmTestCase.setUp(self)
self.algo = DifferentialEvolution
def test_Custom(self):
de_custom = self.algo(differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
de_customc = self.algo(differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
def test_griewank(self):
de_griewank = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
de_griewankc = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc)
def test_CrossRand1(self):
de_rand1 = self.algo(strategy=cross_rand1, seed=self.seed)
de_rand1c = self.algo(strategy=cross_rand1, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_rand1, de_rand1c)
def test_CrossBest1(self):
de_best1 = self.algo(strategy=cross_best1, seed=self.seed)
de_best1c = self.algo(strategy=cross_best1, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_best1, de_best1c)
def test_CrossRand2(self):
de_rand2 = self.algo(strategy=cross_rand2, seed=self.seed)
de_rand2c = self.algo(strategy=cross_rand2, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_rand2, de_rand2c)
def test_CrossBest2(self):
de_best2 = self.algo(strategy=cross_best2, seed=self.seed)
de_best2c = self.algo(strategy=cross_best2, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_best2, de_best2c)
def test_CrossCurr2Rand1(self):
de_curr2rand1 = self.algo(strategy=cross_curr2rand1, seed=self.seed)
de_curr2rand1c = self.algo(strategy=cross_curr2rand1, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_curr2rand1, de_curr2rand1c)
def test_CrossCurr2Best1(self):
de_curr2best1 = self.algo(strategy=cross_curr2best1, seed=self.seed)
de_curr2best1c = self.algo(strategy=cross_curr2best1, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_curr2best1, de_curr2best1c)
class DynNpDETestCase(AlgorithmTestCase):
def setUp(self):
AlgorithmTestCase.setUp(self)
self.algo = DynNpDifferentialEvolution
def test_Custom(self):
de_custom = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
de_customc = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
def test_griewank(self):
de_griewank = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
de_griewankc = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc, 'griewank')
class ANpDETestCase(AlgorithmTestCase):
def setUp(self):
AlgorithmTestCase.setUp(self)
self.algo = AgingNpDifferentialEvolution
def test_Custom(self):
de_custom = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
de_customc = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
def test_griewank(self):
de_griewank = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
de_griewankc = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc, 'griewank')
class MsDETestCase(AlgorithmTestCase):
def setUp(self):
AlgorithmTestCase.setUp(self)
self.algo = MultiStrategyDifferentialEvolution
def test_Custom(self):
de_custom = MultiStrategyDifferentialEvolution(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
de_customc = MultiStrategyDifferentialEvolution(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
def test_griewank(self):
de_griewank = MultiStrategyDifferentialEvolution(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
de_griewankc = MultiStrategyDifferentialEvolution(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc, 'griewank')
class DynNpMsDETestCase(AlgorithmTestCase):
def setUp(self):
AlgorithmTestCase.setUp(self)
self.algo = DynNpMultiStrategyDifferentialEvolution
def test_Custom(self):
de_custom = self.algo(rp=3, population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
de_customc = self.algo(rp=3, population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
def test_griewank(self):
de_griewank = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
de_griewankc = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc, 'griewank')
# class ANpMsDETestCase(AlgorithmTestCase):
# def setUp(self):
# AlgorithmTestCase.setUp(self)
# self.algo = AgingNpMultiMutationDifferentialEvolution
#
# @skip("Not working")
# def test_Custom(self):
# de_custom = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
# de_customc = self.algo(population_size=40, differential_weight=0.5, crossover_probability=0.9, seed=self.seed)
# AlgorithmTestCase.test_algorithm_run(self, de_custom, de_customc, MyProblem())
#
# @skip("Not working")
# def test_griewank(self):
# de_griewank = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
# de_griewankc = self.algo(population_size=10, crossover_probability=0.5, differential_weight=0.9, seed=self.seed)
# AlgorithmTestCase.test_algorithm_run(self, de_griewank, de_griewankc, 'griewank')
# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
| StarcoderdataPython |
11374368 | # -*- coding: utf-8 -*-
"""
@author:XuMing<<EMAIL>>
@description:
"""
from text2vec import Similarity
a = '湖北人爱吃鱼'
b = '甘肃人不爱吃鱼'
ss = Similarity(embedding_type='w2v')
ss.get_score(a, b)
print(ss.model.info())
ss = Similarity(embedding_type='bert')
ss.get_score(a, b)
print(ss.model.info())
| StarcoderdataPython |
3277626 | <gh_stars>1000+
from __future__ import division, print_function, absolute_import
import scipy.special as sc
import numpy as np
from numpy.testing import assert_, assert_equal, assert_allclose
def test_zeta():
assert_allclose(sc.zeta(2,2), np.pi**2/6 - 1, rtol=1e-12)
def test_zeta_1arg():
assert_allclose(sc.zeta(2), np.pi**2/6, rtol=1e-12)
assert_allclose(sc.zeta(4), np.pi**4/90, rtol=1e-12)
def test_zetac():
assert_equal(sc.zetac(0), -1.5)
assert_equal(sc.zetac(1.0), np.inf)
# Expected values in the following were computed using
# Wolfram Alpha `Zeta[x] - 1`:
rtol = 1e-12
assert_allclose(sc.zetac(-2.1), -0.9972705002153750, rtol=rtol)
assert_allclose(sc.zetac(0.8), -5.437538415895550, rtol=rtol)
assert_allclose(sc.zetac(0.9999), -10000.42279161673, rtol=rtol)
assert_allclose(sc.zetac(9), 0.002008392826082214, rtol=rtol)
assert_allclose(sc.zetac(50), 8.881784210930816e-16, rtol=rtol)
assert_allclose(sc.zetac(75), 2.646977960169853e-23, rtol=rtol)
def test_zetac_negative_even():
pts = [-2, -50, -100]
for p in pts:
assert_equal(sc.zetac(p), -1)
def test_zetac_inf():
assert_equal(sc.zetac(np.inf), 0.0)
assert_(np.isnan(sc.zetac(-np.inf)))
| StarcoderdataPython |
3340296 | class ChannelTransactionHistory:
def __init__(self, order_id, amount, currency, type, address, recipient, signature, org_id, group_id,
request_parameters, transaction_hash,
status):
self._order_id = order_id
self._amount = amount
self._currency = currency
self._type = type
self._address = address
self._recipient = recipient
self._signature = signature
self._org_id = org_id
self._group_id = group_id
self._request_parameters = request_parameters
self._transaction_hash = transaction_hash
self._status = status
def to_dict(self):
return {
"order_id": self._order_id,
"amount": self._amount,
"currency": self._currency,
"type": self._type,
"address": self._address,
"recipient": self._recipient,
"signature": self._signature,
"org_id": self._org_id,
"group_id": self._group_id,
"request_parameters": self._request_parameters,
"transaction_hash": self._transaction_hash,
"status": self._status
}
@property
def order_id(self):
return self._order_id
@property
def amount(self):
return self._amount
@property
def currency(self):
return self._currency
@property
def type(self):
return self._type
@property
def address(self):
return self._address
@property
def recipient(self):
return self._recipient
@property
def signature(self):
return self._signature
@property
def org_id(self):
return self._org_id
@property
def group_id(self):
return self._group_id
@property
def request_parameters(self):
return self._request_parameters
@property
def transaction_hash(self):
return self._transaction_hash
@property
def status(self):
return self._status
@order_id.setter
def order_id(self, order_id):
self._order_id = order_id
@amount.setter
def amount(self, amount):
self._amount == amount
@currency.setter
def currency(self, currency):
self._currency == currency
@type.setter
def type(self, type):
self._type == type
@address.setter
def address(self, address):
self._address == address
@recipient.setter
def recipient(self, recipient):
self._recipient == recipient
@signature.setter
def signature(self, signature):
self._signature == signature
@org_id.setter
def org_id(self, org_id):
self._org_id == org_id
@group_id.setter
def group_id(self, group_id):
self._group_id == group_id
@request_parameters.setter
def request_parameters(self, request_parameters):
self._request_parameters = request_parameters
@transaction_hash.setter
def transaction_hash(self, transaction_hash):
self._transaction_hash = transaction_hash
@status.setter
def status(self, status):
self._status = status | StarcoderdataPython |
144214 | from os import path
from pony.orm.core import db_session
from tornado.web import StaticFileHandler, url, Application
from grepopla.controllers.PlayerController import PlayerController
from grepopla.controllers.IndexController import IndexController
from grepopla.settings import PRODUCTION
app_params = [
url(r'/', IndexController),
url(r'/game', PlayerController),
url(r'/static/(.*)', StaticFileHandler,
{"path": ''.join((path.dirname(path.abspath(__file__)), '/grepopla/static/'))})
]
app = Application(app_params, debug=not PRODUCTION, compiled_template_cache=PRODUCTION,
static_hash_cache=PRODUCTION)
@db_session
def start():
from tornado import ioloop, options, autoreload
options.parse_command_line()
app.listen(8888)
ioloop = ioloop.IOLoop().instance()
autoreload.start(ioloop)
ioloop.start()
if __name__ == '__main__':
start()
| StarcoderdataPython |
8081364 | import doctest
import math
import os
import random
import sklearn
import pandas as pd
import numpy as np
from datetime import timedelta
from sklearn.utils.estimator_checks import check_transformer_general, check_transformers_unfitted
from unittest2 import TestSuite, TextTestRunner, TestCase # or `from unittest import ...` if on Python 3.4+
import category_encoders as encoders
__author__ = 'willmcginnis'
# subroutines
def create_array(n_rows=1000, extras=False, has_none=True):
"""
Creates a numpy dataset with some categorical variables
:return:
"""
ds = [[
random.random(),
random.random(),
random.choice(['A', 'B', 'C']),
random.choice(['A', 'B', 'C', 'D']) if extras else random.choice(['A', 'B', 'C']),
random.choice(['A', 'B', 'C', None, np.nan]) if has_none else random.choice(['A', 'B', 'C']),
random.choice(['A'])
] for _ in range(n_rows)]
return np.array(ds)
def create_dataset(n_rows=1000, extras=False, has_none=True):
"""
Creates a dataset with some categorical variables
"""
ds = [[
random.random(), # Floats
random.choice([float('nan'), float('inf'), float('-inf'), -0, 0, 1, -1, math.pi]), # Floats with edge scenarios
row, # Unique integers
str(row), # Unique strings
random.choice(['A']), # Invariant
random.choice(['A', 'B_b', 'C_c_c']), # Strings with underscores to test reverse_dummies()
random.choice(['A', 'B', 'C', None]) if has_none else random.choice(['A', 'B', 'C']), # None
random.choice(['A', 'B', 'C', 'D']) if extras else random.choice(['A', 'B', 'C']), # With a new string value
random.choice([12, 43, -32]) # Number in the column name
] for row in range(n_rows)]
df = pd.DataFrame(ds, columns=['float', 'float_edge', 'unique_int', 'unique_str', 'invariant', 'underscore', 'none', 'extra', 321])
return df
def verify_numeric(X_test):
for dt in X_test.dtypes:
numeric = False
if np.issubdtype(dt, np.dtype(int)) or np.issubdtype(dt, np.dtype(float)):
numeric = True
assert numeric
def verify_inverse_transform(x, x_inv):
"""
Verify x is equal to x_inv. The test returns true for NaN.equals(NaN) as it should.
"""
assert x.equals(x_inv)
# data definitions
np_X = create_array(n_rows=100)
np_X_t = create_array(n_rows=50, extras=True)
np_y = np.random.randn(np_X.shape[0]) > 0.5
np_y_t = np.random.randn(np_X_t.shape[0]) > 0.5
X = create_dataset(n_rows=100)
X_t = create_dataset(n_rows=50, extras=True)
y = pd.DataFrame(np_y)
y_t = pd.DataFrame(np_y_t)
# this class utilises parametrised tests where we loop over different encoders
# tests that are applicable to only one encoder are the end of the class
class TestEncoders(TestCase):
def test_np(self):
for encoder_name in encoders.__all__:
with self.subTest(encoder_name=encoder_name):
# Encode a numpy array
enc = getattr(encoders, encoder_name)()
enc.fit(np_X, np_y)
verify_numeric(enc.transform(np_X_t))
def test_classification(self):
for encoder_name in encoders.__all__:
with self.subTest(encoder_name=encoder_name):
cols = ['unique_str', 'underscore', 'extra', 'none', 'invariant', 321]
enc = getattr(encoders, encoder_name)(cols=cols)
enc.fit(X, np_y)
verify_numeric(enc.transform(X_t))
enc = getattr(encoders, encoder_name)(verbose=1)
enc.fit(X, np_y)
verify_numeric(enc.transform(X_t))
enc = getattr(encoders, encoder_name)(drop_invariant=True)
enc.fit(X, np_y)
verify_numeric(enc.transform(X_t))
enc = getattr(encoders, encoder_name)(return_df=False)
enc.fit(X, np_y)
self.assertTrue(isinstance(enc.transform(X_t), np.ndarray))
self.assertEqual(enc.transform(X_t).shape[0], X_t.shape[0], 'Row count must not change')
# documented in issue #122
# when we use the same encoder on two different datasets, it should not explode
# X_a = pd.DataFrame(data=['1', '2', '2', '2', '2', '2'], columns=['col_a'])
# X_b = pd.DataFrame(data=['1', '1', '1', '2', '2', '2'], columns=['col_b']) # different values and name
# y_dummy = [True, False, True, False, True, False]
# enc = getattr(encoders, encoder_name)()
# enc.fit(X_a, y_dummy)
# enc.fit(X_b, y_dummy)
# verify_numeric(enc.transform(X_b))
def test_impact_encoders(self):
for encoder_name in ['LeaveOneOutEncoder', 'TargetEncoder', 'WOEEncoder']:
with self.subTest(encoder_name=encoder_name):
# encode a numpy array and transform with the help of the target
enc = getattr(encoders, encoder_name)()
enc.fit(np_X, np_y)
verify_numeric(enc.transform(np_X_t, np_y_t))
# target is a DataFrame
enc = getattr(encoders, encoder_name)()
enc.fit(X, y)
verify_numeric(enc.transform(X_t, y_t))
# when we run transform(X, y) and there is a new value in X, something is wrong and we raise an error
enc = getattr(encoders, encoder_name)(impute_missing=True, handle_unknown='error', cols=['extra'])
enc.fit(X, y)
self.assertRaises(ValueError, enc.transform, (X_t, y_t))
def test_error_handling(self):
for encoder_name in encoders.__all__:
with self.subTest(encoder_name=encoder_name):
# we exclude some columns
X = create_dataset(n_rows=100)
X = X.drop(['unique_str', 'none'], axis=1)
X_t = create_dataset(n_rows=50, extras=True)
X_t = X_t.drop(['unique_str', 'none'], axis=1)
# illegal state, we have to first train the encoder...
enc = getattr(encoders, encoder_name)()
with self.assertRaises(ValueError):
enc.transform(X)
# wrong count of attributes
enc = getattr(encoders, encoder_name)()
enc.fit(X, y)
with self.assertRaises(ValueError):
enc.transform(X_t.iloc[:, 0:3])
# no cols
enc = getattr(encoders, encoder_name)(cols=[])
enc.fit(X, y)
self.assertTrue(enc.transform(X_t).equals(X_t))
def test_handle_unknown_error(self):
# BaseN has problems with None -> ignore None
X = create_dataset(n_rows=100, has_none=False)
X_t = create_dataset(n_rows=50, extras=True, has_none=False)
for encoder_name in (set(encoders.__all__) - {'HashingEncoder'}): # HashingEncoder supports new values by design -> excluded
with self.subTest(encoder_name=encoder_name):
# new value during scoring
enc = getattr(encoders, encoder_name)(handle_unknown='error')
enc.fit(X, y)
with self.assertRaises(ValueError):
_ = enc.transform(X_t)
def test_sklearn_compliance(self):
for encoder_name in encoders.__all__:
with self.subTest(encoder_name=encoder_name):
# in sklearn < 0.19.0, these methods require classes,
# in sklearn >= 0.19.0, these methods require instances
if sklearn.__version__ < '0.19.0':
encoder = getattr(encoders, encoder_name)
else:
encoder = getattr(encoders, encoder_name)()
check_transformer_general(encoder_name, encoder)
check_transformers_unfitted(encoder_name, encoder)
def test_inverse_transform(self):
# we do not allow None in these data (but "none" column without any None is ok)
X = create_dataset(n_rows=100, has_none=False)
X_t = create_dataset(n_rows=50, has_none=False)
X_t_extra = create_dataset(n_rows=50, extras=True, has_none=False)
cols = ['underscore', 'none', 'extra', 321]
for encoder_name in ['BaseNEncoder', 'BinaryEncoder', 'OneHotEncoder', 'OrdinalEncoder']:
with self.subTest(encoder_name=encoder_name):
# simple run
enc = getattr(encoders, encoder_name)(verbose=1, cols=cols)
enc.fit(X)
verify_inverse_transform(X_t, enc.inverse_transform(enc.transform(X_t)))
# when a new value is encountered, do not raise an exception
enc = getattr(encoders, encoder_name)(verbose=1, cols=cols)
enc.fit(X, y)
_ = enc.inverse_transform(enc.transform(X_t_extra))
def test_types(self):
X = pd.DataFrame({
'Int': [1, 2, 1, 2],
'Float': [1.1, 2.2, 3.3, 4.4],
'Complex': [3.45J, 3.45J, 3.45J, 3.45J],
'None': [None, None, None, None],
'Str': ['a', 'c', 'c', 'd'],
'PdTimestamp': [pd.Timestamp('2012-05-01'), pd.Timestamp('2012-05-02'), pd.Timestamp('2012-05-03'), pd.Timestamp('2012-05-06')],
'PdTimedelta': [pd.Timedelta('1 days'), pd.Timedelta('2 days'), pd.Timedelta('1 days'), pd.Timedelta('1 days')],
'TimeDelta': [timedelta(-9999), timedelta(-9), timedelta(-1), timedelta(999)],
'Bool': [False, True, True, False],
'Tuple': [('a', 'tuple'), ('a', 'tuple'), ('a', 'tuple'), ('b', 'tuple')],
# 'Categorical': pd.Categorical(list('bbea'), categories=['e', 'a', 'b'], ordered=True),
# 'List': [[1,2], [2,3], [3,4], [4,5]],
# 'Dictionary': [{1: "a", 2: "b"}, {1: "a", 2: "b"}, {1: "a", 2: "b"}, {1: "a", 2: "b"}],
# 'Set': [{'John', 'Jane'}, {'John', 'Jane'}, {'John', 'Jane'}, {'John', 'Jane'}],
# 'Array': [array('i'), array('i'), array('i'), array('i')]
})
y = [1, 0, 0, 1]
for encoder_name in encoders.__all__:
encoder = getattr(encoders, encoder_name)()
encoder.fit_transform(X, y)
# encoder specific tests
def test_binary_bin(self):
data = np.array(['a', 'ba', 'ba'])
out = encoders.BinaryEncoder().fit_transform(data)
self.assertTrue(pd.DataFrame([[0, 1], [1, 0], [1, 0]], columns=['0_0', '0_1']).equals(out))
def test_binary_dist(self):
data = np.array(['apple', 'orange', 'peach', 'lemon'])
encoder = encoders.BinaryEncoder()
encoder.fit(data)
# split dataframe into two transforms and recombine
a = encoder.transform(data[:1])
b = encoder.transform(data[1:])
split = pd.concat([a, b])
split = split.reset_index(drop=True)
# run all at once
c = encoder.transform(data)
# make sure they are the same
self.assertTrue(split.equals(c))
def test_leave_one_out(self):
enc = encoders.LeaveOneOutEncoder(verbose=1, randomized=True, sigma=0.1)
enc.fit(X, y)
verify_numeric(enc.transform(X_t))
verify_numeric(enc.transform(X_t, y_t))
def test_leave_one_out_values(self):
df = pd.DataFrame({
'color': ["a", "a", "a", "b", "b", "b"],
'outcome': [1, 0, 0, 1, 0, 1]})
X = df.drop('outcome', axis=1)
y = df.drop('color', axis=1)
ce_leave = encoders.LeaveOneOutEncoder(cols=['color'], randomized=False)
obtained = ce_leave.fit_transform(X, y['outcome'])
self.assertEquals([0.0, 0.5, 0.5, 0.5, 1.0, 0.5], list(obtained['color']))
def test_leave_one_out_fit_callTwiceOnDifferentData_ExpectRefit(self):
x_a = pd.DataFrame(data=['1', '2', '2', '2', '2', '2'], columns=['col_a'])
x_b = pd.DataFrame(data=['1', '1', '1', '2', '2', '2'], columns=['col_b']) # different values and name
y_dummy = [True, False, True, False, True, False]
encoder = encoders.LeaveOneOutEncoder()
encoder.fit(x_a, y_dummy)
encoder.fit(x_b, y_dummy)
mapping = encoder.mapping
self.assertEqual(1, len(mapping))
col_b_mapping = mapping[0]
self.assertEqual('col_b', col_b_mapping['col']) # the model must get updated
self.assertEqual({'sum': 2.0, 'count': 3, 'mean': 2.0/3.0}, col_b_mapping['mapping']['1'])
self.assertEqual({'sum': 1.0, 'count': 3, 'mean': 01.0/3.0}, col_b_mapping['mapping']['2'])
def test_one_hot(self):
enc = encoders.OneHotEncoder(verbose=1, return_df=False)
enc.fit(X)
self.assertEqual(enc.transform(X_t).shape[1],
enc.transform(X_t[X_t['extra'] != 'A']).shape[1],
'We have to get the same count of columns')
enc = encoders.OneHotEncoder(verbose=1, return_df=True, impute_missing=True)
enc.fit(X)
out = enc.transform(X_t)
self.assertIn('extra_-1', out.columns.values)
enc = encoders.OneHotEncoder(verbose=1, return_df=True, impute_missing=True, handle_unknown='ignore')
enc.fit(X)
out = enc.transform(X_t)
self.assertEqual(len([x for x in out.columns.values if str(x).startswith('extra_')]), 3)
enc = encoders.OneHotEncoder(verbose=1, return_df=True, impute_missing=True, handle_unknown='error')
enc.fit(X)
with self.assertRaises(ValueError):
out = enc.transform(X_t)
enc = encoders.OneHotEncoder(verbose=1, return_df=True, handle_unknown='ignore', use_cat_names=True)
enc.fit(X)
out = enc.transform(X_t)
self.assertIn('extra_A', out.columns.values)
enc = encoders.OneHotEncoder(verbose=1, return_df=True, use_cat_names=True)
enc.fit(X)
out = enc.transform(X_t)
self.assertIn('extra_-1', out.columns.values)
# test inverse_transform
X_i = create_dataset(n_rows=100, has_none=False)
X_i_t = create_dataset(n_rows=50, has_none=False)
X_i_t_extra = create_dataset(n_rows=50, extras=True, has_none=False)
cols = ['underscore', 'none', 'extra', 321]
enc = encoders.OneHotEncoder(verbose=1, use_cat_names=True, cols=cols)
enc.fit(X_i)
obtained = enc.inverse_transform(enc.transform(X_i_t))
obtained[321] = obtained[321].astype('int64') # numeric columns are incorrectly typed as object...
verify_inverse_transform(X_i_t, obtained)
def test_ordinal(self):
enc = encoders.OrdinalEncoder(verbose=1, return_df=True, impute_missing=True)
enc.fit(X)
out = enc.transform(X_t)
self.assertEqual(len(set(out['extra'].values)), 4)
self.assertIn(0, set(out['extra'].values))
self.assertFalse(enc.mapping is None)
self.assertTrue(len(enc.mapping) > 0)
enc = encoders.OrdinalEncoder(verbose=1, mapping=enc.mapping, return_df=True, impute_missing=True)
enc.fit(X)
out = enc.transform(X_t)
self.assertEqual(len(set(out['extra'].values)), 4)
self.assertIn(0, set(out['extra'].values))
self.assertTrue(len(enc.mapping) > 0)
enc = encoders.OrdinalEncoder(verbose=1, return_df=True, impute_missing=True, handle_unknown='ignore')
enc.fit(X)
out = enc.transform(X_t)
out_cats = [x for x in set(out['extra'].values) if np.isfinite(x)]
self.assertEqual(len(out_cats), 3)
self.assertFalse(enc.mapping is None)
def test_ordinal_dist(self):
data = np.array([
['apple', None],
['peach', 'lemon']
])
encoder = encoders.OrdinalEncoder(impute_missing=True)
encoder.fit(data)
a = encoder.transform(data)
self.assertEqual(a.values[0, 1], 0)
self.assertEqual(a.values[1, 1], 1)
encoder = encoders.OrdinalEncoder(impute_missing=False)
encoder.fit(data)
a = encoder.transform(data)
self.assertTrue(np.isnan(a.values[0, 1]))
self.assertEqual(a.values[1, 1], 1)
def test_target_encoder(self):
enc = encoders.TargetEncoder(verbose=1, smoothing=2, min_samples_leaf=2)
enc.fit(X, y)
verify_numeric(enc.transform(X_t))
verify_numeric(enc.transform(X_t, y_t))
def test_target_encoder_fit_HaveConstructorSetSmoothingAndMinSamplesLeaf_ExpectUsedInFit(self):
k = 2
f = 10
binary_cat_example = pd.DataFrame(
{'Trend': ['UP', 'UP', 'DOWN', 'FLAT', 'DOWN', 'UP', 'DOWN', 'FLAT', 'FLAT', 'FLAT'],
'target': [1, 1, 0, 0, 1, 0, 0, 0, 1, 1]})
encoder = encoders.TargetEncoder(cols=['Trend'], min_samples_leaf=k, smoothing=f)
encoder.fit(binary_cat_example, binary_cat_example['target'])
trend_mapping = encoder.mapping[0]['mapping']
self.assertAlmostEqual(0.4125, trend_mapping['DOWN']['smoothing'], delta=1e-4)
self.assertEqual(0.5, trend_mapping['FLAT']['smoothing'])
self.assertAlmostEqual(0.5874, trend_mapping['UP']['smoothing'], delta=1e-4)
def test_target_encoder_fit_transform_HaveConstructorSetSmoothingAndMinSamplesLeaf_ExpectCorrectValueInResult(self):
k = 2
f = 10
binary_cat_example = pd.DataFrame(
{'Trend': ['UP', 'UP', 'DOWN', 'FLAT', 'DOWN', 'UP', 'DOWN', 'FLAT', 'FLAT', 'FLAT'],
'target': [1, 1, 0, 0, 1, 0, 0, 0, 1, 1]})
encoder = encoders.TargetEncoder(cols=['Trend'], min_samples_leaf=k, smoothing=f)
result = encoder.fit_transform(binary_cat_example, binary_cat_example['target'])
values = result['Trend'].values
self.assertAlmostEqual(0.5874, values[0], delta=1e-4)
self.assertAlmostEqual(0.5874, values[1], delta=1e-4)
self.assertAlmostEqual(0.4125, values[2], delta=1e-4)
self.assertEqual(0.5, values[3])
def test_woe(self):
cols = ['unique_str', 'underscore', 'extra', 'none', 'invariant', 321]
# balanced label with balanced features
X_balanced = pd.DataFrame(data=['1', '1', '1', '2', '2', '2'], columns=['col1'])
y_balanced = [True, False, True, False, True, False]
enc = encoders.WOEEncoder()
enc.fit(X_balanced, y_balanced)
X1 = enc.transform(X_balanced)
self.assertTrue(all(X1.sum() < 0.001), "When the class label is balanced, WoE should sum to 0 in each transformed column")
enc = encoders.WOEEncoder(cols=cols)
enc.fit(X, np_y)
X1 = enc.transform(X_t)
verify_numeric(X1[cols])
self.assertTrue(np.isfinite(X1[cols].values).all(), 'There must not be any NaN, inf or -inf in the transformed columns')
self.assertEqual(len(list(X_t)), len(list(X1)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X1), 'The count of rows must not change')
X2 = enc.transform(X_t, np_y_t)
verify_numeric(X2)
self.assertTrue(np.isfinite(X2[cols].values).all(), 'There must not be any NaN, inf or -inf in the transformed columns')
self.assertEqual(len(list(X_t)), len(list(X2)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X2), 'The count of rows must not change')
X3 = enc.transform(X, np_y)
verify_numeric(X3)
self.assertTrue(np.isfinite(X3[cols].values).all(), 'There must not be any NaN, inf or -inf in the transformed columns')
self.assertEqual(len(list(X)), len(list(X3)), 'The count of attributes must not change')
self.assertEqual(len(X), len(X3), 'The count of rows must not change')
self.assertTrue(X3['unique_str'].var() < 0.001, 'The unique string column must not be predictive of the label')
X4 = enc.fit_transform(X, np_y)
verify_numeric(X4)
self.assertTrue(np.isfinite(X4[cols].values).all(), 'There must not be any NaN, inf or -inf in the transformed columns')
self.assertEqual(len(list(X)), len(list(X4)), 'The count of attributes must not change')
self.assertEqual(len(X), len(X4), 'The count of rows must not change')
self.assertTrue(X4['unique_str'].var() < 0.001, 'The unique string column must not be predictive of the label')
enc = encoders.WOEEncoder()
enc.fit(X, np_y)
X1 = enc.transform(X_t)
self.assertEqual(len(list(X_t)), len(list(X1)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X1), 'The count of rows must not change')
verify_numeric(X1)
X2 = enc.transform(X_t, np_y_t)
verify_numeric(X2)
self.assertEqual(len(list(X_t)), len(list(X2)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X2), 'The count of rows must not change')
# seed
enc = encoders.WOEEncoder(cols=cols, random_state=2001, randomized=True)
enc.fit(X, np_y)
X1 = enc.transform(X_t, np_y_t)
X2 = enc.transform(X_t, np_y_t)
self.assertTrue(X1.equals(X2), "When the seed is given, the results must be identical")
verify_numeric(X1)
verify_numeric(X2)
# invariant target
y_invariant = [True, True, True, True, True, True]
enc = encoders.WOEEncoder()
with self.assertRaises(ValueError):
enc.fit(X_balanced, y_invariant)
# branch coverage unit tests - no cols
enc = encoders.WOEEncoder(cols=[])
enc.fit(X, np_y)
self.assertTrue(enc.transform(X_t).equals(X_t))
# missing values in the target
y_missing = [True, True, None, True, True, True]
enc = encoders.WOEEncoder()
with self.assertRaises(ValueError):
enc.fit(X_balanced, y_missing)
# impute missing
enc = encoders.WOEEncoder(impute_missing=False)
enc.fit(X, np_y)
X1 = enc.transform(X_t)
verify_numeric(X1)
self.assertTrue(X1.isnull().values.any())
self.assertEqual(len(list(X_t)), len(list(X1)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X1), 'The count of rows must not change')
X2 = enc.transform(X_t, np_y_t)
verify_numeric(X2)
self.assertTrue(X1.isnull().values.any())
self.assertEqual(len(list(X_t)), len(list(X2)), 'The count of attributes must not change')
self.assertEqual(len(X_t), len(X2), 'The count of rows must not change')
# beware: for some reason doctest does not raise exceptions - you have to read the text output
def test_doc(self):
suite = TestSuite()
for filename in os.listdir('../'):
if filename.endswith(".py"):
suite.addTest(doctest.DocFileSuite('../' + filename))
runner = TextTestRunner(verbosity=2)
runner.run(suite) | StarcoderdataPython |
12863583 | '''
molecool.io package
configure access to subpackage functions
'''
from .pdb import open_pdb
from .xyz import open_xyz, write_xyz
| StarcoderdataPython |
92627 | <reponame>brownaa/wagtail
from django.contrib.auth import get_user_model
from django.test import TestCase
from wagtail.core.models import Comment, Page
class CommentTestingUtils:
def setUp(self):
self.page = Page.objects.get(title="Welcome to the Wagtail test site!")
self.revision_1 = self.page.save_revision()
self.revision_2 = self.page.save_revision()
def create_comment(self, revision_created):
return Comment.objects.create(
page=self.page,
user=get_user_model().objects.first(),
text='test',
contentpath='title',
revision_created=revision_created,
)
class TestRevisionDeletion(CommentTestingUtils, TestCase):
fixtures = ['test.json']
def setUp(self):
super().setUp()
self.revision_3 = self.page.save_revision()
self.old_comment = self.create_comment(self.revision_1)
self.new_comment = self.create_comment(self.revision_3)
def test_deleting_old_revision_moves_comment_revision_created_forwards(self):
# test that when a revision is deleted, a comment linked to it via revision_created has its revision_created moved
# to the next revision
self.revision_1.delete()
self.old_comment.refresh_from_db()
self.assertEqual(self.old_comment.revision_created, self.revision_2)
def test_deleting_most_recent_revision_deletes_created_comments(self):
# test that when the most recent revision is deleted, any comments created on it are also deleted
self.revision_3.delete()
with self.assertRaises(Comment.DoesNotExist):
self.new_comment.refresh_from_db()
| StarcoderdataPython |
1814529 | # ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from ngraph.frontends.cntk.cntk_importer.ops_compound import OpsCompound
from ngraph.frontends.cntk.cntk_importer.ops_binary import OpsBinary
from ngraph.frontends.cntk.cntk_importer.ops_unary import OpsUnary
class OpsBridge(OpsCompound, OpsBinary, OpsUnary):
"""
Bridging operations between CNTK and ngraph.
"""
def __call__(self, cntk_op, inputs):
"""
Call Op based on `cntk_op.op_name`.
Arguments:
cntk_op: CNTK operation to be translated.
inputs: List of prepared inputs for operation.
Returns:
The resulting ngraph op.
"""
op_name = cntk_op.op_name
try:
return getattr(self, op_name)(cntk_op, inputs)
except AttributeError:
raise TypeError("Unknown operation: " + op_name)
| StarcoderdataPython |
392941 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author: x.huang
# @date:29/05/19
import time
from pypay import err
from pypay.gateways.wechat import WechatPay
class PosPayImpl(WechatPay):
@staticmethod
def get_trade_type():
return 'MICROPAY'
def pay(self, config_biz: dict):
# todo some wrong.
if not self.config.get('app_id'):
raise err.InvalidArgumentException('Missing Config -- [app_id]')
self.unset_notify_url()
return self.request_api('pay/micropay', config_biz)
| StarcoderdataPython |
259263 | from django.core.validators import RegexValidator
zip_validate = RegexValidator(r'^[0-9]*$', 'Please enter valid zip code.')
phone_validate = RegexValidator(r'^\s*\d{5}-\d{5}\s*$',
'Please enter valid phone number. Phone number is allowed in following '
'pattern 12345-67890.') | StarcoderdataPython |
11323206 | ################################################################################
#
# Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors
#
# This file is a part of the MadGraph5_aMC@NLO project, an application which
# automatically generates Feynman diagrams and matrix elements for arbitrary
# high-energy processes in the Standard Model and beyond.
#
# It is subject to the MadGraph5_aMC@NLO license which should accompany this
# distribution.
#
# For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch
#
################################################################################
from __future__ import division
import os
import logging
try:
import madgraph
except ImportError:
MADEVENT = True
import internal.banner as bannermod
import internal.misc as misc
import internal.files as files
import internal.cluster as cluster
import internal.lhe_parser as lhe_parser
import internal.misc_integrator as misc_integrator
import internal.histograms as histograms
else:
MADEVENT= False
import madgraph.various.misc as misc
import madgraph.iolibs.files as files
import madgraph.various.cluster as cluster
import madgraph.various.lhe_parser as lhe_parser
import madgraph.integrator.misc_integrator as misc_integrator
import madgraph.various.histograms as histograms
logger = logging.getLogger('madgraph.integrator')
pjoin = os.path.join
class VirtualObservable(object):
"""Base class for observables."""
def __init__(self, name='default'):
self.name = name
def __call__(self, wgt, *args, **opts):
"""Integrand function call,
with list of continuous and discrete input values for all dimensions.
"""
# This virtual function is currently abused for type checks
assert(isinstance(wgt, float))
return True
class ObservableList(list):
"""Base class for lists of observables."""
def __init__(self, *args, **opts):
super(ObservableList, self).__init__(*args, **opts)
def apply_observables(self, wgt, *args, **opts):
"""Apply all observables of this list."""
for obs in self:
obs(wgt, *args, **opts)
def append(self, arg, **opts):
"""Overload append for type-checking."""
assert(isinstance(arg, VirtualObservable))
super(ObservableList, self).append(arg, **opts)
##########################################################
# HwU filling tools
##########################################################
class ObservableFunctions(object):
"""A compendium of basic observables implemented as static methods
Make your class inherit from this to have access to the library
"""
@staticmethod
def inclusive_xsec(*args,**kwargs):
"""Total cross section. No need to bin data by kinematics"""
return ((1, 1), )
@staticmethod
def scalar_pt_sum(data_for_observables,*args,**kwargs):
"""Sum of the transverse momentum of all particles in the final state """
PS_point = data_for_observables['PS_point'].to_list()
flavors = data_for_observables['flavors']
pt = 0
for p in PS_point[len(flavors[0]):]:
pt+=p.pt()
return ((pt, 1), )
class HwUObservable(VirtualObservable,ObservableFunctions):
"""Class that creates and fills in a HwU histogram
for an observable given as a function.
"""
def __init__(
self, name='default', observable_function=None, range=(0, 2), nbins=1,
*args, **opts ):
super(HwUObservable, self).__init__(name)
if observable_function is None:
self.observable_function = self.inclusive_xsec
else:
self.observable_function = observable_function
self.range = [float(range[0]),float(range[1])]
assert self.range[0] < self.range[1]
self.nbins = int(nbins)
self.bin_size = (self.range[1]-self.range[0])/float(nbins)
assert self.bin_size > 0
self.create_HwU(**opts)
def create_HwU(self, **opts):
bins = histograms.BinList(bin_range=self.range+[self.bin_size])
self.HwU = histograms.HwU(title=self.name, bins=bins, **opts)
def __call__(self, wgt, *args, **kwargs):
assert super(HwUObservable, self).__call__(wgt, *args, **kwargs)
values_and_weights = self.observable_function(*args, **kwargs)
for v, w in values_and_weights:
self.HwU.addEvent(v, wgt*w)
class HwUObservableList(ObservableList):
def __init__(self,*args,**opts):
super(HwUObservableList, self).__init__(*args,**opts)
assert all([isinstance(x,HwUObservable) for x in self])
self.create_HwUList()
def create_HwUList(self):
HwUs=[]
for obs in self:
HwUs.append(obs.HwU)
self.HwUList = histograms.HwUList(HwUs)
def append(self, arg, **opts):
assert(isinstance(arg, HwUObservable))
super(ObservableList, self).append(arg, **opts)
def output(self,path,*args,**opts):
self.HwUList.output(path,*args,**opts)
| StarcoderdataPython |
11282503 | import collections
#from . import pyheclib
import .pyheclib
import pandas as pd
import numpy as np
import os
import re
import time
import warnings
import logging
from datetime import datetime, timedelta
from calendar import monthrange
from dateutil.parser import parse
# some static functions
DATE_FMT_STR = '%d%b%Y'
_USE_CONDENSED = False
def set_message_level(level):
"""
set the verbosity level of the HEC-DSS library
level ranges from "bort" only (level 0) to "internal" (level >10)
"""
pyheclib.hec_zset('MLEVEL', '', level)
def set_program_name(program_name):
"""
sets the name of the program (upto 6 chars long) to store with data
"""
name = program_name[:min(6, len(program_name))]
pyheclib.hec_zset('PROGRAM', name, 0)
def get_version(fname):
"""
Get version of DSS File
returns a tuple of string version of 4 characters and integer version
"""
return pyheclib.hec_zfver(fname)
def get_start_end_dates(twstr, sep='-'):
"""
Get the start and end date (as strings of format ddMMMyyyy,e.g. 01JAN1991) from timewindow string
The start and end times must be separated by sep (default = '-') and can be in any format that works with
pandas to_datetime (see link below)
The returned start and end date are rounded down and up (respectively) to the day
Args:
twstr (str): timewindow as string of the form that can be parsed by pd.to_datetime [https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.to_datetime.html]
"""
s, e = [pd.to_datetime(str.strip(d)) for d in str.split(twstr, sep)]
return s.floor('D').strftime('%d%b%Y').upper(), e.ceil('D').strftime('%d%b%Y').upper()
def get_ts(filename, pathname):
"""
Gets regular time series matching the pathname from the filename.
Opens and reads pathname from filename and then closes it (slightly inefficient)
Parameters
----------
filename: a path to DSS file
pathname: a string of the form /A/B/C/D/E/F that is parsed to match all parts except D
which if not blank is used to determine the time window to retrieve
D should be specified in the format of ddMMMYYYY HHmm - ddMMMYYYY HHmm
Returns
-------
a generator of named tuples DSSData(data=pandas DataFrame, units=units, period_type=period type) for timeseries found or an empty list
Notes
-----
Assumes that all matching are Regular time series ( as opposed to Irregular, See DSS terminolog)
Examples
--------
Get time series based on a part of pathname, e.g.
>>> pyhecdss.get_rts('test1.dss', '//SIN/////')
[(rts,units,type),...]
"""
with DSSFile(filename) as dssh:
dfcat = dssh.read_catalog()
pp = pathname.split('/')
cond = True
for p, n in zip(pp[1:4]+pp[5:7], ['A', 'B', 'C', 'E', 'F']):
if len(p) > 0:
cond = cond & (dfcat[n] == p)
plist = dssh.get_pathnames(dfcat[cond])
twstr = str.strip(pp[4])
startDateStr = endDateStr = None
if len(twstr) > 0:
try:
startDateStr, endDateStr = get_start_end_dates(twstr)
except:
startDateStr, endDateStr = None, None
for p in plist:
if p.split('/')[5].startswith('IR-'):
yield dssh.read_its(p, startDateStr, endDateStr)
else:
yield dssh.read_rts(p, startDateStr, endDateStr)
def get_matching_ts(filename, pathname=None, path_parts=None):
'''Opens the DSS file and reads matching pathname or path parts
Args:
:param filename: DSS filename containing data
:param pathname: The DSS pathname A-F parts like string /A/B/C/D/E/F/
where A-F is either blank implying match all or a regular expression to be matched
or
:param pathparts: if A-F regular expression contains the "/" character use the path_parts array instead
*One of pathname or pathparts must be specified*
:returns: an generator of named tuples of DSSData ( data as dataframe, units as string, type as string one of INST-VAL, PER-VAL)
'''
with DSSFile(filename) as dssh:
dfcat = dssh.read_catalog()
pp = pathname.split('/')
cond = dfcat['A'].str.match('.*')
for p, n in zip(pp[1:4]+pp[5:7], ['A', 'B', 'C', 'E', 'F']):
if len(p) > 0:
cond = cond & (dfcat[n].str.match(p))
plist = dssh.get_pathnames(dfcat[cond])
twstr = str.strip(pp[4])
startDateStr = endDateStr = None
if len(twstr) > 0:
try:
startDateStr, endDateStr = get_start_end_dates(twstr)
except:
startDateStr, endDateStr = None, None
if len(plist) == 0:
raise Exception(f'No pathname found in {filename} for {pathname} or {path_parts}')
for p in plist:
if p.split('/')[5].startswith('IR-'):
yield dssh.read_its(p, startDateStr, endDateStr)
else:
yield dssh.read_rts(p, startDateStr, endDateStr)
DSSData = collections.namedtuple('DSSData', field_names=['data', 'units', 'period_type'])
class DSSFile:
"""
Opens a HEC-DSS file for operations of read and write.
The correct way of using is "with" statement:
```
with DSSFile('myfile.dss') as dh:
dfcat=dh.read_catalog()
```
Raises:
FileNotFoundError: If the path to the file is not found. Usually silently creats an empty file if missing
Returns:
DSSFile: an open DSS file handle
"""
# DSS missing conventions
MISSING_VALUE = -901.0
MISSING_RECORD = -902.0
#
FREQ_NAME_MAP = {"T": "MIN", "H": "HOUR", "D": "DAY", "W": "WEEK", "M": "MON", "A-DEC": "YEAR"}
#
NAME_FREQ_MAP = {v: k for k, v in FREQ_NAME_MAP.items()}
#
EPART_PATTERN = re.compile('(?P<n>\d+)(?P<interval>M[O|I]N|YEAR|HOUR|DAY|WEEK)')
#
"""
vectorized version of timedelta
"""
timedelta_minutes = np.vectorize(lambda x: timedelta(minutes=int(x)), otypes=['O'])
def __init__(self, fname, create_new=False):
"""Opens a dssfile
Args:
fname (str): path to filename
create_new (bool, optional): create_new if file doesn't exist. Defaults to False.
"""
self.isopen = False
self._check_dir_exists(fname)
if not create_new:
if not os.path.exists(fname) and not os.path.isfile(fname):
raise Exception(
f'File path: {fname} does not exist! '+'Use create_new=True if you want to create a new file')
self.ifltab = pyheclib.intArray(600)
self.istat = 0
self.fname = fname
self.open()
# defining __enter__ and __exit__ for use with "with" statements
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __del__(self):
self.close()
def _check_dir_exists(self, fname):
dname = os.path.dirname(fname)
if dname == '' or os.path.exists(dname):
return
else:
raise FileNotFoundError(
"Attempt to create file: %s in non-existent directory: %s " % (fname, dname))
def open(self):
"""
Open DSS file
"""
if (self.isopen):
return
self.istat = pyheclib.hec_zopen(self.ifltab, self.fname)
self.isopen = True
def close(self):
"""
Close DSS File
"""
# FIXME: remove all created arrays and pointers
if (self.isopen):
pyheclib.zclose_(self.ifltab)
self.isopen = False
def get_version(self):
"""
Get version of DSS File
returns a tuple of string version of 4 characters and integer version
"""
# needs to be done on a closed file
if (self.isopen):
self.close()
return pyheclib.hec_zfver(self.fname)
def catalog(self):
"""
Catalog DSS Files
"""
opened_already = self.isopen
try:
if not opened_already:
self.open()
icunit = pyheclib.new_intp() # unit (fortran) for catalog
pyheclib.intp_assign(icunit, 12)
fcname = self.fname[:self.fname.rfind(".")]+".dsc"
pyheclib.fortranopen_(icunit, fcname, len(fcname))
icdunit = pyheclib.new_intp() # unit (fortran) for condensed catalog
fdname = self.fname[:self.fname.rfind(".")]+".dsd"
pyheclib.intp_assign(icdunit, 13)
pyheclib.fortranopen_(icdunit, fdname, len(fdname))
inunit = pyheclib.new_intp()
# new catalog, if non-zero no cataloging
pyheclib.intp_assign(inunit, 0)
cinstr = "" # catalog instructions : None = ""
labrev = pyheclib.new_intp()
pyheclib.intp_assign(labrev, 0) # 0 is unabbreviated.
ldsort = pyheclib.new_intp()
pyheclib.intp_assign(ldsort, 1) # 1 is sorted
lcdcat = pyheclib.new_intp() # output if condensed created
nrecs = pyheclib.new_intp() # number of records cataloged
pyheclib.zcat_(self.ifltab, icunit, icdunit, inunit, cinstr,
labrev, ldsort, lcdcat, nrecs, len(cinstr))
return pyheclib.intp_value(nrecs)
except:
#warnings.warn("Exception occurred while catalogging")
pass
finally:
pyheclib.fortranflush_(icunit)
pyheclib.fortranclose_(icunit)
pyheclib.fortranflush_(icdunit)
pyheclib.fortranclose_(icdunit)
pyheclib.fortranflush_(inunit)
pyheclib.fortranclose_(inunit)
if not opened_already:
self.close()
@staticmethod
def _read_catalog_dsd(fdname):
'''
read condensed catalog from fname into a data frame
'''
with open(fdname, 'r') as fd:
lines = fd.readlines()
columns = ['Tag', 'A Part', 'B Part',
'C Part', 'F Part', 'E Part', 'D Part']
if len(lines) < 9:
logging.warn("catalog is empty! for filename: ", fdname)
return None
colline = lines[7]
column_indices = []
for c in columns:
column_indices.append(colline.find(c))
a = np.empty([len(columns), len(lines)-9], dtype='U132')
ilx = 0
for line in lines[9:]:
cix = 0
isx = column_indices[0]
for iex in column_indices[1:]:
s = line[isx:iex].strip()
if s.startswith("-"):
s = a[cix, ilx-1]
a[cix, ilx] = s
cix = cix+1
isx = iex
s = line[isx:].strip()
a[cix, ilx] = s
ilx = ilx+1
df = pd.DataFrame(a.transpose(), columns=list('TABCFED'))
return df
@staticmethod
def _read_catalog_dsc(fcname):
'''
read full catalog from fc name and create condensed catalog on the fly
returns data frame
'''
df = pd.read_fwf(fcname, skiprows=8, colspecs=[(0, 8), (8, 15), (15, 500)])
df = df.dropna(how='all', axis=0) # drop empty lines
df[list('ABCDEF')] = df['Record Pathname'].str.split('/', expand=True).iloc[:, 1:7]
dfg = df.groupby(['A', 'B', 'C', 'F', 'E'])
df.D = pd.to_datetime(df.D)
dfmin, dfmax = dfg.min(), dfg.max()
tagmax = 'T' + str(dfmax.Tag.astype('str').str[1:].astype('int', errors='ignore').max())
dfc = dfmin['D'].dt.strftime('%d%b%Y').str.upper() + ' - ' + \
dfmax['D'].dt.strftime('%d%b%Y').str.upper()
dfc = dfc.reset_index()
dfc.insert(0, 'T', tagmax)
return dfc
def _check_condensed_catalog_file_and_recatalog(self, condensed=True):
if condensed:
ext = '.dsd'
else:
ext = '.dsc'
fdname = self.fname[:self.fname.rfind(".")]+ext
if not os.path.exists(fdname):
logging.debug("NO CATALOG FOUND: Generating...")
self.catalog()
else:
if os.path.exists(self.fname):
ftime = pd.to_datetime(time.ctime(
os.path.getmtime(self.fname)))
fdtime = pd.to_datetime(time.ctime(os.path.getmtime(fdname)))
if ftime > fdtime:
logging.debug("CATALOG FILE OLD: Generating...")
self.catalog()
else:
logging.debug("No DSS File found. Using catalog file as is")
#
return fdname
def read_catalog(self):
"""
Reads .dsd (condensed catalog) for the given dss file.
Will run catalog if it doesn't exist or is out of date
"""
fdname = self._check_condensed_catalog_file_and_recatalog(condensed=_USE_CONDENSED)
if _USE_CONDENSED:
df = DSSFile._read_catalog_dsd(fdname)
else:
df = DSSFile._read_catalog_dsc(fdname)
return df
def get_pathnames(self, catalog_dataframe=None):
"""
converts a catalog data frame into pathnames
If catalog_dataframe is None then reads catalog to populate it
returns a list of pathnames (condensed version, i.e. D part is time window)
/A PART/B PART/C PART/DPART (START DATE "-" END DATE)/E PART/F PART/
"""
if catalog_dataframe is None:
catalog_dataframe = self.read_catalog()
pdf = catalog_dataframe.iloc[:, [1, 2, 3, 6, 5, 4]]
return pdf.apply(func=lambda x: '/'+('/'.join(list(x.values)))+'/', axis=1).values.tolist()
def num_values_in_interval(sdstr, edstr, istr):
"""
Get number of values in interval istr, using the start date and end date
string
"""
td = DSSFile._get_timedelta_for_interval(istr)
return int((parse(edstr)-parse(sdstr))/td)+1
def julian_day(self, date):
"""
get julian day for the date. (count of days since beginning of year)
"""
return date.dayofyear
def m2ihm(self, minute):
"""
24 hour style from mins
"""
ihr = minute/60
imin = minute-(ihr*60)
itime = ihr*100+imin
return itime
def parse_pathname_epart(self, pathname):
return pathname.split('/')[1:7][4]
def _number_between(startDateStr, endDateStr, delta=timedelta(days=1)):
"""
This is just a guess at number of values to be read so going over is ok.
"""
return round((parse(endDateStr)-parse(startDateStr))/delta+1)
def _get_timedelta_for_interval(interval):
"""
get minimum timedelta for interval defined by string. e.g. for month it is 28 days (minimum)
"""
if interval.find('MON') >= 0: # less number of estimates will lead to overestimating values
td = timedelta(days=28)
elif interval.find('YEAR') >= 0:
td = timedelta(days=365)
# TODO: Added to process IR-DAY and IR-DECADE, but this will make
# the routine bypass `get_freq_from_epart` for regular DAY intervals.
# Rewriting some of the logics here would be better for efficiency.
elif interval.find('DAY') >= 0:
td = timedelta(days=1) # Assuming the maximum daily.
elif interval.find('DECADE') >= 0:
td = timedelta(days=365) # Assuming it is close to YEARLY
else:
td = timedelta(seconds=DSSFile.get_freq_from_epart(interval).nanos/1e9)
return td
def _pad_to_end_of_block(self, endDateStr, interval):
edate = parse(endDateStr)
if interval.find('MON') >= 0 or interval.find('YEAR') >= 0:
edate = datetime((edate.year//10+1)*10, 1, 1)
elif interval.find('DAY') >= 0:
edate = datetime(edate.year+1, 1, 1)
elif interval.find('HOUR') >= 0 or interval.find('MIN') >= 0:
if edate.month == 12:
edate = datetime(edate.year+1, 1, 1)
else:
edate = datetime(edate.year, edate.month+1, 1)
else:
edate = edate+timedelta(days=1)
return edate.strftime(DATE_FMT_STR).upper()
def _get_istat_for_zrrtsxd(self, istat):
"""
C ISTAT: Integer status parameter, indicating the
C successfullness of the retrieval.
C ISTAT = 0 All ok.
C ISTAT = 1 Some missing data (still ok)
C ISTAT = 2 Missing data blocks, but some data found
C ISTAT = 3 Combination of 1 and 2 (some data found)
C ISTAT = 4 No data found, although a pathname was read
C ISTAT = 5 No pathname(s) found
C ISTAT > 9 Illegal call to ZRRTS
"""
if istat == 0:
return "All good"
msg = "ISTAT: %d --> " % istat
if istat == 1:
msg = msg + "Some missing data (still ok)"
elif istat == 2:
msg = msg + "Missing data blocks, but some data found"
elif istat == 3:
msg = msg + "Combination of 1 and 2 (some data found)"
elif istat == 4:
msg = msg + "No data found, although a pathname was read"
elif istat == 5:
msg = msg + "No pathname(s) found"
elif istat > 9:
msg = msg + "Illegal call to ZRRTS"
return msg
def _respond_to_istat_state(self, istat):
if istat == 0:
# everything is ok
pass
elif istat == 1 or istat == 2 or istat == 3:
logging.debug(
"Some data or data blocks are missing [istat=" + str(istat) + "]", RuntimeWarning)
elif istat == 4:
logging.debug(
"Found file but failed to load any data", RuntimeWarning)
elif istat == 5:
logging.debug("Path not found")
elif istat > 9:
logging.debug("Illegal internal call")
def _parse_times(self, pathname, startDateStr=None, endDateStr=None):
'''
parse times based on pathname or startDateStr and endDateStr
start date and end dates may be padded to include a larger interval
'''
interval = self.parse_pathname_epart(pathname)
if startDateStr is None or endDateStr is None:
twstr = pathname.split("/")[4]
if twstr.find("-") < 0:
if len(twstr.strip()) == 0:
raise Exception(
"No start date or end date and twstr is "+twstr)
sdate = edate = twstr
else:
sdate, edate = twstr.replace("*", "").split("-")
if startDateStr is None:
startDateStr = sdate.strip()
if endDateStr is None:
endDateStr = edate.strip()
endDateStr = self._pad_to_end_of_block(
endDateStr, interval)
return startDateStr, endDateStr
def read_rts(self, pathname, startDateStr=None, endDateStr=None):
"""
read regular time series for pathname.
if pathname D part contains a time window (START DATE "-" END DATE) and
either start or end date is None it uses that to define start and end date
"""
opened_already = self.isopen
try:
if not opened_already:
self.open()
interval = self.parse_pathname_epart(pathname)
trim_first = startDateStr is None
trim_last = endDateStr is None
startDateStr, endDateStr = self._parse_times(pathname, startDateStr, endDateStr)
nvals = DSSFile.num_values_in_interval(startDateStr, endDateStr, interval)
sdate = parse(startDateStr)
cdate = sdate.date().strftime('%d%b%Y').upper()
ctime = ''.join(sdate.time().isoformat().split(':')[:2])
# PERF: could be np.empty if all initialized
dvalues = np.zeros(nvals, 'd')
nvals, cunits, ctype, iofset, istat = pyheclib.hec_zrrtsxd(self.ifltab, pathname, cdate, ctime,
dvalues)
# FIXME: raise appropriate exception for istat value
# if istat != 0:
# raise Exception(self._get_istat_for_zrrtsxd(istat))
self._respond_to_istat_state(istat)
# FIXME: deal with non-zero iofset for period data,i.e. else part of if stmt below
nfreq, freqstr = DSSFile.get_number_and_frequency_from_epart(interval)
freqstr = '%d%s' % (nfreq, DSSFile.NAME_FREQ_MAP[freqstr])
freqoffset = DSSFile.get_freq_from_epart(interval)
if ctype.startswith('PER'): # for period values, shift back 1
# - pd.tseries.frequencies.to_offset(freqoffset)
sp = pd.Period(startDateStr, freq=freqstr)
dindex = pd.period_range(sp, periods=nvals, freq=freqstr).shift(-1)
else:
startDateWithOffset = parse(startDateStr)
if iofset != 0: # offsets are always from the end of the period, e.g. for day, rewind by a day and then add offset
startDateWithOffset = parse(startDateStr)-freqoffset+timedelta(minutes=iofset)
dindex = pd.date_range(
startDateWithOffset, periods=nvals, freq=freqoffset)
df1 = pd.DataFrame(data=dvalues, index=dindex, columns=[pathname])
# cleanup missing values --> NAN, trim dataset and units and period type strings
df1.replace([DSSFile.MISSING_VALUE, DSSFile.MISSING_RECORD], [
np.nan, np.nan], inplace=True)
if trim_first or trim_last:
if trim_first:
first_index = df1.first_valid_index()
else:
first_index = df1.index[0]
if trim_last:
last_index = df1.last_valid_index()
else:
last_index = df1.index[-1]
df1 = df1[first_index:last_index]
else:
df1 = df1
return DSSData(data=df1, units=cunits.strip(), period_type=ctype.strip())
finally:
if not opened_already:
self.close()
def get_epart_from_freq(freq):
return "%d%s" % (freq.n, DSSFile.FREQ_NAME_MAP[freq.name])
def get_number_and_frequency_from_epart(epart):
match = DSSFile.EPART_PATTERN.match(epart)
return int(match['n']), match['interval']
def get_freq_from_epart(epart):
if epart.find('MON') >= 0:
td = pd.offsets.MonthBegin(n=int(str.split(epart, 'MON')[0]))
elif epart.find('DAY') >= 0:
td = pd.offsets.Day(n=int(str.split(epart, 'DAY')[0]))
elif epart.find('HOUR') >= 0:
td = pd.offsets.Hour(n=int(str.split(epart, 'HOUR')[0]))
elif epart.find('MIN') >= 0:
td = pd.offsets.Minute(n=int(str.split(epart, 'MIN')[0]))
elif epart.find('YEAR') >= 0:
td = pd.offsets.YearBegin(n=int(str.split(epart, 'YEAR')[0]))
elif epart.find('WEEK') >= 0:
td = pd.offsets.Week(n=int(str.split(epart, 'WEEK')[0]))
else:
raise RuntimeError('Could not understand interval: ', epart)
return td
def write_rts(self, pathname, df, cunits, ctype):
"""
write time series to this DSS file with the given pathname.
The time series is passed in as a pandas DataFrame
and associated units and types of length no greater than 8.
"""
parts = pathname.split('/')
parts[5] = DSSFile.get_epart_from_freq(df.index.freq)
pathname = "/".join(parts)
if isinstance(df.index, pd.PeriodIndex):
if ctype.startswith('PER'): # for period values...
sp = df.index.shift(1).to_timestamp()[0] # shift by 1 as per HEC convention
else:
raise 'Either pass in ctype beginning with "PER" ' +\
'for period indexed dataframe or change dataframe to timestamps'
else:
sp = df.index[0]
# values are either the first column in the pandas DataFrame or should be a pandas Series
values = df.iloc[:, 0].values if isinstance(df, pd.DataFrame) else df.iloc[:].values
istat = pyheclib.hec_zsrtsxd(self.ifltab, pathname,
sp.strftime("%d%b%Y").upper(
), sp.round(freq='T').strftime("%H%M"),
values, cunits[:8], ctype[:8])
self._respond_to_istat_state(istat)
def read_its(self, pathname, startDateStr=None, endDateStr=None, guess_vals_per_block=10000):
"""
reads the entire irregular time series record. The timewindow is derived
from the D-PART of the pathname so make sure to read that from the catalog
before calling this function
"""
epart = self.parse_pathname_epart(pathname)
startDateStr, endDateStr = self._parse_times(pathname, startDateStr, endDateStr)
juls, istat = pyheclib.hec_datjul(startDateStr)
jule, istat = pyheclib.hec_datjul(endDateStr)
ietime = istime = 0
# guess how many values to be read based on e part approximation
ktvals = DSSFile._number_between(startDateStr, endDateStr,
DSSFile._get_timedelta_for_interval(epart))
ktvals = guess_vals_per_block*int(ktvals)
kdvals = ktvals
itimes = np.zeros(ktvals, 'i')
dvalues = np.zeros(kdvals, 'd')
inflag = 0 # Retrieve both values preceding and following time window in addtion to time window
nvals, ibdate, cunits, ctype, istat = pyheclib.hec_zritsxd(
self.ifltab, pathname, juls, istime, jule, ietime, itimes, dvalues, inflag)
self._respond_to_istat_state(istat)
if nvals == ktvals:
raise Exception(
"More values than guessed! %d. Call with guess_vals_per_block > 10000 " % ktvals)
base_date = parse('31DEC1899')+timedelta(days=ibdate)
df = pd.DataFrame(dvalues[:nvals], index=base_date +
DSSFile.timedelta_minutes(itimes[:nvals]), columns=[pathname])
return DSSData(data=df, units=cunits.strip(), period_type=ctype.strip())
# return nvals, dvalues, itimes, base_date, cunits, ctype
def write_its(self, pathname, df, cunits, ctype, interval=None):
"""
write irregular time series to the pathname.
The timewindow part of the pathname (D PART) is used to establish the base julian date
for storage of the time values (minutes relative to that base julian date)
The interval is the block size to store irregular time series for efficient access
interval values should be "IR-YEAR", "IR-MONTH" or "IR-DAY"
Uses the provided pandas.DataFrame df index (time) and values
and also stores the units (cunits) and type (ctype)
"""
parts = pathname.split('/')
# parts[5]=DSSFile.FREQ_EPART_MAP[df.index.freq]
if interval:
parts[5] = interval
else:
if len(parts[5]) == 0:
raise Exception(
"Specify interval = IR-YEAR or IR-MONTH or IR-DAY or provide it the pathname (5th position)")
epart = parts[5]
if len(parts[4]) == 0:
startDateStr = (
df.index[0]-pd.offsets.YearBegin(1)).strftime('%d%b%Y').upper()
endDateStr = (df.index[-1]+pd.offsets.YearBegin(0)
).strftime('%d%b%Y').upper()
parts[4] = startDateStr + " - " + endDateStr
else:
tw = list(map(lambda x: x.strip(), parts[4].split('-')))
startDateStr = tw[0]
endDateStr = tw[1] # self._pad_to_end_of_block(tw[1],epart)
juls, istat = pyheclib.hec_datjul(startDateStr)
jule, istat = pyheclib.hec_datjul(endDateStr)
ietime = istime = 0
pathname = "/".join(parts)
itimes = df.index-parse(startDateStr)
itimes = itimes.total_seconds()/60 # time in minutes since base date juls
itimes = itimes.values.astype('i') # conver to integer numpy
inflag = 1 # replace data (merging should be done in memory)
# values are either the first column in the pandas DataFrame or should be a pandas Series
values = df.iloc[:, 0].values if isinstance(df, pd.DataFrame) else df.iloc[:].values
istat = pyheclib.hec_zsitsxd(self.ifltab, pathname,
itimes, values, juls, cunits, ctype, inflag)
self._respond_to_istat_state(istat)
# return istat
| StarcoderdataPython |
6418572 | <reponame>VITA-Group/Large_Scale_GCN_Benchmarking
import gc
import json
import os
import random
from datetime import datetime
import numpy as np
import torch
from options.base_options import BaseOptions
from trainer import trainer
from utils import print_args
def set_seed(args):
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
if args.cuda and not torch.cuda.is_available(): # cuda is not available
args.cuda = False
if args.cuda:
torch.cuda.manual_seed(args.random_seed)
torch.cuda.manual_seed_all(args.random_seed)
os.environ["CUDA_VISIBLE_DEVICES"] = str(args.cuda_num)
torch.manual_seed(args.random_seed)
np.random.seed(args.random_seed)
random.seed(args.random_seed)
def main(args):
list_test_acc = []
list_valid_acc = []
list_train_loss = []
filedir = f"./logs/{args.dataset}"
if not os.path.exists(filedir):
os.makedirs(filedir)
if not args.exp_name:
filename = f"{args.type_model}.json"
else:
filename = f"{args.exp_name}.json"
path_json = os.path.join(filedir, filename)
try:
resume_seed = 0
if os.path.exists(path_json):
if args.resume:
with open(path_json, "r") as f:
saved = json.load(f)
resume_seed = saved["seed"] + 1
list_test_acc = saved["test_acc"]
list_valid_acc = saved["val_acc"]
list_train_loss = saved["train_loss"]
else:
t = os.path.getmtime(path_json)
tstr = datetime.fromtimestamp(t).strftime("%Y_%m_%d_%H_%M_%S")
os.rename(
path_json, os.path.join(filedir, filename + "_" + tstr + ".json")
)
if resume_seed >= args.N_exp:
print("Training already finished!")
return
except:
pass
print_args(args)
if args.debug_mem_speed:
trnr = trainer(args)
trnr.mem_speed_bench()
for seed in range(resume_seed, args.N_exp):
print(f"seed (which_run) = <{seed}>")
args.random_seed = seed
set_seed(args)
# torch.cuda.empty_cache()
trnr = trainer(args)
train_loss, valid_acc, test_acc = trnr.train_and_test(seed)
list_test_acc.append(test_acc)
list_valid_acc.append(valid_acc)
list_train_loss.append(train_loss)
# del trnr
# torch.cuda.empty_cache()
# gc.collect()
## record training data
print(
"mean and std of test acc: ", np.mean(list_test_acc), np.std(list_test_acc)
)
try:
to_save = dict(
seed=seed,
test_acc=list_test_acc,
val_acc=list_valid_acc,
train_loss=list_train_loss,
mean_test_acc=np.mean(list_test_acc),
std_test_acc=np.std(list_test_acc),
)
with open(path_json, "w") as f:
json.dump(to_save, f)
except:
pass
print(
"final mean and std of test acc: ",
f"{np.mean(list_test_acc):.2f} $\\pm$ {np.std(list_test_acc):.2f}",
)
if __name__ == "__main__":
args = BaseOptions().initialize()
main(args)
| StarcoderdataPython |
8160566 | from setuptools import setup
setup(name='color_naming',
version='0.1',
description='Implementation of ',
url='',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['color_naming'],
install_requires=['numpy',],
include_package_data=True,
package_data={'color_naming': ['data/w2c.pkl']},
zip_safe=False)
| StarcoderdataPython |
6415078 | <gh_stars>1-10
from __future__ import with_statement
from fabric.api import local
import env
from tasks import deploy
| StarcoderdataPython |
3589942 |
# S60 contacts restore - <NAME> 2010 - Public Domain - no warranty
# Where to find history:
# on GitHub at https://github.com/ssb22/s60-utils
# and on GitLab at https://gitlab.com/ssb22/s60-utils
# and on BitBucket https://bitbucket.org/ssb22/s60-utils
# and at https://gitlab.developers.cam.ac.uk/ssb22/s60-utils
# and in China https://gitee.com/ssb22/s60-utils
file_to_read = r"E:\contacts-backup.txt"
import contacts
print "Reading",file_to_read
contacts.open().import_vcards(open(file_to_read).read())
print "Done"
| StarcoderdataPython |
4846319 | # -*- coding: utf-8 -*-
from brewtils.schemas import UserCreateSchema, UserListSchema, UserSchema
from beer_garden.api.http.base_handler import BaseHandler
from beer_garden.api.http.handlers import AuthorizationHandler
from beer_garden.db.mongo.models import User
from beer_garden.user import create_user, update_user
# TODO: Update endpoints with authorization checks
class UserAPI(BaseHandler):
def get(self, username):
"""
---
summary: Retrieve a specific User
parameters:
- name: username
in: path
required: true
description: The username of the User
type: string
responses:
200:
description: User with the given username
schema:
$ref: '#/definitions/User'
404:
$ref: '#/definitions/404Error'
50x:
$ref: '#/definitions/50xError'
tags:
- Users
"""
user = User.objects.get(username=username)
response = UserSchema().dump(user).data
self.write(response)
def delete(self, username):
"""
---
summary: Delete a specific User
parameters:
- name: username
in: path
required: true
description: The username of the User
type: string
responses:
204:
description: User has been successfully deleted
404:
$ref: '#/definitions/404Error'
50x:
$ref: '#/definitions/50xError'
tags:
- Users
"""
user = User.objects.get(username=username)
user.delete()
self.set_status(204)
def patch(self, username):
"""
---
summary: Partially update a User
parameters:
- name: username
in: path
required: true
description: The username of the User
type: string
- name: patch
in: body
required: true
description: |
A subset of User attributes to update, most commonly the password.
schema:
$ref: '#/definitions/UserCreate'
responses:
200:
description: User with the given username
schema:
$ref: '#/definitions/User'
400:
$ref: '#/definitions/400Error'
404:
$ref: '#/definitions/404Error'
50x:
$ref: '#/definitions/50xError'
tags:
- Users
"""
user_data = UserCreateSchema().load(self.request_body, partial=True).data
db_user = User.objects.get(username=username)
user = update_user(db_user, **user_data)
response = UserSchema().dump(user).data
self.write(response)
class UserListAPI(BaseHandler):
def get(self):
"""
---
summary: Retrieve all Users
responses:
200:
description: All Users
schema:
$ref: '#/definitions/UserList'
50x:
$ref: '#/definitions/50xError'
tags:
- Users
"""
users = User.objects.all()
response = UserListSchema().dump({"users": users}).data
self.write(response)
def post(self):
"""
---
summary: Create a new User
parameters:
- name: user
in: body
description: The user
schema:
$ref: '#/definitions/UserCreate'
consumes:
- application/json
responses:
201:
description: A new User has been created
schema:
$ref: '#/definitions/User'
400:
$ref: '#/definitions/400Error'
50x:
$ref: '#/definitions/50xError'
tags:
- Users
"""
user_data = UserCreateSchema().load(self.request_body).data
create_user(**user_data)
self.set_status(201)
class WhoAmIAPI(AuthorizationHandler):
def get(self):
"""
---
summary: Retrieve requesting User
responses:
200:
description: Requesting User
schema:
$ref: '#/definitions/User'
401:
$ref: '#/definitions/401Error'
403:
$ref: '#/definitions/403Error'
tags:
- Users
"""
user = self.current_user
response = UserSchema().dump(user).data
self.write(response)
| StarcoderdataPython |
9743619 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from odahuflow.sdk.models.base_model_ import Model
from odahuflow.sdk.models import util
class AuthConfig(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_token: str=None, api_url: str=None, client_id: str=None, client_secret: str=None, oauth_oidc_token_endpoint: str=None): # noqa: E501
"""AuthConfig - a model defined in Swagger
:param api_token: The api_token of this AuthConfig. # noqa: E501
:type api_token: str
:param api_url: The api_url of this AuthConfig. # noqa: E501
:type api_url: str
:param client_id: The client_id of this AuthConfig. # noqa: E501
:type client_id: str
:param client_secret: The client_secret of this AuthConfig. # noqa: E501
:type client_secret: str
:param oauth_oidc_token_endpoint: The oauth_oidc_token_endpoint of this AuthConfig. # noqa: E501
:type oauth_oidc_token_endpoint: str
"""
self.swagger_types = {
'api_token': str,
'api_url': str,
'client_id': str,
'client_secret': str,
'oauth_oidc_token_endpoint': str
}
self.attribute_map = {
'api_token': 'apiToken',
'api_url': 'apiUrl',
'client_id': 'clientId',
'client_secret': 'clientSecret',
'oauth_oidc_token_endpoint': 'oauthOidcTokenEndpoint'
}
self._api_token = api_token
self._api_url = api_url
self._client_id = client_id
self._client_secret = client_secret
self._oauth_oidc_token_endpoint = oauth_oidc_token_endpoint
@classmethod
def from_dict(cls, dikt) -> 'AuthConfig':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The AuthConfig of this AuthConfig. # noqa: E501
:rtype: AuthConfig
"""
return util.deserialize_model(dikt, cls)
@property
def api_token(self) -> str:
"""Gets the api_token of this AuthConfig.
It is a mock for the future. Currently, it is always empty. # noqa: E501
:return: The api_token of this AuthConfig.
:rtype: str
"""
return self._api_token
@api_token.setter
def api_token(self, api_token: str):
"""Sets the api_token of this AuthConfig.
It is a mock for the future. Currently, it is always empty. # noqa: E501
:param api_token: The api_token of this AuthConfig.
:type api_token: str
"""
self._api_token = api_token
@property
def api_url(self) -> str:
"""Gets the api_url of this AuthConfig.
ODAHU API URL # noqa: E501
:return: The api_url of this AuthConfig.
:rtype: str
"""
return self._api_url
@api_url.setter
def api_url(self, api_url: str):
"""Sets the api_url of this AuthConfig.
ODAHU API URL # noqa: E501
:param api_url: The api_url of this AuthConfig.
:type api_url: str
"""
self._api_url = api_url
@property
def client_id(self) -> str:
"""Gets the client_id of this AuthConfig.
OpenID client_id credential for service account # noqa: E501
:return: The client_id of this AuthConfig.
:rtype: str
"""
return self._client_id
@client_id.setter
def client_id(self, client_id: str):
"""Sets the client_id of this AuthConfig.
OpenID client_id credential for service account # noqa: E501
:param client_id: The client_id of this AuthConfig.
:type client_id: str
"""
self._client_id = client_id
@property
def client_secret(self) -> str:
"""Gets the client_secret of this AuthConfig.
OpenID client_secret credential for service account # noqa: E501
:return: The client_secret of this AuthConfig.
:rtype: str
"""
return self._client_secret
@client_secret.setter
def client_secret(self, client_secret: str):
"""Sets the client_secret of this AuthConfig.
OpenID client_secret credential for service account # noqa: E501
:param client_secret: The client_secret of this AuthConfig.
:type client_secret: str
"""
self._client_secret = client_secret
@property
def oauth_oidc_token_endpoint(self) -> str:
"""Gets the oauth_oidc_token_endpoint of this AuthConfig.
OpenID token url # noqa: E501
:return: The oauth_oidc_token_endpoint of this AuthConfig.
:rtype: str
"""
return self._oauth_oidc_token_endpoint
@oauth_oidc_token_endpoint.setter
def oauth_oidc_token_endpoint(self, oauth_oidc_token_endpoint: str):
"""Sets the oauth_oidc_token_endpoint of this AuthConfig.
OpenID token url # noqa: E501
:param oauth_oidc_token_endpoint: The oauth_oidc_token_endpoint of this AuthConfig.
:type oauth_oidc_token_endpoint: str
"""
self._oauth_oidc_token_endpoint = oauth_oidc_token_endpoint
| StarcoderdataPython |
1939259 | import psycopg2 as db # al hacer esto no hay q escribir mas psycopg2
conexion = db.connect(user='postgres',
password='<PASSWORD>',
host='127.0.0.1',
port='5432',
database='test_db')
# variable para el cursor
cursor = conexion.cursor()
# variable de sentencia
# modificar o actualizar
sentencia = 'UPDATE persona SET nombre = %s, apellido = %s, email = %s WHERE id_persona = %s'
valores = (
('Juan', 'Perez', '<EMAIL>', 1),
('Karla1', 'Gomez2', '<EMAIL>', 2)
)
cursor.executemany(sentencia, valores)
# guardamos la informacion en la BD
conexion.commit()
# recuperamos la cantidad de registros insertados
registros_actualizados = cursor.rowcount
print(f'Registros actualizados: {registros_actualizados}')
# cerramos cursor y conexión
cursor.close()
conexion.close()
| StarcoderdataPython |
91882 | <reponame>l0l00l000/C3AE<gh_stars>1-10
#coding=utf-8
import os
import math
import pandas as pd
import tensorflow as tf
import numpy as np
import logging
from sklearn.model_selection import train_test_split
'''
基础的数据处理基类:
现在输入的数据必须满足pandas的输入格式, 各自实现输入对应接口
输出统一成pandas的feather格式
主要是方便标注后台的数据能够顺利的接入
'''
class BaseProc:
def __init__(self, name, data_dir, output_dir, heads, overwrite=False, tf_dir="../data/", sample_rate=0.01):
'''
@name: proc name
@data_dir: 预处理数据目录
@output_dir: 输出目录
@heads: 数据titles
'''
self.data_dir = data_dir
self.name = name
self.output_dir = output_dir
self.heads = heads
self.dataframe = pd.DataFrame(columns=self.heads)
self.overwrite = overwrite
self.tf_dir = tf_dir
self.can_run = True
self.sample_rate = sample_rate
def replica_check(self):
'''
检查重复任务
'''
if self.overwrite:
return True
for dirname, dirs, fnames in os.walk(self.output_dir):
for fname in fnames:
if self.name in fname:
return False
return True
def process(self, *args, **kwargs):
logging.info("name:%s"%self.name)
self.can_run = self.replica_check()
if not self.can_run:
logging.info("已存在重复文件则不需要重新处理")
self.reload_data()
else:
self._process(*args, **kwargs)
self.save()
self.dataframe = self.dataframe.dropna(axis=0)
self.rectify_data()
self.transtf_record()
def rectify_data(self):
'''
主要对脏数据进行清理
'''
pass
def reload_data(self):
'''
重新加载历史数据
'''
import feather
dataset = pd.DataFrame(columns=self.heads)
for rdir, dlist, fnames in os.walk(self.output_dir):
fnames = filter(lambda x: x.endswith(".feather"), fnames)
fnames = filter(lambda x: x.count(self.name), fnames)
if fnames:
file_paths = map(lambda name: os.path.join(rdir, name), fnames)
frames = map(lambda path: feather.read_dataframe(path), file_paths)
dataset = pd.concat(frames, ignore_index=True)
self.dataframe = dataset
return dataset
def _process(self, *args, **kwargs):
return NotImplemented
def save(self, chunkSize=5000):
if not self.can_run:
return
chunk_start = 0
dataframe = self.dataframe.reset_index()[self.heads]
while(chunk_start < len(self.dataframe)):
dir_path = os.path.join(self.output_dir, self.name + "_" + str(int(chunk_start / chunkSize)) + ".feather")
tmp_pd = dataframe[chunk_start:chunk_start + chunkSize].copy().reset_index()
tmp_pd.to_feather(dir_path)
chunk_start += chunkSize
def transtf_record(self, record_size=10000):
self.train_sets, self.test_sets = train_test_split(self.dataframe, test_size=self.sample_rate, random_state=2017)
self.train_sets.reset_index(drop=True, inplace=True)
self.test_sets.reset_index(drop=True, inplace=True)
train_nums = self.train_sets.shape[0]
test_nums = self.test_sets.shape[0]
train_file_nums = int(math.ceil(1.0 * train_nums / record_size))
test_file_nums = int(math.ceil(1.0 * test_nums/ record_size))
train_idx = np.linspace(0, train_nums, train_file_nums, dtype=np.int)
test_idx = np.linspace(0, test_nums, test_file_nums, dtype=np.int)
for steps in train_idx:
next_steps = min(steps+record_size, train_nums)
self._trans2tf_record(self.train_sets[steps:next_steps].copy().reset_index(drop=True), steps // record_size, "train")
for steps in test_idx:
next_steps = min(steps+record_size, test_nums)
self._trans2tf_record(self.test_sets[steps:next_steps].copy().reset_index(drop=True), steps // record_size, "test")
def _trans2tf_record(self, dataframe, trunck_num, sub_dir="train"):
"""
各子类需要自行实现相关代码
"""
return NotImplemented
def _int64_feature(self, value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(self, value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
| StarcoderdataPython |
6625131 | <reponame>fatihCinarKrtg/zulip
import logging
import secrets
import urllib
from functools import wraps
from typing import Any, Dict, List, Mapping, Optional, cast
from urllib.parse import urlencode
import jwt
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.views import LoginView as DjangoLoginView
from django.contrib.auth.views import PasswordResetView as DjangoPasswordResetView
from django.contrib.auth.views import logout_then_login as django_logout_then_login
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.forms import Form
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, HttpResponseServerError
from django.shortcuts import redirect, render
from django.template.response import SimpleTemplateResponse
from django.urls import reverse
from django.utils.html import escape
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.translation import gettext as _
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_safe
from jinja2.utils import Markup as mark_safe
from social_django.utils import load_backend, load_strategy
from two_factor.forms import BackupTokenForm
from two_factor.views import LoginView as BaseTwoFactorLoginView
from confirmation.models import (
Confirmation,
ConfirmationKeyException,
create_confirmation_link,
get_object_from_key,
)
from version import API_FEATURE_LEVEL, ZULIP_MERGE_BASE, ZULIP_VERSION
from zerver.context_processors import get_realm_from_request, login_context, zulip_default_context
from zerver.decorator import do_login, log_view_func, process_client, require_post
from zerver.forms import (
DEACTIVATED_ACCOUNT_ERROR,
AuthenticationTokenForm,
HomepageForm,
OurAuthenticationForm,
ZulipPasswordResetForm,
)
from zerver.lib.exceptions import (
AuthenticationFailedError,
InvalidSubdomainError,
JsonableError,
PasswordAuthDisabledError,
PasswordResetRequiredError,
RateLimited,
RealmDeactivatedError,
UserDeactivatedError,
)
from zerver.lib.mobile_auth_otp import otp_encrypt_api_key
from zerver.lib.push_notifications import push_notifications_enabled
from zerver.lib.pysa import mark_sanitized
from zerver.lib.realm_icon import realm_icon_url
from zerver.lib.request import REQ, RequestNotes, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.sessions import set_expirable_session_var
from zerver.lib.subdomains import get_subdomain, is_subdomain_root_or_alias
from zerver.lib.types import ViewFuncT
from zerver.lib.url_encoding import append_url_query_string
from zerver.lib.user_agent import parse_user_agent
from zerver.lib.users import get_api_key
from zerver.lib.utils import has_api_key_format
from zerver.lib.validator import validate_login_email
from zerver.models import (
MultiuseInvite,
PreregistrationUser,
Realm,
UserProfile,
filter_to_valid_prereg_users,
get_realm,
remote_user_to_email,
)
from zerver.signals import email_on_new_login
from zproject.backends import (
AUTH_BACKEND_NAME_MAP,
AppleAuthBackend,
ExternalAuthDataDict,
ExternalAuthResult,
GenericOpenIdConnectBackend,
SAMLAuthBackend,
ZulipLDAPAuthBackend,
ZulipLDAPConfigurationError,
ZulipRemoteUserBackend,
auth_enabled_helper,
dev_auth_enabled,
ldap_auth_enabled,
password_auth_enabled,
saml_auth_enabled,
validate_otp_params,
)
ExtraContext = Optional[Dict[str, Any]]
def get_safe_redirect_to(url: str, redirect_host: str) -> str:
is_url_safe = url_has_allowed_host_and_scheme(url=url, allowed_hosts=None)
if is_url_safe:
# Mark as safe to prevent Pysa from surfacing false positives for
# open redirects. In this branch, we have already checked that the URL
# points to the specified 'redirect_host', or is relative.
return urllib.parse.urljoin(redirect_host, mark_sanitized(url))
else:
return redirect_host
def create_preregistration_user(
email: str,
request: HttpRequest,
realm_creation: bool = False,
password_required: bool = True,
full_name: Optional[str] = None,
full_name_validated: bool = False,
) -> PreregistrationUser:
realm = None
if not realm_creation:
try:
realm = get_realm(get_subdomain(request))
except Realm.DoesNotExist:
pass
return PreregistrationUser.objects.create(
email=email,
realm_creation=realm_creation,
password_required=<PASSWORD>,
realm=realm,
full_name=full_name,
full_name_validated=full_name_validated,
)
def maybe_send_to_registration(
request: HttpRequest,
email: str,
full_name: str = "",
mobile_flow_otp: Optional[str] = None,
desktop_flow_otp: Optional[str] = None,
is_signup: bool = False,
password_required: bool = True,
multiuse_object_key: str = "",
full_name_validated: bool = False,
) -> HttpResponse:
"""Given a successful authentication for an email address (i.e. we've
confirmed the user controls the email address) that does not
currently have a Zulip account in the target realm, send them to
the registration flow or the "continue to registration" flow,
depending on is_signup, whether the email address can join the
organization (checked in HomepageForm), and similar details.
"""
# In the desktop and mobile registration flows, the sign up
# happens in the browser so the user can use their
# already-logged-in social accounts. Then at the end, with the
# user account created, we pass the appropriate data to the app
# via e.g. a `zulip://` redirect. We store the OTP keys for the
# mobile/desktop flow in the session with 1-hour expiry, because
# we want this configuration of having a successful authentication
# result in being logged into the app to persist if the user makes
# mistakes while trying to authenticate (E.g. clicks the wrong
# Google account, hits back, etc.) during a given browser session,
# rather than just logging into the web app in the target browser.
#
# We can't use our usual pre-account-creation state storage
# approach of putting something in PreregistrationUser, because
# that would apply to future registration attempts on other
# devices, e.g. just creating an account on the web on their laptop.
assert not (mobile_flow_otp and desktop_flow_otp)
if mobile_flow_otp:
set_expirable_session_var(
request.session, "registration_mobile_flow_otp", mobile_flow_otp, expiry_seconds=3600
)
elif desktop_flow_otp:
set_expirable_session_var(
request.session, "registration_desktop_flow_otp", desktop_flow_otp, expiry_seconds=3600
)
multiuse_obj: Optional[MultiuseInvite] = None
realm: Optional[Realm] = None
from_multiuse_invite = False
if multiuse_object_key:
from_multiuse_invite = True
try:
multiuse_obj = get_object_from_key(multiuse_object_key, Confirmation.MULTIUSE_INVITE)
except ConfirmationKeyException:
return render(request, "zerver/confirmation_link_expired_error.html", status=404)
assert multiuse_obj is not None
realm = multiuse_obj.realm
invited_as = multiuse_obj.invited_as
else:
try:
realm = get_realm(get_subdomain(request))
except Realm.DoesNotExist:
pass
invited_as = PreregistrationUser.INVITE_AS["MEMBER"]
form = HomepageForm({"email": email}, realm=realm, from_multiuse_invite=from_multiuse_invite)
if form.is_valid():
# If the email address is allowed to sign up for an account in
# this organization, construct a PreregistrationUser and
# Confirmation objects, and then send the user to account
# creation or confirm-continue-registration depending on
# is_signup.
try:
prereg_user = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(email__iexact=email, realm=realm)
).latest("invited_at")
# password_required and full_name data passed here as argument should take precedence
# over the defaults with which the existing PreregistrationUser that we've just fetched
# was created.
prereg_user.password_required = <PASSWORD>
update_fields = ["password_required"]
if full_name:
prereg_user.full_name = full_name
prereg_user.full_name_validated = full_name_validated
update_fields.extend(["full_name", "full_name_validated"])
prereg_user.save(update_fields=update_fields)
except PreregistrationUser.DoesNotExist:
prereg_user = create_preregistration_user(
email,
request,
password_required=<PASSWORD>,
full_name=full_name,
full_name_validated=full_name_validated,
)
if multiuse_obj is not None:
request.session.modified = True
streams_to_subscribe = list(multiuse_obj.streams.all())
prereg_user.streams.set(streams_to_subscribe)
prereg_user.invited_as = invited_as
prereg_user.save()
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
if is_signup:
return redirect(confirmation_link)
context = {"email": email, "continue_link": confirmation_link, "full_name": full_name}
return render(request, "zerver/confirm_continue_registration.html", context=context)
# This email address it not allowed to join this organization, so
# just send the user back to the registration page.
url = reverse("register")
context = login_context(request)
extra_context: Mapping[str, Any] = {
"form": form,
"current_url": lambda: url,
"from_multiuse_invite": from_multiuse_invite,
"multiuse_object_key": multiuse_object_key,
"mobile_flow_otp": mobile_flow_otp,
"desktop_flow_otp": desktop_flow_otp,
}
context.update(extra_context)
return render(request, "zerver/accounts_home.html", context=context)
def register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
# We have verified the user controls an email address, but
# there's no associated Zulip user account. Consider sending
# the request to registration.
kwargs: Dict[str, Any] = dict(result.data_dict)
# maybe_send_to_registration doesn't take these arguments, so delete them.
kwargs.pop("subdomain", None)
kwargs.pop("redirect_to", None)
kwargs.pop("is_realm_creation", None)
kwargs["password_required"] = False
return maybe_send_to_registration(request, **kwargs)
def login_or_register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
"""Given a successful authentication showing the user controls given
email address (email) and potentially a UserProfile
object (if the user already has a Zulip account), redirect the
browser to the appropriate place:
* The logged-in app if the user already has a Zulip account and is
trying to log in, potentially to an initial narrow or page that had been
saved in the `redirect_to` parameter.
* The registration form if is_signup was set (i.e. the user is
trying to create a Zulip account)
* A special `confirm_continue_registration.html` "do you want to
register or try another account" if the user doesn't have a
Zulip account but is_signup is False (i.e. the user tried to log in
and then did social authentication selecting an email address that does
not have a Zulip account in this organization).
* A zulip:// URL to send control back to the mobile or desktop apps if they
are doing authentication using the mobile_flow_otp or desktop_flow_otp flow.
"""
user_profile = result.user_profile
if user_profile is None or user_profile.is_mirror_dummy:
return register_remote_user(request, result)
# Otherwise, the user has successfully authenticated to an
# account, and we need to do the right thing depending whether
# or not they're using the mobile OTP flow or want a browser session.
is_realm_creation = result.data_dict.get("is_realm_creation")
mobile_flow_otp = result.data_dict.get("mobile_flow_otp")
desktop_flow_otp = result.data_dict.get("desktop_flow_otp")
if mobile_flow_otp is not None:
return finish_mobile_flow(request, user_profile, mobile_flow_otp)
elif desktop_flow_otp is not None:
return finish_desktop_flow(request, user_profile, desktop_flow_otp)
do_login(request, user_profile)
redirect_to = result.data_dict.get("redirect_to", "")
if is_realm_creation is not None and settings.BILLING_ENABLED:
from corporate.lib.stripe import is_free_trial_offer_enabled
if is_free_trial_offer_enabled():
redirect_to = "{}?onboarding=true".format(reverse("initial_upgrade"))
redirect_to = get_safe_redirect_to(redirect_to, user_profile.realm.uri)
return HttpResponseRedirect(redirect_to)
def finish_desktop_flow(request: HttpRequest, user_profile: UserProfile, otp: str) -> HttpResponse:
"""
The desktop otp flow returns to the app (through the clipboard)
a token that allows obtaining (through log_into_subdomain) a logged in session
for the user account we authenticated in this flow.
The token can only be used once and within ExternalAuthResult.LOGIN_KEY_EXPIRATION_SECONDS
of being created, as nothing more powerful is needed for the desktop flow
and this ensures the key can only be used for completing this authentication attempt.
"""
result = ExternalAuthResult(user_profile=user_profile)
token = result.store_data()
key = bytes.fromhex(otp)
iv = secrets.token_bytes(12)
desktop_data = (iv + AESGCM(key).encrypt(iv, token.encode(), b"")).hex()
context = {
"desktop_data": desktop_data,
"browser_url": reverse("login_page", kwargs={"template_name": "zerver/login.html"}),
"realm_icon_url": realm_icon_url(user_profile.realm),
}
return render(request, "zerver/desktop_redirect.html", context=context)
def finish_mobile_flow(request: HttpRequest, user_profile: UserProfile, otp: str) -> HttpResponse:
# For the mobile OAuth flow, we send the API key and other
# necessary details in a redirect to a zulip:// URI scheme.
api_key = get_api_key(user_profile)
response = create_response_for_otp_flow(
api_key, otp, user_profile, encrypted_key_field_name="otp_encrypted_api_key"
)
# Since we are returning an API key instead of going through
# the Django login() function (which creates a browser
# session, etc.), the "new login" signal handler (which
# triggers an email notification new logins) will not run
# automatically. So we call it manually here.
#
# Arguably, sending a fake 'user_logged_in' signal would be a better approach:
# user_logged_in.send(sender=user_profile.__class__, request=request, user=user_profile)
email_on_new_login(sender=user_profile.__class__, request=request, user=user_profile)
# Mark this request as having a logged-in user for our server logs.
process_client(request, user_profile)
RequestNotes.get_notes(request).requestor_for_logs = user_profile.format_requestor_for_logs()
return response
def create_response_for_otp_flow(
key: str, otp: str, user_profile: UserProfile, encrypted_key_field_name: str
) -> HttpResponse:
realm_uri = user_profile.realm.uri
# Check if the mobile URI is overridden in settings, if so, replace it
# This block should only apply to the mobile flow, so we if add others, this
# needs to be conditional.
if realm_uri in settings.REALM_MOBILE_REMAP_URIS:
realm_uri = settings.REALM_MOBILE_REMAP_URIS[realm_uri]
params = {
encrypted_key_field_name: otp_encrypt_api_key(key, otp),
"email": user_profile.delivery_email,
"realm": realm_uri,
}
# We can't use HttpResponseRedirect, since it only allows HTTP(S) URLs
response = HttpResponse(status=302)
response["Location"] = append_url_query_string("zulip://login", urllib.parse.urlencode(params))
return response
@log_view_func
@has_request_variables
def remote_user_sso(
request: HttpRequest,
mobile_flow_otp: Optional[str] = REQ(default=None),
desktop_flow_otp: Optional[str] = REQ(default=None),
next: str = REQ(default="/"),
) -> HttpResponse:
subdomain = get_subdomain(request)
try:
realm: Optional[Realm] = get_realm(subdomain)
except Realm.DoesNotExist:
realm = None
if not auth_enabled_helper([ZulipRemoteUserBackend.auth_backend_name], realm):
return config_error(request, "remote_user_backend_disabled")
try:
remote_user = request.META["REMOTE_USER"]
except KeyError:
return config_error(request, "remote_user_header_missing")
# Django invokes authenticate methods by matching arguments, and this
# authentication flow will not invoke LDAP authentication because of
# this condition of Django so no need to check if LDAP backend is
# enabled.
validate_login_email(remote_user_to_email(remote_user))
# Here we support the mobile and desktop flow for REMOTE_USER_BACKEND; we
# validate the data format and then pass it through to
# login_or_register_remote_user if appropriate.
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
if realm is None:
user_profile = None
else:
user_profile = authenticate(remote_user=remote_user, realm=realm)
email = remote_user_to_email(remote_user)
data_dict = ExternalAuthDataDict(
email=email,
mobile_flow_otp=mobile_flow_otp,
desktop_flow_otp=desktop_flow_otp,
redirect_to=next,
)
if realm:
data_dict["subdomain"] = realm.subdomain
else:
data_dict["subdomain"] = "" # realm creation happens on root subdomain
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
return login_or_register_remote_user(request, result)
@csrf_exempt
@log_view_func
def remote_user_jwt(request: HttpRequest) -> HttpResponse:
subdomain = get_subdomain(request)
try:
key = settings.JWT_AUTH_KEYS[subdomain]["key"]
algorithms = settings.JWT_AUTH_KEYS[subdomain]["algorithms"]
except KeyError:
raise JsonableError(_("Auth key for this subdomain not found."))
try:
json_web_token = request.POST["json_web_token"]
options = {"verify_signature": True}
payload = jwt.decode(json_web_token, key, algorithms=algorithms, options=options)
except KeyError:
raise JsonableError(_("No JSON web token passed in request"))
except jwt.InvalidTokenError:
raise JsonableError(_("Bad JSON web token"))
remote_user = payload.get("user", None)
if remote_user is None:
raise JsonableError(_("No user specified in JSON web token claims"))
email_domain = payload.get("realm", None)
if email_domain is None:
raise JsonableError(_("No organization specified in JSON web token claims"))
email = f"{remote_user}@{email_domain}"
try:
realm = get_realm(subdomain)
except Realm.DoesNotExist:
raise JsonableError(_("Wrong subdomain"))
user_profile = authenticate(username=email, realm=realm, use_dummy_backend=True)
if user_profile is None:
result = ExternalAuthResult(
data_dict={"email": email, "full_name": remote_user, "subdomain": realm.subdomain}
)
else:
result = ExternalAuthResult(user_profile=user_profile)
return login_or_register_remote_user(request, result)
@has_request_variables
def oauth_redirect_to_root(
request: HttpRequest,
url: str,
sso_type: str,
is_signup: bool = False,
extra_url_params: Dict[str, str] = {},
next: Optional[str] = REQ(default=None),
multiuse_object_key: str = REQ(default=""),
mobile_flow_otp: Optional[str] = REQ(default=None),
desktop_flow_otp: Optional[str] = REQ(default=None),
) -> HttpResponse:
main_site_uri = settings.ROOT_DOMAIN_URI + url
if settings.SOCIAL_AUTH_SUBDOMAIN is not None and sso_type == "social":
main_site_uri = (
settings.EXTERNAL_URI_SCHEME
+ settings.SOCIAL_AUTH_SUBDOMAIN
+ "."
+ settings.EXTERNAL_HOST
) + url
params = {
"subdomain": get_subdomain(request),
"is_signup": "1" if is_signup else "0",
}
params["multiuse_object_key"] = multiuse_object_key
# mobile_flow_otp is a one-time pad provided by the app that we
# can use to encrypt the API key when passing back to the app.
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
if mobile_flow_otp is not None:
params["mobile_flow_otp"] = mobile_flow_otp
if desktop_flow_otp is not None:
params["desktop_flow_otp"] = desktop_flow_otp
if next:
params["next"] = next
params = {**params, **extra_url_params}
return redirect(append_url_query_string(main_site_uri, urllib.parse.urlencode(params)))
def handle_desktop_flow(func: ViewFuncT) -> ViewFuncT:
@wraps(func)
def wrapper(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
user_agent = parse_user_agent(request.META.get("HTTP_USER_AGENT", "Missing User-Agent"))
if user_agent["name"] == "ZulipElectron":
return render(request, "zerver/desktop_login.html")
return func(request, *args, **kwargs)
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
@handle_desktop_flow
def start_remote_user_sso(request: HttpRequest) -> HttpResponse:
"""
The purpose of this endpoint is to provide an initial step in the flow
on which we can handle the special behavior for the desktop app.
/accounts/login/sso may have Apache intercepting requests to it
to do authentication, so we need this additional endpoint.
"""
query = request.META["QUERY_STRING"]
return redirect(append_url_query_string(reverse(remote_user_sso), query))
@handle_desktop_flow
def start_social_login(
request: HttpRequest,
backend: str,
extra_arg: Optional[str] = None,
) -> HttpResponse:
backend_url = reverse("social:begin", args=[backend])
extra_url_params: Dict[str, str] = {}
if backend == "saml":
if not SAMLAuthBackend.check_config():
return config_error(request, "saml")
# This backend requires the name of the IdP (from the list of configured ones)
# to be passed as the parameter.
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
logging.info(
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
)
return config_error(request, "saml")
extra_url_params = {"idp": extra_arg}
if backend == "apple" and not AppleAuthBackend.check_config():
return config_error(request, "apple")
if backend == "oidc" and not GenericOpenIdConnectBackend.check_config():
return config_error(request, "oidc")
# TODO: Add AzureAD also.
if backend in ["github", "google", "gitlab"]:
key_setting = "SOCIAL_AUTH_" + backend.upper() + "_KEY"
secret_setting = "SOCIAL_AUTH_" + backend.upper() + "_SECRET"
if not (getattr(settings, key_setting) and getattr(settings, secret_setting)):
return config_error(request, backend)
return oauth_redirect_to_root(request, backend_url, "social", extra_url_params=extra_url_params)
@handle_desktop_flow
def start_social_signup(
request: HttpRequest,
backend: str,
extra_arg: Optional[str] = None,
) -> HttpResponse:
backend_url = reverse("social:begin", args=[backend])
extra_url_params: Dict[str, str] = {}
if backend == "saml":
if not SAMLAuthBackend.check_config():
return config_error(request, "saml")
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
logging.info(
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
)
return config_error(request, "saml")
extra_url_params = {"idp": extra_arg}
return oauth_redirect_to_root(
request, backend_url, "social", is_signup=True, extra_url_params=extra_url_params
)
_subdomain_token_salt = "zerver.views.auth.log_into_subdomain"
@log_view_func
def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
"""Given a valid authentication token (generated by
redirect_and_log_into_subdomain called on auth.zulip.example.com),
call login_or_register_remote_user, passing all the authentication
result data that has been stored in Redis, associated with this token.
"""
# The tokens are intended to have the same format as API keys.
if not has_api_key_format(token):
logging.warning("log_into_subdomain: Malformed token given: %s", token)
return HttpResponse(status=400)
try:
result = ExternalAuthResult(login_token=token)
except ExternalAuthResult.InvalidTokenError:
logging.warning("log_into_subdomain: Invalid token given: %s", token)
return render(request, "zerver/log_into_subdomain_token_invalid.html", status=400)
subdomain = get_subdomain(request)
if result.data_dict["subdomain"] != subdomain:
raise JsonableError(_("Invalid subdomain"))
return login_or_register_remote_user(request, result)
def redirect_and_log_into_subdomain(result: ExternalAuthResult) -> HttpResponse:
token = result.store_data()
realm = get_realm(result.data_dict["subdomain"])
subdomain_login_uri = realm.uri + reverse(log_into_subdomain, args=[token])
return redirect(subdomain_login_uri)
def redirect_to_misconfigured_ldap_notice(request: HttpRequest, error_type: int) -> HttpResponse:
if error_type == ZulipLDAPAuthBackend.REALM_IS_NONE_ERROR:
return config_error(request, "ldap")
else:
raise AssertionError("Invalid error type")
def show_deactivation_notice(request: HttpRequest) -> HttpResponse:
realm = get_realm_from_request(request)
if realm and realm.deactivated:
context = {"deactivated_domain_name": realm.name}
if realm.deactivated_redirect is not None:
context["deactivated_redirect"] = realm.deactivated_redirect
return render(request, "zerver/deactivated.html", context=context)
return HttpResponseRedirect(reverse("login_page"))
def redirect_to_deactivation_notice() -> HttpResponse:
return HttpResponseRedirect(reverse(show_deactivation_notice))
def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> None:
for key in ("email", "already_registered"):
try:
context[key] = request.GET[key]
except KeyError:
pass
deactivated_email = request.GET.get("is_deactivated")
if deactivated_email is None:
return
try:
validate_email(deactivated_email)
context["deactivated_account_error"] = mark_safe(
DEACTIVATED_ACCOUNT_ERROR.format(username=escape(deactivated_email))
)
except ValidationError:
logging.info("Invalid email in is_deactivated param to login page: %s", deactivated_email)
class TwoFactorLoginView(BaseTwoFactorLoginView):
extra_context: ExtraContext = None
form_list = (
("auth", OurAuthenticationForm),
("token", AuthenticationTokenForm),
("backup", BackupTokenForm),
)
def __init__(self, extra_context: ExtraContext = None, *args: Any, **kwargs: Any) -> None:
self.extra_context = extra_context
super().__init__(*args, **kwargs)
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
context = super().get_context_data(**kwargs)
if self.extra_context is not None:
context.update(self.extra_context)
update_login_page_context(self.request, context)
realm = get_realm_from_request(self.request)
redirect_to = realm.uri if realm else "/"
context["next"] = self.request.POST.get(
"next",
self.request.GET.get("next", redirect_to),
)
return context
def done(self, form_list: List[Form], **kwargs: Any) -> HttpResponse:
"""
Log in the user and redirect to the desired page.
We need to override this function so that we can redirect to
realm.uri instead of '/'.
"""
realm_uri = self.get_user().realm.uri
# This mock.patch business is an unpleasant hack that we'd
# ideally like to remove by instead patching the upstream
# module to support better configurability of the
# LOGIN_REDIRECT_URL setting. But until then, it works. We
# import mock.patch here because mock has an expensive import
# process involving pbr -> pkgresources (which is really slow).
from unittest.mock import patch
with patch.object(settings, "LOGIN_REDIRECT_URL", realm_uri):
return super().done(form_list, **kwargs)
@has_request_variables
def login_page(
request: HttpRequest,
next: str = REQ(default="/"),
**kwargs: Any,
) -> HttpResponse:
if settings.SOCIAL_AUTH_SUBDOMAIN == get_subdomain(request):
return social_auth_subdomain_login_page(request)
# To support previewing the Zulip login pages, we have a special option
# that disables the default behavior of redirecting logged-in users to the
# logged-in app.
is_preview = "preview" in request.GET
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
if request.user and request.user.is_verified():
return HttpResponseRedirect(request.user.realm.uri)
elif request.user.is_authenticated and not is_preview:
return HttpResponseRedirect(request.user.realm.uri)
if is_subdomain_root_or_alias(request) and settings.ROOT_DOMAIN_LANDING_PAGE:
redirect_url = reverse("realm_redirect")
if request.GET:
redirect_url = append_url_query_string(redirect_url, request.GET.urlencode())
return HttpResponseRedirect(redirect_url)
realm = get_realm_from_request(request)
if realm and realm.deactivated:
return redirect_to_deactivation_notice()
extra_context = kwargs.pop("extra_context", {})
extra_context["next"] = next
if dev_auth_enabled() and kwargs.get("template_name") == "zerver/development/dev_login.html":
from zerver.views.development.dev_login import add_dev_login_context
if "new_realm" in request.POST:
try:
realm = get_realm(request.POST["new_realm"])
except Realm.DoesNotExist:
realm = None
add_dev_login_context(realm, extra_context)
if realm and "new_realm" in request.POST:
# If we're switching realms, redirect to that realm, but
# only if it actually exists.
return HttpResponseRedirect(realm.uri)
if "username" in request.POST:
extra_context["email"] = request.POST["username"]
extra_context.update(login_context(request))
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
return start_two_factor_auth(request, extra_context=extra_context, **kwargs)
try:
template_response = DjangoLoginView.as_view(
authentication_form=OurAuthenticationForm, extra_context=extra_context, **kwargs
)(request)
except ZulipLDAPConfigurationError as e:
assert len(e.args) > 1
return redirect_to_misconfigured_ldap_notice(request, e.args[1])
if isinstance(template_response, SimpleTemplateResponse):
# Only those responses that are rendered using a template have
# context_data attribute. This attribute doesn't exist otherwise. It is
# added in SimpleTemplateResponse class, which is a derived class of
# HttpResponse. See django.template.response.SimpleTemplateResponse,
# https://github.com/django/django/blob/2.0/django/template/response.py#L19
update_login_page_context(request, template_response.context_data)
assert isinstance(template_response, HttpResponse)
return template_response
def social_auth_subdomain_login_page(request: HttpRequest) -> HttpResponse:
origin_subdomain = request.session.get("subdomain")
if origin_subdomain is not None:
try:
origin_realm = get_realm(origin_subdomain)
return HttpResponseRedirect(origin_realm.uri)
except Realm.DoesNotExist:
pass
return render(request, "zerver/auth_subdomain.html", status=400)
def start_two_factor_auth(
request: HttpRequest, extra_context: ExtraContext = None, **kwargs: Any
) -> HttpResponse:
two_fa_form_field = "two_factor_login_view-current_step"
if two_fa_form_field not in request.POST:
# Here we inject the 2FA step in the request context if it's missing to
# force the user to go to the first step of 2FA authentication process.
# This seems a bit hackish but simplifies things from testing point of
# view. I don't think this can result in anything bad because all the
# authentication logic runs after the auth step.
#
# If we don't do this, we will have to modify a lot of auth tests to
# insert this variable in the request.
request.POST = request.POST.copy()
request.POST.update({two_fa_form_field: "auth"})
"""
This is how Django implements as_view(), so extra_context will be passed
to the __init__ method of TwoFactorLoginView.
def as_view(cls, **initkwargs):
def view(request, *args, **kwargs):
self = cls(**initkwargs)
...
return view
"""
two_fa_view = TwoFactorLoginView.as_view(extra_context=extra_context, **kwargs)
return two_fa_view(request, **kwargs)
@csrf_exempt
@require_post
@has_request_variables
def api_fetch_api_key(
request: HttpRequest, username: str = REQ(), password: str = REQ()
) -> HttpResponse:
return_data: Dict[str, bool] = {}
realm = get_realm_from_request(request)
if realm is None:
raise InvalidSubdomainError()
if not ldap_auth_enabled(realm=realm):
# In case we don't authenticate against LDAP, check for a valid
# email. LDAP backend can authenticate against a non-email.
validate_login_email(username)
user_profile = authenticate(
request=request, username=username, password=password, realm=realm, return_data=return_data
)
if return_data.get("inactive_user"):
raise UserDeactivatedError()
if return_data.get("inactive_realm"):
raise RealmDeactivatedError()
if return_data.get("password_auth_disabled"):
raise PasswordAuthDisabledError()
if return_data.get("password_reset_needed"):
raise PasswordResetRequiredError()
if user_profile is None:
raise AuthenticationFailedError()
assert user_profile.is_authenticated
# Maybe sending 'user_logged_in' signal is the better approach:
# user_logged_in.send(sender=user_profile.__class__, request=request, user=user_profile)
# Not doing this only because over here we don't add the user information
# in the session. If the signal receiver assumes that we do then that
# would cause problems.
email_on_new_login(sender=user_profile.__class__, request=request, user=user_profile)
# Mark this request as having a logged-in user for our server logs.
process_client(request, user_profile)
RequestNotes.get_notes(request).requestor_for_logs = user_profile.format_requestor_for_logs()
api_key = get_api_key(user_profile)
return json_success({"api_key": api_key, "email": user_profile.delivery_email})
def get_auth_backends_data(request: HttpRequest) -> Dict[str, Any]:
"""Returns which authentication methods are enabled on the server"""
subdomain = get_subdomain(request)
try:
realm = Realm.objects.get(string_id=subdomain)
except Realm.DoesNotExist:
# If not the root subdomain, this is an error
if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
raise JsonableError(_("Invalid subdomain"))
# With the root subdomain, it's an error or not depending
# whether ROOT_DOMAIN_LANDING_PAGE (which indicates whether
# there are some realms without subdomains on this server)
# is set.
if settings.ROOT_DOMAIN_LANDING_PAGE:
raise JsonableError(_("Subdomain required"))
else:
realm = None
result = {
"password": password_auth_<PASSWORD>(realm),
}
for auth_backend_name in AUTH_BACKEND_NAME_MAP:
key = auth_backend_name.lower()
result[key] = auth_enabled_helper([auth_backend_name], realm)
return result
def check_server_incompatibility(request: HttpRequest) -> bool:
user_agent = parse_user_agent(request.META.get("HTTP_USER_AGENT", "Missing User-Agent"))
return user_agent["name"] == "ZulipInvalid"
@require_safe
@csrf_exempt
def api_get_server_settings(request: HttpRequest) -> HttpResponse:
# Log which client is making this request.
process_client(request, request.user, skip_update_user_activity=True)
result = dict(
authentication_methods=get_auth_backends_data(request),
zulip_version=ZULIP_VERSION,
zulip_merge_base=ZULIP_MERGE_BASE,
zulip_feature_level=API_FEATURE_LEVEL,
push_notifications_enabled=push_notifications_enabled(),
is_incompatible=check_server_incompatibility(request),
)
context = zulip_default_context(request)
context.update(login_context(request))
# IMPORTANT NOTE:
# realm_name, realm_icon, etc. are not guaranteed to appear in the response.
# * If they do, that means the server URL has only one realm on it
# * If they don't, the server has multiple realms, and it's not clear which is
# the requested realm, so we can't send back these data.
for settings_item in [
"email_auth_enabled",
"require_email_format_usernames",
"realm_uri",
"realm_name",
"realm_icon",
"realm_description",
"external_authentication_methods",
]:
if context[settings_item] is not None:
result[settings_item] = context[settings_item]
return json_success(result)
@has_request_variables
def json_fetch_api_key(
request: HttpRequest, user_profile: UserProfile, password: str = REQ(default="")
) -> HttpResponse:
realm = get_realm_from_request(request)
if realm is None:
raise JsonableError(_("Invalid subdomain"))
if password_auth_enabled(user_profile.realm):
if not authenticate(
request=request, username=user_profile.delivery_email, password=password, realm=realm
):
raise JsonableError(_("Your username or password is incorrect."))
api_key = get_api_key(user_profile)
return json_success({"api_key": api_key, "email": user_profile.delivery_email})
@require_post
def logout_then_login(request: HttpRequest, **kwargs: Any) -> HttpResponse:
return django_logout_then_login(request, kwargs)
def password_reset(request: HttpRequest) -> HttpResponse:
if is_subdomain_root_or_alias(request) and settings.ROOT_DOMAIN_LANDING_PAGE:
redirect_url = append_url_query_string(
reverse("realm_redirect"), urlencode({"next": reverse("password_reset")})
)
return HttpResponseRedirect(redirect_url)
try:
response = DjangoPasswordResetView.as_view(
template_name="zerver/reset.html",
form_class=ZulipPasswordResetForm,
success_url="/accounts/password/reset/done/",
)(request)
except RateLimited as e:
assert e.secs_to_freedom is not None
return render(
request,
"zerver/rate_limit_exceeded.html",
context={"retry_after": int(e.secs_to_freedom)},
status=429,
)
assert isinstance(response, HttpResponse)
return response
@csrf_exempt
def saml_sp_metadata(request: HttpRequest, **kwargs: Any) -> HttpResponse: # nocoverage
"""
This is the view function for generating our SP metadata
for SAML authentication. It's meant for helping check the correctness
of the configuration when setting up SAML, or for obtaining the XML metadata
if the IdP requires it.
Taken from https://python-social-auth.readthedocs.io/en/latest/backends/saml.html
"""
if not saml_auth_enabled():
return config_error(request, "saml")
complete_url = reverse("social:complete", args=("saml",))
saml_backend = load_backend(load_strategy(request), "saml", complete_url)
metadata, errors = saml_backend.generate_metadata_xml()
if not errors:
return HttpResponse(content=metadata, content_type="text/xml")
return HttpResponseServerError(content=", ".join(errors))
def config_error(request: HttpRequest, error_category_name: str) -> HttpResponse:
contexts = {
"apple": {"social_backend_name": "apple", "has_markdown_file": True},
"google": {"social_backend_name": "google", "has_markdown_file": True},
"github": {"social_backend_name": "github", "has_markdown_file": True},
"gitlab": {"social_backend_name": "gitlab", "has_markdown_file": True},
"ldap": {"error_name": "ldap_error_realm_is_none"},
"dev": {"error_name": "dev_not_supported_error"},
"saml": {"social_backend_name": "saml"},
"smtp": {"error_name": "smtp_error"},
"remote_user_backend_disabled": {"error_name": "remoteuser_error_backend_disabled"},
"remote_user_header_missing": {"error_name": "remoteuser_error_remote_user_header_missing"},
# TODO: Improve the config error page for OIDC.
"oidc": {"error_name": "oidc_error"},
}
return render(request, "zerver/config_error.html", contexts[error_category_name])
| StarcoderdataPython |
3403975 | <filename>NPTFit/set_dirs.py
###############################################################################
# set_dirs.py
###############################################################################
#
# Define and create the directories required for the scan.
#
###############################################################################
import os
class SetDirs: # pragma: no cover
def __init__(self, tag='Untagged', work_dir=None):
""" :param tag: label associated with the scan
:param work_dir: location where all output from the run is stored
"""
self.tag = tag
# If unset, work_dir defaults to the current working directory, whilst
# psf_dir is placed inside work_dir
if work_dir is None:
self.work_dir = os.getcwd() + '/'
else:
self.work_dir = work_dir
# Chains directories, where the output of
self.chains_base_dir = self.work_dir + 'chains/'
self.chains_dir = self.chains_base_dir + self.tag + '/'
self.make_dirs([self.chains_base_dir, self.chains_dir])
def make_dirs_for_run(self, run_tag=None):
""" Set up the directory for the run itself, called at the time of
perform_scan
"""
# If no run_tag specified, write directly into the chains directory
if run_tag is None:
self.chains_dir_for_run = self.chains_dir
else:
self.chains_dir_for_run = self.chains_dir + run_tag + '/'
self.make_dirs([self.chains_dir_for_run])
@staticmethod
def make_dirs(dirs):
""" Creates directories if they do not already exist
"""
for d in dirs:
if not os.path.exists(d):
try:
os.mkdir(d)
except OSError as e:
if e.errno != 17:
raise
| StarcoderdataPython |
11281013 | """This module contains the general information for LstorageDasScsiLun ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class LstorageDasScsiLunConsts:
ADMIN_STATE_OFFLINE = "offline"
ADMIN_STATE_ONLINE = "online"
ADMIN_STATE_UNDEPLOYED = "undeployed"
AUTO_DEPLOY_AUTO_DEPLOY = "auto-deploy"
AUTO_DEPLOY_NO_AUTO_DEPLOY = "no-auto-deploy"
BOOT_DEV_DISABLED = "disabled"
BOOT_DEV_ENABLED = "enabled"
CONFIG_STATE_APPLIED = "applied"
CONFIG_STATE_APPLYING = "applying"
CONFIG_STATE_FAILED_TO_APPLY = "failed-to-apply"
CONFIG_STATE_OK = "ok"
DEFERRED_NAMING_FALSE = "false"
DEFERRED_NAMING_NO = "no"
DEFERRED_NAMING_TRUE = "true"
DEFERRED_NAMING_YES = "yes"
EXPAND_TO_AVAIL_FALSE = "false"
EXPAND_TO_AVAIL_NO = "no"
EXPAND_TO_AVAIL_TRUE = "true"
EXPAND_TO_AVAIL_YES = "yes"
LUN_MAP_TYPE_NON_SHARED = "non-shared"
LUN_MAP_TYPE_SHARED = "shared"
LUN_MAP_TYPE_UNASSIGNED = "unassigned"
OPER_STATE_COMPUTE_DEGRADED = "compute-degraded"
OPER_STATE_COMPUTE_INOPERABLE = "compute-inoperable"
OPER_STATE_OFFLINE = "offline"
OPER_STATE_ONLINE = "online"
OPER_STATE_UNDEFINED = "undefined"
ORDER_NOT_APPLICABLE = "not-applicable"
SIZE_UNSPECIFIED = "unspecified"
STORAGE_CLASS_DAS = "das"
STORAGE_CLASS_SAN = "san"
class LstorageDasScsiLun(ManagedObject):
"""This is LstorageDasScsiLun class."""
consts = LstorageDasScsiLunConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("LstorageDasScsiLun", "lstorageDasScsiLun", "das-scsi-lun-[name]", VersionMeta.Version224b, "InputOutput", 0x7fff, [], ["admin", "ls-compute", "ls-config", "ls-server", "ls-storage", "ls-storage-policy"], [u'lstorageProfile', u'lstorageProfileDef'], [u'faultInst', u'storageLocalDiskConfigDef'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["offline", "online", "undeployed"], []),
"auto_deploy": MoPropertyMeta("auto_deploy", "autoDeploy", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, ["auto-deploy", "no-auto-deploy"], []),
"boot_dev": MoPropertyMeta("boot_dev", "bootDev", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["disabled", "enabled"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version224b, MoPropertyMeta.INTERNAL, 0x8, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"config_qualifier": MoPropertyMeta("config_qualifier", "configQualifier", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|identity-assignment|unsupported-storage-capability|lun-id-conflict|missing-firmware-image|lun-capacity-exceeded|insufficient-lun-resources|lun-limit-exceeded|lun-ownership-conflict|storage-unavailable),){0,10}(defaultValue|not-applicable|identity-assignment|unsupported-storage-capability|lun-id-conflict|missing-firmware-image|lun-capacity-exceeded|insufficient-lun-resources|lun-limit-exceeded|lun-ownership-conflict|storage-unavailable){0,1}""", [], []),
"config_state": MoPropertyMeta("config_state", "configState", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["applied", "applying", "failed-to-apply", "ok"], []),
"deferred_naming": MoPropertyMeta("deferred_naming", "deferredNaming", "string", VersionMeta.Version251a, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["false", "no", "true", "yes"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"expand_to_avail": MoPropertyMeta("expand_to_avail", "expandToAvail", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, ["false", "no", "true", "yes"], []),
"fractional_size": MoPropertyMeta("fractional_size", "fractionalSize", "ulong", VersionMeta.Version227b, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, [], ["0-1023"]),
"local_disk_policy_name": MoPropertyMeta("local_disk_policy_name", "localDiskPolicyName", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x100, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"lun_dn": MoPropertyMeta("lun_dn", "lunDn", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"lun_map_type": MoPropertyMeta("lun_map_type", "lunMapType", "string", VersionMeta.Version251a, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["non-shared", "shared", "unassigned"], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version224b, MoPropertyMeta.NAMING, 0x400, None, None, r"""[\-\.:_a-zA-Z0-9]{1,10}""", [], []),
"oper_local_disk_policy_name": MoPropertyMeta("oper_local_disk_policy_name", "operLocalDiskPolicyName", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["compute-degraded", "compute-inoperable", "offline", "online", "undefined"], []),
"order": MoPropertyMeta("order", "order", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x800, None, None, None, ["not-applicable"], ["0-64"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, 0x1000, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"size": MoPropertyMeta("size", "size", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x2000, None, None, None, ["unspecified"], ["0-245760"]),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version224b, MoPropertyMeta.READ_WRITE, 0x4000, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"storage_class": MoPropertyMeta("storage_class", "storageClass", "string", VersionMeta.Version224b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["das", "san"], []),
}
prop_map = {
"adminState": "admin_state",
"autoDeploy": "auto_deploy",
"bootDev": "boot_dev",
"childAction": "child_action",
"configQualifier": "config_qualifier",
"configState": "config_state",
"deferredNaming": "deferred_naming",
"dn": "dn",
"expandToAvail": "expand_to_avail",
"fractionalSize": "fractional_size",
"localDiskPolicyName": "local_disk_policy_name",
"lunDn": "lun_dn",
"lunMapType": "lun_map_type",
"name": "name",
"operLocalDiskPolicyName": "oper_local_disk_policy_name",
"operState": "oper_state",
"order": "order",
"rn": "rn",
"sacl": "sacl",
"size": "size",
"status": "status",
"storageClass": "storage_class",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.auto_deploy = None
self.boot_dev = None
self.child_action = None
self.config_qualifier = None
self.config_state = None
self.deferred_naming = None
self.expand_to_avail = None
self.fractional_size = None
self.local_disk_policy_name = None
self.lun_dn = None
self.lun_map_type = None
self.oper_local_disk_policy_name = None
self.oper_state = None
self.order = None
self.sacl = None
self.size = None
self.status = None
self.storage_class = None
ManagedObject.__init__(self, "LstorageDasScsiLun", parent_mo_or_dn, **kwargs)
| StarcoderdataPython |
268221 | <filename>geg.py
#! /usr/bin/env python
def fitness_function(bin_string):
from expression import eval_bin_string
val = eval_bin_string(bin_string)
if val == required_int:
ret = float("inf")
else:
ret = 1 / abs(float(val - required_int))
return ret
if __name__ == '__main__':
import genetic
from expression import eval_bin_string, conv_to_expression
global required_int
required_int = int(raw_input('Input integer: '))
genetic.initialize(50, 4, 100, fitness_function, 0.7, 0.001)
while True:
fittest = genetic.get_fittest(5)
print "Most fittest:",
for v in fittest:
print eval_bin_string(v),
print
if eval_bin_string(fittest[0]) == required_int:
break
genetic.next_generation()
print "Found expression: %s" % conv_to_expression(genetic.get_fittest(1)[0])
| StarcoderdataPython |
5139077 | <filename>tests/test_metrics.py<gh_stars>1-10
#!/usr/bin/env python
"""Tests for `lesion_metrics` package."""
import builtins
import pathlib
import medio.image as mioi
import pytest
import lesion_metrics.metrics as lmm
import lesion_metrics.typing as lmt
import lesion_metrics.volume as lmv
backends = ["numpy"]
try:
import torch
backends.append("torch")
except (ModuleNotFoundError, ImportError):
pass
@pytest.fixture
def cwd() -> pathlib.Path:
cwd = pathlib.Path.cwd().resolve()
if cwd.name == "tests":
return cwd
cwd = (cwd / "tests").resolve(strict=True)
return cwd
@pytest.fixture(params=backends)
def backend(request) -> builtins.str: # type: ignore[no-untyped-def]
_backend: str = request.param
return _backend
@pytest.fixture
def pred_filename(cwd: pathlib.Path) -> pathlib.Path:
return cwd / "test_data" / "pred" / "pred.nii.gz"
@pytest.fixture
def pred(backend: builtins.str, pred_filename: pathlib.Path) -> lmt.Label:
pred_data: lmt.Label = mioi.Image.from_path(pred_filename)
if backend == "torch":
pred_data = torch.from_numpy(pred_data) # type: ignore[assignment]
return pred_data
@pytest.fixture
def truth_filename(cwd: pathlib.Path) -> pathlib.Path:
return cwd / "test_data" / "truth" / "truth.nii.gz"
@pytest.fixture
def truth(backend: builtins.str, truth_filename: pathlib.Path) -> lmt.Label:
truth_data: lmt.Label = mioi.Image.from_path(truth_filename)
if backend == "torch":
truth_data = torch.from_numpy(truth_data) # type: ignore[assignment]
return truth_data
def test_dice(pred: lmt.Label, truth: lmt.Label) -> None:
dice_coef = lmm.dice(pred, truth)
correct = 2 * (3 / ((8 + 1 + 1) + (2 + 1 + 1)))
assert dice_coef == correct
def test_jaccard(pred: lmt.Label, truth: lmt.Label) -> None:
jaccard_idx = lmm.jaccard(pred, truth)
correct = 3 / ((8 + 1 + 1) + 1)
assert jaccard_idx == correct
def test_ppv(pred: lmt.Label, truth: lmt.Label) -> None:
ppv_score = lmm.ppv(pred, truth)
correct = 3 / (2 + 1 + 1)
assert ppv_score == correct
def test_tpr(pred: lmt.Label, truth: lmt.Label) -> None:
tpr_score = lmm.tpr(pred, truth)
correct = 3 / (8 + 1 + 1)
assert tpr_score == correct
def test_lfdr(pred: lmt.Label, truth: lmt.Label) -> None:
lfpr_score = lmm.lfdr(pred, truth)
correct = 1 / 3
assert lfpr_score == correct
def test_ltpr(pred: lmt.Label, truth: lmt.Label) -> None:
ltpr_score = lmm.ltpr(pred, truth)
correct = 2 / 3
assert ltpr_score == correct
def test_avd(pred: lmt.Label, truth: lmt.Label) -> None:
avd_score = lmm.avd(pred, truth)
correct = 0.6
assert avd_score == correct
def test_corr(pred: lmt.Label, truth: lmt.Label) -> None:
ps = pred.sum()
ts = truth.sum()
eps = 0.1
pred_vols = [ps, ps + eps, ps - eps]
truth_vols = [ts, ts + eps, ts - eps]
corr_score = lmm.corr(pred_vols, truth_vols)
correct = 1.0
assert pytest.approx(corr_score, 1e-3) == correct
def test_isbi15_score(pred: lmt.Label, truth: lmt.Label) -> None:
isbi15 = lmm.isbi15_score(pred, truth)
correct = 0.6408730158730158
assert isbi15 == pytest.approx(correct, 1e-3)
def test_segmentation_volume(pred_filename: pathlib.Path) -> None:
sv = lmv.SegmentationVolume.from_filename(pred_filename)
vol = sv.volume()
assert vol == 4.0
@pytest.mark.skip("Not implemented.")
def test_assd(pred: lmt.Label, truth: lmt.Label) -> None:
pass
| StarcoderdataPython |
12846976 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#Author: <NAME>
#Email: <EMAIL>
#For licensing see the LICENSE file in the top level directory.
import unittest, os, sys, base64, itertools, random, time, copy
import copy, collections
from random import randint, seed, shuffle
from zss import compare
from zss.test_tree import Node
seed(os.urandom(15))
def test_empty_tree_distance():
assert compare.distance(Node(''), Node('')) == 0
assert compare.distance(Node('a'), Node('')) == 1
assert compare.distance(Node(''), Node('b')) == 1
def test_paper_tree():
A = (
Node("f")
.addkid(Node("d")
.addkid(Node("a"))
.addkid(Node("c")
.addkid(Node("b"))
)
)
.addkid(Node("e"))
)
B = (
Node("f")
.addkid(Node("c")
.addkid(Node("d")
.addkid(Node("a"))
.addkid(Node("b"))
)
)
.addkid(Node("e"))
)
#print A
#print
#print B
dist = compare.distance(A,B)
assert dist == 2
def test_simplelabelchange():
A = (
Node("f")
.addkid(Node("a")
.addkid(Node("h"))
.addkid(Node("c")
.addkid(Node("l"))))
.addkid(Node("e"))
)
B = (
Node("f")
.addkid(Node("a")
.addkid(Node("d"))
.addkid(Node("r")
.addkid(Node("b"))))
.addkid(Node("e"))
)
dist = compare.distance(A,B)
print dist
assert dist == 3
#print 'distance', d
def test_incorrect_behavior_regression():
A = (
Node("a")
.addkid(Node("b")
.addkid(Node("x"))
.addkid(Node("y"))
)
)
B = (
Node("a")
.addkid(Node("x"))
.addkid(Node("b")
.addkid(Node("y"))
)
)
dist = compare.distance(A, B)
print dist
assert dist == 2
| StarcoderdataPython |
1827201 | <filename>general/slicing.py
import string
a_str = "Monty Python's Flying Circus"
print "example string:", a_str
#last nth element: seq[-n]
print "last element:", a_str[-1]
print "second to last element", a_str[-2]
#nth element through end: seq[(n-1):]
print "third element through end:", a_str[2:]
#last n elements: seq[-n:]
print "last three elements:", a_str[-3:]
#n elements from beginning: seq[:n]
print "first three elements:", a_str[:3]
#from beginning until(not including) last nth element: seq[:-n]
print "beginning until(not including) third from last element:", a_str[:-3]
#step = -1 reverses sequence
print "reversed:", a_str[::-1]
#step of 2
print "every other element:", a_str[::2]
#elements m through n inclusive: seq[(m-1):n]
print "2nd through 4th inclusive:", a_str[1:4]
#mth to nth to last inclusive: seq[(m-1):-(n-1)]
print "3rd to 2nd to last inclusive:", a_str[2:-1]
#mth to last to nth to last inclusive (wow that sounds awful): seq[-m:-(n-1)]
print "4th to last to 2nd to last inclusive:", a_str[-4:-1]
#naming slices
# https://docs.python.org/2/library/functions.html#slice
#slice(stop)
#slice(start, stop[, step])
first_three_reversed = slice(2, None, -1)
last_three_reversed = slice(None, -4, -1)
print "first three elements reversed:", a_str[first_three_reversed]
print "last three elements reversed:", a_str[last_three_reversed]
#slicing assignment
# insert list items into another list
a_list = ['and', 'something', 'completely', 'different']
a_list[1:1] = ['now', 'for']
print a_list
# replace items in list
a_list = ['and', 'now', 'for', 'everything', 'somewhat', 'different']
a_list[3:5] = ['something', 'completely']
print a_list
#remove adjacent items from a list
a_list = ['and', 'now', 'for', 'something', 'this', 'is', 'an', 'ex-parrot', 'completely', 'different']
#del a_list[4:8]
a_list[4:8] = []
print a_list
#iterate over slice
for c in string.ascii_uppercase[11:16]:
print c,
| StarcoderdataPython |
4967853 |
# coding: utf-8
import QuantLib as ql
QL_USE_INDEXED_COUPON = False
tradeDate = ql.Date(21,5,2009)
ql.Settings.instance().setEvaluationDate(tradeDate)
dep_tenors = [1,2,3,6,9,12]
dep_quotes = [0.003081,0.005525,0.007163,0.012413,0.014,0.015488]
isdaRateHelpers = [ql.DepositRateHelper(dep_quotes[i],
dep_tenors[i]*ql.Period(ql.Monthly),
2,ql.WeekendsOnly(),
ql.ModifiedFollowing,
False,ql.Actual360())
for i in range(len(dep_tenors))]
swap_tenors = [2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 15, 20, 25, 30]
swap_quotes = [0.011907,
0.01699,
0.021198,
0.02444,
0.026937,
0.028967,
0.030504,
0.031719,
0.03279,
0.034535,
0.036217,
0.036981,
0.037246,
0.037605]
isda_ibor = ql.IborIndex('IsdaIbor',3*ql.Period(ql.Monthly),2,
ql.USDCurrency(),ql.WeekendsOnly(),
ql.ModifiedFollowing,False,ql.Actual360())
isdaRateHelpers = isdaRateHelpers + [
ql.SwapRateHelper(swap_quotes[i],swap_tenors[i]*ql.Period(ql.Annual),
ql.WeekendsOnly(),ql.Semiannual,ql.ModifiedFollowing,
ql.Thirty360(),isda_ibor)
for i in range(len(swap_tenors))]
discountCurve = ql.RelinkableYieldTermStructureHandle()
discountCurve.linkTo(ql.PiecewiseLogLinearDiscount(0,ql.WeekendsOnly(),
isdaRateHelpers,
ql.Actual365Fixed()))
probabilityCurve = ql.RelinkableDefaultProbabilityTermStructureHandle()
termDates = [ql.Date(20, 6, 2010),
ql.Date(20, 6, 2011),
ql.Date(20, 6, 2012),
ql.Date(20, 6, 2016),
ql.Date(20, 6, 2019)]
spreads = [0.001, 0.1]
recoveries = [0.2, 0.4]
markitValues = [97798.29358, #0.001
97776.11889, #0.001
-914971.5977, #0.1
-894985.6298, #0.1
186921.3594, #0.001
186839.8148, #0.001
-1646623.672, #0.1
-1579803.626, #0.1
274298.9203,
274122.4725,
-2279730.93,
-2147972.527,
592420.2297,
591571.2294,
-3993550.206,
-3545843.418,
797501.1422,
795915.9787,
-4702034.688,
-4042340.999]
if not QL_USE_INDEXED_COUPON:
tolerance = 1.0e-6
else:
# The risk-free curve is a bit off. We might skip the tests
# altogether and rely on running them with indexed coupons
# disabled, but leaving them can be useful anyway. */
tolerance = 1.0e-3
l = 0;
distance = 0
for i in range(0,len(termDates)):
for j in range(0,2):
for k in range(0,2):
cdsSchedule = ql.Schedule(tradeDate+1,termDates[i],
3*ql.Period(ql.Monthly),
ql.WeekendsOnly(),
ql.Following,ql.Unadjusted,
ql.DateGeneration.CDS,False)
quotedTrade = ql.CreditDefaultSwap(
ql.Protection.Buyer,10000000,0,spreads[j],cdsSchedule,
ql.Following,ql.Actual360(),True,True,tradeDate+1,
ql.WeekendsOnly().advance(tradeDate,3*ql.Period(ql.Daily)),
ql.FaceValueClaim(), ql.Actual360(True))
h = quotedTrade.impliedHazardRate(0,discountCurve,ql.Actual365Fixed(),
recoveries[k],1e-10,
ql.CreditDefaultSwap.ISDA)
probabilityCurve.linkTo(
ql.FlatHazardRate(0,ql.WeekendsOnly(),
ql.QuoteHandle(ql.SimpleQuote(h)),
ql.Actual365Fixed()))
engine = ql.IsdaCdsEngine(probabilityCurve,recoveries[k],discountCurve)
conventionalTrade = ql.CreditDefaultSwap(
ql.Protection.Buyer,10000000,0,0.01,cdsSchedule,
ql.Following,ql.Actual360(),True,True,tradeDate+1,
ql.WeekendsOnly().advance(tradeDate,3*ql.Period(ql.Daily)),
ql.FaceValueClaim(), ql.Actual360(True))
conventionalTrade.setPricingEngine(engine)
upfront = conventionalTrade.notional() * conventionalTrade.fairUpfront()
print("Hazard:",h)
print("Upfront:",upfront)
print("Distance:",abs(upfront-markitValues[l]))
print("Tolerance:",tolerance)
print(abs(upfront-markitValues[l])<tolerance)
distance = distance + abs(upfront-markitValues[l])
l = l + 1
print('total distance:',distance)
| StarcoderdataPython |
9790216 | from __future__ import absolute_import
import six
from django.db.models import Q
from sentry.api.bases.organization import (OrganizationEndpoint, OrganizationPermission)
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.models import OrganizationMember
from sentry.search.utils import tokenize_query
class MemberPermission(OrganizationPermission):
scope_map = {
'GET': ['member:read', 'member:write', 'member:admin'],
'POST': ['member:write', 'member:admin'],
'PUT': ['member:write', 'member:admin'],
'DELETE': ['member:admin'],
}
class OrganizationMemberIndexEndpoint(OrganizationEndpoint):
permission_classes = (MemberPermission, )
def get(self, request, organization):
queryset = OrganizationMember.objects.filter(
Q(user__is_active=True) | Q(user__isnull=True),
organization=organization,
).select_related('user').order_by('email', 'user__email')
query = request.GET.get('query')
if query:
tokens = tokenize_query(query)
for key, value in six.iteritems(tokens):
if key == 'email':
queryset = queryset.filter(
Q(user__email__in=value) | Q(user__emails__email__in=value)
)
return self.paginate(
request=request,
queryset=queryset,
on_results=lambda x: serialize(x, request.user),
paginator_cls=OffsetPaginator,
)
| StarcoderdataPython |
11224267 | #!/usr/bin/env python
"""
Copyright 2014 Wordnik, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class WordOfTheDay:
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'id': 'int',
'parentId': 'str',
'category': 'str',
'createdBy': 'str',
'createdAt': 'datetime',
'contentProvider': 'ContentProvider',
'htmlExtra': 'str',
'word': 'str',
'definitions': 'list[SimpleDefinition]',
'examples': 'list[SimpleExample]',
'note': 'str',
'publishDate': 'datetime'
}
self.id = None # int
self.parentId = None # str
self.category = None # str
self.createdBy = None # str
self.createdAt = None # datetime
self.contentProvider = None # ContentProvider
self.htmlExtra = None # str
self.word = None # str
self.definitions = None # list[SimpleDefinition]
self.examples = None # list[SimpleExample]
self.note = None # str
self.publishDate = None # datetime
| StarcoderdataPython |
1925078 | from core.models import Socio
from django.db import models
class File(models.Model):
author: Socio = models.ForeignKey(
'core.Socio', on_delete=models.CASCADE, blank=True, null=True)
title: str = models.CharField(max_length=100, default='Untitled')
content: str = models.TextField()
url = models.URLField(blank=True, null=True)
file = models.FileField(upload_to='files/', blank=True, null=True)
viewers = models.ManyToManyField(
'core.Socio', related_name='files_viwers', blank=True)
likes = models.ManyToManyField(
'core.Socio', related_name='files_likes', blank=True)
posted_at = models.DateTimeField(auto_now_add=True)
is_hidden = models.BooleanField(default=False)
def __str__(self) -> str:
return self.title
def save(self, *args, **kwargs):
if not self.author:
self.author = Socio.objects.filter(
user__username='22238742').first()
super().save(*args, **kwargs)
| StarcoderdataPython |
219819 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import numpy as np
import pandas as pd
from ..IO import out_results
from ..IO import pkl_data
from ..IO import read_input as rin
def initialize(stat, rslt_data):
# ---------- log
print('\n# ---------- Initialize evolutionary algorithm')
print('# ------ Generation 1')
print('{} structures by random\n'.format(rin.tot_struc))
with open('cryspy.out', 'a') as fout:
fout.write('\n# ---------- Initilalize evolutionary algorithm\n')
fout.write('# ------ Generation 1\n')
fout.write('{} structures by random\n\n'.format(rin.tot_struc))
# ---------- initialize
next_id = 0
id_done = np.array([], dtype=int)
gen = 1
# ------ ea_info
ea_info = pd.DataFrame(columns=['Gen', 'Population',
'Crossover', 'Permutation', 'Strain',
'Random', 'Elite',
'crs_func', 'crs_lat', 'slct_func'])
ea_info.iloc[:, 0:7] = ea_info.iloc[:, 0:7].astype(int)
tmp_info = pd.Series([1, rin.tot_struc, 0, 0, 0, rin.tot_struc, 0,
rin.crs_func, rin.crs_lat, rin.slct_func], index=ea_info.columns)
ea_info = ea_info.append(tmp_info, ignore_index=True)
out_results.out_ea_info(ea_info)
# ------ ea_origin
ea_origin = pd.DataFrame(columns=['Gen', 'Struc_ID', 'Operation', 'Parent'])
ea_origin.iloc[:, 0:2] = ea_origin.iloc[:, 0:2].astype(int)
for cID in range(rin.tot_struc):
tmp_origin = pd.Series([1, cID, 'random', None], index=ea_origin.columns)
ea_origin = ea_origin.append(tmp_origin, ignore_index=True)
# ------ elite
elite_struc = None
elite_fitness = None
# ------ rslt_data
rslt_data['Gen'] = pd.Series(dtype=int)
rslt_data = rslt_data[['Gen', 'Struc_ID', 'Spg_num', 'Spg_sym', 'Spg_num_opt',
'Spg_sym_opt', 'Energy', 'Magmom', 'Opt']]
# ---------- save
ea_id_data = (gen, next_id, id_done)
pkl_data.save_ea_id(ea_id_data)
ea_data = (elite_struc, elite_fitness, ea_info, ea_origin)
pkl_data.save_ea_data(ea_data)
pkl_data.save_rslt(rslt_data)
# ---------- status
stat.set('status', 'generation', '{}'.format(gen))
stat.set('status', 'next_id', '{}'.format(next_id))
with open('cryspy.stat', 'w') as fstat:
stat.write(fstat)
| StarcoderdataPython |
249144 | <gh_stars>10-100
import pytest
import numpy as np
from collections import OrderedDict
import contextlib
import arim.geometry as g
from arim import Probe, ExaminationObject, Material, Time, Frame
import arim.im.das as das
import arim.im.tfm
def _random_uniform(dtype, low=0.0, high=1.0, size=None):
z = np.zeros(size, dtype)
if np.issubdtype(dtype, np.complexfloating):
z.real = np.random.uniform(low, high, size)
z.imag = np.random.uniform(low, high, size)
elif np.issubdtype(dtype, np.floating):
z[...] = np.random.uniform(low, high, size)
else:
raise NotImplementedError
return z
def make_delay_and_sum_case_random(dtype_float, dtype_data, amplitudes="random"):
locations = g.Points(
np.array([(0, 0, 0), (1.0, 0, 0), (2.0, 0.0, 0.0)], dtype=np.float)
)
numelements = len(locations)
frequency = 1e6
probe = Probe(locations, frequency)
# examination object:
vel = 10.0
material = Material(vel)
examination_object = ExaminationObject(material)
# timetraces
# time = Time(start=0.35, step=0.001, num=100)
time = Time(start=0.0, step=0.001, num=100)
tx = np.array([0, 0, 1, 1, 2, 2], dtype=np.int)
rx = np.array([0, 1, 0, 1, 0, 1], dtype=np.int)
numtimetraces = len(tx)
numpoints = 10
start_lookup = time.start / 2
stop_lookup = (time.end - time.step) / 2
np.random.seed(31031596)
timetraces = _random_uniform(
dtype_data, 100.0, 101.0, size=(numtimetraces, len(time))
)
amplitudes_tx = _random_uniform(dtype_data, 1.0, 1.1, size=(numpoints, numelements))
amplitudes_rx = _random_uniform(
dtype_data, -1.0, -1.1, size=(numpoints, numelements)
)
timetrace_weights = _random_uniform(dtype_data, size=(numtimetraces))
lookup_times_tx = _random_uniform(
dtype_float, start_lookup, stop_lookup, (numpoints, numelements)
)
lookup_times_rx = _random_uniform(
dtype_float, start_lookup, stop_lookup, (numpoints, numelements)
)
if amplitudes == "random":
amplitudes = arim.im.tfm.TxRxAmplitudes(amplitudes_tx, amplitudes_rx)
elif amplitudes == "uniform":
amplitudes = arim.im.tfm.TxRxAmplitudes(
np.ones((numpoints, numelements), dtype_data),
np.ones((numpoints, numelements), dtype_data),
)
elif amplitudes == "none":
amplitudes = None
else:
raise ValueError
# Mess a bit lookup times to get out of bounds values:
# lookup_times_tx[0, 0] = time.start / 2.
# lookup_times_rx[1, 1] = time.end * 2.
focal_law = arim.im.tfm.FocalLaw(
lookup_times_tx, lookup_times_rx, amplitudes, timetrace_weights
)
frame = Frame(timetraces, time, tx, rx, probe, examination_object)
return frame, focal_law
DATATYPES = OrderedDict()
# dtype_float, dtype_data
DATATYPES["f"] = (np.float32, np.float32)
DATATYPES["c"] = (np.float32, np.complex64)
DATATYPES["d"] = (np.float64, np.float64)
DATATYPES["z"] = (np.float64, np.complex128)
# DATATYPES = [
# dict(code='f', dtype_float=np.float32, dtype_data=np.float32),
# dict(code='c', dtype_float=np.float32, dtype_data=np.complex64),
# dict(code='d', dtype_float=np.float64, dtype_data=np.float32),
# dict(code='z', dtype_float=np.float64, dtype_data=np.complex128),
# ]
@pytest.fixture(params=["naive", "numba"])
def das_func(request):
return getattr(das, "delay_and_sum_" + request.param)
@pytest.fixture(params=tuple(DATATYPES.values()), ids=tuple(DATATYPES.keys()))
def datatypes(request):
return request.param
@pytest.fixture(params=(0.0, np.nan), ids=("fillvalue_0", "fillvalue_nan"))
def fillvalue(request):
return request.param
def test_delay_and_sum_all(das_func, datatypes, fillvalue):
dtype_float, dtype_data = datatypes
frame, focal_law = make_delay_and_sum_case_random(dtype_float, dtype_data)
kwargs = dict(frame=frame, focal_law=focal_law, fillvalue=fillvalue)
result = das_func(**kwargs)
assert result.dtype == dtype_data
reference_result = das.delay_and_sum_naive(**kwargs)
# np.testing.assert_almost_equal(result, reference_result)
assert np.allclose(result, reference_result, equal_nan=True)
assert np.sum(np.isfinite(result)) > 0, "all nan!"
assert np.count_nonzero(result) > 0, "all zeros!"
class TestDasDispatcher:
def test_amplitudes_uniform_vs_noamp(self, datatypes):
dtype_float, dtype_data = datatypes
frame, focal_law = make_delay_and_sum_case_random(
dtype_float, dtype_data, amplitudes="uniform"
)
result = das.delay_and_sum(frame, focal_law)
assert result.dtype == dtype_data
reference_result = result.copy()
frame, focal_law = make_delay_and_sum_case_random(
dtype_float, dtype_data, amplitudes="none"
)
result = das.delay_and_sum(frame, focal_law)
assert result.dtype == dtype_data
if dtype_data is np.float32:
np.testing.assert_allclose(
result, reference_result, equal_nan=True, rtol=1e-5
)
else:
np.testing.assert_allclose(result, reference_result, equal_nan=True)
def test_call_das(self, datatypes):
dtype_float, dtype_data = datatypes
frame, focal_law = make_delay_and_sum_case_random(
dtype_float, dtype_data, amplitudes="none"
)
res = das.delay_and_sum(frame, focal_law, fillvalue=0.0)
res = das.delay_and_sum(frame, focal_law, fillvalue=np.nan)
res = das.delay_and_sum(frame, focal_law, interpolation="nearest")
res = das.delay_and_sum(frame, focal_law, interpolation=("nearest",))
res = das.delay_and_sum(frame, focal_law, interpolation="linear")
res = das.delay_and_sum(frame, focal_law, interpolation=("linear",))
res = das.delay_and_sum(frame, focal_law, interpolation=("lanczos", 3))
if dtype_data == np.complex_:
# If complex, run normally
not_impl_typing = contextlib.nullcontext()
else:
# If not complex, run but expect NotImplementedTyping exception
not_impl_typing = pytest.raises(das.NotImplementedTyping)
with not_impl_typing:
res = das.delay_and_sum(
frame, focal_law, aggregation="median", interpolation="nearest"
)
res = das.delay_and_sum(
frame, focal_law, aggregation="median", interpolation=("lanczos", 3)
)
res = das.delay_and_sum(
frame,
focal_law,
aggregation=("huber", 1.5),
interpolation=("lanczos", 3),
)
frame, focal_law = make_delay_and_sum_case_random(
dtype_float, dtype_data, amplitudes="random"
)
res = das.delay_and_sum(frame, focal_law, fillvalue=0.0)
res = das.delay_and_sum(frame, focal_law, fillvalue=np.nan)
res = das.delay_and_sum(frame, focal_law, interpolation="nearest")
res = das.delay_and_sum(frame, focal_law, interpolation="linear")
| StarcoderdataPython |
3447374 | <reponame>lalusvipi/siptrackweb
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import Context, loader
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from siptracklib.utils import object_by_attribute
import siptracklib.errors
from siptrackweb.views import helpers
from siptrackweb.views import attribute
from siptrackweb.views import config
from siptrackweb.forms import *
def make_device_association_list(network):
ret = []
if network.class_name not in ['ipv4 network', 'ipv6 network']:
return ret
for device in network.listAssociations(include = ['device']):
path = helpers.make_browsable_path(device, ['device category', 'device tree'],
include_root = False)
ent = {'obj': device, 'path': path, 'type': 'association'}
ret.append(ent)
for device in network.listReferences(include = ['device']):
path = helpers.make_browsable_path(device, ['device category', 'device tree'],
include_root = False)
ent = {'obj': device, 'path': path, 'type': 'reference'}
ret.append(ent)
return ret
@helpers.authcheck
def display(request, oid):
pm = helpers.PageManager(request, 'stweb/views/networktrees/networks/display.html')
network = pm.object_store.getOID(oid)
network_tree = network.getNetworkTree()
# pm.render_var['network_tree_list'] = network_tree.parent.listChildren(include = ['network tree'])
pm.render_var['browsable_path'] = []
pm.render_var['network_tree'] = network_tree
pm.render_var['network'] = network
pm.render_var['template_list'] = network.listChildren(include = ['device template', 'network template'])
pm.render_var['permission_list'] = network.listChildren(include = ['permission'])
if request.GET.get('display_missing', '') == 'true':
pm.render_var['network_list'] = list(network.listNetworks(include_missing = True))
else:
pm.render_var['network_list'] = list(network.listNetworks())
for n in pm.render_var['network_list']:
n.device_association_list = make_device_association_list(n)
pm.render_var['network_range_list'] = network.listNetworkRanges()
pm.render_var['attribute_list'] = attribute.parse_attributes(network)
pm.render_var['config_list'] = config.parse_config(network)
pm.render_var['device_list'] = network.listReferences(include = ['device', 'device category'])
pm.render_var['template_add_type'] = 'network'
assoc_list = make_device_association_list(network)
pm.render_var['device_association_list'] = assoc_list
pm.path(network)
if pm.tagged_oid and pm.tagged_oid.class_name in ['ipv4 network', 'ipv6 network']:
if network == network_tree:
tagged_network_tree = pm.tagged_oid.getNetworkTree()
if tagged_network_tree != network_tree:
if tagged_network_tree.protocol == network_tree.protocol:
pm.render_var['valid_tag_target'] = True
return pm.render()
@helpers.authcheck
def add(request, parent_oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
network = pm.object_store.getOID(parent_oid)
network_tree = network.getNetworkTree()
pm.render_var['network_tree_list'] = network_tree.parent.listChildren(include = ['network tree'])
pm.render_var['network_tree'] = network_tree
pm.render_var['network'] = network
pm.addForm(NetworkAddForm(), '/network/add/post/%s/' % (parent_oid))
pm.path(network)
return pm.render()
@helpers.authcheck
def add_post(request, parent_oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
parent = pm.object_store.getOID(parent_oid)
pm.path(parent)
network_tree = parent.getNetworkTree()
pm.render_var['network_tree_list'] = network_tree.parent.listChildren(include = ['network tree'])
pm.render_var['network_tree'] = network_tree
pm.addForm(NetworkAddForm(request.POST), '/network/add/post/%s/' % (parent_oid))
if not pm.form.is_valid():
return pm.error()
network = network_tree.addNetwork(pm.form.cleaned_data['name'].strip())
if len(pm.form.cleaned_data['description']) > 0:
network.attributes['description'] = pm.form.cleaned_data['description']
return pm.redirect('network.display', (network.oid,))
@helpers.authcheck
def delete(request, oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
pm.addForm(NetworkDeleteForm(), '/network/delete/post/%s/' % (oid), message='Removing network.')
network = pm.object_store.getOID(oid)
network_tree = network.getNetworkTree()
pm.render_var['network_tree'] = network_tree
pm.render_var['network_tree_list'] = network_tree.parent.listChildren(include = ['network tree'])
pm.render_var['network'] = network
pm.path(network)
return pm.render()
@helpers.authcheck
def delete_post(request, oid):
pm = helpers.PageManager(request, 'stweb/generic_form.html')
pm.addForm(NetworkDeleteForm(request.POST), '/network/delete/post/%s/' % (oid), message='Removing network.')
network = pm.object_store.getOID(oid)
pm.path(network)
network_tree = network.getNetworkTree()
pm.render_var['network_tree'] = network_tree
pm.render_var['network_tree_list'] = network_tree.parent.listChildren(include = ['network tree'])
pm.render_var['network'] = network
if not pm.form.is_valid():
return pm.error('invalid value for recursive')
parent_oid = network.parent.oid
network.delete(pm.form.cleaned_data['recursive'])
return pm.redirect('network.display', (parent_oid,))
| StarcoderdataPython |
1709672 | import secrets
import deal
deal.activate()
from .candidate import Candidate
from .charspace import Charspace
from .constants import MAX_PASSWORD_LENGTH
from .exceptions import DumbValueError
@deal.safe
@deal.has("random")
@deal.pre(
validator=lambda _: _.length <= MAX_PASSWORD_LENGTH,
exception=DumbValueError,
message=f"length cannot be greater than {MAX_PASSWORD_LENGTH}.",
)
@deal.pre(
validator=lambda _: _.min_uppercase
+ _.min_lowercase
+ _.min_digits
+ _.min_specials
<= _.length,
exception=DumbValueError,
message="You cannot request more characters than the password length.",
)
@deal.pre(
validator=lambda _: _.length <= MAX_PASSWORD_LENGTH,
exception=DumbValueError,
message=f"length cannot be greater than {MAX_PASSWORD_LENGTH}.",
)
@deal.pre(
validator=lambda _: _.length > 0,
message="length must be greater than zero.",
exception=DumbValueError,
)
@deal.ensure(
lambda _: _.result.uppers >= _.min_uppercase,
message="Not enough uppercase characters in result.",
)
@deal.ensure(
lambda _: _.result.lowers >= _.min_lowercase,
message="Not enough lowercase characters in result.",
)
@deal.ensure(
lambda _: _.result.digits >= _.min_digits,
message="Not enough digit characters in result.",
)
@deal.ensure(
lambda _: _.result.specials >= _.min_specials,
message="Not enough special characters in result.",
)
@deal.ensure(
lambda _: _.allow_repeating or not _.result.has_repeating,
message="Repeating characters are not allowed.",
)
@deal.ensure(
lambda _: len(_.result) == _.length,
message="The returned value len must equal the requested length.",
)
def search(
*,
length: int,
min_uppercase: int,
min_lowercase: int,
min_digits: int,
min_specials: int,
blocklist: str,
allow_repeating: bool,
) -> Candidate:
charspace = Charspace(blocklist=blocklist)
try_password = Candidate("")
while not all(
[
True
and try_password.uppers >= min_uppercase
and try_password.lowers >= min_lowercase
and try_password.digits >= min_digits
and try_password.specials >= min_specials
and (allow_repeating or not try_password.has_repeating)
and len(try_password) == length
]
):
try_password = Candidate(
generate(charset=charspace.charset, length=length)
)
return try_password
@deal.safe
@deal.has("random")
@deal.pre(
validator=lambda _: _.length <= MAX_PASSWORD_LENGTH,
message=f"length cannot be greater than {MAX_PASSWORD_LENGTH}.",
)
@deal.pre(
validator=lambda _: _.length > 0,
message="length must be greater than zero.",
)
@deal.pre(
validator=lambda _: len("".join(_.charset)) > 0,
message="charset must have positive len.",
)
@deal.ensure(
lambda _: len(_.result) == _.length,
message="The returned value len must equal the requested length.",
)
@deal.ensure(
lambda _: all(char in "".join(_.charset) for char in _.result),
message="function return value must be "
"composed of characters in the charset",
)
def generate(*, charset: str, length: int) -> str:
"""Return a cryptographically secure password of len length using
characters only from the given charset."""
return "".join(secrets.choice("".join(charset)) for i in range(length))
| StarcoderdataPython |
4922734 | from setupext import find_namespace_packages, setup
# We cannot directly import matplotlib if `MPLCURSORS` is set because
# `sys.path` is not correctly set yet.
#
# The loading of `matplotlib.figure` does not go through the path entry finder
# because it is a submodule, so we must use a metapath finder instead.
@setup.register_pth_hook("mplcursors.pth")
def _pth_hook():
import os
if os.environ.get("MPLCURSORS"):
from importlib.machinery import PathFinder
import sys
class MplcursorsMetaPathFinder(PathFinder):
def find_spec(self, fullname, path=None, target=None):
spec = super().find_spec(fullname, path, target)
if fullname == "matplotlib.figure":
def exec_module(module):
type(spec.loader).exec_module(spec.loader, module)
# The pth file does not get properly uninstalled from
# a develop install. See pypa/pip#4176.
try:
import mplcursors
except ImportError:
return
import functools
import json
import weakref
# Ensure that when the cursor is removed(), or gets
# GC'd because its referents artists are GC'd, the
# entry also disappears.
cursors = weakref.WeakValueDictionary()
options = json.loads(os.environ["MPLCURSORS"])
@functools.wraps(module.Figure.draw)
def wrapper(self, *args, **kwargs):
rv = wrapper.__wrapped__(self, *args, **kwargs)
if self not in cursors:
cursor = mplcursors.cursor(self, **options)
if cursor.artists:
cursors[self] = cursor
else:
# No artist yet; skip possible
# initialization code.
cursor.remove()
return rv
module.Figure.draw = wrapper
spec.loader.exec_module = exec_module
sys.meta_path.remove(self)
return spec
sys.meta_path.insert(0, MplcursorsMetaPathFinder())
setup(
name="mplcursors",
description="Interactive data selection cursors for Matplotlib.",
long_description=open("README.rst", encoding="utf-8").read(),
author="<NAME>",
url="https://github.com/anntzer/mplcursors",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Matplotlib",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
],
packages=find_namespace_packages("lib"),
package_dir={"": "lib"},
python_requires=">=3.6",
setup_requires=["setuptools_scm"],
use_scm_version=lambda: { # xref __init__.py
"version_scheme": "post-release",
"local_scheme": "node-and-date",
"write_to": "lib/mplcursors/_version.py",
},
install_requires=[
"matplotlib>=3.1",
],
)
| StarcoderdataPython |
1917501 | <reponame>jurajHasik/peps-torch<gh_stars>10-100
import torch
import config as cfg
from ctm.generic.env import ENV
from ctm.generic import rdm
from ctm.pess_kagome import rdm_kagome
from ctm.generic import corrf
from math import sqrt
from numpy import exp
import itertools
def _cast_to_real(t, check=True, imag_eps=1.0e-8):
if t.is_complex():
assert abs(t.imag) < imag_eps,"unexpected imaginary part "+str(t.imag)
return t.real
return t
# function (n1,n2,n3) --> s that maps the basis of states in the fundamental irrep of SU(3) (states n=0,1,2) for the three sites of the unit cell to a single physical index s=0...26
# NB: the 3 sites are labeled as:
# 1---3
# \ /
# 2
def fmap(n1, n2, n3):
return n3 + 3 * n2 + 9 * n1
# reverse mapping s --> (n1, n2, n3)
def fmap_inv(s):
n1 = s // 9
n2 = (s - 9 * n1) // 3
n3 = s - 9 * n1 - 3 * n2
return (n1, n2, n3)
exchange_bond = torch.zeros((3, 3, 3, 3), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
exchange_bond[i, j, j, i] = 1.
exchange_bond_triangle = torch.zeros((3, 3, 3, 3, 3, 3), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
for k in range(3):
# 1--2
exchange_bond_triangle[i, j, k, j, i, k] = 1.
# 2--3
exchange_bond_triangle[i, j, k, i, k, j] = 1.
# 3--1
exchange_bond_triangle[i, j, k, k, j, i] = 1.
permute_triangle = torch.zeros((3, 3, 3, 3, 3, 3), dtype=torch.complex128, device=cfg.global_args.device)
permute_triangle_inv = torch.zeros((3, 3, 3, 3, 3, 3), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
for k in range(3):
# anticlockwise (direct)
permute_triangle[i, j, k, j, k, i] = 1.
# clockwise (inverse)
permute_triangle_inv[i, j, k, k, i, j] = 1.
# define the matrices associated with the observables \lambda_3 and \lambda_8 for the 3 sites
lambda_3 = torch.tensor([[1., 0., 0.], [0., -1., 0.], [0., 0., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
lambda_8 = 1. / sqrt(3.) * torch.tensor([[1., 0., 0.], [0., 1., 0.], [0., 0., -2.]], dtype=torch.complex128, device=cfg.global_args.device)
lambda_3_1 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
lambda_3_2 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
lambda_3_3 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
lambda_8_1 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
lambda_8_2 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
lambda_8_3 = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
for s in range(27):
n1, n2, n3 = fmap_inv(s)
lambda_3_1[s, s] = lambda_3[n1, n1]
lambda_3_2[s, s] = lambda_3[n2, n2]
lambda_3_3[s, s] = lambda_3[n3, n3]
lambda_8_1[s, s] = lambda_8[n1, n1]
lambda_8_2[s, s] = lambda_8[n2, n2]
lambda_8_3[s, s] = lambda_8[n3, n3]
su3_gens= torch.zeros(3,3,8, dtype= torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,0]= torch.tensor([[0., 1., 0.], [1., 0., 0.], [0., 0., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,1]= torch.tensor([[0., -1.j, 0.], [1.j, 0., 0.], [0., 0., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,2]= lambda_3
su3_gens[:,:,3]= torch.tensor([[0., 0., 1.], [0., 0., 0.], [1., 0., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,4]= torch.tensor([[0., 0., -1.j], [0., 0., 0.], [1.j, 0., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,5]= torch.tensor([[0., 0., 0.], [0., 0., 1.], [0., 1., 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,6]= torch.tensor([[0., 0., 0.], [0., 0., -1.j], [0., 1.j, 0.]], dtype=torch.complex128, device=cfg.global_args.device)
su3_gens[:,:,7]= lambda_8
class SU3_CHIRAL():
def __init__(self, Kr=0., Ki=0., j1=0., j2=0., global_args=cfg.global_args):
self.j1 = j1
self.j2 = j2
self.Kr = Kr
self.Ki = Ki
print('Hamiltonian coupling constants:')
print("Kr = {}".format(Kr))
print("Ki = {}".format(Ki))
print("j1 = {}".format(j1))
print("j2 = {}".format(j2))
self.dtype = global_args.torch_dtype
self.device = global_args.device
self.phys_dim = 3
self.id_downT = torch.eye(27, dtype=self.dtype, device=self.device)
self.h_triangle = (Kr+1j*Ki) * permute_triangle + (Kr-1j*Ki) * permute_triangle_inv + self.j1 * exchange_bond_triangle
self.h_triangle = self.h_triangle.to(self.device)
_tmp_l_labels = ["l3","l8","l3_1","l3_2","l3_3","l8_1","l8_2","l8_3"]
_tmp_l_op= [lambda_3, lambda_8, lambda_3_1, lambda_3_2, lambda_3_3, lambda_8_1, lambda_8_2, lambda_8_3]
self.obs_ops= { l: op.to(self.device) for l,op in zip(_tmp_l_labels, _tmp_l_op)}
self.su3_gens= su3_gens.to(self.device)
# Energy terms
def energy_triangle_dn(self, state, env, force_cpu=False):
e_dn= rdm_kagome.rdm2x2_dn_triangle_with_operator(\
(0, 0), state, env, self.h_triangle, force_cpu=force_cpu)
return _cast_to_real(e_dn)
def energy_triangle_up(self, state, env, force_cpu=False):
rdm_up= rdm_kagome.rdm2x2_up_triangle_open(\
(0, 0), state, env, force_cpu=force_cpu)
e_up= torch.einsum('ijkmno,mnoijk', rdm_up, self.h_triangle )
return _cast_to_real(e_up)
def energy_nnn(self, state, env, force_cpu=False):
if self.j2 == 0:
return 0.
else:
vNNN = self.P_bonds_nnn(state, env, force_cpu=force_cpu)
return(self.j2*(vNNN[0]+vNNN[1]+vNNN[2]+vNNN[3]+vNNN[4]+vNNN[5]))
# Observables
def P_dn(self, state, env, force_cpu=False):
vP_dn= rdm_kagome.rdm2x2_dn_triangle_with_operator((0, 0), state, env,\
operator=permute_triangle, force_cpu=force_cpu)
return vP_dn
def P_up(self, state, env, force_cpu=False):
rdm_up= rdm_kagome.rdm2x2_up_triangle_open((0, 0), state, env, force_cpu=force_cpu)
vP_up= torch.einsum('ijkmno,mnoijk', rdm_up, permute_triangle)
return vP_up
def P_bonds_nnn(self, state, env, force_cpu=False):
norm_wf = rdm_kagome.rdm2x2_dn_triangle_with_operator((0, 0), state, env, \
self.id_downT, force_cpu=force_cpu)
vNNN1_12, vNNN1_31 = rdm_kagome.rdm2x2_nnn_1((0, 0), state, env, operator=exchange_bond, force_cpu=force_cpu)
vNNN2_32, vNNN2_21 = rdm_kagome.rdm2x2_nnn_2((0, 0), state, env, operator=exchange_bond, force_cpu=force_cpu)
vNNN3_31, vNNN3_23 = rdm_kagome.rdm2x2_nnn_3((0, 0), state, env, operator=exchange_bond, force_cpu=force_cpu)
return _cast_to_real(vNNN1_12 / norm_wf), _cast_to_real(vNNN2_21 / norm_wf), \
_cast_to_real(vNNN1_31 / norm_wf), _cast_to_real(vNNN3_31 / norm_wf), \
_cast_to_real(vNNN2_32 / norm_wf), _cast_to_real(vNNN3_23 / norm_wf)
def P_bonds_nn(self, state, env):
id_matrix = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
norm_wf = rdm.rdm1x1((0, 0), state, env, operator=id_matrix)
# bond 2--3
bond_op = torch.zeros((27, 27), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
for k in range(3):
bond_op[fmap(i,j,k),fmap(i,k,j)] = 1.
vP_23 = rdm.rdm1x1((0,0), state, env, operator=bond_op) / norm_wf
# bond 1--3
bond_op = torch.zeros((27, 27), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
for k in range(3):
bond_op[fmap(i,j,k),fmap(k,j,i)] = 1.
vP_13 = rdm.rdm1x1((0, 0), state, env, operator=bond_op) / norm_wf
# bond 1--2
bond_op = torch.zeros((27, 27), dtype=torch.complex128, device=cfg.global_args.device)
for i in range(3):
for j in range(3):
for k in range(3):
bond_op[fmap(i,j,k),fmap(j,i,k)] = 1.
vP_12 = rdm.rdm1x1((0, 0), state, env, operator=bond_op) / norm_wf
return(torch.real(vP_23), torch.real(vP_13), torch.real(vP_12))
def eval_lambdas(self, state, env):
# computes the expectation value of the SU(3) observables \lambda_3 and \lambda_8 for the three sites of the unit cell
id_matrix = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
norm_wf = rdm.rdm1x1((0, 0), state, env, operator=id_matrix)
color3_1 = rdm.rdm1x1((0, 0), state, env, operator=lambda_3_1) / norm_wf
color3_2 = rdm.rdm1x1((0, 0), state, env, operator=lambda_3_2) / norm_wf
color3_3 = rdm.rdm1x1((0, 0), state, env, operator=lambda_3_3) / norm_wf
color8_1 = rdm.rdm1x1((0, 0), state, env, operator=lambda_8_1) / norm_wf
color8_2 = rdm.rdm1x1((0, 0), state, env, operator=lambda_8_2) / norm_wf
color8_3 = rdm.rdm1x1((0, 0), state, env, operator=lambda_8_3) / norm_wf
return (color3_1, color3_2, color3_3), (color8_1, color8_2, color8_3)
def eval_su3_gens(self, state, env):
id_matrix2 = torch.eye(9, dtype=torch.complex128, device=cfg.global_args.device)
id_matrix2 = id_matrix2.reshape(3,3,3,3)
id_matrix = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
norm_wf = rdm.rdm1x1((0, 0), state, env, operator=id_matrix)
# site 0
l8_ops_0= torch.einsum('ijx,klmn->ikmjlnx', self.su3_gens, id_matrix2).contiguous()
l8_ops_0= l8_ops_0.reshape(27,27,8)
for x in range(8):
l8_x_1x1= rdm.rdm1x1((0, 0), state, env, operator=l8_ops_0[:,:,x]) / norm_wf
print(f"{x} {l8_x_1x1}")
# site 1
l8_ops_1= torch.einsum('ijx,klmn->kimljnx', self.su3_gens, id_matrix2).contiguous()
l8_ops_1= l8_ops_1.reshape(27,27,8)
for x in range(8):
l8_x_1x1= rdm.rdm1x1((0, 0), state, env, operator=l8_ops_1[:,:,x]) / norm_wf
print(f"{x} {l8_x_1x1}")
# site 2
l8_ops_2= torch.einsum('ijx,klmn->kmilnjx', self.su3_gens, id_matrix2).contiguous()
l8_ops_2= l8_ops_2.reshape(27,27,8)
for x in range(8):
l8_x_1x1= rdm.rdm1x1((0, 0), state, env, operator=l8_ops_2[:,:,x]) / norm_wf
print(f"{x} {l8_x_1x1}")
def eval_obs(self,state,env,force_cpu=True):
r"""
:param state: wavefunction
:param env: CTM environment
:type state: IPEPS
:type env: ENV
:return: expectation values of observables, labels of observables
:rtype: list[float], list[str]
"""
selected_ops= ["l3_1","l3_2","l3_3","l8_1","l8_2","l8_3"]
id_matrix = torch.eye(27, dtype=torch.complex128, device=cfg.global_args.device)
norm_wf = rdm.rdm1x1((0, 0), state, env, operator=id_matrix)
obs= {}
with torch.no_grad():
for label in selected_ops:
obs_val= rdm.rdm1x1((0, 0), state, env, operator=self.obs_ops[label]) / norm_wf
obs[f"{label}"]= obs_val #_cast_to_real(obs_val)
# prepare list with labels and values
return list(obs.values()), list(obs.keys()) | StarcoderdataPython |
3563682 | <reponame>cfergeau/cluster-node-tuning-operator
from . import interfaces
from . import controller
from . import dbus_exporter as dbus
def export(*args, **kwargs):
"""Decorator, use to mark exportable methods."""
def wrapper(method):
method.export_params = [ args, kwargs ]
return method
return wrapper
def signal(*args, **kwargs):
"""Decorator, use to mark exportable signals."""
def wrapper(method):
method.signal_params = [ args, kwargs ]
return method
return wrapper
def register_exporter(instance):
if not isinstance(instance, interfaces.ExporterInterface):
raise Exception()
ctl = controller.ExportsController.get_instance()
return ctl.register_exporter(instance)
def register_object(instance):
if not isinstance(instance, interfaces.ExportableInterface):
raise Exception()
ctl = controller.ExportsController.get_instance()
return ctl.register_object(instance)
def start():
ctl = controller.ExportsController.get_instance()
return ctl.start()
def stop():
ctl = controller.ExportsController.get_instance()
return ctl.stop()
| StarcoderdataPython |
3367174 | ##-----------------------------------------------------------
## Copyright 2020 Science and Technologies Facilities Council
## Licensed under the MIT License
## Author <NAME>, STFC Hartree Centre
import h5py
import numpy as np
import argparse
pos_x = []
pos_y = []
pos_z = []
cutoff = []
interactions = []
ids = []
count = 0
box_size_x = 3.0
box_size_y = 3.0
box_size_z = 3.0
set_cutoff = 1.75
output_file = ''
seed = 1
def parse_inputs():
global count, output_file, set_cutoff, seed
parser = argparse.ArgumentParser()
parser.add_argument('--output', required=True, type=str, help='Path of the output file')
parser.add_argument('--count', required=True, type=int, help='Number of particles')
parser.add_argument('--seed', required=True, type=int, help='Random seed for the test')
parser = parser.parse_args()
output_file = parser.output
count = parser.count
seed = parser.seed
def create_problem():
global count, output_file, set_cutoff
f = h5py.File(output_file, "w")
dset_posx = f.create_dataset('pos_x', (count,), dtype=np.float64)
dset_posy = f.create_dataset('pos_y', (count,), dtype=np.float64)
dset_posz = f.create_dataset('pos_z', (count,), dtype=np.float64)
dset_cutoff = f.create_dataset('cutoff', (count,), dtype=np.float64)
dset_interactions = f.create_dataset('interactions', (count,), dtype=np.uint32)
dset_ids = f.create_dataset('ids', (count,), dtype=np.int64)
index = 0
cutoff = np.random.rand(1)
x_vals = np.random.rand(count) * 3.0
y_vals = np.random.rand(count) * 3.0
z_vals = np.random.rand(count) * 0.0
set_cutoff = cutoff[0]
for x in range(count):
dset_posx[x] = x_vals[x]
dset_posy[x] = y_vals[x]
dset_posz[x] = z_vals[x]
for i in range(count):
dset_cutoff[i] = set_cutoff
dset_interactions[i] = 0
dset_ids[i] = i
def main():
parse_inputs()
create_problem()
main()
| StarcoderdataPython |