repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 22
values | size
stringlengths 4
7
| content
stringlengths 626
1.05M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 5.21
99.9
| line_max
int64 12
999
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
libyal/libyal | yaldevtools/definitions.py | 1 | 1409 | # -*- coding: utf-8 -*-
"""The definitions."""
from __future__ import unicode_literals
DATA_TYPE_BOOLEAN = 'boolean'
DATA_TYPE_BINARY_DATA = 'binary_data'
DATA_TYPE_DOUBLE = 'double'
DATA_TYPE_FAT_DATE_TIME = 'fat_date_time'
DATA_TYPE_FILETIME = 'filetime'
DATA_TYPE_FLOAT = 'float'
DATA_TYPE_FLOATINGTIME = 'floatingtime'
DATA_TYPE_HFS_TIME = 'hfs_time'
DATA_TYPE_GUID = 'guid'
DATA_TYPE_INT = 'int'
DATA_TYPE_INT32 = 'int32'
DATA_TYPE_NARROW_STRING = 'narrow_string'
DATA_TYPE_NONE = 'none'
DATA_TYPE_OBJECT = 'object'
DATA_TYPE_OFF64 = 'off64'
DATA_TYPE_POSIX_TIME = 'posix_time'
DATA_TYPE_SIZE32 = 'size32'
DATA_TYPE_SIZE64 = 'size64'
DATA_TYPE_STRING = 'string'
DATA_TYPE_UINT8 = 'uint8'
DATA_TYPE_UINT16 = 'uint16'
DATA_TYPE_UINT32 = 'uint32'
DATA_TYPE_UINT64 = 'uint64'
DATA_TYPE_UUID = 'uuid'
FUNCTION_TYPE_CLOSE = 'close'
FUNCTION_TYPE_COPY = 'copy'
FUNCTION_TYPE_COPY_FROM = 'copy_from'
FUNCTION_TYPE_COPY_TO = 'copy_to'
FUNCTION_TYPE_FREE = 'free'
FUNCTION_TYPE_GET = 'get'
FUNCTION_TYPE_GET_BY_INDEX = 'get_by_index'
FUNCTION_TYPE_GET_BY_IDENTIFIER = 'get_by_identifier'
FUNCTION_TYPE_GET_BY_NAME = 'get_by_name'
FUNCTION_TYPE_GET_BY_PATH = 'get_by_path'
FUNCTION_TYPE_INITIALIZE = 'initialize'
FUNCTION_TYPE_IS = 'is'
FUNCTION_TYPE_OPEN = 'open'
FUNCTION_TYPE_READ = 'read'
FUNCTION_TYPE_SEEK = 'seek'
FUNCTION_TYPE_SET = 'set'
FUNCTION_TYPE_UTILITY = 'utility'
FUNCTION_TYPE_WRITE = 'write'
| apache-2.0 | 2,838,674,518,745,749,000 | 27.755102 | 53 | 0.713272 | false |
Ircam-Web/mezzanine-organization | organization/pages/migrations/0014_auto_20161028_1516.py | 1 | 1131 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-28 13:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pages', '0005_auto_20160923_1219'),
('organization-pages', '0013_auto_20161026_1025'),
]
operations = [
migrations.CreateModel(
name='PageRelatedTitle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=1024, null=True, verbose_name='title')),
('page', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='related_title', to='pages.Page', verbose_name='page')),
],
options={
'verbose_name': 'related title',
},
),
migrations.AlterOrderWithRespectTo(
name='pagerelatedtitle',
order_with_respect_to='page',
),
]
| agpl-3.0 | 5,761,957,583,470,936,000 | 34.34375 | 184 | 0.594164 | false |
fmfn/UnbalancedDataset | imblearn/under_sampling/_prototype_selection/tests/test_instance_hardness_threshold.py | 2 | 3098 | """Test the module ."""
# Authors: Guillaume Lemaitre <[email protected]>
# Christos Aridas
# License: MIT
import pytest
import numpy as np
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.naive_bayes import GaussianNB as NB
from sklearn.utils._testing import assert_array_equal
from imblearn.under_sampling import InstanceHardnessThreshold
RND_SEED = 0
X = np.array(
[
[-0.3879569, 0.6894251],
[-0.09322739, 1.28177189],
[-0.77740357, 0.74097941],
[0.91542919, -0.65453327],
[-0.03852113, 0.40910479],
[-0.43877303, 1.07366684],
[-0.85795321, 0.82980738],
[-0.18430329, 0.52328473],
[-0.30126957, -0.66268378],
[-0.65571327, 0.42412021],
[-0.28305528, 0.30284991],
[0.20246714, -0.34727125],
[1.06446472, -1.09279772],
[0.30543283, -0.02589502],
[-0.00717161, 0.00318087],
]
)
Y = np.array([0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0])
ESTIMATOR = GradientBoostingClassifier(random_state=RND_SEED)
def test_iht_init():
sampling_strategy = "auto"
iht = InstanceHardnessThreshold(
estimator=ESTIMATOR, sampling_strategy=sampling_strategy, random_state=RND_SEED,
)
assert iht.sampling_strategy == sampling_strategy
assert iht.random_state == RND_SEED
def test_iht_fit_resample():
iht = InstanceHardnessThreshold(estimator=ESTIMATOR, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_half():
sampling_strategy = {0: 3, 1: 3}
iht = InstanceHardnessThreshold(
estimator=NB(), sampling_strategy=sampling_strategy, random_state=RND_SEED,
)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (6, 2)
assert y_resampled.shape == (6,)
def test_iht_fit_resample_class_obj():
est = GradientBoostingClassifier(random_state=RND_SEED)
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_wrong_class_obj():
from sklearn.cluster import KMeans
est = KMeans()
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
with pytest.raises(ValueError, match="Invalid parameter `estimator`"):
iht.fit_resample(X, Y)
def test_iht_reproducibility():
from sklearn.datasets import load_digits
X_digits, y_digits = load_digits(return_X_y=True)
idx_sampled = []
for seed in range(5):
est = RandomForestClassifier(n_estimators=10, random_state=seed)
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
iht.fit_resample(X_digits, y_digits)
idx_sampled.append(iht.sample_indices_.copy())
for idx_1, idx_2 in zip(idx_sampled, idx_sampled[1:]):
assert_array_equal(idx_1, idx_2)
| mit | 3,621,761,623,600,403,500 | 31.270833 | 88 | 0.666559 | false |
dewuem/python-bioinf | sliding_window.py | 1 | 2485 | #!/usr/bin/env python2
# Daniel Elsner
# Creates a sliding window of sequence pieces to blast against
# takes two arguments, input file and window size. Only the first record
# in a fasta will be used, the rest ignored.
from Bio import SeqIO # read sequence files
import sys
# read your sequence file
try:
with open(sys.argv[1], "rU") as fileA:
fcontent = list(SeqIO.parse(fileA, "fasta"))
fileA.close()
except IOError as exc: # it will complain if you try to open a directory instead of a file
if exc.errno != errno.EISDIR:
raise
# the function
def sliding_create(seq_id, seq_str, start_pos, window_size):
# defines a function that takes the sequence id of the input query, the
# actual sequence from the fasta seq record, the start position (from the
# loop, for your sliding window, and the window size, from a command line
# paramter
sub_seq = seq_str[start_pos:start_pos + window_size]
# cut a sub sequence from the full length sequence, from the start
# position (of this loop iteration) + the window length
sub_pos_str = str(seq_id + "_POS:_" + str(start_pos) +
":" + str(start_pos + window_size))
# and create a string with the appropriate name so you know which window
# you test
if len(sub_seq) == window_size:
# only report the sub sequence if it is as long as the window size,
# otherwise you get shorter pieces at the end
return ">" + sub_pos_str + "\n" + sub_seq
else:
return None
# main loop begins here
with open(sys.argv[1] + "_sliding_window.fa", "w") as writefile:
# open our output file, same name as the input + the suffix
# _sliding_window.fa
for i in range(len(fcontent[0].seq)):
# for all positions in your sequence
output = sliding_create(fcontent[0].id, fcontent[
0].seq, i, int(sys.argv[2]))
# get the output of the function, as paramtere give the ID of the
# fasta record, the sequence of the fasta record, the current
# iteration of the loop (so we shift the window per one per loop,
# and how long you want your window to be.
if output != None:
# only have output if there is output, otherwise you get lots
# of empty output from the function when the sub sequence is no
# longer long enough. Write it into the output file.
writefile.write(str(output) + "\n")
| mit | -8,909,885,774,297,362,000 | 39.080645 | 91 | 0.646278 | false |
open-synergy/vertical-association | membership_autoextend/models/product_template.py | 1 | 1383 | # -*- coding: utf-8 -*-
# © 2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import fields, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
membership_autoextend = fields.Boolean('Autoextend membership',
default=False)
membership_autoextend_product_id = fields.Many2one(
'product.template', string='Extension membership',
help='If selected, use this product for extensions. Otherwise, the '
'current membership product will be used.',
domain=[('membership', '=', True)])
membership_autoextend_warning_days = fields.Integer(
'Warn before autoextend', help='The amount of days to send warning '
'email before automatic extension')
membership_autoextend_warning_template_id = fields.Many2one(
'email.template', string='Autoextend warning',
help='This email is sent the selected amount of days before a '
'membership was extended',
domain=[('model_id.model', '=', 'membership.membership_line')])
membership_autoextend_info_template_id = fields.Many2one(
'email.template', string='Autoextend info',
help='This email is sent after a membership was extended',
domain=[('model_id.model', '=', 'membership.membership_line')])
| agpl-3.0 | -2,176,540,284,076,027,100 | 48.357143 | 76 | 0.65919 | false |
matthewkitching/constraint_local_search | constraints/alldiff.py | 1 | 1294 | from constraints.base_constraint import BaseConstraint
from collections import defaultdict
class AllDifferent(BaseConstraint):
def __init__(self, variables, identifier=None):
'''
AllDifferent is satisfied if every variable is assigned a different
value.
The moves_necessary is equal to the total number of variables that
have the same value as other variables ignoring one of the duplicates)
'''
super(AllDifferent, self).__init__(variables, identifier)
self._reinit()
def _reinit(self):
self.score_count = defaultdict(int)
for variable in self.variables:
self.score_count[variable.state] += 1
self.moves_necessary = len(self.variables) - len(self.score_count)
def get_delta_moves_necessary(self, variable, new_value):
delta_old = 0 if self.score_count[variable.state] == 1 else -1
delta_new = 0 if self.score_count.get(new_value, 0) == 0 else 1
return delta_new + delta_old
def make_move(self, variable, new_value):
self.score_count[variable.state] -= 1
self.score_count[new_value] += 1
delta_moves = self.get_delta_moves_necessary(variable, new_value)
self.moves_necessary += delta_moves
return delta_moves
| gpl-2.0 | -7,278,428,690,419,748,000 | 39.4375 | 78 | 0.659969 | false |
noba3/KoTos | addons/plugin.video.mediathek/mediathek/kika.py | 1 | 6715 | # -*- coding: utf-8 -*-
#-------------LicenseHeader--------------
# plugin.video.Mediathek - Gives access to most video-platforms from German public service broadcasters
# Copyright (C) 2010 Raptor 2101 [[email protected]]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re, time;
from mediathek import *
class KIKA(Mediathek):
def __init__(self, simpleXbmcGui):
self.gui = simpleXbmcGui;
self.rootLink = "http://www.kika.de";
self.menuTree = (
TreeNode("0","Videos",self.rootLink+"/videos/index.html",True),
TreeNode("1","Sendungen von A-Z","",False,
(
TreeNode("1.0","A",self.rootLink+"/sendungen/sendungenabisz100_page-A_zc-05fb1331.html",True),
TreeNode("1.1","B",self.rootLink+"/sendungen/sendungenabisz100_page-B_zc-1775e6d8.html",True),
TreeNode("1.2","C",self.rootLink+"/sendungen/sendungenabisz100_page-C_zc-6248eba0.html",True),
TreeNode("1.3","D",self.rootLink+"/sendungen/sendungenabisz100_page-D_zc-e090a8fb.html",True),
TreeNode("1.4","E",self.rootLink+"/sendungen/sendungenabisz100_page-E_zc-ec2376ed.html",True),
TreeNode("1.5","F",self.rootLink+"/sendungen/sendungenabisz100_page-F_zc-f76734a0.html",True),
TreeNode("1.6","G",self.rootLink+"/sendungen/sendungenabisz100_page-G_zc-34bda7c3.html",True),
TreeNode("1.7","H",self.rootLink+"/sendungen/sendungenabisz100_page-H_zc-7e25e70a.html",True),
TreeNode("1.8","I",self.rootLink+"/sendungen/sendungenabisz100_page-I_zc-b7f774f5.html",True),
TreeNode("1.9","J",self.rootLink+"/sendungen/sendungenabisz100_page-J_zc-3130680a.html",True),
TreeNode("1.10","K",self.rootLink+"/sendungen/sendungenabisz100_page-K_zc-c8f76ba1.html",True),
TreeNode("1.11","L",self.rootLink+"/sendungen/sendungenabisz100_page-L_zc-bbebc1a7.html",True),
TreeNode("1.12","M",self.rootLink+"/sendungen/sendungenabisz100_page-M_zc-00574a43.html",True),
TreeNode("1.13","N",self.rootLink+"/sendungen/sendungenabisz100_page-N_zc-b079366f.html",True),
TreeNode("1.14","O",self.rootLink+"/sendungen/sendungenabisz100_page-O_zc-febc55f5.html",True),
TreeNode("1.15","P",self.rootLink+"/sendungen/sendungenabisz100_page-P_zc-2c1a492f.html",True),
TreeNode("1.16","Q",self.rootLink+"/sendungen/sendungenabisz100_page-Q_zc-2cb019d6.html",True),
TreeNode("1.17","R",self.rootLink+"/sendungen/sendungenabisz100_page-R_zc-cab3e22b.html",True),
TreeNode("1.18","S",self.rootLink+"/sendungen/sendungenabisz100_page-S_zc-e7f420d0.html",True),
TreeNode("1.19","T",self.rootLink+"/sendungen/sendungenabisz100_page-T_zc-84a2709f.html",True),
TreeNode("1.20","U",self.rootLink+"/sendungen/sendungenabisz100_page-U_zc-a26c1157.html",True),
TreeNode("1.21","V",self.rootLink+"/sendungen/sendungenabisz100_page-V_zc-1fc26dc3.html",True),
TreeNode("1.22","W",self.rootLink+"/sendungen/sendungenabisz100_page-W_zc-25c5c777.html",True),
TreeNode("1.23","Y",self.rootLink+"/sendungen/sendungenabisz100_page-Y_zc-388beba7.html",True),
TreeNode("1.24","Z",self.rootLink+"/sendungen/sendungenabisz100_page-Z_zc-e744950d.html",True),
TreeNode("1.25","...",self.rootLink+"/sendungen/sendungenabisz100_page-1_zc-43c28d56.html",True)
)
)
)
self.regex_videoPages=re.compile("<a href=\"(.*?sendereihe\\d+.html)\" class=\"linkAll\" title=\"(.*?)\">");
self.regex_videoLinks=re.compile("<a href=\"(.*?/sendungen/videos/video\\d+?)\\.html\"");
self.regex_xml_title=re.compile("<title>(.*?)</title>");
self.regex_xml_image=re.compile("<teaserimage>\\s*?<url>(.*?)</url>");
self.regex_xml_videoLink=re.compile("<asset>\\s*?<profileName>(.*?)</profileName>.*?<progressiveDownloadUrl>(.*?)</progressiveDownloadUrl>\\s*?</asset>",re.DOTALL)
self.regex_videoLink=re.compile("rtmp://.*?\.mp4");
@classmethod
def name(self):
return "KI.KA";
def isSearchable(self):
return False;
def searchVideo(self, searchText):
return;
def buildVideoLink(self,pageLink):
xmlPage = self.loadPage(self.rootLink+pageLink);
title = unicode(self.regex_xml_title.search(xmlPage).group(1),"UTF-8");
image = self.regex_xml_image.search(xmlPage).group(1).replace("**aspectRatio**","tlarge169").replace("**width**","1472");
self.gui.log("%s %s"%(title,image));
links = {};
for match in self.regex_xml_videoLink.finditer(xmlPage):
profile = match.group(1);
directLink = match.group(2);
self.gui.log("%s %s"%(profile,directLink));
if("MP4 Web S" in profile):
links[0] = SimpleLink(directLink, 0);
if("MP4 Web L" in profile):
links[1] = SimpleLink(directLink, 0);
if("MP4 Web L+" in profile):
links[2] = SimpleLink(directLink, 0);
if("MP4 Web XL" in profile):
links[3] = SimpleLink(directLink, 0);
return DisplayObject(title,"",image,"",links,True, None);
def buildPageMenu(self, link, initCount):
mainPage = self.loadPage(link);
videoLinks = list(self.regex_videoLinks.finditer(mainPage));
count = initCount + len(videoLinks)
if(len(videoLinks) > 0):
for match in videoLinks:
link=match.group(1)+"-avCustom.xml";
displayObject = self.buildVideoLink(link);
self.gui.buildVideoLink(displayObject,self, count);
else:
videoPages = list(self.regex_videoPages.finditer(mainPage));
count = initCount + len(videoPages)
for match in videoPages:
link=match.group(1);
if(not link.startswith(self.rootLink)):
link = self.rootLink+link;
subPage = self.loadPage(link);
linkFound = self.regex_videoLinks.search(subPage)
if(linkFound):
title = unicode(match.group(2),"UTF-8");
displayObject = DisplayObject(title,"",None,"",link,False, None);
self.gui.buildVideoLink(displayObject,self, count);
| gpl-2.0 | -5,218,597,951,839,051,000 | 51.46875 | 167 | 0.651378 | false |
lkundrak/scraperwiki | services/jdatastore/datalib.py | 1 | 22102 | from twisted.python import log
import ConfigParser
import hashlib
import types
import os, string, re, sys
import time, datetime
import sqlite3
import base64
import shutil
import logging
import urllib
import traceback
import json
import csv
import StringIO
from twisted.internet import reactor
logger = None # filled in by dataproxy
ninstructions_progresshandler = 10000000 # about 0.4secs on Julian's laptop
resourcedir = None # filled in by dataproxy
attachauthurl = None # filled in by dataproxy
def authorizer_readonly(action_code, tname, cname, sql_location, trigger):
#logger.debug("authorizer_readonly: %s, %s, %s, %s, %s" % (action_code, tname, cname, sql_location, trigger))
readonlyops = [ sqlite3.SQLITE_SELECT, sqlite3.SQLITE_READ, sqlite3.SQLITE_DETACH, 31 ] # 31=SQLITE_FUNCTION missing from library. codes: http://www.sqlite.org/c3ref/c_alter_table.html
if action_code in readonlyops:
return sqlite3.SQLITE_OK
if action_code == sqlite3.SQLITE_PRAGMA:
if tname in ["table_info", "index_list", "index_info", "page_size"]:
return sqlite3.SQLITE_OK
# SQLite FTS (full text search) requires this permission even when reading, and
# this doesn't let ordinary queries alter sqlite_master because of PRAGMA writable_schema
if action_code == sqlite3.SQLITE_UPDATE and tname == "sqlite_master":
return sqlite3.SQLITE_OK
return sqlite3.SQLITE_DENY
def authorizer_attaching(action_code, tname, cname, sql_location, trigger):
#print "authorizer_attaching", (action_code, tname, cname, sql_location, trigger)
if action_code == sqlite3.SQLITE_ATTACH:
return sqlite3.SQLITE_OK
return authorizer_readonly(action_code, tname, cname, sql_location, trigger)
def authorizer_writemain(action_code, tname, cname, sql_location, trigger):
#logger.debug("authorizer_writemain: %s, %s, %s, %s, %s" % (action_code, tname, cname, sql_location, trigger))
if sql_location == None or sql_location == 'main':
return sqlite3.SQLITE_OK
return authorizer_readonly(action_code, tname, cname, sql_location, trigger)
#http://twistedmatrix.com/documents/current/core/howto/producers.html
class SQLiteDatabase(object):
def __init__(self, short_name, short_name_dbreadonly, attachlist):
self.Dclientnumber = -1
self.short_name = short_name
self.short_name_dbreadonly = short_name_dbreadonly
self.attachlist = attachlist # to be used to drive the attaches on setup
self.attached = { } # name => [ asname1, ... ] list
self.m_sqlitedbconn = None
self.m_sqlitedbcursor = None
self.authorizer_func = None
self.sqlitesaveinfo = { } # tablename -> info
if self.short_name:
self.scraperresourcedir = os.path.join(resourcedir, self.short_name)
self.cstate = ''
self.etimestate = time.time()
self.progressticks = 0
self.totalprogressticks = 0
self.timeout_tickslimit = 300 # about 2 minutes
self.timeout_secondslimit = 180 # real time
self.clientforresponse = None
def close(self):
logger.debug("client#%d calling close on database" % self.Dclientnumber)
try:
if self.m_sqlitedbcursor:
self.m_sqlitedbcursor.close()
if self.m_sqlitedbconn:
self.m_sqlitedbconn.close()
except Exception, e:
logger.warning("client#%d close database error: %s" % (self.Dclientnumber, str(e)))
def process(self, request):
#logger.debug("doing request %s" % str(request)[:1000])
if request["maincommand"] == 'save_sqlite':
res = self.save_sqlite(unique_keys=request["unique_keys"], data=request["data"], swdatatblname=request["swdatatblname"])
logger.debug("save_sqlite response %s" % str(res))
elif request["maincommand"] == 'clear_datastore':
res = self.clear_datastore()
elif request["maincommand"] == 'undelete_datastore':
res = self.undelete_datastore()
elif request["maincommand"] == 'sqlitecommand':
if request["command"] == "downloadsqlitefile":
res = self.downloadsqlitefile(seek=request["seek"], length=request["length"])
elif request["command"] == "datasummary":
res = self.datasummary(request.get("limit", 10))
else:
res = {"error":'Unknown command: %s' % request["command"]}
elif request["maincommand"] == "sqliteexecute":
if self.attachlist:
self.establishconnection(True)
for req in self.attachlist:
if req["asname"] not in self.attached.get(req["name"], []):
ares = self.sqliteattach(req["name"], req["asname"])
if "error" in ares:
return ares
self.cstate, self.etimestate = 'sqliteexecute', time.time()
res = self.sqliteexecute(sqlquery=request["sqlquery"], data=request["data"])
if 'error' not in res:
res["stillproducing"] = "yes"
else:
res = {"error":'Unknown maincommand: %s' % request["maincommand"]}
logger.debug(json.dumps(res))
return res
def undelete_datastore(self):
restore_from = os.path.join(self.scraperresourcedir, "DELETED-defaultdb.sqlite")
if os.path.isfile(restore_from):
restore_to = os.path.join(self.scraperresourcedir, "defaultdb.sqlite")
shutil.move(restore_from, restore_to)
return {"status":"good"}
def clear_datastore(self):
scrapersqlitefile = os.path.join(self.scraperresourcedir, "defaultdb.sqlite")
if os.path.isfile(scrapersqlitefile):
deletedscrapersqlitefile = os.path.join(self.scraperresourcedir, "DELETED-defaultdb.sqlite")
shutil.move(scrapersqlitefile, deletedscrapersqlitefile)
return {"status":"good"}
# To do this properly would need to ensure file doesn't change during this process
def downloadsqlitefile(self, seek, length):
self.cstate, self.etimestate = 'downloadsqlitefile', time.time()
scrapersqlitefile = os.path.join(self.scraperresourcedir, "defaultdb.sqlite")
lscrapersqlitefile = os.path.join(self.short_name, "defaultdb.sqlite")
if not os.path.isfile(scrapersqlitefile):
return {"status":"No sqlite database"}
result = { "filename":lscrapersqlitefile, "filesize": os.path.getsize(scrapersqlitefile)}
if length == 0:
return result
fin = open(scrapersqlitefile, "rb")
fin.seek(seek)
content = fin.read(length)
result["length"] = len(content)
result["content"] = base64.encodestring(content)
result['encoding'] = "base64"
fin.close()
return result
def establishconnection(self, bcreate):
# seems not able to reset authorizer function after it has been set once, so have to redirect this way
def authorizer_all(action_code, tname, cname, sql_location, trigger):
#print "authorizer_all", (action_code, tname, cname, sql_location, trigger)
return self.authorizer_func(action_code, tname, cname, sql_location, trigger)
if self.short_name_dbreadonly:
self.authorizer_func = authorizer_readonly
else:
self.authorizer_func = authorizer_writemain
if not self.m_sqlitedbconn:
if self.short_name:
if not os.path.isdir(self.scraperresourcedir):
if not bcreate:
return False
os.mkdir(self.scraperresourcedir)
scrapersqlitefile = os.path.join(self.scraperresourcedir, "defaultdb.sqlite")
self.m_sqlitedbconn = sqlite3.connect(scrapersqlitefile, check_same_thread=False)
logger.debug('Connecting to %s' % (scrapersqlitefile))
else:
self.m_sqlitedbconn = sqlite3.connect(":memory:", check_same_thread=False) # draft scrapers make a local version
if not self.short_name_dbreadonly:
self.m_sqlitedbconn.isolation_level = None # autocommit!
self.m_sqlitedbconn.set_authorizer(authorizer_all)
if ninstructions_progresshandler:
self.m_sqlitedbconn.set_progress_handler(self.progress_handler, ninstructions_progresshandler)
self.m_sqlitedbcursor = self.m_sqlitedbconn.cursor()
return True
def datasummary(self, limit):
self.cstate, self.etimestate = 'datasummary', time.time()
if not self.establishconnection(False):
logger.warning( 'Failed to connect to sqlite database for summary %s' % (self.short_name or 'draft'))
return {"status":"No sqlite database"} # don't change this return string, is a structured one
logger.debug('Performing datasummary for %s' % self.short_name)
self.authorizer_func = authorizer_readonly
total_rows = 0
tables = { }
try:
for name, sql in list(self.m_sqlitedbcursor.execute("select name, sql from sqlite_master where type='table' or type='view'")):
tables[name] = {"sql":sql}
cols = []
self.m_sqlitedbcursor.execute("PRAGMA table_info(`%s`);" % name)
# We can optimise the count by doing all tables in sub-selects, but suspect it is a micro-optimisation
tables[name]["keys"] = [ r[1] for r in self.m_sqlitedbcursor]
tables[name]["count"] = list(self.m_sqlitedbcursor.execute("select count(1) from `%s`" % name))[0][0]
total_rows += int(tables[name]["count"])
except sqlite3.Error, e:
return {"error":"datasummary: sqlite3.Error: "+str(e)}
result = {"tables":tables, 'total_rows': total_rows }
if self.short_name:
scrapersqlitefile = os.path.join(self.scraperresourcedir, "defaultdb.sqlite")
if os.path.isfile(scrapersqlitefile):
result["filesize"] = os.path.getsize(scrapersqlitefile)
return result
def sqliteattach(self, name, asname):
logger.debug("attach to %s %s as %s" % (self.short_name, name, asname))
self.establishconnection(True)
if self.authorizer_func == authorizer_writemain:
self.m_sqlitedbconn.commit() # otherwise a commit will be invoked by the attaching function
logger.info("requesting permission to attach %s to %s" % (self.short_name, name))
aquery = {"command":"can_attach", "scrapername":self.short_name, "attachtoname":name, "username":"unknown"}
ares = urllib.urlopen("%s?%s" % (attachauthurl, urllib.urlencode(aquery))).read()
logger.info("permission to attach %s to %s response: %s" % (self.short_name, name, ares))
if ares == "Yes":
logger.debug('attach connection allowed')
elif ares == "DoesNotExist":
return {"error":"Does Not Exist %s" % name}
else:
return {"error":"no permission to attach to %s" % name}
attachscrapersqlitefile = os.path.join(resourcedir, name, "defaultdb.sqlite")
self.authorizer_func = authorizer_attaching
try:
self.m_sqlitedbcursor.execute('attach database ? as ?', (attachscrapersqlitefile, asname or name))
except sqlite3.Error, e:
logger.error(e)
return {"error":"sqliteattach: sqlite3.Error: "+str(e)}
logger.debug('attach complete')
if name not in self.attached:
self.attached[name] = [ ]
self.attached[name].append(asname)
return {"status":"attach succeeded"}
def progress_handler(self):
if self.cstate == 'sqliteexecute':
self.progressticks += 1
self.totalprogressticks += 1
if self.progressticks == self.timeout_tickslimit:
logger.info("client#%d tickslimit timeout" % (self.Dclientnumber))
return 1
if time.time() - self.etimestate > self.timeout_secondslimit:
logger.info("client#%d elapsed time timeout" % (self.Dclientnumber))
return 2
logger.debug("client#%d progress %d time=%.2f" % (self.Dclientnumber, self.progressticks, time.time() - self.etimestate))
# looks like should be using IBodyPushProducer for this cycle
# but prob couldn't work as we are in a deferred thread here
lclientforresponse = self.clientforresponse
if not lclientforresponse:
logger.info("client#%d terminating progress" % (self.Dclientnumber)) # as nothing to receive the result anyway
return 3
elif lclientforresponse.progress_ticks == "yes":
jtickline = json.dumps({"progresstick":self.progressticks, "timeseconds":time.time() - self.etimestate})+"\n"
# should this be using IPushProducer?
reactor.callFromThread(lclientforresponse.transport.write, jtickline)
return 0 # continue
def sqliteexecute(self, sqlquery, data):
self.establishconnection(True)
self.progressticks = 0
try:
logger.info("client#%d sqlexecute %s" % (self.Dclientnumber, str(sqlquery)[:100]))
if data:
self.m_sqlitedbcursor.execute(sqlquery, data) # handle "(?,?,?)", (val, val, val)
else:
self.m_sqlitedbcursor.execute(sqlquery)
logger.info("client#%d end-sqlexecute %f" % (self.Dclientnumber, time.time() - self.etimestate))
# take a copy of the clientforresponse as it may be disconnected by the other thread
lclientforresponse = self.clientforresponse
if not lclientforresponse:
return {"error":"client must have disconnected"}
keys = self.m_sqlitedbcursor.description and map(lambda x:x[0], self.m_sqlitedbcursor.description) or []
arg = {"keys":keys, "data":[] } # data empty for filling in in another function
except sqlite3.Error, e:
arg = {"error":"sqliteexecute: sqlite3.Error: %s" % str(e)}
except ValueError, ve:
arg = {"error":"sqliteexecute: ValueError: %s" % str(ve)}
if "error" in arg:
logger.error(arg["error"])
arg["progressticks"] = self.progressticks
arg["timeseconds"] = time.time() - self.etimestate
return arg
def save_sqlite(self, unique_keys, data, swdatatblname):
self.cstate, self.etimestate = 'save_sqlite', time.time()
res = { }
if type(data) == dict:
data = [data]
if not self.m_sqlitedbconn or swdatatblname not in self.sqlitesaveinfo:
ssinfo = SqliteSaveInfo(self, swdatatblname)
self.sqlitesaveinfo[swdatatblname] = ssinfo
if not ssinfo.rebuildinfo() and data:
ssinfo.buildinitialtable(data[0])
ssinfo.rebuildinfo()
res["tablecreated"] = swdatatblname
else:
ssinfo = self.sqlitesaveinfo[swdatatblname]
nrecords = 0
self.sqliteexecute("BEGIN TRANSACTION", None)
logger.debug("client#%d begintrans for records %d" % (self.Dclientnumber, len(data)))
for ldata in data:
newcols = ssinfo.newcolumns(ldata)
if newcols:
for i, kv in enumerate(newcols):
ssinfo.addnewcolumn(kv[0], kv[1])
res["newcolumn %d" % i] = "%s %s" % kv
ssinfo.rebuildinfo()
if nrecords == 0 and unique_keys:
idxname, idxkeys = ssinfo.findclosestindex(unique_keys)
if not idxname or idxkeys != set(unique_keys):
lres = ssinfo.makenewindex(idxname, unique_keys)
if "error" in lres:
return lres
res.update(lres)
values = [ ldata.get(k) for k in ssinfo.swdatakeys ]
lres = self.sqliteexecute(ssinfo.sqdatatemplate, values)
if "error" in lres:
return lres
nrecords += 1
logger.debug("client#%d about to endtrans" % (self.Dclientnumber))
self.sqliteexecute("END TRANSACTION", None)
logger.debug("client#%d endtrans" % (self.Dclientnumber))
#self.m_sqlitedbconn.commit()
res["nrecords"] = nrecords
res["status"] = 'Data record(s) inserted or replaced'
self.cstate = ''
return res
def FetchRows(self, nrows=-1):
rows = []
for r in self.m_sqlitedbcursor:
row = []
for c in r:
if type(c) == buffer:
row.append( unicode(c) )
else:
row.append(c)
rows.append(row)
if nrows != -1 and len(rows) == nrows:
break
return rows
class SqliteSaveInfo:
def __init__(self, database, swdatatblname):
self.database = database
self.swdatatblname = swdatatblname
self.swdatakeys = [ ]
self.swdatatypes = [ ]
self.sqdatatemplate = ""
def sqliteexecuteSS(self, sqlquery, data=None):
res = self.database.sqliteexecute(sqlquery, data)
if "error" in res:
logger.warning("%s %s" % (self.database.short_name, str(res)[:1000]))
res["data"] = self.database.FetchRows()
return res
def rebuildinfo(self):
if not self.sqliteexecuteSS("select * from main.sqlite_master where name=?", (self.swdatatblname,))['data']:
return False
tblinfo = self.sqliteexecuteSS("PRAGMA main.table_info(`%s`)" % self.swdatatblname)
# there's a bug: PRAGMA main.table_info(swdata) returns the schema for otherdatabase.swdata
# following an attach otherdatabase where otherdatabase has a swdata and main does not
self.swdatakeys = [ a[1] for a in tblinfo["data"] ]
self.swdatatypes = [ a[2] for a in tblinfo["data"] ]
self.sqdatatemplate = "insert or replace into main.`%s` values (%s)" % (self.swdatatblname, ",".join(["?"]*len(self.swdatakeys)))
return True
def buildinitialtable(self, data):
assert not self.swdatakeys
coldef = self.newcolumns(data)
assert coldef
# coldef = coldef[:1] # just put one column in; the rest could be altered -- to prove it's good
scoldef = ", ".join(["`%s` %s" % col for col in coldef])
# used to just add date_scraped in, but without it can't create an empty table
self.sqliteexecuteSS("create table main.`%s` (%s)" % (self.swdatatblname, scoldef))
def newcolumns(self, data):
newcols = [ ]
for k in data:
if k not in self.swdatakeys:
v = data[k]
if v != None:
if k[-5:] == "_blob":
vt = "blob" # coerced into affinity none
elif type(v) == int:
vt = "integer"
elif type(v) == float:
vt = "real"
else:
vt = "text"
newcols.append((k, vt))
return newcols
def addnewcolumn(self, k, vt):
self.sqliteexecuteSS("alter table main.`%s` add column `%s` %s" % (self.swdatatblname, k, vt))
def findclosestindex(self, unique_keys):
idxlist = self.sqliteexecuteSS("PRAGMA main.index_list(`%s`)" % self.swdatatblname) # [seq,name,unique]
uniqueindexes = [ ]
if 'error' in idxlist:
return None, None
for idxel in idxlist["data"]:
if idxel[2]:
idxname = idxel[1]
idxinfo = self.sqliteexecuteSS("PRAGMA main.index_info(`%s`)" % idxname) # [seqno,cid,name]
idxset = set([ a[2] for a in idxinfo["data"] ])
idxoverlap = len(idxset.intersection(unique_keys))
uniqueindexes.append((idxoverlap, idxname, idxset))
if not uniqueindexes:
return None, None
uniqueindexes.sort()
return uniqueindexes[-1][1], uniqueindexes[-1][2]
# increment to next index number every time there is a change, and add the new index before dropping the old one.
def makenewindex(self, idxname, unique_keys):
istart = 0
if idxname:
mnum = re.search("(\d+)$", idxname)
if mnum:
istart = int(mnum.group(1))
for i in range(10000):
newidxname = "%s_index%d" % (self.swdatatblname, istart+i)
if not self.sqliteexecuteSS("select name from main.sqlite_master where name=?", (newidxname,))['data']:
break
res = { "newindex": newidxname }
lres = self.sqliteexecuteSS("create unique index `%s` on `%s` (%s)" % (newidxname, self.swdatatblname, ",".join(["`%s`"%k for k in unique_keys])))
if "error" in lres:
return lres
if idxname:
lres = self.sqliteexecuteSS("drop index main.`%s`" % idxname)
if "error" in lres:
if lres["error"] != 'sqlite3.Error: index associated with UNIQUE or PRIMARY KEY constraint cannot be dropped':
return lres
logger.info("%s: %s" % (self.database.short_name, str(lres)[:1000]))
res["droppedindex"] = idxname
return res
| agpl-3.0 | -168,464,633,160,901,730 | 43.115768 | 190 | 0.588363 | false |
sebp/scikit-survival | doc/conf.py | 1 | 12316 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# scikit-survival documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from datetime import datetime
import inspect
import os
from pathlib import Path
import re
import sys
from nbconvert.preprocessors import Preprocessor
import nbsphinx
from setuptools_scm import get_version
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
# https://docs.readthedocs.io/en/latest/faq.html?highlight=environ#how-do-i-change-behavior-for-read-the-docs
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
if on_rtd:
sys.path.insert(0, os.path.abspath(os.path.pardir))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.8'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.linkcode',
'sphinx.ext.mathjax',
'nbsphinx',
]
autosummary_generate = True
autodoc_default_options = {
'members': None,
'inherited-members': None,
}
# Napoleon settings
napoleon_google_docstring = False
napoleon_include_init_with_doc = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'scikit-survival'
current_year = datetime.utcnow().year
copyright = f'2015-{current_year}, Sebastian Pölsterl and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
if on_rtd:
release = get_version(root='..', relative_to=__file__)
else:
import sksurv
release = sksurv.__version__
# The short X.Y.Z version.
version = '.'.join(release.split('.')[:3])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# The default language to highlight source code in.
highlight_language = 'none'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '**/README.*', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
# pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
nbsphinx_execute = 'never'
nbsphinx_prolog = r"""
{% set docname = "doc/" + env.doc2path(env.docname, base=None) %}
{% set notebook = env.doc2path(env.docname, base=None)|replace("user_guide/", "notebooks/") %}
{% set branch = 'master' if 'dev' in env.config.release else 'v{}'.format(env.config.release) %}
.. raw:: html
<div class="admonition note" style="line-height: 150%;">
This page was generated from
<a class="reference external" href="https://github.com/sebp/scikit-survival/blob/{{ branch|e }}/{{ docname|e }}">{{ docname|e }}</a>.<br/>
Interactive online version:
<span style="white-space: nowrap;"><a href="https://mybinder.org/v2/gh/sebp/scikit-survival/{{ branch|e }}?urlpath=lab/tree/{{ notebook|e }}"><img alt="Binder badge" src="https://mybinder.org/badge_logo.svg" style="vertical-align:text-bottom"></a>.</span>
</div>
"""
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'pydata_sphinx_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"github_url": "https://github.com/sebp/scikit-survival",
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "scikit-survival {0}".format(version)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
html_css_files = ['custom.css']
html_js_files = ['buttons.js']
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
Adapted from scipy.
"""
import sksurv
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except AttributeError:
return None
try:
fn = inspect.getsourcefile(obj)
except TypeError:
fn = None
if fn is None and hasattr(obj, '__module__'):
fn = inspect.getsourcefile(sys.modules[obj.__module__])
if fn is None:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except ValueError:
lineno = None
if lineno:
linespec = '#L%d-L%d' % (lineno, lineno + len(source) - 1)
else:
linespec = ''
startdir = Path(sksurv.__file__).parent.parent.absolute()
if not fn.startswith(str(startdir)): # not in sksurv
return None
fn = '/'.join(Path(fn).relative_to(startdir).parts)
if fn.startswith('sksurv/'):
m = re.match(r'^.*dev[0-9]+\+g([a-f0-9]+)$', release)
if m:
branch = m.group(1)
elif 'dev' in release:
branch = 'master'
else:
branch = 'v{}'.format(release)
return 'https://github.com/sebp/scikit-survival/blob/{branch}/{filename}{linespec}'.format(
branch=branch,
filename=fn,
linespec=linespec
)
else:
return None
class RTDUrlPreprocessor(Preprocessor):
"""Convert URLs to RTD in notebook to relative urls."""
URL_PATTERN = re.compile(
r'\(https://scikit-survival\.readthedocs\.io/.+?/.+?/([-._a-zA-Z0-9/]+)/(.+?)\.html.*?\)'
)
DOC_DIR = Path(__file__).parent
def preprocess_cell(self, cell, resources, index):
# path of notebook directory, relative to conf.py
nb_path = Path(resources['metadata']['path']).relative_to(self.DOC_DIR)
to_root = [os.pardir] * len(nb_path.parts)
if cell.cell_type == 'markdown':
text = cell.source
replace = []
for match in self.URL_PATTERN.finditer(text):
path = to_root[:]
path.append(match.group(1))
rel_url = "/".join(path)
filename = match.group(2)
replace.append((match.group(0), '({}/{}.rst)'.format(rel_url, filename)))
for s, r in replace:
text = text.replace(s, r)
cell.source = text
return cell, resources
return cell, resources
def _from_notebook_node(self, nb, resources, **kwargs):
filters = [RTDUrlPreprocessor(), ]
for f in filters:
nb, resources = f.preprocess(nb, resources=resources)
return nbsphinx_from_notebook_node(self, nb, resources=resources, **kwargs)
# see https://github.com/spatialaudio/nbsphinx/issues/305#issuecomment-506748814-permalink
nbsphinx_from_notebook_node = nbsphinx.Exporter.from_notebook_node
nbsphinx.Exporter.from_notebook_node = _from_notebook_node
# ------------------------
# Mock dependencies on RTD
# ------------------------
if on_rtd:
MOCK_MODULES = [
# external dependencies
'ecos',
'joblib',
'numexpr',
'numpy',
'osqp',
'pandas',
'pandas.api.types',
'scipy',
'scipy.integrate',
'scipy.io.arff',
'scipy.linalg',
'scipy.optimize',
'scipy.sparse',
'scipy.special',
'scipy.stats',
'sklearn',
'sklearn.base',
'sklearn.dummy',
'sklearn.ensemble',
'sklearn.ensemble._base',
'sklearn.ensemble._forest',
'sklearn.ensemble._gb',
'sklearn.ensemble._gb_losses',
'sklearn.ensemble._gradient_boosting',
'sklearn.ensemble.base',
'sklearn.ensemble.forest',
'sklearn.ensemble.gradient_boosting',
'sklearn.exceptions',
'sklearn.externals.joblib',
'sklearn.linear_model',
'sklearn.metrics',
'sklearn.metrics.pairwise',
'sklearn.model_selection',
'sklearn.pipeline',
'sklearn.preprocessing',
'sklearn.svm',
'sklearn.tree',
'sklearn.tree._classes',
'sklearn.tree._splitter',
'sklearn.tree._tree',
'sklearn.tree.tree',
'sklearn.utils',
'sklearn.utils._joblib',
'sklearn.utils.extmath',
'sklearn.utils.fixes',
'sklearn.utils.metaestimators',
'sklearn.utils.validation',
# our C modules
'sksurv.bintrees._binarytrees',
'sksurv.ensemble._coxph_loss',
'sksurv.kernels._clinical_kernel',
'sksurv.linear_model._coxnet',
'sksurv.svm._minlip',
'sksurv.svm._prsvm',
'sksurv.tree._criterion']
MOCK_VERSIONS = {
'pandas': '0.25.0',
'sklearn': '0.22.0',
}
from unittest.mock import Mock
class MockModule(Mock):
"""mock imports"""
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper() and name[0] != "_":
# Not very good, we assume Uppercase names are classes...
mocktype = type(name, (), {})
mocktype.__module__ = __name__
return mocktype
else:
return MockModule()
sys.modules.update((mod_name, MockModule()) for mod_name in MOCK_MODULES)
for mod, ver in MOCK_VERSIONS.items():
setattr(sys.modules[mod], '__version__', ver)
| gpl-3.0 | -5,840,506,738,319,099,000 | 30.256345 | 261 | 0.633618 | false |
Marginal/OverlayEditor | draw.py | 1 | 94208 | import OpenGL # for __version__
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.arrays import vbo
from OpenGL.extensions import alternate, hasGLExtension
from OpenGL.GL.shaders import compileShader, compileProgram
from OpenGL.GL.ARB.occlusion_query import *
glBeginQuery = alternate(glBeginQuery, glBeginQueryARB)
glDeleteQueries = alternate(glDeleteQueries, glDeleteQueriesARB)
glEndQuery = alternate(glEndQuery, glEndQueryARB)
glGenQueries = alternate(glGenQueries, glGenQueriesARB)
glGetQueryObjectuiv = alternate(glGetQueryObjectuiv, glGetQueryObjectuivARB)
GL_ANY_SAMPLES_PASSED=0x8C2F # not in 3.0.1
from OpenGL.GL.ARB.instanced_arrays import glInitInstancedArraysARB, glVertexAttribDivisorARB
from OpenGL.GL.EXT.multi_draw_arrays import glMultiDrawArraysEXT
from OpenGL.GL.EXT.gpu_shader4 import glInitGpuShader4EXT
glMultiDrawArrays = alternate(glMultiDrawArrays, glMultiDrawArraysEXT)
import gc
from glob import glob
from math import atan2, cos, sin, floor, hypot, radians
from numpy import array, array_equal, concatenate, dot, identity, vstack, zeros, float32, float64, int32
import os
from os.path import basename, curdir, join, splitext
from struct import unpack
from sys import exc_info, exit, platform, version
from traceback import print_exc
import time
import wx
import wx.glcanvas
from apt import layoutApt
from files import VertexCache, sortfolded, glInitTextureCompressionS3TcEXT
from fixed8x13 import fixed8x13
from clutter import Clutter, Object, Polygon, Draped, DrapedImage, Facade, Network, Exclude
from clutterdef import ClutterDef, ObjectDef, AutoGenPointDef, NetworkDef, PolygonDef, COL_CURSOR, COL_SELECTED, COL_UNPAINTED, COL_DRAGBOX, COL_WHITE, fallbacktexture
from DSFLib import readDSF
from elevation import BBox, ElevationMesh, onedeg, round2res
from imagery import Imagery
from lock import Locked
from MessageBox import myMessageBox
from nodes import Node
from prefs import Prefs, prefs, gcustom, gnavdata
from version import appname
sband=16 # width of mouse scroll band around edge of window
debugapt = __debug__ and False
log_glstate= __debug__ and True
log_load = __debug__ and True
log_paint = __debug__ and False
class UndoEntry:
ADD=0
DEL=1
MODIFY=2
MOVE=3
SPLIT=4
def __init__(self, tile, kind, data):
self.tile=tile
self.kind=kind
self.data=data # [(idx, placement)]
def equals(self, other):
# ignore placement details
if self.tile!=other.tile or not (self.kind==other.kind==UndoEntry.MOVE): return False
if self.data==other.data==None: return True
if not (self.data and other.data and len(self.data)==len(other.data)):
return False
for i in range(len(self.data)):
if self.data[i][0]!=other.data[i][0]:
return False
return True
class ClickModes:
Undecided=1
DragBox=2
Drag=3
DragNode=4
Scroll=5
Move=6
# OpenGL state
class GLstate():
def __init__(self):
self.debug=__debug__ and False
self.proj = identity(4, float64) # projection matrix
self.occlusion_query=None # Will test for this later
self.queries=[]
self.multi_draw_arrays = bool(glMultiDrawArrays)
glEnableClientState(GL_VERTEX_ARRAY)
self.texture=0
glEnableClientState(GL_TEXTURE_COORD_ARRAY)
self.color=COL_UNPAINTED
glColor3f(*COL_UNPAINTED)
glDisableClientState(GL_COLOR_ARRAY)
self.cull=True
glEnable(GL_CULL_FACE)
self.depthtest=True
glDepthFunc(GL_LESS)
self.poly=False
glDisable(GL_POLYGON_OFFSET_FILL)
glDepthMask(GL_TRUE)
self.current_vbo=None
self.instance_vbo =vbo.VBO(None, GL_STATIC_DRAW, size=0) # explicit size for PyOpenGL_accelerate
self.vector_vbo =vbo.VBO(None, GL_STATIC_DRAW, size=0)
self.vector_indices_vbo =vbo.VBO(None, GL_STATIC_DRAW, GL_ELEMENT_ARRAY_BUFFER, size=0)
self.dynamic_vbo =vbo.VBO(None, GL_STATIC_DRAW, size=0)
self.selected_vbo =vbo.VBO(None, GL_STREAM_DRAW, size=0)
# Use of GL_ARB_instanced_arrays requires a shader. Just duplicate fixed pipeline shaders.
try:
# MacOS 10.5 drivers are too flakey
if platform=='darwin' and int(os.uname()[2].split('.')[0]) < 10: raise NotImplementedError
vanilla = open('Resources/vanilla.vs').read()
instanced = open('Resources/instanced.vs').read()
unlit = open('Resources/unlit.fs').read()
colorvs = open('Resources/color.vs').read()
colorfs = open('Resources/color.fs').read()
pointfs = open('Resources/point.fs').read()
self.textureshader = compileProgram(compileShader(vanilla, GL_VERTEX_SHADER),
compileShader(unlit, GL_FRAGMENT_SHADER))
if __debug__: print glGetProgramInfoLog(self.textureshader)
self.transform_pos = glGetUniformLocation(self.textureshader, 'transform')
self.colorshader = compileProgram(compileShader(colorvs, GL_VERTEX_SHADER),
compileShader(colorfs, GL_FRAGMENT_SHADER))
if __debug__: print glGetProgramInfoLog(self.colorshader)
assert glGetProgramiv(self.colorshader, GL_LINK_STATUS), glGetProgramInfoLog(self.colorshader)
if platform=='win32' and glGetString(GL_VENDOR)=='Intel' and glGetString(GL_VERSION).startswith('2.'):
# gl_PointCoord broken on Gen5 and older - http://lists.freedesktop.org/archives/mesa-commit/2012-March/036247.html
self.pointshader = None
glPointSize(5.0)
else:
self.pointshader = compileProgram(compileShader(colorvs, GL_VERTEX_SHADER),
compileShader(pointfs, GL_FRAGMENT_SHADER))
if __debug__: print glGetProgramInfoLog(self.pointshader)
glEnable(GL_POINT_SPRITE)
if glInitInstancedArraysARB():
self.instancedshader = compileProgram(compileShader(instanced, GL_VERTEX_SHADER),
compileShader(unlit, GL_FRAGMENT_SHADER))
if __debug__: print glGetProgramInfoLog(self.instancedshader)
self.instanced_transform_pos = glGetAttribLocation(self.instancedshader, 'transform')
self.instanced_selected_pos = glGetAttribLocation(self.instancedshader, 'selected')
self.instanced_arrays = True
else:
self.instancedshader = self.instanced_transform_pos = self.instanced_selected_pos = None
self.instanced_arrays = False
glUseProgram(self.textureshader)
glUniform4f(self.transform_pos, *zeros(4,float32))
self.shaders = True
except:
if __debug__: print_exc()
self.instanced_arrays = self.shaders = False
self.textureshader = self.colorshader = self.pointshader = self.instancedshader = None
def set_texture(self, id):
if self.texture!=id:
if __debug__:
if self.debug: print "set_texture", id
if id is None:
if __debug__:
if self.debug: print "set_texture disable GL_TEXTURE_COORD_ARRAY"
glDisableClientState(GL_TEXTURE_COORD_ARRAY)
if self.shaders: glUseProgram(self.colorshader)
if self.texture!=0:
glBindTexture(GL_TEXTURE_2D, 0)
else:
if self.texture is None:
if __debug__:
if self.debug: print "set_texture enable GL_TEXTURE_COORD_ARRAY"
glEnableClientState(GL_TEXTURE_COORD_ARRAY)
if self.shaders: glUseProgram(self.textureshader)
glBindTexture(GL_TEXTURE_2D, id)
self.texture=id
elif __debug__:
if self.debug: print "set_texture already", id
def set_color(self, color):
if self.color!=color:
if color is None:
# Colors from VBO
if __debug__:
if self.debug:
print "set_color None"
print "set_color enable GL_COLOR_ARRAY"
glEnableClientState(GL_COLOR_ARRAY)
else:
# Color specified explicitly
if __debug__:
if self.debug: print "set_color (%.3f, %.3f. %.3f)" % color
if self.color is None:
if __debug__:
if self.debug: print "set_color disable GL_COLOR_ARRAY"
glDisableClientState(GL_COLOR_ARRAY)
glColor3f(*color)
self.color=color
elif __debug__:
if self.debug:
if self.color is None:
print "set_color already None"
else:
print "set_color already (%.3f, %.3f. %.3f)" % color
def set_depthtest(self, depthtest):
# Occlusion query counts "the number of samples that pass the depth and stencil tests", which ATI interpret
# as meaning that the depth test must be enabled. So control depth test via DepthFunc rather than glEnable.
if self.depthtest!=depthtest:
if __debug__:
if self.debug: print "set_depthtest", depthtest
self.depthtest=depthtest
if depthtest:
glDepthFunc(GL_LESS)
else:
glDepthFunc(GL_ALWAYS)
elif __debug__:
if self.debug: print "set_depthtest already", depthtest
def set_cull(self, cull):
if self.cull!=cull:
if __debug__:
if self.debug: print "set_cull", cull
self.cull=cull
if cull:
glEnable(GL_CULL_FACE)
else:
glDisable(GL_CULL_FACE)
elif __debug__:
if self.debug: print "set_cull already", cull
def set_poly(self, poly):
if self.poly!=poly:
if __debug__:
if self.debug: print "set_poly", poly
self.poly=poly
if poly:
glEnable(GL_POLYGON_OFFSET_FILL)
glDepthMask(GL_FALSE) # offset mustn't update depth
else:
glDisable(GL_POLYGON_OFFSET_FILL)
glDepthMask(GL_TRUE)
elif __debug__:
if self.debug: print "set_poly already", poly
def set_instance(self, vertexcache):
if vertexcache.realize_instance(self.instance_vbo) or self.current_vbo!=self.instance_vbo:
if __debug__:
if self.debug: print "set_instance"
self.instance_vbo.bind()
glTexCoordPointer(2, GL_FLOAT, 20, self.instance_vbo+12)
glVertexPointer(3, GL_FLOAT, 20, self.instance_vbo)
self.current_vbo=self.instance_vbo
elif __debug__:
if self.debug: print "set_instance already instance_vbo"
def set_vector(self, vertexcache):
if vertexcache.realize_vector(self.vector_vbo, self.vector_indices_vbo) or self.current_vbo!=self.vector_vbo:
if __debug__:
if self.debug: print "set_vector"
self.vector_indices_vbo.bind()
self.vector_vbo.bind()
glColorPointer(3, GL_FLOAT, 24, self.vector_vbo+12)
glVertexPointer(3, GL_FLOAT, 24, self.vector_vbo)
self.current_vbo=self.vector_vbo
elif __debug__:
if self.debug: print "set_vector already vector_vbo"
def set_dynamic(self, vertexcache):
if vertexcache.realize_dynamic(self.dynamic_vbo) or self.current_vbo!=self.dynamic_vbo:
if __debug__:
if self.debug: print "set_dynamic"
self.dynamic_vbo.bind()
glColorPointer(3, GL_FLOAT, 24, self.dynamic_vbo+12)
glTexCoordPointer(2, GL_FLOAT, 24, self.dynamic_vbo+12)
glVertexPointer(3, GL_FLOAT, 24, self.dynamic_vbo)
self.current_vbo=self.dynamic_vbo
elif __debug__:
if self.debug: print "set_dynamic already dynamic_vbo"
def set_attrib_selected(self, pos, selectflags):
self.selected_vbo.set_array(selectflags)
self.selected_vbo.bind()
glVertexAttribPointer(pos, 1, GL_FLOAT, GL_FALSE, 4, self.selected_vbo)
def alloc_queries(self, needed):
if len(self.queries)<needed:
if len(self.queries): glDeleteQueries(len(self.queries), self.queries)
needed=(needed/256+1)*256 # round up
self.queries=glGenQueries(needed)
if __debug__:
if self.debug: print "get_queries", self.queries
def pickmatrix(self, x, y, width, height, viewx, viewy):
# like gluPickMatrix, but doesn't actually load the resultant matrix into OpenGL
width = width and float(width) or 1.0 # maths goes wrong if zero-sized box
height = height and float(height) or 1.0
sx = viewx / width
sy = viewy / height
tx = (viewx - 2 * x) / width
ty = (viewy - 2 * y) / height
m = array([[sx, 0, 0, 0], [0, sy, 0, 0], [0, 0, 1, 0], [tx, ty, 0, 1]], dtype=float64)
return dot(self.proj, m)
# OpenGL Window
class MyGL(wx.glcanvas.GLCanvas):
def __init__(self, parent, frame):
self.parent=parent
self.frame=frame
self.movecursor=wx.StockCursor(wx.CURSOR_SIZING)
self.scrollcursor=wx.StockCursor(wx.CURSOR_HAND)
self.dragcursor=wx.StockCursor(wx.CURSOR_CROSS)
self.doneinit=False # Has glInit been called?
self.valid=False # do we have valid data for a redraw?
self.needclear=False # pending clear
self.needrefesh=False # pending refresh
self.options = 0 # display options at last goto()
self.tile=(0,999) # [lat,lon] of SW
self.centre=None # [lat,lon] of centre
self.airports={} # [runways] by code
self.runways={} # [shoulder/taxiway/runway data] by tile
self.aptdata = {} # indices into vertexcache (base, len), by layer
self.navaids=[] # (type, lat, lon, hdg)
self.navaidplacements={} # navaid placements by tile
self.codes={} # [(code, loc)] by tile
self.codeslist=0 # airport labels
self.lookup={} # virtual name -> filename (may be duplicates)
self.defs={} # loaded ClutterDefs by filename
self.placements={} # [Clutter] by tile
self.unsorted={} # [Clutter] by tile
self.background=None
self.mousenow=None # Current position (used in timer and drag)
self.locked=0 # locked object types
self.selected=set() # selected placements
self.clickmode=None
self.clickpos=None # Location of mouse down
self.clickctrl=False # Ctrl was held down
self.selectednode=None # Selected node. Only if len(self.selected)==1
self.selectedhandle=None # Selected node control handle.
self.selectedlayoutpending = False # self.clickmode==ClickModes.DragNode and we need to do a full layout
self.selections=set() # Hits for cycling picking
self.selectsaved=set() # Selection at start of ctrl drag box
self.snapnode = None # (Polygon, idx) of node we snapped to in ClickModes.DragNode mode
self.draginert=True
self.dragx=wx.SystemSettings_GetMetric(wx.SYS_DRAG_X)
self.dragy=wx.SystemSettings_GetMetric(wx.SYS_DRAG_Y)
if self.dragx<=1 or self.dragx>8 or self.dragy<=1 or self.dragy>8:
self.dragx=self.dragy=5 # Finder on Mac appears to use 5
self.clipboard = set()
self.undostack=[]
# Values during startup
self.x=0
self.y=0
self.z=0
self.h=0
self.e=45
self.d=2048.0
if __debug__: self.ei = self.ej = ElevationMesh.DIVISIONS/2-1 # for debugging elevation mesh
# Must specify min sizes for glX? - see glXChooseVisual and GLXFBConfig
try:
# Ask for a large depth buffer.
# wxGTK<=2.8 can't recover from a failure in glXChooseFBConfig so skip this - http://trac.wxwidgets.org/ticket/12479
if platform.startswith('linux'): raise AssertionError
# We're not using the stencil buffer so would prefer to specify a 32bit depth buffer, but this can cause e.g. Intel Windows drivers to fall back to 16 even though they support 24
wx.glcanvas.GLCanvas.__init__(self, parent, style=wx.FULL_REPAINT_ON_RESIZE|wx.WANTS_CHARS,
attribList=[wx.glcanvas.WX_GL_RGBA,wx.glcanvas.WX_GL_DOUBLEBUFFER,wx.glcanvas.WX_GL_DEPTH_SIZE, 24])
if wx.VERSION >= (2,9):
self.context = wx.glcanvas.GLContext(self)
except:
# Failed - try with safe 16bit depth buffer.
try:
if __debug__: print "Trying 16bit depth buffer"
# wxGTK<=2.8 has no way to discover if this fails, so will segfault later
wx.glcanvas.GLCanvas.__init__(self, parent, style=wx.FULL_REPAINT_ON_RESIZE|wx.WANTS_CHARS,
attribList=[wx.glcanvas.WX_GL_RGBA,wx.glcanvas.WX_GL_DOUBLEBUFFER,wx.glcanvas.WX_GL_DEPTH_SIZE, 16])
if wx.VERSION >= (2,9):
self.context = wx.glcanvas.GLContext(self)
except:
myMessageBox('Try updating the drivers for your graphics card.', "Can't initialise OpenGL.", wx.ICON_ERROR|wx.OK, frame)
exit(1)
# Allocate this stuff in glInit
self.glstate=None
self.vertexcache=None
wx.EVT_ERASE_BACKGROUND(self, self.OnEraseBackground)
wx.EVT_KEY_DOWN(self, self.OnKeyDown)
wx.EVT_MOUSEWHEEL(self, self.OnMouseWheel)
wx.EVT_MOTION(self, self.OnMouseMotion)
wx.EVT_LEFT_DOWN(self, self.OnLeftDown)
wx.EVT_LEFT_UP(self, self.OnLeftUp)
wx.EVT_MIDDLE_DOWN(self, self.OnMiddleDown)
wx.EVT_MIDDLE_UP(self, self.OnMiddleUp)
wx.EVT_IDLE(self, self.OnIdle)
#wx.EVT_KILL_FOCUS(self, self.OnKill) # debug
self.timer=wx.Timer(self, wx.ID_ANY)
wx.EVT_TIMER(self, self.timer.GetId(), self.OnTimer)
wx.EVT_PAINT(self, self.OnPaint)
def glInit(self):
# Setup state
if platform.startswith('linux') and not self.IsShownOnScreen():
return # Under X must be called after window is shown, which is deferred under wxGTK>=3.
else:
self.doneinit = True
if wx.VERSION >= (2,9):
self.SetCurrent(self.context)
else:
self.SetCurrent()
if log_glstate: print "%s\n%s\n%s\nRGBA: %d%d%d%d, Depth: %d, Stencil: %d, Aux: %d, DoubleBuffer: %d" % (glGetString(GL_VENDOR), glGetString(GL_RENDERER), glGetString(GL_VERSION), glGetInteger(GL_RED_BITS), glGetInteger(GL_GREEN_BITS), glGetInteger(GL_BLUE_BITS), glGetInteger(GL_ALPHA_BITS), glGetInteger(GL_DEPTH_BITS), glGetInteger(GL_STENCIL_BITS), glGetInteger(GL_AUX_BUFFERS), glGetBoolean(GL_DOUBLEBUFFER))
if not vbo.get_implementation():
myMessageBox('This application requires the use of OpenGL Vertex Buffer Objects (VBOs) which are not supported by your graphics card.\nTry updating the drivers for your graphics card.',
"Can't initialise OpenGL.", wx.ICON_ERROR|wx.OK, self.frame)
exit(1)
if not glInitTextureCompressionS3TcEXT() and not __debug__:
myMessageBox('This application requires the use of DXT texture compression which is not supported by your graphics card.\nTry updating the drivers for your graphics card.',
"Can't initialise OpenGL.", wx.ICON_ERROR|wx.OK, self.frame)
exit(1)
self.vertexcache=VertexCache() # member so can free resources
self.glstate=GLstate()
self.imagery=Imagery(self)
#glClearDepth(1.0)
glClearColor(0.5, 0.5, 1.0, 0.0) # Sky
glEnable(GL_DEPTH_TEST)
glShadeModel(GL_SMOOTH)
glEnable(GL_LINE_SMOOTH)
glPointSize(7.0) # for nodes
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST) # we can hope
glFrontFace(GL_CW)
glPolygonMode(GL_FRONT, GL_FILL)
glPolygonOffset(-2, -2)
glCullFace(GL_BACK)
glPixelStorei(GL_UNPACK_ALIGNMENT,1) # byte aligned glBitmap
glPixelStorei(GL_PACK_ALIGNMENT,1) # byte aligned glReadPixels
glReadBuffer(GL_BACK) # for unproject
#glPixelStorei(GL_UNPACK_LSB_FIRST,1)
glEnable(GL_TEXTURE_2D)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_BLEND)
glAlphaFunc(GL_GREATER, 1.0/256) # discard wholly transparent
glEnable(GL_ALPHA_TEST)
glMatrixMode(GL_TEXTURE)
glTranslatef(0, 1, 0)
glScalef(1, -1, 1) # OpenGL textures are backwards
glMatrixMode(GL_PROJECTION) # We always leave the modelview matrix as identity and the active matrix mode as projection, except briefly when drawing objects via the non-shader path
if log_glstate:
try:
import OpenGL_accelerate.formathandler # for py2exe
import OpenGL_accelerate.nones_formathandler # for py2exe
import OpenGL.acceleratesupport
print 'PyOpenGL acceleration:\t\t%s' % (OpenGL.acceleratesupport.ACCELERATE_AVAILABLE and 'yes' or 'no')
except:
print 'PyOpenGL acceleration:\t\tnot supported'
print 'max_texture_size:\t\t%d' % self.vertexcache.texcache.maxtexsize
print 'texture_non_power_of_two:\t%s' % (self.vertexcache.texcache.npot and 'yes' or 'no')
print 'texture_compression:\t\t%s' % (self.vertexcache.texcache.compress and 'yes' or 'no')
print 'texture_compression_s3tc:\t%s' % (self.vertexcache.texcache.s3tc and 'yes' or 'no')
print 'bgra:\t\t\t\t%s' % (self.vertexcache.texcache.bgra and 'yes' or 'no')
print 'shaders:\t\t\t%s' % (self.glstate.shaders and 'yes' or 'no')
print 'gpu_shader4:\t\t\t%s' % (glInitGpuShader4EXT() and 'yes' or 'no')
print 'instanced_arrays:\t\t%s' % (self.glstate.instanced_arrays and 'yes' or 'no')
print 'multi_draw_arrays:\t\t%s' % (self.glstate.multi_draw_arrays and 'yes' or 'no')
def OnEraseBackground(self, event):
# Prevent flicker when resizing / painting on MSW
#if __debug__: print "OnEraseBackground"
self.needclear=True # ATI drivers require clear afer resize
def OnKeyDown(self, event):
if self.clickmode:
event.Skip()
else:
# Manually propagate
self.frame.OnKeyDown(event)
def OnMouseWheel(self, event):
# under wxMac 2.8 scroll events don't arrive in the main frame, so catch here and forward
if self.clickmode:
event.Skip()
else:
# Manually propagate
self.frame.OnMouseWheel(event)
def OnTimer(self, event):
# mouse scroll - fake up a key event and pass it up
size=self.GetClientSize()
posx=self.mousenow[0]
posy=self.mousenow[1]
keyevent=wx.KeyEvent()
keyevent.m_controlDown=wx.GetKeyState(wx.WXK_CONTROL)
keyevent.m_shiftDown=wx.GetKeyState(wx.WXK_SHIFT)
if posx<sband:
keyevent.m_keyCode=wx.WXK_LEFT
elif posy<sband:
keyevent.m_keyCode=wx.WXK_UP
elif size.x-posx<sband:
keyevent.m_keyCode=wx.WXK_RIGHT
elif size.y-posy<sband:
keyevent.m_keyCode=wx.WXK_DOWN
if keyevent.m_keyCode:
self.frame.OnKeyDown(keyevent)
def OnLeftDown(self, event):
if self.clickmode==ClickModes.Move: return
#event.Skip(False) # don't change focus
self.mousenow=self.clickpos=[event.GetX(),event.GetY()]
self.clickctrl=event.CmdDown()
self.frame.canvas.SetFocus() # otherwise focus goes to None under wxGTK
self.CaptureMouse()
size = self.GetClientSize()
if event.GetX()<sband or event.GetY()<sband or size.x-event.GetX()<sband or size.y-event.GetY()<sband:
# mouse scroll
self.clickmode=ClickModes.Scroll
self.timer.Start(50)
else:
self.clickmode=ClickModes.Undecided
self.select()
self.draginert=True # was (self.clickmode!=ClickModes.DragNode)
def OnLeftUp(self, event):
#print "up", ClickModes.DragNode
if self.HasCapture(): self.ReleaseMouse()
self.timer.Stop()
if self.clickmode==ClickModes.DragNode:
assert len(self.selected)==1
if self.selectedlayoutpending:
if __debug__: clock2 = time.clock()
placement = list(self.selected)[0]
self.selectednode = placement.layout(self.tile, self.selectednode)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
self.selectedlayoutpending = False
self.Refresh()
if self.snapnode:
# split segment at snapped-to node
(poly,idx) = self.snapnode
if idx!=0 and idx!=len(poly.nodes[0])-1:
poly2 = poly.clone()
poly2.load(self.lookup, self.defs, self.vertexcache)
placements = self.placements[self.tile]
self.undostack.append(UndoEntry(self.tile, UndoEntry.SPLIT, [(placements.index(poly), poly.clone()),
(len(placements), poly2)]))
placements.append(poly2)
poly.nodes[0] = poly.nodes[0][:idx+1]
poly.layout(self.tile, recalc=True)
poly2.nodes[0]= poly2.nodes[0][idx:]
poly2.layout(self.tile, recalc=True)
self.selected = set([poly,poly2])
self.selectednode = self.selectedhandle = None
self.Refresh()
self.snapnode = None
elif self.clickmode==ClickModes.Drag:
for placement in self.selected:
placement.layout(self.tile)
elif self.clickmode==ClickModes.DragBox:
self.Refresh() # get rid of drag box
self.clickmode=None
self.frame.ShowSel()
event.Skip()
def OnMiddleDown(self, event):
if self.clickmode: return
self.clickmode=ClickModes.Move
self.mousenow=self.clickpos=[event.GetX(),event.GetY()]
self.CaptureMouse()
self.SetCursor(self.movecursor)
def OnMiddleUp(self, event):
if self.HasCapture(): self.ReleaseMouse()
self.SetCursor(wx.NullCursor)
self.clickmode=None
event.Skip()
def OnIdle(self, event):
if self.valid: # can get Idles during reload under X
if self.clickmode==ClickModes.DragNode:
assert len(self.selected)==1
if self.selectedlayoutpending:
if __debug__: clock2 = time.clock()
placement = list(self.selected)[0]
self.selectednode = placement.layout(self.tile, self.selectednode)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
assert self.selectednode
self.selectedlayoutpending = False
self.Refresh()
elif self.needrefresh:
# Mouse motion with a selected polygon draws to the back buffer, which Mac uses as backing store. So refresh.
self.Refresh()
def OnMouseMotion(self, event):
# Capture unreliable on Mac, so may have missed Up events. See
# https://sourceforge.net/tracker/?func=detail&atid=109863&aid=1489131&group_id=9863
#self.getworldloc(event.GetX(),event.GetY()) # debug
assert self.valid
if not self.valid: return
if self.clickmode==ClickModes.Move:
if not event.MiddleIsDown():
self.OnMiddleUp(event)
return
elif self.clickmode and not event.LeftIsDown():
self.OnLeftUp(event)
return
if self.timer.IsRunning():
# Continue mouse scroll
self.mousenow=[event.GetX(),event.GetY()] # not known in timer
return
if not self.clickmode:
size = self.GetClientSize()
# Change cursor if over a window border
if event.GetX()<sband or event.GetY()<sband or size.x-event.GetX()<sband or size.y-event.GetY()<sband:
self.SetCursor(self.scrollcursor)
return
# Change cursor if over a node
if len(self.selected)==1 and isinstance(list(self.selected)[0], Polygon):
if list(self.selected)[0].pick_nodes(self.glstate.pickmatrix(event.GetX(), size[1]-1-event.GetY(), 5,5, *size), True):
self.SetCursor(self.dragcursor) # hovering over node
return
self.SetCursor(wx.NullCursor)
return
assert (self.clickmode!=ClickModes.Undecided)
if self.clickmode==ClickModes.Move:
(oldlat,oldlon)=self.getworldloc(*self.mousenow)
self.mousenow=[event.GetX(),event.GetY()]
(lat,lon)=self.getworldloc(*self.mousenow)
self.frame.loc=(self.frame.loc[0]-lat+oldlat, self.frame.loc[1]-lon+oldlon)
self.goto(self.frame.loc)
self.frame.ShowLoc()
return
if self.draginert and abs(event.GetX()-self.clickpos[0])<self.dragx and abs(event.GetY()-self.clickpos[1])<self.dragx:
return
else:
self.draginert=False
if self.clickmode==ClickModes.DragNode:
# Start/continue node drag
self.SetCursor(self.dragcursor)
self.snapnode = None
poly=list(self.selected)[0]
if isinstance(poly, Network) and not self.selectedhandle and self.selectednode[1] in [0,len(poly.nodes[0])-1]:
# snap end nodes to nodes in other network segments
size = self.GetClientSize()
proj = self.glstate.pickmatrix(event.GetX(), size[1]-1-event.GetY(), 13,13, *size)
for p in self.placements[self.tile]:
if isinstance(p, Network) and p!=poly:
hit = p.pick_nodes(proj, False)
if hit:
self.snapnode = (p, hit[0][1])
break
if self.snapnode:
node = self.snapnode[0].nodes[0][self.snapnode[1]]
(lon,lat) = (node.lon,node.lat) # snap to matching network node location
else:
(lat,lon)=self.getworldloc(event.GetX(), event.GetY())
lat=max(self.tile[0], min(self.tile[0]+1, lat))
lon=max(self.tile[1], min(self.tile[1]+1, lon))
if not self.frame.bkgd: # No undo for background image
newundo=UndoEntry(self.tile, UndoEntry.MOVE, [(self.placements[self.tile].index(poly), poly.clone())])
if not (self.undostack and self.undostack[-1].equals(newundo)):
self.undostack.append(newundo)
self.frame.toolbar.EnableTool(wx.ID_SAVE, True)
self.frame.toolbar.EnableTool(wx.ID_UNDO, True)
if self.frame.menubar:
self.frame.menubar.Enable(wx.ID_SAVE, True)
self.frame.menubar.Enable(wx.ID_UNDO, True)
if self.selectedhandle:
poly.updatehandle(self.selectednode, self.selectedhandle, event.CmdDown(), lat, lon, self.tile)
else:
poly.updatenode(self.selectednode, lat, lon, self.tile)
self.selectedlayoutpending = True # need to do full layout at some point
self.Refresh() # show updated node
self.frame.ShowSel()
return
elif self.clickmode==ClickModes.Drag:
# Continue move drag
(lat,lon)=self.getworldloc(event.GetX(), event.GetY())
if (lat>self.tile[0] and lat<self.tile[0]+1 and
lon>self.tile[1] and lon<self.tile[1]+1):
(oldlat,oldlon)=self.getworldloc(*self.mousenow)
self.movesel(lat-oldlat, lon-oldlon)
if not self.frame.bkgd: # No undo for background image
self.frame.toolbar.EnableTool(wx.ID_SAVE, True)
self.frame.toolbar.EnableTool(wx.ID_UNDO, True)
if self.frame.menubar:
self.frame.menubar.Enable(wx.ID_SAVE, True)
self.frame.menubar.Enable(wx.ID_UNDO, True)
elif self.clickmode==ClickModes.DragBox:
self.select()
self.mousenow=[event.GetX(),event.GetY()] # not known in paint
def OnPaint(self, event):
if event: wx.PaintDC(self) # Tell the window system that we're on the case
if not self.doneinit:
self.glInit()
self.needrefresh=False
size = self.GetClientSize()
#print "pt", size
if size.width<=0: return # may be junk on startup
if wx.VERSION >= (2,9):
self.SetCurrent(self.context)
else:
self.SetCurrent()
if log_paint:
clock=clock2=time.clock()
glViewport(0, 0, *size)
vd=self.d*size.y/size.x
proj=array([[1.0/self.d,0,0,0], [0,1.0/vd,0,0], [0,0,(-1.0/30)/vd,0], [0,0,0,1]], float64) # glOrtho(-self.d, self.d, -vd, vd, -30*vd, 30*vd) # 30 ~= 1/sin(2), where 2 is minimal elevation angle
proj=dot(array([[1,0,0,0], [0,cos(radians(self.e)),sin(radians(self.e)),0], [0,-sin(radians(self.e)),cos(radians(self.e)),0], [0,0,0,1]], float64), proj) # glRotatef(self.e, 1.0,0.0,0.0)
proj=dot(array([[cos(radians(self.h)),0,-sin(radians(self.h)),0], [0,1,0,0], [sin(radians(self.h)),0,cos(radians(self.h)),0], [0,0,0,1]], float64), proj) # glRotatef(self.h, 0.0,1.0,0.0)
self.glstate.proj = dot(array([[1,0,0,0], [0,1,0,0], [0,0,1,0], [-self.x,-self.y,-self.z,1]], float64), proj) # glTranslatef(-self.x, -self.y, -self.z)
glLoadMatrixd(self.glstate.proj)
# Workaround for buggy ATI drivers: Check that occlusion queries actually work
if self.glstate.occlusion_query is None:
if not bool(glGenQueries):
if log_glstate: print 'occlusion_query:\t\tno'
self.glstate.occlusion_query=False
else:
self.glstate.occlusion_query=hasGLExtension('GL_ARB_occlusion_query2') and GL_ANY_SAMPLES_PASSED or GL_SAMPLES_PASSED
if self.glstate.occlusion_query:
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
self.glstate.set_texture(self.vertexcache.texcache.get(fallbacktexture)) # ATI drivers don't like 0
self.glstate.set_color(COL_WHITE) # Ensure colour indexing off
self.glstate.set_depthtest(False) # Draw even if occluded
self.glstate.set_poly(True) # Disable writing to depth buffer
self.glstate.set_cull(False)
self.glstate.alloc_queries(1)
glColorMask(GL_FALSE,GL_FALSE,GL_FALSE,GL_FALSE)
glBeginQuery(self.glstate.occlusion_query, self.glstate.queries[0])
glBegin(GL_QUADS)
glVertex3f( 100, 0, -100)
glVertex3f( 100, 0, 100)
glVertex3f(-100, 0, 100)
glVertex3f(-100, 0, -100)
glEnd()
glEndQuery(self.glstate.occlusion_query)
if not glGetQueryObjectuiv(self.glstate.queries[0], GL_QUERY_RESULT):
self.glstate.occlusion_query=False
if log_glstate: print 'occlusion_query:\t\tbroken'
else:
if log_glstate: print 'occlusion_query:\t\tyes (%s)' % (hasGLExtension('GL_ARB_occlusion_query2') and 'GL_ANY_SAMPLES_PASSED' or 'GL_SAMPLES_PASSED')
glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE)
# Ground terrain
if not self.valid:
# Sea
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
self.glstate.set_color((0.25, 0.25, 0.50))
if self.glstate.shaders:
self.glstate.set_texture(None) # ATI drivers don't like 0
else:
self.glstate.set_texture(0)
glBegin(GL_QUADS)
glVertex3f( onedeg*cos(radians(1+self.tile[0]))/2, 0, -onedeg/2)
glVertex3f( onedeg*cos(radians(self.tile[0]))/2, 0, onedeg/2)
glVertex3f(-onedeg*cos(radians(self.tile[0]))/2, 0, onedeg/2)
glVertex3f(-onedeg*cos(radians(1+self.tile[0]))/2, 0, -onedeg/2)
glEnd()
self.SwapBuffers()
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
self.needclear=False
self.frame.canvas.SetFocus() # under wxGTK need to manually set focus on startup
return
elif self.needclear:
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
self.needclear=False
# Map imagery & background
has_imagery = self.imagery.placements(self.d, size) # allocate into dynamic VBO
if log_paint:
print "%6.3f time to get imagery" % (time.clock()-clock2)
clock2=time.clock()
if self.background and self.background.islaidout():
has_imagery = True
if self.frame.bkgd:
self.selected=set([self.background]) # Override selection while dialog is open
elif self.background in self.selected:
self.selected=set()
elif self.frame.bkgd:
self.selected=set()
# Mesh and Nets
self.glstate.set_instance(self.vertexcache)
self.glstate.set_color(COL_UNPAINTED)
self.glstate.set_cull(True)
self.glstate.set_poly(False)
if __debug__:
elev = self.vertexcache.getElevationMesh(self.tile)
if debugapt: glPolygonMode(GL_FRONT, GL_LINE)
if debugapt and hasattr(elev, 'divwidth'):
# show elevation mesh buckets
self.glstate.set_depthtest(False)
self.glstate.set_texture(None)
self.glstate.set_color(COL_DRAGBOX)
glBegin(GL_LINES)
for i in range(-ElevationMesh.DIVISIONS/2, ElevationMesh.DIVISIONS/2+1):
glVertex3f( (ElevationMesh.DIVISIONS/2) * elev.divwidth, 0, i * elev.divheight)
glVertex3f(-(ElevationMesh.DIVISIONS/2) * elev.divwidth, 0, i * elev.divheight)
for j in range(-ElevationMesh.DIVISIONS/2, ElevationMesh.DIVISIONS/2+1):
glVertex3f(j * elev.divwidth, 0, (ElevationMesh.DIVISIONS/2) * elev.divheight)
glVertex3f(j * elev.divwidth, 0, -(ElevationMesh.DIVISIONS/2) * elev.divheight)
glEnd()
self.glstate.set_color(COL_SELECTED)
glBegin(GL_LINE_LOOP)
glVertex3f((self.ej-ElevationMesh.DIVISIONS/2) *elev.divwidth, 0, (ElevationMesh.DIVISIONS/2 -self.ei)*elev.divheight)
glVertex3f((self.ej-ElevationMesh.DIVISIONS/2+1)*elev.divwidth, 0, (ElevationMesh.DIVISIONS/2 -self.ei)*elev.divheight)
glVertex3f((self.ej-ElevationMesh.DIVISIONS/2+1)*elev.divwidth, 0, (ElevationMesh.DIVISIONS/2-1-self.ei)*elev.divheight)
glVertex3f((self.ej-ElevationMesh.DIVISIONS/2) *elev.divwidth, 0, (ElevationMesh.DIVISIONS/2-1-self.ei)*elev.divheight)
glEnd()
self.glstate.set_color(COL_UNPAINTED)
glBegin(GL_TRIANGLES)
print (self.ei,self.ej), len(elev.buckets[self.ei][self.ej]), 'tris'
for tri in elev.tris[elev.buckets[self.ei][self.ej]]:
glVertex3fv(tri['p1'])
glVertex3fv(tri['p2'])
glVertex3fv(tri['p3'])
glEnd()
self.glstate.set_depthtest(True)
self.glstate.set_texture(0) # texture shader
if self.glstate.shaders:
if not prefs.options&Prefs.ELEVATION:
glUniform4f(self.glstate.transform_pos, 0, -1, 0, 0) # Defeat elevation data
else:
glUniform4f(self.glstate.transform_pos, *zeros(4,float32))
else:
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
if not self.options&Prefs.ELEVATION:
glScalef(1,0,1) # Defeat elevation data:
(mesh, netindices) = self.vertexcache.getMesh(self.tile)
for (base,number,texno) in mesh:
self.glstate.set_texture(texno)
glDrawArrays(GL_TRIANGLES, base, number)
if not self.options&Prefs.ELEVATION:
if self.glstate.shaders:
glUniform4f(self.glstate.transform_pos, *zeros(4,float32))
else:
glLoadIdentity()
if __debug__:
if debugapt: glPolygonMode(GL_FRONT, GL_FILL)
if netindices is not None:
(base, count) = netindices
self.glstate.set_vector(self.vertexcache)
self.glstate.set_texture(None)
self.glstate.set_color(None)
self.glstate.set_depthtest(False) # Need line to appear over terrain
glDrawElements(GL_LINES, count, GL_UNSIGNED_INT, self.glstate.vector_indices_vbo + base*4)
if log_paint:
print "%6.3f time to draw mesh" % (time.clock()-clock2)
clock2=time.clock()
# Objects and Polygons
placements=self.placements[self.tile]
navaidplacements=self.navaidplacements[self.tile]
self.glstate.set_dynamic(self.vertexcache) # realize
# Draw clutter with dynamic geometry
self.vertexcache.buckets.draw(self.glstate, self.frame.bkgd and set() or self.selected, self.aptdata, has_imagery, prefs.imageryopacity) # background image is always drawn with its own opacity setting
if log_paint:
print "%6.3f time to draw dynamic" % (time.clock()-clock2)
clock2=time.clock()
# Draw clutter with static geometry (ignoring layer ordering since it doesn't really matter so much for Objects)
self.glstate.set_instance(self.vertexcache)
self.glstate.set_poly(False)
self.glstate.set_depthtest(True)
if self.glstate.instanced_arrays:
self.glstate.set_color(COL_UNPAINTED)
self.glstate.set_texture(0) # has side-effect so shader program won't be reset
glUseProgram(self.glstate.instancedshader)
glEnableVertexAttribArray(self.glstate.instanced_transform_pos)
glVertexAttribDivisorARB(self.glstate.instanced_transform_pos, 1)
assert type(self.selected)==set
selected = self.selected.copy()
if selected:
glEnableVertexAttribArray(self.glstate.instanced_selected_pos)
glVertexAttribDivisorARB(self.glstate.instanced_selected_pos, 1)
for o in self.selected: selected.update(o.placements) # include children
else:
glVertexAttrib1f(self.glstate.instanced_selected_pos, 0)
for objdef in self.defs.values(): # benefit of sorting by texture would be marginal
objdef.draw_instanced(self.glstate, selected)
glDisableVertexAttribArray(self.glstate.instanced_transform_pos)
glDisableVertexAttribArray(self.glstate.instanced_selected_pos)
else:
# Instancing not supported
self.glstate.set_texture(0) # load texture shader
selected = self.selected.copy()
if selected:
for o in self.selected: selected.update(o.placements) # include children
for objdef in self.defs.values(): # benefit of sorting by texture would be marginal
objdef.draw_instanced(self.glstate, selected)
if self.glstate.shaders:
glUniform4f(self.glstate.transform_pos, *zeros(4,float32)) # drawing Objects alters the matrix
else:
glLoadIdentity() # Drawing Objects alters the matrix
glMatrixMode(GL_PROJECTION)
if log_paint:
print "%6.3f time to draw static" % (time.clock()-clock2)
clock2=time.clock()
# Overlays
self.glstate.set_texture(None) # resets shader
self.glstate.set_depthtest(False)
self.glstate.set_poly(True)
# Selected nodes - very last so overwrites everything
if len(self.selected)==1:
# don't bother setting VBO since this is done immediate
list(self.selected)[0].draw_nodes(self.glstate, self.selectednode)
# labels
if self.codeslist and self.d>2000: # arbitrary
#if __debug__: print "labels"
glCallList(self.codeslist)
# Position centre
#if __debug__: print "cursor"
self.glstate.set_color(COL_CURSOR)
glTranslatef(self.x, self.y, self.z)
glBegin(GL_LINES)
glVertex3f(-0.5,0,0)
glVertex3f( 0.5,0,0)
glVertex3f(0,0,-0.5)
glVertex3f(0,0, 0.5)
glVertex3f(0,0,-0.5)
glVertex3f(0.125,0,-0.375)
glVertex3f(0,0,-0.5)
glVertex3f(-0.125,0,-0.375)
glEnd()
# 2D stuff
if self.clickmode==ClickModes.DragBox or self.imagery.provider_logo:
glLoadIdentity()
# drag box
if self.clickmode==ClickModes.DragBox:
if __debug__: print "drag"
self.glstate.set_color(COL_DRAGBOX)
x0=float(self.clickpos[0]*2)/size.x-1
y0=1-float(self.clickpos[1]*2)/size.y
x1=float(self.mousenow[0]*2)/size.x-1
y1=1-float(self.mousenow[1]*2)/size.y
glBegin(GL_LINE_LOOP)
glVertex3f(x0, y0, -0.9)
glVertex3f(x0, y1, -0.9)
glVertex3f(x1, y1, -0.9)
glVertex3f(x1, y0, -0.9)
glEnd()
# imagery attribution
if self.imagery.provider_logo:
(filename,width,height)=self.imagery.provider_logo
self.glstate.set_color(COL_UNPAINTED)
self.glstate.set_texture(self.vertexcache.texcache.get(filename,wrap=False,fixsize=True))
glBegin(GL_QUADS)
glTexCoord2f(0,0)
glVertex3f(-1,-1, -0.9)
glTexCoord2f(0,1)
glVertex3f(-1,-1+height*2.0/size.y, -0.9)
glTexCoord2f(1,1)
glVertex3f(-1+width*2.0/size.x,-1+height*2.0/size.y, -0.9)
glTexCoord2f(1,0)
glVertex3f(-1+width*2.0/size.x,-1, -0.9)
glEnd()
glLoadMatrixd(self.glstate.proj) # Restore state for unproject
self.glstate.set_poly(False)
if log_paint: print "%6.3f time in OnPaint" % (time.clock()-clock)
# Display
self.SwapBuffers()
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
self.needclear=False
def select(self):
if __debug__: print "sel"
#if not self.currentobjects():
# self.selections=[] # Can't remember
if __debug__: clock=time.clock()
size = self.GetClientSize()
if self.clickmode==ClickModes.DragBox:
proj = self.glstate.pickmatrix((self.clickpos[0]+self.mousenow[0])/2, size[1]-1-(self.clickpos[1]+self.mousenow[1])/2,
abs(self.clickpos[0]-self.mousenow[0]), abs(self.clickpos[1]-self.mousenow[1]), *size)
else: # at point
proj = self.glstate.pickmatrix(self.clickpos[0], size[1]-1-self.clickpos[1], 5,5, *size)
if self.frame.bkgd: # Don't allow selection of other objects while background dialog is open
if self.background and self.background.islaidout():
placements=[self.background]
else:
placements=[]
else:
placements=self.placements[self.tile]
# Select poly node?
if (self.clickmode==ClickModes.Undecided and len(self.selected)==1 and isinstance(list(self.selected)[0], Polygon)):
selected = list(self.selected)[0].pick_nodes(proj, True)
if selected:
# No need to look further if user has clicked on a node or handle within selected polygon
(self.selectednode, self.selectedhandle) = selected
self.clickmode = ClickModes.DragNode
self.selectedlayoutpending = False
if __debug__: print "%6.3f time in select" %(time.clock()-clock)
self.frame.ShowSel()
return
self.selectednode = self.selectedhandle = None
# Select placements
glLoadMatrixd(proj)
self.glstate.set_texture(self.vertexcache.texcache.get(fallbacktexture)) # textured shader throughout. ATI drivers don't like 0
self.glstate.set_color(COL_WHITE) # Ensure colour indexing off
self.glstate.set_depthtest(False) # Make selectable even if occluded
self.glstate.set_poly(True) # Disable writing to depth buffer
self.glstate.set_cull(False) # Enable selection of "invisible" faces
selections = set()
lookup = []
objdefs = (not self.locked&Locked.OBJ) and [objdef for objdef in self.defs.values() if isinstance(objdef,ObjectDef)] or []
if self.glstate.occlusion_query:
needed = sum([len(objdef.instances) for objdef in objdefs]) + len(placements) # upper limit
self.glstate.alloc_queries(needed)
glColorMask(GL_FALSE,GL_FALSE,GL_FALSE,GL_FALSE) # Don't want to update frame buffer either
else:
glSelectBuffer(len(placements)*8) # Twice as many for two-phase drawing
glRenderMode(GL_SELECT)
glInitNames()
glPushName(0)
self.glstate.set_instance(self.vertexcache)
if not self.glstate.shaders:
glMatrixMode(GL_MODELVIEW)
for objdef in objdefs:
objdef.pick_instanced(self.glstate, proj, selections, lookup)
if self.glstate.shaders:
glUniform4f(self.glstate.transform_pos, *zeros(4,float32)) # drawing Objects alters the matrix
else:
glLoadIdentity() # Drawing Objects alters the matrix
glMatrixMode(GL_PROJECTION)
if __debug__: print "%6.3f time to issue instance" %(time.clock()-clock)
self.glstate.set_dynamic(self.vertexcache)
for p in placements:
if not p.definition.type & self.locked:
p.pick_dynamic(self.glstate, lookup)
if __debug__: print "%6.3f time to issue dynamic" %(time.clock()-clock)
# Now check for selections
if self.glstate.occlusion_query:
for k in range(len(lookup)):
if glGetQueryObjectuiv(self.glstate.queries[k], GL_QUERY_RESULT):
selections.add(lookup[k])
glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE)
else:
try:
for min_depth, max_depth, (name,) in glRenderMode(GL_RENDER):
selections.add(lookup[int(name)])
except: # overflow
if __debug__: print_exc()
if __debug__: print "%6.3f time to check queries" %(time.clock()-clock)
# promote children and filter out navaids
selections = set([placement.parent or placement for placement in selections]).difference(self.navaidplacements[self.tile])
if self.frame.bkgd: # Don't allow selection of other objects while background dialog is open
if self.clickmode==ClickModes.Drag or self.background in selections:
self.clickmode=ClickModes.Drag
else:
self.clickmode=ClickModes.DragBox # Can't leave as ClickModes.Undecided
self.Refresh()
self.frame.ShowSel()
return
if self.clickmode==ClickModes.DragBox: # drag box - add or remove all selections
if self.clickctrl:
self.selected=self.selectsaved.copy() # reset each time
for i in selections:
if i in self.selected:
self.selected.remove(i)
else:
self.selected.add(i)
else:
self.selected=selections.copy()
else: # click - Add or remove one
if not selections:
# Start drag box
self.clickmode=ClickModes.DragBox
self.selectsaved=self.selected
else:
self.clickmode=ClickModes.Drag
if self.clickctrl:
for i in selections:
if i not in self.selected:
self.selected.add(i)
break
else: # all selected - remove one
for i in self.selected:
if i in selections:
self.selected.remove(i)
break
else:
if not selections:
self.selected=set()
elif selections==self.selections and len(self.selected)==1 and list(self.selected)[0] in self.selections:
# cycle through selections by improvising an ordering on the set
ordered=list(selections)
idx=ordered.index(list(self.selected)[0])
self.selected=set([ordered[(idx+1)%len(ordered)]])
else:
self.selected=set(list(selections)[:1])
self.selections=selections
if __debug__: print "%6.3f time in select" %(time.clock()-clock)
self.Refresh()
self.frame.ShowSel()
def latlon2m(self, lat, lon):
return(((lon-self.centre[1])*onedeg*cos(radians(lat)),
(self.centre[0]-lat)*onedeg))
# create the background polygon. Defer layout to goto since can be called in reload() before location is known.
def setbackground(self, prefs, loc=None, newfile=None, layoutnow=False):
if newfile is None and prefs.package in prefs.packageprops:
backgroundfile=prefs.packageprops[prefs.package][0]
else:
backgroundfile=newfile # may be '' if just cleared
if not backgroundfile:
if self.background: self.background.clearlayout()
self.background=None
prefs.packageprops.pop(prefs.package,False)
return
elif not self.background or self.background.name!=backgroundfile:
if backgroundfile[0]==curdir:
backgroundfile = join(glob(join(prefs.xplane,gcustom,prefs.package))[0], backgroundfile[2:])
try:
texture = self.vertexcache.texcache.get(backgroundfile, wrap=False, downsample=False, fixsize=True)
except:
if __debug__: print_exc()
texture=self.vertexcache.texcache.get(fallbacktexture)
if not self.background:
if prefs.package in prefs.packageprops:
p=prefs.packageprops[prefs.package][1:]
if len(p)==6:
# convert from pre 2.15 setting
(lat, lon, hdg, width, length, opacity)=p
x=width/(2*onedeg*cos(radians(lat)))
z=length/(2*onedeg)
l=hypot(x,z)
nodes=[(lon-x,lat-z),(lon+x,lat-z),(lon+x,lat+z),(lon-x,lat+z)]
if hdg:
for j in range(len(nodes)):
h=atan2(nodes[j][0]-lon, nodes[j][1]-lat)+radians(hdg)
l=hypot(nodes[j][0]-lon, nodes[j][1]-lat)
nodes[j] = Node([lon+sin(h)*l, lat+cos(h)*l])
else:
nodes = [Node([p[i], p[i+1]]) for i in range(0, len(p), 2)]
self.background=DrapedImage('*background', 65535, [nodes])
else: # new
try: # georeferenced
from osgeo import osr, gdal
ds = gdal.Open(backgroundfile.encode('utf8')) # Needs to be utf8 for old versions on Linux!
gt = ds.GetGeoTransform()
width, height = ds.RasterXSize, ds.RasterYSize
old_cs= osr.SpatialReference()
old_cs.ImportFromWkt(ds.GetProjectionRef())
new_cs = osr.SpatialReference()
new_cs.SetWellKnownGeogCS('WGS84')
tr = osr.CoordinateTransformation(old_cs,new_cs)
nodes = [Node(tr.TransformPoint(gt[0], gt[3] + width*gt[4] + height*gt[5])),
Node(tr.TransformPoint(gt[0] + width*gt[1] + height*gt[2], gt[3] + width*gt[4] + height*gt[5])),
Node(tr.TransformPoint(gt[0] + width*gt[1] + height*gt[2], gt[3] )),
Node(tr.TransformPoint(gt[0], gt[3] ))]
self.background=DrapedImage('*background', 65535, [nodes])
except:
if __debug__: print_exc()
self.background=DrapedImage('*background', 65535, loc[0], loc[1], self.d, self.h)
self.background.load(self.lookup, self.defs, self.vertexcache)
self.background.definition.layer = ClutterDef.IMAGEFILELAYER
self.background.singlewinding = self.background.fixednodes = True
self.background.canbezier = False
for i in range(len(self.background.nodes[0])):
self.background.nodes[0][i].rest = [(i+1)/2%2,i/2] # assign UVs
if self.background.name!=backgroundfile:
self.background.name=backgroundfile
self.background.definition.texture = texture
self.background.flush() # force layout with new texture
if layoutnow:
self.background.layout(self.tile)
def add(self, name, lat, lon, hdg, size):
# Add new clutter
if not name:
return False
texerr=None
ext = splitext(name)[1].lower()
try:
if not ext and not name.startswith(PolygonDef.EXCLUDE):
raise # probably a space in a library export statement
elif ext in [ObjectDef.OBJECT, AutoGenPointDef.AGP]:
placement = Object.factory(name, lat, lon, hdg)
else:
placement = Polygon.factory(name, None, lat, lon, size, hdg)
except UnicodeError:
if __debug__: print_exc()
myMessageBox('Filename "%s" uses non-ASCII characters' % name, 'Cannot add this object.', wx.ICON_ERROR|wx.OK, self.frame)
return False
except:
if __debug__: print_exc()
myMessageBox("Can't read " + name, 'Cannot add this object.', wx.ICON_ERROR|wx.OK, self.frame)
return False
if __debug__: print "add", placement
if not placement.load(self.lookup, self.defs, self.vertexcache):
myMessageBox("Can't read " + name, 'Cannot add this object.', wx.ICON_ERROR|wx.OK, self.frame)
return False
texerr=placement.definition.texerr
placement.definition.texerr=None # Don't report again
# Hack! Can only decide nature of new draped polygon once we've loaded its definition
if isinstance(placement, Draped):
if placement.definition.ortho:
placement.param = 65535
placement.singlewinding = placement.fixednodes = True
placement.canbezier = False
for i in range(4):
placement.nodes[0][i].rest = [(i+1)/2%2, i/2] # ST coords
else:
placement.canbezier = True
placement.layout(self.tile)
placements=self.placements[self.tile]
self.undostack.append(UndoEntry(self.tile, UndoEntry.ADD, [(len(placements), placement)]))
placements.append(placement)
self.selected=set([placement])
self.selectednode=None
self.Refresh()
self.frame.ShowSel()
if texerr:
myMessageBox(u"%s: %s" % texerr, "Can't read texture.", wx.ICON_INFORMATION|wx.OK, self.frame)
return True
def addnode(self, name, lat, lon, hdg, size):
# Add new node/winding
if len(self.selected)!=1 or not isinstance(list(self.selected)[0], Polygon) or self.frame.bkgd: # No additional nodes for background image
return False
placement=list(self.selected)[0]
newundo=UndoEntry(self.tile, UndoEntry.MODIFY, [(self.placements[self.tile].index(placement), placement.clone())])
if self.selectednode:
newnode=placement.addnode(self.tile, self.selectednode, lat, lon)
else:
newnode=placement.addwinding(self.tile, size, hdg)
if not newnode:
return False
self.undostack.append(newundo)
if not self.selectednode:
self.selected=set([placement])
self.selectednode=newnode
self.Refresh()
self.frame.ShowSel()
return True
def togglebezier(self):
# Add new node/winding
if len(self.selected)!=1 or not self.selectednode: return False
placement = list(self.selected)[0]
newundo = UndoEntry(self.tile, UndoEntry.MODIFY, [(self.placements[self.tile].index(placement), placement.clone())])
newnode = placement.togglebezier(self.tile, self.selectednode)
if not newnode:
return False
else:
self.selectednode = newnode
if not (self.undostack and self.undostack[-1].equals(newundo)):
self.undostack.append(newundo)
self.Refresh()
self.frame.ShowSel()
return True
def movesel(self, dlat, dlon, dhdg=0, dparam=0, loc=None):
# returns True if changed something
if not self.selected: return False
if self.selectednode:
placement=list(self.selected)[0]
if not self.frame.bkgd: # No undo for background image
newundo=UndoEntry(self.tile, UndoEntry.MOVE, [(self.placements[self.tile].index(placement), placement.clone())])
if not (self.undostack and self.undostack[-1].equals(newundo)):
self.undostack.append(newundo)
if __debug__: clock2 = time.clock()
self.selectednode=placement.movenode(self.selectednode, dlat, dlon, dparam, self.tile, False)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
assert self.selectednode
else:
moved=[]
placements=self.placements[self.tile]
for placement in self.selected:
if not self.frame.bkgd: # No undo for background image
moved.append((placements.index(placement), placement.clone()))
if __debug__: clock2 = time.clock()
placement.move(dlat, dlon, dhdg, dparam, loc, self.tile)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
if moved:
newundo=UndoEntry(self.tile, UndoEntry.MOVE, moved)
if not (self.undostack and self.undostack[-1].equals(newundo)):
self.undostack.append(newundo)
self.Refresh()
self.frame.ShowSel()
return True
def delsel(self, shift):
# returns True if deleted something
if not self.selected:
return False
elif self.frame.bkgd:
if self.selectednode:
return False
else:
self.frame.bkgd.OnClear(None) # Yuck!
elif self.selectednode:
# Delete node/winding
placement=list(self.selected)[0]
newundo=UndoEntry(self.tile, UndoEntry.MODIFY, [(self.placements[self.tile].index(placement), placement.clone())])
if shift:
newnode=placement.delwinding(self.tile, self.selectednode)
else:
newnode=placement.delnode(self.tile, self.selectednode)
if newnode:
self.undostack.append(newundo)
self.selectednode=newnode
assert self.selectednode
else:
deleted=[]
placements=self.placements[self.tile]
for placement in self.selected:
placement.clearlayout() # no point taking up space in vbo
i=placements.index(placement)
deleted.insert(0, (i, placement)) # LIFO
placements.pop(i)
self.undostack.append(UndoEntry(self.tile, UndoEntry.DEL, deleted))
self.selected=set()
self.Refresh()
self.frame.ShowSel()
return True
def copysel(self):
if self.selectednode or not self.selected: return None # can't copy and paste nodes
self.clipboard = set()
avlat = sum([placement.location()[0] for placement in self.selected]) / len(self.selected)
avlon = sum([placement.location()[1] for placement in self.selected]) / len(self.selected)
for placement in self.selected:
# Centre copied objects relative to average location
self.clipboard.add(placement.copy(avlat, avlon))
return (avlat,avlon)
def paste(self, lat, lon):
if not self.clipboard: return None
newplacements = []
for placement in self.clipboard:
clone = placement.clone()
clone.load(self.lookup, self.defs, self.vertexcache, True)
clone.move(lat, lon, 0, 0, None, self.tile)
newplacements.append(clone)
placements = self.placements[self.tile]
self.undostack.append(UndoEntry(self.tile, UndoEntry.ADD, [(len(placements), placement) for placement in newplacements]))
placements.extend(newplacements)
self.selected = set(newplacements)
self.selectednode = None
self.Refresh()
self.frame.ShowSel()
return (lat,lon)
def importregion(self, dsfdirs, netdefs):
if len(self.selected)!=1 or not isinstance(list(self.selected)[0], Exclude):
return False
exc = list(self.selected)[0]
bbox = BBox()
for node in exc.nodes[0]:
bbox.include(node.lon, node.lat)
dsfs = []
for path in dsfdirs:
if not glob(path): continue
pathlen=len(glob(path)[0])+1
thisdsfs=glob(join(path, '*', gnavdata, "%+02d0%+03d0" % (int(self.tile[0]/10), int(self.tile[1]/10)), "%+03d%+04d.[dD][sS][fF]" % (self.tile[0], self.tile[1])))
# asciibetical, except global is last
thisdsfs.sort(lambda x,y: ((x[pathlen:].lower().startswith('-global ') and 1) or
(y[pathlen:].lower().startswith('-global ') and -1) or
cmp(x,y)))
dsfs += thisdsfs
gc.disable() # work round http://bugs.python.org/issue4074 on Python<2.7
for dsf in dsfs:
try:
(lat, lon, newplacements, nets, mesh) = readDSF(dsf, netdefs, {}, bbox, Exclude.TYPES[exc.name])
for placement in newplacements:
placement.load(self.lookup, self.defs, self.vertexcache, True)
placement.layout(self.tile)
if newplacements:
placements = self.placements[self.tile]
self.undostack.append(UndoEntry(self.tile, UndoEntry.ADD, [(len(placements), placement) for placement in newplacements]))
placements.extend(newplacements)
self.selected = set(newplacements)
self.selectednode = None
self.Refresh()
gc.enable()
return True
except:
if __debug__: print_exc()
gc.enable()
return False
def undo(self):
# returns new location
if not self.undostack: return False # can't happen
undo=self.undostack.pop()
self.goto(undo.tile) # force assignment of placements to layers
avlat=0
avlon=0
self.selected=set()
self.selectednode=None
placements=self.placements[undo.tile]
if undo.kind==UndoEntry.ADD:
for (i, placement) in undo.data:
placement.clearlayout()
placements.pop(i) # assumes all were added at the same index
avlat+=placement.lat
avlon+=placement.lon
elif undo.kind==UndoEntry.DEL:
for (i, placement) in undo.data:
placement.load(self.lookup, self.defs, self.vertexcache, True)
placement.layout(undo.tile)
placements.insert(i, placement)
avlat+=placement.lat
avlon+=placement.lon
self.selected.add(placement)
elif undo.kind in [UndoEntry.MOVE, UndoEntry.MODIFY]:
for (i, placement) in undo.data:
placement.load(self.lookup, self.defs, self.vertexcache, True)
placement.layout(undo.tile)
placements[i].clearlayout()
placements[i]=placement
avlat+=placement.lat
avlon+=placement.lon
self.selected.add(placement)
elif undo.kind==UndoEntry.SPLIT:
# SPLIT is like a MOVE and ADD
assert len(undo.data)==2
(i, placement) = undo.data[0]
placement.load(self.lookup, self.defs, self.vertexcache, True)
placement.layout(undo.tile)
placements[i].clearlayout()
placements[i] = placement
avlat = placement.lat*2 # Hack!
avlon = placement.lon*2
self.selected.add(placement)
(i, placement) = undo.data[1]
placement.clearlayout(self.vertexcache)
placements.pop(i)
else:
assert False, undo.kind
avlat/=len(undo.data)
avlon/=len(undo.data)
self.goto((avlat,avlon))
return (avlat,avlon)
def clearsel(self):
if self.selected:
self.Refresh()
self.selected=set()
self.selectednode=None
def allsel(self, withctrl):
# fake up mouse drag
self.clickmode=ClickModes.DragBox
self.clickpos=[0,0]
self.clickctrl=withctrl
size=self.GetClientSize()
self.mousenow=[size.x-1,size.y-1]
self.select()
self.clickmode=None
self.clickpos=None
def nextsel(self, name, withctrl, withshift):
# returns new location or None
# we have 0 or more items of the same type selected
if name.startswith(PolygonDef.EXCLUDE) or not self.lookup[name].file in self.defs:
return None # Don't have an easy way of mapping to an ExcludeDef. Placement can't exist in this tile if not loaded.
definition=self.defs[self.lookup[name].file]
placements=self.placements[self.tile]
if withctrl and withshift:
self.selected=set()
for placement in placements:
if placement.definition==definition:
self.selected.add(placement)
if not self.selected: return None
placement=list(self.selected)[0] # for position
elif withctrl:
for placement in placements:
if placement.definition==definition and placement not in self.selected:
self.selected.add(placement)
break
else:
return None
else:
start=-1
for placement in self.selected:
start=max(start,placements.index(placement))
for i in range(start+1, len(placements)+start+1):
placement=placements[i%len(placements)]
if placement.definition==definition:
self.selected=set([placement])
break
else:
return None
self.selectednode=None
self.frame.ShowSel()
return (placement.lat, placement.lon)
def getsel(self, dms=0, imp=0):
# return current selection, or average: ([names], location_string, lat, lon, object_hdg)
if not self.selected: return ([], '', None, None, None)
if self.selectednode:
placement=list(self.selected)[0]
(i,j)=self.selectednode
return ([placement.name], placement.locationstr(dms, imp, self.selectednode), placement.nodes[i][j].lat, placement.nodes[i][j].lon, None)
elif len(self.selected)==1:
placement=list(self.selected)[0]
if isinstance(placement, Polygon):
return ([placement.name], placement.locationstr(dms, imp), placement.lat, placement.lon, None)
else:
return ([placement.name], placement.locationstr(dms, imp), placement.lat, placement.lon, placement.hdg)
else:
lat=lon=0
names=[]
for placement in self.selected:
names.append(placement.name)
(tlat,tlon)=placement.location()
lat+=tlat
lon+=tlon
lat/=len(self.selected)
lon/=len(self.selected)
return (names, "%s (%d objects)" % (Clutter.latlondisp(dms, lat, lon), len(self.selected)), lat, lon, None)
def getheight(self):
# return current height
return self.y
def reload(self, prefs, airports, navaids, aptdatfile, netdefs, netfile, lookup, placements, terrain, dsfdirs):
self.valid=False
self.airports=airports # [runways] by code
self.runways={} # need to re-layout airports
self.navaids=navaids
self.navaidplacements={} # need to re-layout navaids
self.aptdatfile=aptdatfile
self.netdefs=netdefs
self.netfile=netfile # logical name of .net file used
self.codes={} # need to re-layout airports
if self.codeslist: glDeleteLists(self.codeslist, 1)
self.codeslist=0
self.lookup=lookup
self.defs={}
self.vertexcache.reset(terrain, dsfdirs)
self.imagery.reset()
self.tile=(0,999) # force reload on next goto
if not self.doneinit:
self.glInit() # would have expected an OnPaint first, but do this in case we haven't
# load networks - have to do this every reload since texcache has been reset
if netdefs:
for netdef in self.netdefs.values():
self.defs[netdef.name] = NetworkDef(netdef, self.vertexcache, self.lookup, self.defs)
if placements!=None:
self.unsorted = placements
else:
# invalidate all allocations (note: navaids just get trashed and re-loaded as required)
self.unsorted = self.placements
for placements in self.placements.values():
for placement in placements:
placement.clearlayout()
self.placements={}
self.background=None # Force reload of texture in next line
self.setbackground(prefs)
self.clipboard = set() # layers might have changed
self.undostack=[] # layers might have changed
self.selected=set() # may not have same indices in new list
self.selectednode=None
self.locked=0 # reset locked on loading new
if __debug__:
print "Frame:\t%s" % self.frame.GetId()
print "Toolb:\t%s" % self.frame.toolbar.GetId()
print "Parent:\t%s" % self.parent.GetId()
print "Split:\t%s" % self.frame.splitter.GetId()
print "MyGL:\t%s" % self.GetId()
print "Palett:\t%s" % self.frame.palette.GetId()
if 'GetChoiceCtrl' in dir(self.frame.palette):
print "Choice:\t%s" %self.frame.palette.GetChoiceCtrl().GetId()
def goto(self, latlon, hdg=None, elev=None, dist=None):
# if __debug__: print "goto", latlon
if not self.valid: return # Hack: can get spurious events on Mac during startup (progress dialogs aren't truly modal)
errdsf=None
errobjs=[]
errtexs=[]
newtile=(int(floor(latlon[0])),int(floor(latlon[1]))) # (lat,lon) of SW corner
self.centre=[newtile[0]+0.5, newtile[1]+0.5]
(self.x, self.z)=self.latlon2m(latlon[0],latlon[1])
if hdg!=None: self.h=hdg
if elev!=None: self.e=elev
if dist!=None: self.d=dist
if newtile!=self.tile or prefs.options&Prefs.REDRAW!=self.options&Prefs.REDRAW:
if newtile!=self.tile:
self.selected=set()
self.selectednode=None
self.frame.ShowSel()
self.valid=False
self.tile=newtile
self.vertexcache.flush() # clear VBOs
# forget all instance VBO allocations
for Def in self.defs.values(): Def.flush()
self.selections=set()
progress=wx.ProgressDialog('Loading', 'Terrain', 16, self.frame)
progress.SetSize
self.vertexcache.loadMesh(newtile, self.netdefs)
progress.Update(1, 'Terrain textures')
try:
self.vertexcache.getMesh(newtile) # allocates into VBO
except EnvironmentError, e:
if __debug__: print_exc()
if e.filename:
errdsf=u"%s: %s" % (e.filename, e.strerror)
else:
errdsf=unicode(e)
self.vertexcache.loadFallbackMesh(newtile)
self.vertexcache.getMesh(newtile)
except:
if __debug__: print_exc()
errdsf=unicode(exc_info()[1])
self.vertexcache.loadFallbackMesh(newtile)
self.vertexcache.getMesh(newtile)
progress.Update(2, 'Mesh')
if prefs.options&Prefs.ELEVATION!=self.options&Prefs.ELEVATION:
# Elevation preference chaged - clear all layout (other than airports)
for placements in self.placements.values() + self.navaidplacements.values():
for placement in placements:
placement.clearlayout()
else:
# Just a new tile - forget all dynamic VBO allocation
for placements in self.placements.values() + self.navaidplacements.values():
for placement in placements:
placement.flush()
# load placements
progress.Update(3, 'Objects')
if newtile in self.unsorted:
if log_load: clock=time.clock() # Processor time
placements = self.placements[newtile] = self.unsorted.pop(newtile)
# Limit progress dialog to 10 updates
p=len(placements)/10+1
n=0
i=0
for i in range(len(placements)):
if i==n:
progress.Update(3+i/p, 'Objects')
n+=p
placement=placements[i]
# Silently correct virtual names' cases
if placement.name not in self.lookup:
for existing in self.lookup.keys():
if placement.name.lower()==existing.lower():
placement.name=existing
break
#if __debug__: print placement.name
if not placement.load(self.lookup, self.defs, self.vertexcache, True) and placement.name not in errobjs:
if __debug__: print "Bad", placement.name
errobjs.append(placement.name)
self.frame.palette.markbad(placement.name)
if placement.definition.texerr:
s=u"%s: %s" % placement.definition.texerr
if not s in errtexs: errtexs.append(s)
if not placement.islaidout():
if __debug__: clock2 = time.clock()
placement.layout(newtile)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
if log_load: print "%6.3f time in load&layout" % (time.clock()-clock)
elif newtile not in self.placements:
self.placements[newtile]=[]
else:
# This tile has been previously viewed - placements are already loaded
for placement in self.placements[newtile]:
placement.layout(newtile, recalc=False) # ensure allocated
# Lay out runways
progress.Update(13, 'Airports')
key=(newtile[0],newtile[1],prefs.options&Prefs.ELEVATION)
if key not in self.runways:
if log_load: clock=time.clock() # Processor time
(self.runways[key], self.codes[newtile]) = layoutApt(newtile, self.aptdatfile, self.airports, self.vertexcache.getElevationMesh(newtile))
if log_load: print "%6.3f time in runways" % (time.clock()-clock)
(varray,shoulderlen,taxiwaylen,runwaylen,marray,mindices) = self.runways[key]
self.aptdata = {}
if shoulderlen:
self.aptdata[ClutterDef.SHOULDERLAYER] = (self.vertexcache.instance_count, shoulderlen)
if taxiwaylen:
self.aptdata[ClutterDef.TAXIWAYLAYER] = (self.vertexcache.instance_count+shoulderlen, taxiwaylen)
if runwaylen:
self.aptdata[ClutterDef.RUNWAYSLAYER] = (self.vertexcache.instance_count+shoulderlen+taxiwaylen, runwaylen)
if len(varray):
if __debug__:
for p in varray: assert p.dtype==float32 and len(p)==6, p
self.vertexcache.allocate_instance(vstack(varray)[:,:5].flatten()) # instance VBO has 5 coords
if marray:
if __debug__:
for p in marray: assert p.dtype==float32 and p.shape[1]==6, p
base = self.vertexcache.allocate_vector(vstack(marray).flatten(), mindices)
self.aptdata[ClutterDef.MARKINGSLAYER] = (base, len(mindices))
progress.Update(14, 'Navaids')
assert self.tile==newtile
if self.tile not in self.navaidplacements:
if __debug__: clock=time.clock() # Processor time
objs={2: 'lib/airport/NAVAIDS/NDB_3.obj',
3: 'lib/airport/NAVAIDS/VOR.obj',
4: 'lib/airport/NAVAIDS/ILS.obj',
5: 'lib/airport/NAVAIDS/ILS.obj',
6: 'lib/airport/NAVAIDS/glideslope.obj',
7: 'lib/airport/NAVAIDS/Marker1.obj',
8: 'lib/airport/NAVAIDS/Marker2.obj',
9: 'lib/airport/NAVAIDS/Marker2.obj',
19: '*windsock.obj',
181:'lib/airport/landscape/beacon1.obj',
182:'lib/airport/beacons/beacon_seaport.obj',
183:'lib/airport/beacons/beacon_heliport.obj',
184:'lib/airport/landscape/beacon2.obj',
185:'lib/airport/landscape/beacon1.obj',
211:'lib/airport/lights/slow/VASI.obj',
212:'lib/airport/lights/slow/PAPI.obj',
213:'lib/airport/lights/slow/PAPI.obj',
214:'lib/airport/lights/slow/PAPI.obj',
215:'lib/airport/lights/slow/VASI3.obj',
216:'lib/airport/lights/slow/rway_guard.obj',
}
placements=[]
for (i, lat, lon, hdg) in self.navaids:
if (int(floor(lat)),int(floor(lon)))==self.tile:
if i in objs:
coshdg=cos(radians(hdg))
sinhdg=sin(radians(hdg))
if i==211:
seq=[(1,75),(-1,75),(1,-75),(-1,-75)]
elif i in range(212,215):
seq=[(12,0),(4,0),(-4,0),(-12,0)]
else:
seq=[(0,0)]
for (xinc,zinc) in seq:
placement=Object(objs[i], lat, lon, hdg)
if not placement.load(self.lookup, self.defs, self.vertexcache, False):
if __debug__: print "Missing navaid %s" % objs[i]
else:
if __debug__: clock2 = time.clock()
x,z=placement.position(self.tile, lat, lon)
placement.layout(self.tile, x+xinc*coshdg-zinc*sinhdg, None, z+xinc*sinhdg+zinc*coshdg)
if __debug__: print "%6.3f time to layout %s" % (time.clock()-clock2, placement.name)
placements.append(placement)
elif __debug__: print "Missing navaid type %d" % i
self.navaidplacements[self.tile]=placements
if __debug__: print "%6.3f time in navaids" % (time.clock()-clock)
else:
# This tile has been previously viewed - placements are already loaded
for placement in self.navaidplacements[newtile]:
placement.layout(newtile, recalc=False) # ensure allocated
# labels
progress.Update(15, 'Layout')
if self.codeslist:
glDeleteLists(self.codeslist, 1)
self.codeslist=0
if __debug__ and platform=='win32':
pass # hacky workaround for https://www.virtualbox.org/ticket/8666
elif self.codes[self.tile]:
elev = self.vertexcache.getElevationMesh(self.tile)
self.codeslist=glGenLists(1)
glNewList(self.codeslist, GL_COMPILE)
glColor3f(1.0, 0.25, 0.25) # Labels are pink
glBindTexture(GL_TEXTURE_2D, 0)
for (code, (lat,lon)) in self.codes[self.tile]:
(x,z)=self.latlon2m(lat,lon)
glRasterPos3f(x, elev.height(x,z), z)
code=code.encode('latin1', 'replace')
for c in code:
glBitmap(8,13, 16,6, 8,0, fixed8x13[ord(c)])
glEndList()
# Background image - always recalc since may span multiple tiles
if self.background:
nodes=self.background.nodes[0]
for i in range(len(nodes)):
if (int(floor(nodes[i].lat)),int(floor(nodes[i].lon)))==newtile:
self.background.layout(newtile)
break
else:
self.background.clearlayout()
self.imagery.reset() # Always recalc
# Done
progress.Update(16, 'Done')
progress.Destroy()
self.valid=True
self.options = prefs.options
# cursor position
self.y = self.vertexcache.getElevationMesh(self.tile).height(self.x, self.z)
# imagery position
self.imagery.goto(latlon, self.d, self.GetClientSize())
# Redraw can happen under MessageBox, so do this last
if errdsf:
myMessageBox(errdsf, "Can't load terrain.", wx.ICON_EXCLAMATION|wx.OK, self.frame)
if errobjs:
sortfolded(errobjs)
if len(errobjs)>11: errobjs=errobjs[:10]+['and %d more objects' % (len(errobjs)-10)]
myMessageBox('\n'.join(errobjs), "Can't read one or more objects.", wx.ICON_EXCLAMATION|wx.OK, self.frame)
if errtexs:
sortfolded(errtexs)
if len(errtexs)>11: errtexs=errtexs[:10]+['and %d more textures' % (len(errtexs)-10)]
myMessageBox('\n'.join(errtexs), "Can't read one or more textures.", wx.ICON_INFORMATION|wx.OK, self.frame)
self.Refresh()
def exit(self):
# closing down
self.imagery.exit()
def getlocalloc(self, mx, my):
if not self.valid: raise Exception # MouseWheel can happen under MessageBox
if wx.VERSION >= (2,9):
self.SetCurrent(self.context)
else:
self.SetCurrent()
size = self.GetClientSize()
mx=max(0, min(size[0]-1, mx))
my=max(0, min(size[1]-1, size[1]-1-my))
self.glstate.set_instance(self.vertexcache)
self.glstate.set_texture(self.vertexcache.texcache.get(fallbacktexture)) # textured shader. ATI drivers don't like 0
self.glstate.set_depthtest(True)
self.glstate.set_poly(False) # DepthMask=True
glColorMask(GL_FALSE,GL_FALSE,GL_FALSE,GL_FALSE)
if prefs.options&Prefs.ELEVATION:
for (base,number,texno) in self.vertexcache.getMesh(self.tile)[0]:
glDrawArrays(GL_TRIANGLES, base, number)
else:
glBegin(GL_QUADS)
glVertex3f( onedeg*cos(radians(1+self.tile[0]))/2, 0, -onedeg/2)
glVertex3f( onedeg*cos(radians(self.tile[0]))/2, 0, onedeg/2)
glVertex3f(-onedeg*cos(radians(self.tile[0]))/2, 0, onedeg/2)
glVertex3f(-onedeg*cos(radians(1+self.tile[0]))/2, 0, -onedeg/2)
glEnd()
mz=glReadPixelsf(mx,my, 1,1, GL_DEPTH_COMPONENT)[0][0]
if mz==1.0: mz=0.5 # treat off the tile edge as sea level
(x,y,z)=gluUnProject(mx,my,mz, identity(4, float64), self.glstate.proj, array([0,0,size[0],size[1]], GLint))
glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE)
#self.SwapBuffers() # debug
glClear(GL_DEPTH_BUFFER_BIT)
#if __debug__: print "%3d %3d %.6f, %5d %5.1f %5d" % (mx,my,mz, x,y,z)
return (x,y,z)
def xz2latlon(self, x, z):
lat=round2res(self.centre[0]-z/onedeg)
lon=round2res(self.centre[1]+x/(onedeg*cos(radians(lat))))
#if __debug__: print "%11.7f %12.7f" % (lat,lon)
return (lat,lon)
def getworldloc(self, mx, my):
(x,y,z)=self.getlocalloc(mx, my)
return self.xz2latlon(x,z)
# runway tessellators
def tessvertex(vertex, data):
data.append(vertex)
def tesscombine(coords, vertex, weight):
# Linearly interp height from vertices (location, ismesh, uv)
p1=vertex[0]
p2=vertex[1]
d=hypot(p2[0][0]-p1[0][0], p2[0][2]-p1[0][2])
if not d:
return p1 # p1 and p2 are colocated
else:
ratio=hypot(coords[0]-p1[0][0], coords[2]-p1[0][2])/d
y=p1[0][1]+ratio*(p2[0][1]-p1[0][1])
return ([coords[0],y,coords[2]], False, p1[2])
def tessedge(flag):
pass # dummy
tess=gluNewTess()
gluTessNormal(tess, 0, -1, 0)
gluTessProperty(tess, GLU_TESS_WINDING_RULE, GLU_TESS_WINDING_NONZERO)
gluTessCallback(tess, GLU_TESS_VERTEX_DATA, tessvertex)
gluTessCallback(tess, GLU_TESS_COMBINE, tesscombine)
gluTessCallback(tess, GLU_TESS_EDGE_FLAG, tessedge) # no strips
def csgtvertex((location,ismesh,uv), varray):
#assert len(location)==3, location
#assert len(uv)==2, uv
varray.append(location+uv)
def csgtcombine(coords, vertex, weight):
# vertex = [(location, ismesh, uv)]
# check for just two adjacent mesh triangles
if array_equal(vertex[0][0],vertex[1][0]) and vertex[0][1]==vertex[1][1] and vertex[0][2]==vertex[1][2]:
# common case
return vertex[0]
elif vertex[0][0][0]==vertex[1][0][0] and vertex[0][0][2]==vertex[1][0][2] and vertex[0][1]:
# Height discontinuity in terrain mesh - eg LIEE - wtf!
#assert vertex[0][1]!=vertex[1][1]
assert not weight[2] and not vertex[2] and not weight[3] and not vertex[3] and vertex[1][1]
return vertex[0]
# intersection of two lines - use terrain mesh line for height
elif vertex[0][1]:
#assert weight[0] and weight[1] and weight[2] and weight[3] and vertex[1][1]
p1=vertex[0]
p2=vertex[1]
p3=vertex[2]
else:
#assert weight[0] and weight[1] and weight[2] and weight[3] # not sure why we would assert this
p1=vertex[2]
p2=vertex[3]
p3=vertex[0]
# height
d=hypot(p2[0][0]-p1[0][0], p2[0][2]-p1[0][2])
if not d:
y=p1[0][1]
else:
ratio=(hypot(coords[0]-p1[0][0], coords[2]-p1[0][2])/d)
y=p1[0][1]+ratio*(p2[0][1]-p1[0][1])
return ([coords[0],y,coords[2]], True, p3[2] or p1[2])
def csgtedge(flag):
pass # dummy
csgt = gluNewTess()
gluTessNormal(csgt, 0, -1, 0)
gluTessProperty(csgt, GLU_TESS_WINDING_RULE, GLU_TESS_WINDING_ABS_GEQ_TWO)
gluTessCallback(csgt, GLU_TESS_VERTEX_DATA, csgtvertex)
gluTessCallback(csgt, GLU_TESS_COMBINE, csgtcombine)
gluTessCallback(csgt, GLU_TESS_EDGE_FLAG, csgtedge) # no strips
| gpl-2.0 | 7,955,717,471,736,110,000 | 45.453649 | 421 | 0.577393 | false |
D0tR4ck3t33r/redis-test-chat | client.py | 1 | 2690 | import redis
_DEBUG = True
r = redis.Redis(
host='127.0.0.1',
port=6379)
# Call chat_client._keepAlive everytime a method is used
def _keep_client_alive(fn):
# The wrapper method which will get called instead of the decorated method:
def wrapper(self, *args, **kwargs):
self._keepAlive() # call the additional method
return fn(self, *args, **kwargs) # call the decorated method
return wrapper # return the wrapper method
class chat_client(object):
def __init__(self, name):
self.name = name
self.username = 'user_' + name
r.set(self.username, 1)
if _DEBUG:
print("~SET " + self.username + " 1")
r.expire(self.username, 120)
if _DEBUG:
print("~EXPIRE " + self.username + " 120")
#Methods for the online status of the client
def checkIfAlive(self):
if _DEBUG:
print("~GET " + self.username)
status = r.get(self.username)
if _DEBUG:
print("~~" + status)
if status == "1":
return True
else:
return False
def _keepAlive(self):
r.set(self.username, 1)
r.expire(self.username, 120)
if _DEBUG:
print("~SET " + self.username + ' ' + str(1))
print("~EXPIRE " + self.username + ' ' +str(120))
#General Methods e.g. List all Users
@_keep_client_alive
def listAllUsersRaw(self):
keys = r.keys('user_*')
if _DEBUG:
print("~KEYS user_*")
print("~~"+str(keys))
return keys
def listAllUsers(self):
raw_user_list = self.listAllUsersRaw()
user_list = []
for u in raw_user_list:
user_list += [u[5:]]
return user_list
#Methods Message Releated
@_keep_client_alive
def sendMessageTo(self, name, message):
key = '$user_' + name + '$' + self.username
if _DEBUG:
print("~RPUSH " + key + " \"" + message + " \"")
r.rpush(key, message)
@_keep_client_alive
def getMessagesFrom(self, name):
key = '$' + self.username + "$" + "user_" + name
messages = r.lrange(key, 0, -1)
r.delete(key)
if _DEBUG:
print("~LRANGE " + key)
print("~~" + ' '.join(messages))
print("~DEL " + key)
return messages
def getMessagesFromAll(self):
user_list = self.listAllUsers()
message_list = []
for u in user_list:
mes = self.getMessagesFrom(u)
for m in mes:
message_list += [(u, m)]
return message_list
| gpl-3.0 | 4,861,558,684,750,658,000 | 27.315789 | 79 | 0.519703 | false |
PLOS/allofplos | allofplos/elements/article_elements.py | 1 | 18523 | """These functions are for parsing individual elements of the article XML tree
Eventually these functions will probably need to be a class.
"""
import datetime
import difflib
import re
import string
import lxml.etree as et
import unidecode
def parse_article_date(date_element, date_format='%d %m %Y'):
"""
For an article date element, convert XML to a datetime object.
:param date_element: An article XML element that contains a date
:param date_format: string format used to convert to datetime object
:return: datetime object
"""
day = ''
month = ''
year = ''
for item in date_element.getchildren():
if item.tag == 'day':
day = item.text
if item.tag == 'month':
month = item.text
if item.tag == 'year':
year = item.text
if day:
date = (day, month, year)
string_date = ' '.join(date)
date = datetime.datetime.strptime(string_date, date_format)
elif month:
# try both numerical & word versions of month
date = (month, year)
string_date = ' '.join(date)
try:
date = datetime.datetime.strptime(string_date, '%m %Y')
except ValueError:
date = datetime.datetime.strptime(string_date, '%B %Y')
elif year:
date = year
date = datetime.datetime.strptime(date, '%Y')
else:
print('date error')
date = ''
return date
def get_rid_dict(contrib_element):
""" For an individual contributor, get the list of their associated rids.
More about rids: https://jats.nlm.nih.gov/archiving/tag-library/1.1/attribute/rid.html
Used in get_contrib_info().
:param contrib_element: An article XML element with the tag <contrib>
:return: dictionary matching each type of rid to its value for that contributor
"""
rid_dict = {}
contrib_elements = contrib_element.getchildren()
# get list of ref-types
rid_type_list = [el.attrib.get('ref-type', 'fn') for el in contrib_elements if el.tag == 'xref']
# make dict of ref-types to the actual ref numbers (rids)
for rid_type in set(rid_type_list):
rid_list = [el.attrib.get('rid', None) for el in contrib_elements if el.tag == 'xref' and el.attrib.get('ref-type', 'fn') == rid_type]
rid_dict[rid_type] = rid_list
return rid_dict
def get_author_type(contrib_element):
"""Get the type of author for a single contributor from their accompanying <contrib> element.
Authors can be 'corresponding' or 'contributing'. Depending on the paper, some elements have a
top-level "corresp" attribute that equal yes; otherwise, corresponding status can be inferred
from the existence of the <xref> attribute ref-type="corresp"
:param contrib_element: An article XML element with the tag <contrib>
:return: author type (corresponding, contributing, None)
"""
answer_dict = {
"yes": "corresponding",
"no": "contributing"
}
author_type = None
if contrib_element.get('contrib-type', None) == 'author':
corr = contrib_element.get('corresp', None)
if corr:
author_type = answer_dict.get(corr, None)
else:
temp = get_rid_dict(contrib_element).get('corresp', None)
if temp:
author_type = answer_dict.get("yes", None)
else:
author_type = answer_dict.get("no", None)
return author_type
def get_contrib_name(contrib_element):
"""Get the name for a single contributor from their accompanying <contrib> element.
Also constructs their initials for later matching to article-level dictionaries about
contributors, including get_aff_dict() and get_fn_dict().
Can also handle 'collab' aka group authors with a group name but no surname or given names.
:param contrib_element: An article XML element with the tag <contrib>
:return: dictionary of a single contributor's given names, surname, initials, and group name
"""
given_names = ''
surname = ''
contrib_name_element = contrib_element.find("name")
if contrib_name_element is not None:
for name_element in contrib_name_element.getchildren():
if name_element.tag == 'surname':
# for some reason, name_element.text doesn't work for this element
surname = (et.tostring(name_element,
encoding='unicode',
method='text').rstrip(' ').rstrip('\t').rstrip('\n')
or "")
elif name_element.tag == 'given-names':
given_names = name_element.text
if given_names == '':
print("given names element.text didn't work")
given_names = (et.tostring(name_element,
encoding='unicode',
method='text').rstrip(' ').rstrip('\t').rstrip('\n')
or "")
else:
pass
if given_names or surname:
# construct initials if either given or surname is present
try:
contrib_initials = ''.join([part[0].upper() for part in re.split('[-| |,|\.]+', given_names) if part]) + \
''.join([part[0] for part in re.split('[-| |,|\.]+', surname) if part[0] in string.ascii_uppercase])
except IndexError:
contrib_initials = ''
contrib_name = dict(contrib_initials=contrib_initials,
given_names=given_names,
surname=surname)
else:
# if no <name> element found, assume it's a collaboration
contrib_collab_element = contrib_element.find("collab")
group_name = et.tostring(contrib_collab_element, encoding='unicode')
group_name = re.sub('<[^>]*>', '', group_name).rstrip('\n')
if not group_name:
print("Error constructing contrib_name group element")
group_name = ''
contrib_name = dict(group_name=group_name)
return contrib_name
def get_contrib_ids(contrib_element):
"""Get the ids for a single contributor from their accompanying <contrib> element.
This will mostly get ORCID IDs, and indicate whetherh they are authenticated.
For more information of ORCIDs, see https://orcid.org/
:param contrib_element: An article XML element with the tag <contrib>
:return: list of dictionaries of ids for that contributor
"""
id_list = []
for item in contrib_element.getchildren():
if item.tag == 'contrib-id':
contrib_id_type = item.attrib.get('contrib-id-type', None)
contrib_id = item.text
contrib_authenticated = item.attrib.get('authenticated', None)
id_dict = dict(id_type=contrib_id_type,
id=contrib_id,
authenticated=contrib_authenticated
)
id_list.append(id_dict)
return id_list
def get_credit_taxonomy(contrib_element):
"""Get the contributor roles from the CREDiT taxonomy element when it is present.
Right now, this is is equivalent to author roles.
For more information about this data structure, see http://dictionary.casrai.org/Contributor_Roles
:param contrib_element: An article XML element with the tag <contrib>
:return: dictionary of contributor roles for an individual contributor
"""
credit_dict = {}
for item in contrib_element.getchildren():
if item.tag == 'role':
content_type = item.attrib.get('content-type', None)
if content_type == 'http://credit.casrai.org/':
content_type = 'CASRAI CREDiT taxonomy'
role = item.text
if not credit_dict.get(content_type, None):
credit_dict[content_type] = [role]
else:
credit_dict[content_type].append(role)
return credit_dict
def match_contrib_initials_to_dict(contrib_dict, special_dict, matched_keys, contrib_key):
"""For an individual contributor, match their initials to a dictionary.
This is used for both matching contributors to email addresses as well as credit roles,
where the keys for all dictionaries are contributor initials. In contrib_dict, these initials are
constructed from the contributor name in get_contrib_name(). For the special dicts, initials are
provided in the raw XML.
See match_contribs_to_dicts() for how this matching process is iterated across contributors.
:param contrib_dict: information about individual contributor, including their name and constructed initials
:param special_dict: usually either get_aff_dict() or get_credit_dict()
:param matched_keys: list of keys in special_dict already matched that will be excluded
:param contrib_key: The item in the contrib dictionary where the matched special_dict will be stored
:return: updated contrib_dict that includes the newly matched special_dict
"""
contributor_initials = contrib_dict.get('contrib_initials')
# special_dict keys (initials) are assumed to be uppercase
special_dict = {k.upper(): v
for k, v in special_dict.items()
if k not in matched_keys}
if contrib_dict.get('group_name', None) is None:
try:
contrib_dict[contrib_key] = special_dict[contributor_initials.upper()]
except KeyError:
# Sometimes middle initials are included or excluded, so restrict both initial sets to
# first and last initial only.
try:
contributor_abbrev_initials = ''.join([contributor_initials[0], contributor_initials[-1]])
for dict_initials, dict_value in special_dict.items():
if contributor_abbrev_initials == ''.join([dict_initials[0], dict_initials[-1]]).upper():
contrib_dict[contrib_key] = dict_value
break
except (IndexError, KeyError) as e:
pass
return contrib_dict
def get_contrib_info(contrib_element):
"""Get a dictionary of information for a single contributor from their accompanying <contrib> element.
Don't call this function directly. Instead, use as a part of get_contributors_info()
This includes all contributor information that can be directly accessed from <contrib> element contents.
However, other contributor information is stored in article-level dictionaries that need to be matched
for each contributor using the rid_dict created here.
:param contrib_element: An article XML element with the tag <contrib>
:return: dictionary of contributor name, ids/ORCID, rid_dict, author_roles
"""
# get their name
contrib_dict = get_contrib_name(contrib_element)
# get contrib type
try:
contrib_dict['contrib_type'] = contrib_element.attrib['contrib-type']
except KeyError:
# invalid contributor field; shouldn't count as contributor
return None
# get author type
if contrib_dict.get('contrib_type') == 'author':
contrib_dict['author_type'] = get_author_type(contrib_element)
elif contrib_dict.get('contrib_type') == 'editor':
for item in contrib_element.getchildren():
if item.tag == 'Role' and item.text.lower() != 'editor':
print('new editor type: {}'.format(item.text))
contrib_dict['editor_type'] = item.text
# get ORCID, if available
contrib_dict['ids'] = get_contrib_ids(contrib_element)
# get dictionary of contributor's footnote types to footnote ids
contrib_dict['rid_dict'] = get_rid_dict(contrib_element)
# get dictionary of CREDiT taxonomy, if available
contrib_dict['author_roles'] = get_credit_taxonomy(contrib_element)
return contrib_dict
def match_author_names_to_emails(corr_author_list, email_dict):
""" Finds the best match of author names to potential matching email addresses.
Don't call directly, but use as a part of match_contribs_to_dicts().
Sometimes, the initials-matching process in match_contrib_initials_to_dict() fails. When there's
at least two ummatched corresponding authors and email addresses, this function
figures out the name/email matching with the highest matching continguous character count and matches them.
This is a 'hail Mary' that thankfully also has a high rate of accuracy.
:param corr_author_list: list of contrib_dicts for corresponding authors with no email field
:param email_dict: list of unmatched author email addresses
:return: list of updated contrib_dicts for each author, now including an email field
"""
overall_matching_dict = {}
match_values = []
# Step 1: for each author and email combination, compute longest common string
for corr_author in corr_author_list:
# make single string of author full name
seq_1 = unidecode.unidecode(''.join([corr_author.get('given_names'), corr_author.get('surname')]).lower())
matching_dict = {}
for email_initials, email_address in email_dict.items():
# make string of email address that doesn't include domain
seq_2 = unidecode.unidecode(email_address[0].lower().split('@')[0])
matcher = difflib.SequenceMatcher(a=seq_1, b=seq_2)
# construct dictionary with name, email, and matching string length for each pair
match = matcher.find_longest_match(0, len(matcher.a), 0, len(matcher.b))
matching_dict[tuple(email_address)] = match[-1]
# add length of match to list of all match lengths
match_values.append(match[-1])
overall_matching_dict[(corr_author.get('given_names'), corr_author.get('surname'))] = matching_dict
# Step 2: for the author and email combination(s) with the longest common string, match them
# Iterate through max_values in descending order until all are matched
newly_matched_emails = []
newly_matched_authors = []
count = 0
while len(newly_matched_emails) < len(overall_matching_dict) and count < 20:
for k1, v1 in overall_matching_dict.items():
for k2, v2 in v1.items():
if v2 == max(match_values):
for corr_author in corr_author_list:
if k1 == (corr_author.get('given_names'), corr_author.get('surname')) \
and k2 not in newly_matched_emails and k1 not in newly_matched_authors:
corr_author['email'] = list(k2)
# keep track of matched email & author so they're not matched again
newly_matched_authors.append(k1)
newly_matched_emails.append(k2)
match_values.remove(v2)
count += 1
# Step 3: match the remaining author and email if there's only one remaining (most common)
# Might not be necessary with the while loop
still_unmatched_authors = [author for author in corr_author_list if not author.get('email')]
still_unmatched_emails = {k: v for k, v in email_dict.items() if tuple(v) not in newly_matched_emails}
if len(still_unmatched_authors) == len(still_unmatched_emails) <= 1:
if len(still_unmatched_authors) == len(still_unmatched_emails) == 1:
# only one remaining. it gets matched
still_unmatched_authors[0]['email'] = list(still_unmatched_emails.values())[0]
else:
# we were successful at matching all emails (likely, two pairs had the same match values)
pass
else:
# something's gone wrong. the candidate list of emails doesn't match the number of authors
# the corresponding authors printed below will have their ['email'] field unfilled
print('not calculating right', still_unmatched_authors, still_unmatched_emails)
return corr_author_list
def match_contribs_to_dicts(contrib_list, special_dict, contrib_key):
"""
:param contrib_list: list of contributors
:param special_dict: usually either get_aff_dict() or get_credit_dict()
:param contrib_key: The item in the contrib dictionary where the matched special_dict will be stored
"""
matching_error = False
matched_keys = []
for contrib_dict in contrib_list:
contrib_dict = match_contrib_initials_to_dict(contrib_dict,
special_dict,
matched_keys,
contrib_key)
if contrib_dict.get(contrib_key, None):
for k, v in special_dict.items():
if v == contrib_dict.get(contrib_key):
matched_keys.append(k)
if len(special_dict) == len(matched_keys):
# all special_dicts and contributors are matched
pass
else:
unmatched_special_dict = {k: v for k, v in special_dict.items()
if k not in matched_keys}
contrib_dict_missing_special_list = [contrib_dict for contrib_dict in contrib_list
if not contrib_dict.get(contrib_key, None)]
# if one contributor and one special_dict are unmatched, match them
if len(unmatched_special_dict) == len(contrib_dict_missing_special_list) == 1:
contrib_dict_missing_special_list[0][contrib_key] = list(unmatched_special_dict.values())[0]
elif len(unmatched_special_dict) != len(contrib_dict_missing_special_list):
# these numbers should always be the same
matching_error = True
else:
if contrib_key == 'email':
# match remaining contributor names to emails by string matching
contrib_dicts = match_author_names_to_emails(contrib_dict_missing_special_list, unmatched_special_dict)
if len([contrib for contrib in contrib_dicts if contrib_key not in contrib.keys()]) == 0:
# finally every contributor and special_dict is matched
pass
else:
# even after applying every strategy, there were unmatched contributors
matching_error = True
return contrib_list, matching_error
| mit | -9,046,366,052,612,875,000 | 47.362924 | 142 | 0.624791 | false |
sheerluck/andrew | seconds.py | 1 | 3114 | #-*- coding: utf-8 -*-
class Seconds(object):
def __init__(self, count):
self._seconds = count
def this(self):
return "{} seconds".format(self._seconds)
class Minute(object):
def __init__(self, count):
self._minutes = count
self._seconds = Seconds(self._minutes * 60)
def seconds(self):
return self._seconds.this()
def this(self):
return "{} minutes".format(self._minutes)
class Hour(object):
def __init__(self, count):
self._hours = count
self._minutes = Minute(self._hours * 60)
def seconds(self):
return self._minutes.seconds()
def minutes(self):
return self._minutes.this()
def this(self):
return "{} hours".format(self._hours)
class Day(object):
def __init__(self, count):
self._days = count
self._hours = Hour(self._days * 24)
def seconds(self):
return self._hours.seconds()
def minutes(self):
return self._hours.minutes()
def hours(self):
return self._hours.this()
def this(self):
return "{} days".format(self._days)
class Week(object):
def __init__(self, count):
self._weeks = count
self._days = Day(self._weeks * 7)
def seconds(self):
return self._days.seconds()
def minutes(self):
return self._days.minutes()
def hours(self):
return self._days.hours()
def days(self):
return self._days.this()
def this(self):
return "{:.1f} weeks".format(self._weeks)
class Month(object):
def __init__(self, count):
self._months = count
self._weeks = Week(self._months * 4)
def seconds(self):
return self._weeks.seconds()
def minutes(self):
return self._weeks.minutes()
def hours(self):
return self._weeks.hours()
def days(self):
return self._weeks.days()
def weeks(self):
return self._weeks.this()
def this(self):
return "{} months".format(self._months)
class Year(object):
def __init__(self, count):
self._years = count
self._months = Month(self._years * 12)
self._weeks = Week( self._years * 365/7)
self._days = Day( self._years * 365)
def seconds(self):
return self._days.seconds()
def insec(self):
return self._days._hours._minutes._seconds._seconds
def minutes(self):
return self._days.minutes()
def hours(self):
return self._days.hours()
def days(self):
return self._days.this()
def weeks(self):
return self._weeks.this()
def months(self):
return self._months.this()
def this(self):
return "{} years".format(self._years)
sec_in_year = Year(1).insec()
print(sec_in_year)
se7en_bil = 7 * 1000**3
print(se7en_bil)
years = se7en_bil / sec_in_year
print(years) # 222 years
two2two = Year(222)
print()
print(two2two.this())
print(two2two.months())
print(two2two.weeks())
print(two2two.days())
print(two2two.hours())
print(two2two.minutes())
print(two2two.seconds())
| gpl-3.0 | -4,255,883,822,636,034,000 | 19.090323 | 59 | 0.577392 | false |
aldanor/pynab | ynab/models.py | 1 | 13989 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
import toolz
import collections
from enum import Enum
from dateparser.date import DateDataParser
from . import schema
from .util import force_encode
class AccountType(Enum):
CHECKING = 'Checking'
SAVINGS = 'Savings'
CREDIT_CARD = 'CreditCard'
CASH = 'Cash'
LINE_OF_CREDIT = 'LineOfCredit'
PAYPAL = 'Paypal'
MERCHANT_ACCOUNT = 'MerchantAccount'
INVESTMENT_ACCOUNT = 'InvestmentAccount'
MORTGAGE = 'Mortgage'
OTHER_ASSET = 'OtherAsset'
OTHER_LIABILITY = 'OtherLiability'
class CategoryType(Enum):
OUTFLOW = 'OUTFLOW'
class TransactionStatus(Enum):
CLEARED = 'Cleared'
RECONCILED = 'Reconciled'
UNCLEARED = 'Uncleared'
class Model(object):
_entity_type = None
def __init__(self, ynab, entity):
self._ynab = ynab
self._entity = entity
@classmethod
@toolz.curry
def _from_flat(cls, ynab, data):
return cls(ynab, cls._entity_type(data, strict=False))
@property
def id(self):
return self._entity.entityId
@property
def is_valid(self):
return not self._entity.isTombstone
class Account(Model):
_entity_type = schema.Account
@force_encode
def __repr__(self):
return '<Account: {}>'.format(self.name)
@property
def name(self):
return self._entity.accountName
@property
def type(self):
return AccountType(self._entity.accountType)
@property
def on_budget(self):
return self._entity.onBudget
@property
def last_reconciled_date(self):
return self._entity.lastReconciledDate
@property
def last_reconciled_balance(self):
return self._entity.lastReconciledBalance
@property
def last_reconciled_check_number(self):
return self._entity.lastReconciledCheckNumber
@property
def hidden(self):
return self._entity.hidden
@property
def payees(self):
return self._ynab.payees.filter('target_account', self)
@property
def transactions(self):
return self._ynab.transactions.filter('account', self)
@property
def inbound_transactions(self):
return self._ynab.transactions.filter('target_account', self)
@property
def balance(self):
return round(sum(self.transactions.amount), self._ynab.precision)
@property
def cleared_balance(self):
return round(sum(self.transactions.filter('cleared').amount), self._ynab.precision)
@property
def reconciled_balance(self):
return round(sum(self.transactions.filter('reconciled').amount), self._ynab.precision)
@property
def note(self):
return self._entity.note
class Payee(Model):
_entity_type = schema.Payee
@force_encode
def __repr__(self):
return '<Payee: {}>'.format(self.name)
@property
def name(self):
return self._entity.name
@property
def target_account(self):
return self._ynab.accounts.by_id(self._entity.targetAccountId)
@property
def enabled(self):
return self._entity.enabled
@property
def transactions(self):
return self._ynab.transactions.filter('payee', self)
class CategoryModel(Model):
@property
def name(self):
return self._entity.name
@property
def type(self):
return CategoryType(self._entity.type)
class Category(CategoryModel):
_entity_type = schema.SubCategory
@force_encode
def __repr__(self):
return '<Category: {}>'.format(self.full_name)
@property
def cached_balance(self):
return self._entity.cachedBalance
@property
def master_category(self):
return self._ynab.master_categories.by_id(self._entity.masterCategoryId)
@property
def has_unresolved_conflicts(self):
return not self._entity.isResolvedConflict
@property
def note(self):
return self._entity.note
@property
def full_name(self):
return '{}/{}'.format(self.master_category.name, self.name)
@property
def transactions(self):
return self._ynab.transactions.filter('category', self)
class MasterCategory(CategoryModel):
_entity_type = schema.MasterCategory
def __init__(self, ynab, entity):
super(MasterCategory, self).__init__(ynab, entity)
self._categories = Categories(
Category(ynab, category) for category in self._entity.subCategories or [])
@force_encode
def __repr__(self):
return '<MasterCategory: {}>'.format(self.name)
@property
def categories(self):
return self._categories
def __iter__(self):
return iter(self._categories)
class TransactionModel(Model):
@property
def memo(self):
return self._entity.memo
@property
def amount(self):
return round(float(self._entity.amount or 0.), self._ynab.precision)
@property
def category(self):
return self._ynab.categories.by_id(self._entity.categoryId)
@property
def target_account(self):
return self._ynab.accounts.by_id(self._entity.targetAccountId)
@property
def transfer_transaction(self):
return self._ynab.transactions.by_id(self._entity.transferTransactionId)
@property
def has_unresolved_conflicts(self):
return not self._entity.isResolvedConflict
class SubTransaction(TransactionModel):
_entity_type = schema.SubTransaction
@force_encode
def __repr__(self):
return '<SubTransaction: {:.2f} ({})>'.format(
self.amount, self.category.name if self.category else 'no category')
@property
def parent(self):
return self._ynab.transactions.by_id(self._entity.parentTransactionId)
class Transaction(TransactionModel):
_entity_type = schema.Transaction
def __init__(self, ynab, entity):
super(Transaction, self).__init__(ynab, entity)
self._sub_transactions = SubTransactions(
SubTransaction(ynab, t) for t in self._entity.subTransactions or [])
@force_encode
def __repr__(self):
info = ''
if self.category:
info += ' ({})'.format(self.category.name)
if self.payee:
info += ' [{}]'.format(self.payee.name)
return '<Transaction: [{:%d/%m/%y}]: {}: {:.2f}{}>'.format(
self.date or 'no date', self.account.name if self.account else 'no account',
self.amount, info)
@property
def date(self):
return self._entity.date
@property
def status(self):
return TransactionStatus(self._entity.cleared)
@property
def cleared(self):
return self.status in (TransactionStatus.CLEARED, TransactionStatus.RECONCILED)
@property
def reconciled(self):
return self.status == TransactionStatus.RECONCILED
@property
def accepted(self):
return self._entity.accepted
@property
def account(self):
return self._ynab.accounts.by_id(self._entity.accountId)
@property
def payee(self):
return self._ynab.payees.by_id(self._entity.payeeId)
@property
def date_entered_from_schedule(self):
return self._entity.dateEnteredFromSchedule
@property
def sub_transactions(self):
return self._sub_transactions
class ModelCollection(collections.Sequence):
_model_type = None
_index_key = None
_NO_VALUE = object()
def __init__(self, elements):
self._elements = list(e for e in elements if e.is_valid)
# keep a reverse index for faster id indexing
self._index = {element.id: element for element in self._elements}
@classmethod
@toolz.curry
def _from_flat(cls, ynab, data):
return cls(map(cls._model_type._from_flat(ynab), data))
def __len__(self):
return len(self._elements)
def __getitem__(self, key):
# try behave both like a list and like a dict with string keys
if isinstance(key, six.string_types):
# _index_key defines the attribute name that will be matched
if self._index_key is not None:
for element in self:
if getattr(element, self._index_key) == key:
return element
raise KeyError(key)
else:
return self._elements[key]
def __getattr__(self, key):
# if the attribute is not found, propagate it to children
return [getattr(element, key) for element in self]
def __repr__(self):
# list(self) due to py2/py3 unicode problems
return repr(list(self))
def __str__(self):
# list(self) due to py2/py3 unicode problems
return str(list(self))
def by_id(self, id):
"""
Retrieve an element by entity ID. Returns None if not found.
Parameters
----------
id : str
"""
return self._index.get(id, None)
def sort_by(self, field):
"""
In-place sort by a specified field.
Parameters
----------
field : string
"""
self._elements = sorted(self._elements, key=lambda element: getattr(element, field))
def filter(self, field, value=_NO_VALUE):
"""
Filters the collection by field value of child elements.
Parameters
----------
field : str
Name of the attribute to be matched.
value : object (optional)
If specified, the values will be matched to this value by equality. Otherwise,
the values will be converted to booleans and matched to True.
Returns
-------
collection : ModelCollection
The return value is always of the same type as the original object.
"""
return type(self)(element for element in self
if (value is not self._NO_VALUE and getattr(element, field) == value) or
(value is self._NO_VALUE and getattr(element, field)))
class Accounts(ModelCollection):
_model_type = Account
_index_key = 'name'
class Payees(ModelCollection):
_model_type = Payee
_index_key = 'name'
class MasterCategories(ModelCollection):
_model_type = MasterCategory
_index_key = 'name'
class Categories(ModelCollection):
_model_type = Category
_index_key = 'full_name'
class Transactions(ModelCollection):
_model_type = Transaction
@property
def amount(self):
amount = [t.amount for t in self]
try:
# try to return a numpy array if possible
import numpy as np
return np.round(np.array(amount, dtype=np.float64), self._ynab.precision)
except ImportError:
# return a simple list otherwise
return amount
def _parse_date(self, string):
parser = DateDataParser()
date = parser.get_date_data(string)['date_obj']
if date is None:
raise RuntimeError('Unable to parse date: {!r}'.format(string))
return date.date()
def between(self, start=None, end=None):
"""
Select all transactions between the specified dates.
The dates may be specified as date objects, standard date strings ('2015-01-15') or
human-readable strings ('two weeks ago').
Parameters
----------
start : date or str (optional)
end : date or str (optional)
Returns
-------
transactions : Transactions
"""
transactions = list(self)
if start is not None:
transactions = [t for t in transactions if t.date >= self._parse_date(start)]
if end is not None:
transactions = [t for t in transactions if t.date <= self._parse_date(end)]
return type(self)(transactions)
def since(self, date):
"""
Select all transactions since the specified date.
The date may be specified as date object, standard date string ('2015-01-15') or
a human-readable string ('two weeks ago').
Parameters
----------
start : date or str
Returns
-------
transactions : Transactions
"""
return self.between(start=date)
def till(self, date):
"""
Select all transactions before and including the specified date.
The date may be specified as date object, standard date string ('2015-01-15') or
a human-readable string ('two weeks ago').
Parameters
----------
start : date or str
Returns
-------
transactions : Transactions
"""
return self.between(end=date)
class SubTransactions(ModelCollection):
_model_type = SubTransaction
class BudgetMetaData(Model):
_entity_type = schema.BudgetMetaData
_PRECISIONS = {
'ar_BH': 3,
'ar_EG': 3,
'ar_JO': 3,
'ar_KW': 3,
'ar_TN': 3,
'id_ID': 0,
'is_IS': 0,
'ja_JP': 0,
'ko_KR': 0,
'uz_Latn_UZ': 0,
}
@property
def is_valid(self):
# BudgetMetaData is not deleteable, so it is always valid.
return True
@property
def currency_locale(self):
return self._entity.currencyLocale
@property
def date_locale(self):
return self._entity.dateLocale
@property
def budget_type(self):
return self._entity.budgetType
@property
def is_strict(self):
# Is this used?
return self._entity.strictBudget
@property
def precision(self):
return self._PRECISIONS.get(self.currency_locale, 2)
class FileMetaData(Model):
_entity_type = schema.FileMetaData
@property
def current_knowledge(self):
return self._entity.currentKnowledge
@property
def id(self):
return None
@property
def is_valid(self):
return True
| bsd-3-clause | 871,299,582,886,563,200 | 24.527372 | 98 | 0.611766 | false |
tectronics/yarest | yarest/gui/_edit.py | 1 | 21660 | ## -*- coding: utf-8 -*-
# Copyright © 2011-2012 Mike Fled <[email protected]>
import re, wx
from configobj import ConfigObj
from wx.lib.intctrl import IntCtrl
from yarest import ConnectionProfile
from ._constants import (BLANK_LINES, PORT_NUMBER_MAX, PORT_NUMBER_MIN,
RESIZE_NO, RESIZE_OK)
from ._messages import (connectionprofile_list_added,
connectionprofile_list_cleared,
err_profile_name, err_unexpected, label_browse,
label_error, label_edit_window, label_profile_delete,
label_profile_name, label_profile_new,
label_profile_save, label_ssh_allow_agent,
label_ssh_allow_unknown, label_ssh_clientkeys,
label_ssh_compression, label_ssh_hostkeys,
label_ssh_port, label_ssh_search_keys,
label_ssh_server, label_ssh_username,
label_support_args, label_support_exec,
label_support_port, label_support_tunnel,
label_tcp_timeout, profile_added, profile_deleted,
profile_updated, profile_updated_name, profiles_saved,
prompt_delete_profile_message,
prompt_delete_profile_title,
tooltip_profile_name, tooltip_ssh_allow_agent,
tooltip_ssh_allow_unknown, tooltip_ssh_clientkeys,
tooltip_ssh_compression, tooltip_ssh_hostkeys,
tooltip_ssh_port, tooltip_ssh_search_keys,
tooltip_ssh_server, tooltip_ssh_username,
tooltip_support_args, tooltip_support_exec,
tooltip_support_port, tooltip_support_tunnel,
tooltip_tcp_timeout)
# layout variables
input_int_height = -1
input_int_width = 65
input_text_height = -1
input_text_width = 225
profiles_height = -1
profiles_style = wx.LB_SINGLE | wx.LB_HSCROLL | wx.LB_SORT
profiles_width = 200
input_flags = wx.ALIGN_LEFT
label_flags = wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL
button_padding = 15
grid_padding = 5
widget_padding = 7
# widget identifiers
ID_CLIENTKEYS = 2000
ID_HOSTKEYS = 2001
ID_SUPPORTEXEC = 2002
class EditProfilesDialog (wx.Dialog):
def __init__(self, parent, filename, encoding, logger, profiles, multi):
self._encoding = encoding
self._filename = filename
self._logger = logger
self._multi = multi
self._profile = None
self._profiles = profiles
self._updated = False
wx.Dialog.__init__(self, parent, wx.ID_ANY, label_edit_window,
style=wx.CLOSE_BOX | wx.MINIMIZE_BOX)
# init layout sizers
outersizer = wx.BoxSizer(wx.VERTICAL)
uppersizer = wx.BoxSizer(wx.HORIZONTAL)
formsizer = wx.FlexGridSizer(rows=0, cols=3,
vgap=grid_padding,
hgap=grid_padding)
lowersizer = wx.BoxSizer(wx.HORIZONTAL)
# profile name
label = wx.StaticText(self, wx.ID_ANY, label_profile_name)
self._profile_name = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._profile_name.SetToolTip(wx.ToolTip(tooltip_profile_name))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._profile_name, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh server
label = wx.StaticText(self, wx.ID_ANY, label_ssh_server)
self._ssh_server = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._ssh_server.SetToolTip(wx.ToolTip(tooltip_ssh_server))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_server, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh port
label = wx.StaticText(self, wx.ID_ANY, label_ssh_port)
self._ssh_port = IntCtrl(self, wx.ID_ANY,
value=None, limited=True,
size=(input_int_width,
input_int_height),
min=PORT_NUMBER_MIN, max=PORT_NUMBER_MAX,
allow_none=True, oob_color=wx.BLACK)
self._ssh_port.SetToolTip(wx.ToolTip(tooltip_ssh_port))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_port, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh username
label = wx.StaticText(self, wx.ID_ANY, label_ssh_username)
self._ssh_username = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._ssh_username.SetToolTip(wx.ToolTip(tooltip_ssh_username))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_username, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh clientkeys
label = wx.StaticText(self, wx.ID_ANY, label_ssh_clientkeys)
self._ssh_clientkeys = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._ssh_clientkeys.SetToolTip(wx.ToolTip(tooltip_ssh_clientkeys))
b_clientkeys = wx.Button(self, ID_CLIENTKEYS, label_browse)
wx.EVT_BUTTON(self, ID_CLIENTKEYS, self._on_browse_clientkeys)
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_clientkeys, RESIZE_OK, input_flags)
formsizer.Add(b_clientkeys, RESIZE_NO, input_flags)
# ssh search keys
label = wx.StaticText(self, wx.ID_ANY, label_ssh_search_keys)
self._ssh_search_keys = wx.CheckBox(self, wx.ID_ANY)
self._ssh_search_keys.SetToolTip(wx.ToolTip(tooltip_ssh_search_keys))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_search_keys, RESIZE_NO, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh allow agent
label = wx.StaticText(self, wx.ID_ANY, label_ssh_allow_agent)
self._ssh_allow_agent = wx.CheckBox(self, wx.ID_ANY)
self._ssh_allow_agent.SetToolTip(wx.ToolTip(tooltip_ssh_allow_agent))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_allow_agent, RESIZE_NO, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh hostkeys
label = wx.StaticText(self, wx.ID_ANY, label_ssh_hostkeys)
self._ssh_hostkeys = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._ssh_hostkeys.SetToolTip(wx.ToolTip(tooltip_ssh_hostkeys))
b_hostkeys = wx.Button(self, ID_HOSTKEYS, label_browse)
wx.EVT_BUTTON(self, ID_HOSTKEYS, self._on_browse_hostkeys)
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_hostkeys, RESIZE_OK, input_flags)
formsizer.Add(b_hostkeys, RESIZE_NO, input_flags)
# ssh allow unknown
label = wx.StaticText(self, wx.ID_ANY, label_ssh_allow_unknown)
self._ssh_allow_unknown = wx.CheckBox(self, wx.ID_ANY)
self._ssh_allow_unknown.SetToolTip(wx.ToolTip(tooltip_ssh_allow_unknown))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_allow_unknown, RESIZE_NO, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# ssh compression
label = wx.StaticText(self, wx.ID_ANY, label_ssh_compression)
self._ssh_compression = wx.CheckBox(self, wx.ID_ANY)
self._ssh_compression.SetToolTip(wx.ToolTip(tooltip_ssh_compression))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._ssh_compression, RESIZE_NO, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# tcp timeout
label = wx.StaticText(self, wx.ID_ANY, label_tcp_timeout)
self._tcp_timeout = IntCtrl(self, wx.ID_ANY, value=None,
size=(input_int_width,
input_int_height),
allow_none=True, oob_color=wx.BLACK)
self._tcp_timeout.SetToolTip(wx.ToolTip(tooltip_tcp_timeout))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._tcp_timeout, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# support exec
label = wx.StaticText(self, wx.ID_ANY, label_support_exec)
self._support_exec = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._support_exec.SetToolTip(wx.ToolTip(tooltip_support_exec))
b_supportexec = wx.Button(self, ID_SUPPORTEXEC, label_browse)
wx.EVT_BUTTON(self, ID_SUPPORTEXEC, self._on_browse_supportexec)
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._support_exec, RESIZE_OK, input_flags)
formsizer.Add(b_supportexec, RESIZE_NO, input_flags)
# support port
label = wx.StaticText(self, wx.ID_ANY, label_support_port)
self._support_port = IntCtrl(self, wx.ID_ANY,
value=None, limited=True,
size=(input_int_width,
input_int_height),
min=PORT_NUMBER_MIN, max=PORT_NUMBER_MAX,
allow_none=True, oob_color=wx.BLACK)
self._support_port.SetToolTip(wx.ToolTip(tooltip_support_port))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._support_port, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# support tunnel
label = wx.StaticText(self, wx.ID_ANY, label_support_tunnel)
self._support_tunnel = wx.ComboBox(self, wx.ID_ANY,
style=wx.CB_READONLY | wx.CB_SORT)
self._support_tunnel.Append("")
self._support_tunnel.Append("forward")
self._support_tunnel.Append("reverse")
self._support_tunnel.SetToolTip(wx.ToolTip(tooltip_support_tunnel))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._support_tunnel, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# support args
label = wx.StaticText(self, wx.ID_ANY, label_support_args)
self._support_args = wx.TextCtrl(self, wx.ID_ANY,
size=(input_text_width,
input_text_height))
self._support_args.SetToolTip(wx.ToolTip(tooltip_support_args))
formsizer.Add(label, RESIZE_NO, label_flags)
formsizer.Add(self._support_args, RESIZE_OK, input_flags)
formsizer.Add(self._create_empty_grid_cell(), RESIZE_NO)
# save profile and close buttons
b_save = wx.Button(self, wx.ID_SAVE, label_profile_save)
wx.EVT_BUTTON(self, wx.ID_SAVE, self._on_click_save)
b_close = wx.Button(self, wx.ID_CLOSE)
wx.EVT_BUTTON(self, wx.ID_CLOSE, self._on_click_close)
lowersizer.AddStretchSpacer()
if self._multi:
# setup delete and new profile buttons and add to lower sizer
b_delete = wx.Button(self, wx.ID_DELETE, label_profile_delete)
wx.EVT_BUTTON(self, wx.ID_DELETE, self._on_click_delete)
lowersizer.Add(b_delete, RESIZE_NO)
lowersizer.AddSpacer(button_padding)
b_new = wx.Button(self, wx.ID_NEW, label_profile_new)
wx.EVT_BUTTON(self, wx.ID_NEW, self._on_click_new)
lowersizer.Add(b_new, RESIZE_NO)
lowersizer.AddSpacer(button_padding)
# setup profiles list control and add to upper sizer
profilesizer = wx.BoxSizer(wx.HORIZONTAL)
self._profiles_list = wx.ListBox(self, wx.ID_ANY,
size=(profiles_width,
profiles_height),
style=profiles_style)
self._populate_profiles_list()
self._profiles_list.Bind(wx.EVT_LISTBOX, self._on_select_profile)
profilesizer.Add(self._profiles_list, RESIZE_NO, wx.EXPAND)
uppersizer.Add(profilesizer, RESIZE_NO,
wx.EXPAND | wx.ALL, widget_padding)
# add form sizer to upper sizer
uppersizer.Add(formsizer, RESIZE_OK, wx.ALL, widget_padding)
# add save profile and close buttons to lower sizer
lowersizer.Add(b_save, RESIZE_NO)
lowersizer.AddSpacer(button_padding)
lowersizer.Add(b_close, RESIZE_NO)
lowersizer.AddStretchSpacer()
# add upper and lower sizers to outer sizer
outersizer.Add(uppersizer, RESIZE_OK, wx.ALL, widget_padding)
outersizer.Add(lowersizer, RESIZE_NO,
wx.ALIGN_CENTER | wx.ALL, widget_padding)
# resize and theme window
self.SetIcon(parent.GetIcon())
self.SetSizerAndFit(outersizer)
self.SetThemeEnabled(True)
def _browse_for_and_set_file(self, widget):
dialog = wx.FileDialog(self, style=wx.OPEN)
if dialog.ShowModal() == wx.ID_OK:
widget.SetValue(dialog.GetPath())
dialog.Destroy()
def _create_empty_grid_cell(self):
return wx.StaticText(self, wx.ID_ANY)
def _display_error(self, error):
msgbox = wx.MessageDialog(self, str(error), label_error,
style=wx.OK | wx.ICON_ERROR)
msgbox.ShowModal()
msgbox.Destroy()
def _get_form_profile(self):
profile = ConnectionProfile(self._profile_name.GetValue())
profile.ssh_server = self._ssh_server.GetValue()
profile.ssh_port = self._ssh_port.GetValue()
profile.ssh_username = self._ssh_username.GetValue()
profile.ssh_clientkeys = self._ssh_clientkeys.GetValue()
profile.ssh_search_keys = self._ssh_search_keys.GetValue()
profile.ssh_allow_agent = self._ssh_allow_agent.GetValue()
profile.ssh_hostkeys = self._ssh_hostkeys.GetValue()
profile.ssh_allow_unknown = self._ssh_allow_unknown.GetValue()
profile.ssh_compression = self._ssh_compression.GetValue()
profile.tcp_timeout = self._tcp_timeout.GetValue()
profile.support_exec = self._support_exec.GetValue()
profile.support_port = self._support_port.GetValue()
profile.support_tunnel = self._support_tunnel.GetValue()
profile.support_args = self._support_args.GetValue()
return profile
def _on_browse_clientkeys(self, event):
self._browse_for_and_set_file(self._ssh_clientkeys)
def _on_browse_hostkeys(self, event):
self._browse_for_and_set_file(self._ssh_hostkeys)
def _on_browse_supportexec(self, event):
self._browse_for_and_set_file(self._support_exec)
def _on_click_close(self, event):
self.Close(True)
def _on_click_delete(self, event):
name = self._profiles_list.GetStringSelection()
if name != "":
msgbox = wx.MessageDialog(self,
prompt_delete_profile_message % (name),
prompt_delete_profile_title,
style=wx.YES_NO | wx.NO_DEFAULT |
wx.ICON_QUESTION)
retcode = msgbox.ShowModal()
msgbox.Destroy()
if retcode != wx.ID_YES: return
new_profiles = []
for profile in self._profiles:
if profile.name != name:
new_profiles.append(profile)
self._logger.info(profile_deleted % (name))
self._save_profiles(new_profiles)
def _on_click_new(self, event):
self._profiles_list.DeselectAll()
self._set_form_profile(None)
def _on_click_save(self, event):
profile = self._get_form_profile()
if not self._profile_is_valid(profile): return
new_profiles = []
if self._multi:
if self._profile is not None:
existing_name = self._profile.name
if profile.name == existing_name:
self._logger.info(profile_updated % (profile.name))
elif not self._profile_name_exists(profile.name):
self._logger.info(profile_updated_name %
(existing_name, profile.name))
else:
self._display_error(err_profile_name % (profile.name))
return
for p in self._profiles:
if p.name != existing_name:
new_profiles.append(p)
elif not self._profile_name_exists(profile.name):
self._logger.info(profile_added % (profile.name))
for p in self._profiles:
new_profiles.append(p)
else:
self._display_error(err_profile_name % (profile.name))
return
new_profiles.append(profile)
self._save_profiles(new_profiles)
def _on_select_profile(self, event):
if event.IsSelection():
index = self._profiles_list.GetSelection()
profile = self._profiles_list.GetClientData(index)
self._set_form_profile(profile)
def _populate_profiles_list(self):
self._profiles_list.Clear()
self._logger.debug(connectionprofile_list_cleared %
(self.__class__.__name__))
for profile in self._profiles:
self._profiles_list.Append(profile.name, profile)
self._logger.debug(connectionprofile_list_added %
(self.__class__.__name__, profile.name))
def _profile_is_valid(self, profile):
try:
ConnectionProfile.prevalidate(profile)
return True
except ValueError as e:
self._display_error(e)
return False
except Exception as e:
self._logger.exception(err_unexpected)
self._display_error(err_unexpected + BLANK_LINES + str(e))
return False
def _profile_name_exists(self, name):
pattern = r"^" + name + r"$"
for p in self._profiles:
if re.match(pattern, p.name, re.IGNORECASE) is not None:
return True
return False
def _save_profiles(self, profiles):
try:
ConnectionProfile.write_to_file(profiles,
self._filename,
self._encoding)
except Exception as e:
self._logger.exception(err_unexpected)
self._display_error(err_unexpected + BLANK_LINES + str(e))
return
self._logger.info(profiles_saved % (len(profiles), self._filename))
self._profiles = profiles
self._updated = True
if self._multi:
self._populate_profiles_list()
self._on_click_new(None)
def _set_form_profile(self, profile=None):
# we use None here for a new profile, otherwise we cache the profile
# so that we know when saving whether or not to update the existing
if profile is not None:
self._profile = profile
else:
self._profile = None
profile = ConnectionProfile("")
self._profile_name.SetValue(profile.name)
self._ssh_server.SetValue(_cstr(profile.ssh_server))
self._ssh_port.SetValue(profile.ssh_port)
self._ssh_username.SetValue(_cstr(profile.ssh_username))
self._ssh_clientkeys.SetValue(_cstr(profile.ssh_clientkeys))
self._ssh_search_keys.SetValue(_cbool(profile.ssh_search_keys))
self._ssh_allow_agent.SetValue(_cbool(profile.ssh_allow_agent))
self._ssh_hostkeys.SetValue(_cstr(profile.ssh_hostkeys))
self._ssh_allow_unknown.SetValue(_cbool(profile.ssh_allow_unknown))
self._ssh_compression.SetValue(_cbool(profile.ssh_compression))
self._tcp_timeout.SetValue(_cint(profile.tcp_timeout))
self._support_exec.SetValue(_cstr(profile.support_exec))
self._support_port.SetValue(profile.support_port)
self._support_tunnel.SetStringSelection(_cstr(profile.support_tunnel))
self._support_args.SetValue(_cstr(profile.support_args))
def GetProfiles(self):
return self._profiles
def IsUpdated(self):
return self._updated
def _cbool(value):
return value is not None and value
def _cint(value):
if value is None: return None
return int(value)
def _cstr(value):
if value is None: return ""
return str(value)
| mit | 613,162,719,687,055,700 | 44.122917 | 81 | 0.579482 | false |
Gimpneek/exclusive-raid-gym-tracker | app/features/steps/page_object_models/selectors/listing.py | 1 | 1594 | """ Selectors for list view """
from selenium.webdriver.common.by import By
TITLES = (By.CSS_SELECTOR, '.column.content > h1')
GYMS_TO_VISIT_CARDS = (
By.CSS_SELECTOR,
'#gyms_to_visit .columns > .column > .card'
)
COMPLETED_GYMS_CARDS = (
By.CSS_SELECTOR,
'#completed_gyms .columns > .column > .card'
)
CARDS = (By.CSS_SELECTOR, '.columns > .column > .card')
CARD_CONTENT = (By.CSS_SELECTOR, '.card-content')
CARD_CONTENT_TITLE = (By.CSS_SELECTOR, '.media-content > .title')
CARD_CONTENT_OSM_WAY = (
By.CSS_SELECTOR,
'.media-content > .title > i.fa.fa-tree'
)
CARD_CONTENT_EX_RAID = (
By.CSS_SELECTOR,
'.media-content > .title > i.fa.fa-ticket'
)
CARD_CONTENT_VISIT_DATE = (By.CSS_SELECTOR, '.content > p')
CARD_FOOTER = (By.CSS_SELECTOR, '.card-footer')
CARD_FOOTER_OPTION = (By.CSS_SELECTOR, '.card-footer-item')
PROGRESS_BAR = (By.CSS_SELECTOR, 'progress.progress')
PROGRESS_PERCENTAGE = (By.CSS_SELECTOR, '.hero-body h2.subtitle:last-child')
GYM_MANAGEMENT_LINK = (
By.CSS_SELECTOR,
'.hero-body h2.subtitle .is-primary.is-inverted'
)
SEARCH_BAR = (By.ID, 'gym-list-search-bar')
SEARCH_SUGGESTIONS = (
By.CSS_SELECTOR,
'#dropdown-menu .dropdown-content .dropdown-item'
)
CARD_HEADER = (By.CSS_SELECTOR, '.columns > .column > .card .raid-header')
RAID_BANNER = (By.CSS_SELECTOR, '.raid-header')
PARENT_CARD = (By.XPATH, '../../..')
def get_link_selector(button_name):
"""
Create a selector for the button with the name
:param button_name: Text on button
:return: Selector tuple
"""
return (By.LINK_TEXT, button_name)
| gpl-3.0 | -7,723,491,772,617,503,000 | 31.530612 | 76 | 0.663112 | false |
dashesy/pandas-tsdb | influxdb/indb_pd.py | 1 | 12410 | """InfluxDB interface to Pandas dataframe
Copyright Amiigo Inc.
"""
import pandas as pd
import numpy as np
import json
from indb_io import push_indb, query_indb, EmptyInput, InvalidData
def _is_null(val, zero_null=False):
"""check if value is missing
:param val: value to check
:param zero_null: if all-zero value should be treated like missing
:return:
"""
if val is None:
return True
try:
if np.all(np.isnan(val)):
return True
except:
pass
if zero_null:
try:
if np.all(np.array(val) == 0):
return True
except:
pass
return False
def _get_namespace(sensor):
"""get sensor namespace
:param sensor: namespace of the sensor, e.g. bio.bpm, activities.steps
"""
parts = sensor.split(".")
if len(parts) < 2:
return None, sensor
return ".".join(parts[0:-1]), parts[-1]
def _json_valid(val):
"""return a jason serializable value
"""
try:
json.dumps(val)
except TypeError:
if hasattr(val, 'to_json'):
return val.to_json()
if hasattr(val, 'tolist'):
return val.tolist()
if hasattr(val, 'tostring'):
return val.tostring()
# string is always good
val = str(val)
return val
class InDBJson(dict):
"""dict but my dict
"""
pass
def df_to_indb(df,
name=None,
retention_policy=None,
database=None,
tag_columns=None,
ignore_sensors=None,
labels=None,
zero_null=True,
time=None,
precision='ms',
use_iso_format=False):
"""convert dataframe to InfluxDB json body (a list of dictionaries)
convert datetime index and vectors if necessary.
:param df: dataframe; by convention the index is time and info axis has name name
:param name: if provided will be used as the measurement name.
it can be also a callable that finds measurement and sensor name from colum name
:param retention_policy: InfluxDB policy of data retention
:param database: InfluxDB database to generate json for
:param tag_columns: list of tag columns
or a callable that gets name of columns and returns if they are tags
:param ignore_sensors: name of sensors to ignore,
or a callable that returns if a column should be ignored.
:param labels: dictionary of per-frame extra tags (user_id, sensor_id, ...)
tags in labels take precedence over those in dataframe
:param zero_null: if should ignore zero readings same as missing
:param time: default per-frame timestamp of entire frame (or name of column that is time)
:param precision: required time precision
:param use_iso_format: if time should be in string format
"""
if df is None or len(df) == 0:
raise EmptyInput("Empty sensor data")
df = df.copy()
if not labels:
labels = {}
# ignore all-missing rows and columns
df = df.dropna(axis=1, how='all').dropna(axis=0, how='all')
if len(df) == 0:
raise EmptyInput("Empty sensor data")
epoch = np.datetime64('1970-01-01T00:00Z')
def _get_mname(sensor):
"""get measurement and sensor name of a sensor
"""
if callable(name):
return name(sensor)
return _get_namespace(sensor)
def _get_ts(idx, sensors):
"""find time since epoch time from index and columns
"""
try:
ts = np.array([np.datetime64(idx0) for idx0 in idx])
except:
ts = None
# try other special field for time
if ts is None:
if isinstance(time, basestring) and time in sensors:
try:
ts = sensors[time].apply(np.datetime64)
except:
ts = sensors[time].apply(np.datetime64, args=['us'])
if ts is None and time:
ts = [np.datetime64(time)]
if ts is None:
# InfluxDB backend understands if there is no time and uses now()
return ts
if use_iso_format:
# only nanosecond text is supported
return [str(np.datetime64(t, 'ns')) for t in ts]
ts = (ts - epoch) / np.timedelta64(1, precision)
ts = [int(t) for t in ts]
return ts
df['time'] = _get_ts(df.index, df)
def _is_tag(col):
if callable(tag_columns):
return tag_columns(col)
if not tag_columns:
return False
return col in tag_columns
# update labels with unique inside-df-tags that are not already in labels
for label in [tag for tag in df if _is_tag(tag) and tag not in labels]:
if len(pd.Series.unique(df[label])) == 1:
# factor out shared tag
labels[label] = df[label][0]
df.drop(label, axis=1, inplace=True)
to_precision = {
'us': 'u',
'ns': 'n',
}
points = []
data = {
'points': points,
'precision': to_precision.get(precision, precision),
}
if retention_policy:
data['retentionPolicy'] = retention_policy
if database:
data['database'] = database
if len(pd.Series.unique(df['time'])) == 1:
# factor out shared timestamp
ts = df['time'][0]
df.drop('time', axis=1, inplace=True)
if not _is_null(ts):
data['timestamp'] = ts
# common tags
if labels:
# tags key/value pairs must be both strings
labels = {str(k): str(v) for k, v in labels.iteritems()}
data['tags'] = labels
for sensors in df.itertuples():
sensors = {k: v for (k, v) in zip(df.columns, sensors[1:])}
mlabels = {}
# extract per-row tags
for sensor, val in sensors.iteritems():
sensor = str(sensor)
if _is_null(val):
continue
if _is_tag(sensor):
# tags key/value pairs must be both strings
mlabels[sensor] = str(val)
try:
ts = sensors.pop('time')
except:
ts = None
measurements = {}
for sensor, val in sensors.iteritems():
sensor = str(sensor)
if _is_null(val, zero_null=zero_null):
continue
if _is_tag(sensor):
continue
if ignore_sensors:
if callable(ignore_sensors) and ignore_sensors(sensor):
continue
if sensor in ignore_sensors:
continue
mname = name
psensor = sensor
if mname is None:
# detect measurement name based on column name
mname, psensor = _get_mname(sensor)
if not mname:
raise InvalidData('No measurement name for {sensor}'.format(sensor=sensor))
if not psensor:
raise InvalidData('No field name for {sensor}'.format(sensor=sensor))
if mname not in measurements:
measurements[mname] = {}
measurements[mname][psensor] = _json_valid(val)
for mname, fields in measurements.iteritems():
indb = {
'name': mname,
'fields': fields,
}
if not _is_null(ts):
if isinstance(ts, basestring) and np.datetime64(ts) == np.datetime64('NaT'):
raise InvalidData('Invalid NaT in time')
indb['timestamp'] = ts
if not use_iso_format:
# FIXME: not always need to specify precision for all points
indb['precision'] = to_precision.get(precision, precision)
if mlabels:
indb['tags'] = mlabels
points.append(indb)
if not points:
raise EmptyInput("Empty sensor data")
return InDBJson(data)
def dict_to_indb(data, **kwargs):
"""convert single dictionary to indb json body
Look at df_to_indb for additional arguments
"""
df = pd.DataFrame([data])
return df_to_indb(df, **kwargs)
def list_to_indb(data, **kwargs):
"""convert a list of dictionaries to indb json
Look at df_to_indb for additional arguments
"""
df = pd.DataFrame(data)
return df_to_indb(df, **kwargs)
def record_indb(auth, data,
name=None,
retention_policy=None,
database=None,
tag_columns=None,
ignore_sensors=None,
labels=None,
zero_null=True,
time=None,
precision='ms',
use_iso_format=False,
compress=False,
):
"""convert data to InfluxDB json and push it to server
"""
def _re_apply(dct):
if database:
dct['database'] = database
if retention_policy:
dct['retentionPolicy'] = retention_policy
kwargs = dict(
name=name,
retention_policy=retention_policy,
database=database,
tag_columns=tag_columns,
ignore_sensors=ignore_sensors,
labels=labels,
zero_null=zero_null,
time=time,
precision=precision,
use_iso_format=use_iso_format,
)
if isinstance(data, pd.DataFrame):
json_body = df_to_indb(data, **kwargs)
elif isinstance(data, list):
json_body = list_to_indb(data, **kwargs)
elif isinstance(data, InDBJson):
json_body = data
_re_apply(data)
elif isinstance(data, dict):
json_body = dict_to_indb(data, **kwargs)
_re_apply(data)
else:
# if unknown let it pass and maybe it can be recorded!
json_body = data
return push_indb(auth, json_body, compress=compress)
def response_to_df(data, sensor_id='sensor_id', tz='utc'):
"""convert InfluxDB response (result of a query) to dataframe
:param data: json data from InfluxDB backend
:param sensor_id: the tag that (along with time) uniquely defines a row of one sensor
:param tz: string or pytz.timezone object
"""
if not data:
return pd.DataFrame()
if isinstance(data, basestring):
data = json.loads(data)
if isinstance(data, dict):
data = data.get('results', [])
sdfs = {} # sensor dataframes
for chunk_idx, chunk in enumerate(data):
rows = chunk.get('series')
if not rows:
continue
for row in rows:
tags = row.get('tags', {})
columns = row.get('columns')
values = row.get('values')
if not columns or not values:
continue
name = row.get('name')
def _name_of(col):
if col == 'time' or not name:
return col
return '{name}.{col}'.format(col=col, name=name)
columns = [_name_of(col) for col in columns]
df = pd.DataFrame(values, columns=columns)
if 'time' in df:
df['time'] = df['time'].apply(np.datetime64)
df.set_index('time', inplace=True)
# bring back in-dataframe-tags
for tag, val in tags.iteritems():
df[tag] = val
pk = tags.get(sensor_id, 'unknown') or 'unknown'
if pk not in sdfs:
sdfs[pk] = []
sdfs[pk].append(df)
if len(sdfs) == 0:
return pd.DataFrame()
dfs = []
for ses in sdfs.itervalues():
if len(ses) == 0:
continue
df = pd.concat(ses)
dfs.append(df)
if len(dfs) == 0:
return pd.DataFrame()
data = pd.concat(dfs).sort_index()
if tz:
try:
data = data.tz_localize(tz)
except:
pass
return data
def query_indb_df(auth, query,
database=None,
chunked=False,
sensor_id='sensor_id'):
""" construct a dataframe from sensors
Look at query_indb and response_to_df for parameters
"""
data = query_indb(auth, query, database=database, chunked=chunked)
return response_to_df(data, sensor_id=sensor_id)
| bsd-3-clause | 9,087,867,320,843,652,000 | 30.025 | 93 | 0.545931 | false |
Gateswong/GatesMusicPet | music_pet/services/vgmdb.py | 1 | 5699 | # -*-: coding: utf-8 -*-
import requests
import json
import re
from ..audio import AudioTrack
LANGS = {
u"jp": (u"jp", u"Japanese"),
u"en": (u"en", u"English"),
}
def _get_json_by_link(link):
url = u'''http://vgmdb.info/%s''' % link
return _get_json(url)
def _get_json(url):
resp = requests.get(url, timeout=5)
if resp.status_code != 200:
raise Exception("Failed to search from VGMdb! Error Code = %d" % resp.status_code)
d = resp.json()
return d
# Functions with Search API
def search_all(keyword):
d = _get_json_by_link(u'''search/"%s"?format=json''' % keyword)
_validate_search_result(d)
return d[u"results"]
def search_albums(keyword):
d = _get_json_by_link(u'''search/albums/"%s"?format=json''' % keyword)
_validate_search_result(d)
return d[u"results"]
def search_artists(keyword):
d = _get_json_by_link(u'''search/artists/"%s"?format=json''' % keyword)
_validate_search_result(d)
return d[u"results"]
def search_orgs(keyword):
d = _get_json_by_link(u'''search/orgs/"%s"?format=json''' % keyword)
_validate_search_result(d)
return d[u"results"]
def search_products(keyword):
d = _get_json_by_link(u'''search/products/"%s"?format=json''' % keyword)
_validate_search_result(d)
return d[u"results"]
def _validate_search_result(search_result):
for key in [u"results", u"sections"]:
if key not in search_result:
raise ValueError("Invalid search result: field '%s' required!" % key)
for key in search_result[u"sections"]:
if key not in search_result[u"results"]:
raise ValueError("Invalid search result: in field 'results', field '%s' required!"
% key)
# Functions with Infomation API
def get_album(id):
d = _get_json_by_link(u'''album/%s''' % id)
return d
def get_artist(id):
d = _get_json_by_link(u'''artist/%s''' % id)
return d
def get_org(id):
d = _get_json_by_link(u'''org/%s''' % id)
return d
def get_product(id):
d = _get_json_by_link(u'''product/%s''' % id)
return d
def get_event(id):
d = _get_json_by_link(u'''event/%s''' % id)
return d
# Utility
def disc(num):
return u"Disc %s" % num
# Functions that producing data from JSON (dict)
def album_get_cover_picture(album_info, size="full"):
if u"covers" not in album_info:
return None
for cover_dict in album_info[u"covers"]:
if cover_dict[u"name"].lower() in [u"front", u"cover", u"folder"]:
return cover_dict[size]
def album_tracks(album_info, discname, lang=u"en"):
for disc in album_info[u"discs"]:
if disc[u"name"] == discname:
tracks = []
for track in disc[u"tracks"]:
tracks.append(track[u"names"].get(
LANGS[lang][1],
track[u"names"][u"English"]
))
return tracks
def update(album_info, track, lang=u"en"):
if not isinstance(track, AudioTrack):
raise TypeError("Instance is not a Meta object")
update_album_title(album_info, track, lang=lang)
# update_artist(album_info, track, lang=lang)
update_album_artist(album_info, track, lang=lang)
update_catalog(album_info, track, lang=lang)
update_category(album_info, track, lang=lang)
update_cover_picture(album_info, track, lang=lang)
update_title(album_info, track, lang=lang)
return
def update_album_title(album_info, track, lang=u"en"):
track.ALBUM = album_info[u"names"].get(
LANGS[lang][0],
album_info[u"names"][u"en"]
)
def update_album_artist(album_info, track, lang=u"en"):
composers = u""
for composer in album_info[u"composers"]:
composers += u", %s" % composer[u"names"].get(
LANGS[lang][0],
composer[u"names"][u"en"]
)
track.ALBUMARTIST = composers[2:]
def update_catalog(album_info, track, lang=u"en"):
if u"catalog" in album_info:
track[u"CATALOG"] = album_info[u"catalog"]
def update_category(album_info, track, lang=u"en"):
if u"category" in album_info:
track[u"CATEGORY"] = album_info[u"category"]
def update_cover_picture(album_info, track, lang=u"en"):
track[u"_picture"] = album_get_cover_picture(album_info)
def update_title(album_info, track, lang=u"en"):
if track.DISCNUMBER is None:
discname = u"Disc 1"
else:
discname = disc(track.DISCNUMBER)
track_names = album_tracks(album_info, discname, lang=lang)
track.TITLE = track_names[int(track.TRACKNUMBER) - 1]
def album_detail(album_info, lang=u"English", lang_short=u"en"):
detail_string = u""
if lang_short in album_info[u"names"]:
detail_string += u"TITLE : %s\n" % album_info[u"names"][lang_short]
else:
detail_string += u"TITLE : %s\n" % album_info[u"name"]
detail_string += u"\nCOMPOSER :\n"
for composer in album_info[u"composers"]:
if lang_short in composer[u"names"]:
detail_string += u"%s\n" % composer[u"names"][lang_short]
else:
detail_string += u"%s\n" % composer[u"names"][u"en"]
for disc in album_info[u"discs"]:
detail_string += u"\nIn : %s\n" % disc[u"name"]
for track_id, track in enumerate(disc[u"tracks"]):
detail_string += u" %s : %s\n" % (
str(track_id + 1).zfill(2),
track[u"names"][lang] if lang in track[u"names"] else track[u"names"]["English"])
return detail_string
# Functions for details
def print_album_detail(album_info, lang=u"English", lang_short=u"en"):
print(album_detail(album_info, lang, lang_short))
| mit | -4,544,880,001,078,173,000 | 25.384259 | 97 | 0.602737 | false |
vishnu2kmohan/dcos-commons | frameworks/helloworld/tests/test_secrets.py | 1 | 13700 | import logging
import pytest
import sdk_cmd
import sdk_install
import sdk_marathon
import sdk_plan
import sdk_tasks
import sdk_utils
from retrying import retry
from tests import config
log = logging.getLogger(__name__)
NUM_HELLO = 2
NUM_WORLD = 3
secret_content_default = "hello-world-secret-data"
secret_content_alternative = secret_content_default + "-alternative"
secret_options = {
"service": {
"spec_file": "examples/secrets.yml"
},
"hello": {
"count": NUM_HELLO,
"secret1": "hello-world/secret1",
"secret2": "hello-world/secret2"
},
"world": {
"count": NUM_WORLD,
"secret1": "hello-world/secret1",
"secret2": "hello-world/secret2",
"secret3": "hello-world/secret3"
}
}
options_dcos_space_test = {
"service": {
"spec_file": "examples/secrets.yml"
},
"hello": {
"count": NUM_HELLO,
"secret1": "hello-world/somePath/secret1",
"secret2": "hello-world/somePath/secret2"
},
"world": {
"count": NUM_WORLD,
"secret1": "hello-world/somePath/secret1",
"secret2": "hello-world/somePath/secret2",
"secret3": "hello-world/somePath/secret3"
}
}
@pytest.fixture(scope='module', autouse=True)
def configure_package(configure_security):
try:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
sdk_cmd.run_cli("package install --cli dcos-enterprise-cli --yes")
delete_secrets_all("{}/".format(config.SERVICE_NAME))
delete_secrets_all("{}/somePath/".format(config.SERVICE_NAME))
delete_secrets_all()
yield # let the test session execute
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
delete_secrets_all("{}/".format(config.SERVICE_NAME))
delete_secrets_all("{}/somePath/".format(config.SERVICE_NAME))
delete_secrets_all()
@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.secrets
@sdk_utils.dcos_ee_only
@pytest.mark.dcos_min_version('1.10')
def test_secrets_basic():
# 1) create Secrets
# 2) install examples/secrets.yml
# 3) if secret file is not created, tasks will fail
# 4) wait till deployment finishes
# 5) do replace operation
# 6) ensure all tasks are running
# 7) delete Secrets
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
create_secrets("{}/".format(config.SERVICE_NAME))
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, NUM_HELLO + NUM_WORLD, additional_options=secret_options)
hello_tasks_0 = sdk_tasks.get_task_ids(config.SERVICE_NAME, "hello-0")
world_tasks_0 = sdk_tasks.get_task_ids(config.SERVICE_NAME, "word-0")
# ensure that secrets work after replace
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod replace hello-0')
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod replace world-0')
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, "hello-0", hello_tasks_0)
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, 'world-0', world_tasks_0)
# tasks will fail if secret files are not created by mesos module
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
# clean up and delete secrets
delete_secrets("{}/".format(config.SERVICE_NAME))
@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.secrets
@sdk_utils.dcos_ee_only
@pytest.mark.dcos_min_version('1.10')
def test_secrets_verify():
# 1) create Secrets
# 2) install examples/secrets.yml
# 3) verify Secrets content
# 4) delete Secrets
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
create_secrets("{}/".format(config.SERVICE_NAME))
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, NUM_HELLO + NUM_WORLD, additional_options=secret_options)
# tasks will fail if secret file is not created
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
# Verify secret content, one from each pod type
# first secret: environment variable name is given in yaml
assert secret_content_default == read_secret("world-0", "bash -c 'echo $WORLD_SECRET1_ENV'")
# second secret: file path is given in yaml
assert secret_content_default == read_secret("world-0", "cat WORLD_SECRET2_FILE")
# third secret : no file path is given in yaml
# default file path is equal to secret path
assert secret_content_default == read_secret("world-0", "cat hello-world/secret3")
# hello tasks has container image, world tasks do not
# first secret : environment variable name is given in yaml
assert secret_content_default == read_secret("hello-0", "bash -c 'echo $HELLO_SECRET1_ENV'")
# first secret : both environment variable name and file path are given in yaml
assert secret_content_default == read_secret("hello-0", "cat HELLO_SECRET1_FILE")
# second secret : file path is given in yaml
assert secret_content_default == read_secret("hello-0", "cat HELLO_SECRET2_FILE")
# clean up and delete secrets
delete_secrets("{}/".format(config.SERVICE_NAME))
@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.secrets
@sdk_utils.dcos_ee_only
@pytest.mark.dcos_min_version('1.10')
def test_secrets_update():
# 1) create Secrets
# 2) install examples/secrets.yml
# 3) update Secrets
# 4) restart task
# 5) verify Secrets content (updated after restart)
# 6) delete Secrets
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
create_secrets("{}/".format(config.SERVICE_NAME))
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, NUM_HELLO + NUM_WORLD, additional_options=secret_options)
# tasks will fail if secret file is not created
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
sdk_cmd.run_cli("security secrets update --value={} {}/secret1".format(secret_content_alternative, config.SERVICE_NAME))
sdk_cmd.run_cli("security secrets update --value={} {}/secret2".format(secret_content_alternative, config.SERVICE_NAME))
sdk_cmd.run_cli("security secrets update --value={} {}/secret3".format(secret_content_alternative, config.SERVICE_NAME))
# Verify with hello-0 and world-0, just check with one of the pods
hello_tasks_old = sdk_tasks.get_task_ids(config.SERVICE_NAME, "hello-0")
world_tasks_old = sdk_tasks.get_task_ids(config.SERVICE_NAME, "world-0")
# restart pods to retrieve new secret's content
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod restart hello-0')
sdk_cmd.svc_cli(config.PACKAGE_NAME, config.SERVICE_NAME, 'pod restart world-0')
# wait pod restart to complete
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, "hello-0", hello_tasks_old)
sdk_tasks.check_tasks_updated(config.SERVICE_NAME, 'world-0', world_tasks_old)
# wait till it is running
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
# make sure content is changed
assert secret_content_alternative == read_secret("world-0", "bash -c 'echo $WORLD_SECRET1_ENV'")
assert secret_content_alternative == read_secret("world-0", "cat WORLD_SECRET2_FILE")
assert secret_content_alternative == read_secret("world-0", "cat {}/secret3".format(config.SERVICE_NAME))
# make sure content is changed
assert secret_content_alternative == read_secret("hello-0", "bash -c 'echo $HELLO_SECRET1_ENV'")
assert secret_content_alternative == read_secret("hello-0", "cat HELLO_SECRET1_FILE")
assert secret_content_alternative == read_secret("hello-0", "cat HELLO_SECRET2_FILE")
# clean up and delete secrets
delete_secrets("{}/".format(config.SERVICE_NAME))
@pytest.mark.sanity
@pytest.mark.secrets
@pytest.mark.smoke
@sdk_utils.dcos_ee_only
@pytest.mark.dcos_min_version('1.10')
def test_secrets_config_update():
# 1) install examples/secrets.yml
# 2) create new Secrets, delete old Secrets
# 2) update configuration with new Secrets
# 4) verify secret content (using new Secrets after config update)
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
create_secrets("{}/".format(config.SERVICE_NAME))
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, NUM_HELLO + NUM_WORLD, additional_options=secret_options)
# tasks will fail if secret file is not created
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
# Verify secret content, one from each pod type
# make sure it has the default value
assert secret_content_default == read_secret("world-0", "bash -c 'echo $WORLD_SECRET1_ENV'")
assert secret_content_default == read_secret("world-0", "cat WORLD_SECRET2_FILE")
assert secret_content_default == read_secret("world-0", "cat {}/secret3".format(config.SERVICE_NAME))
# hello tasks has container image
assert secret_content_default == read_secret("hello-0", "bash -c 'echo $HELLO_SECRET1_ENV'")
assert secret_content_default == read_secret("hello-0", "cat HELLO_SECRET1_FILE")
assert secret_content_default == read_secret("hello-0", "cat HELLO_SECRET2_FILE")
# clean up and delete secrets (defaults)
delete_secrets("{}/".format(config.SERVICE_NAME))
# create new secrets with new content -- New Value
create_secrets(secret_content_arg=secret_content_alternative)
marathon_config = sdk_marathon.get_config(config.SERVICE_NAME)
marathon_config['env']['HELLO_SECRET1'] = 'secret1'
marathon_config['env']['HELLO_SECRET2'] = 'secret2'
marathon_config['env']['WORLD_SECRET1'] = 'secret1'
marathon_config['env']['WORLD_SECRET2'] = 'secret2'
marathon_config['env']['WORLD_SECRET3'] = 'secret3'
# config update
sdk_marathon.update_app(config.SERVICE_NAME, marathon_config)
# wait till plan is complete - pods are supposed to restart
sdk_plan.wait_for_completed_deployment(config.SERVICE_NAME)
# all tasks are running
sdk_tasks.check_running(config.SERVICE_NAME, NUM_HELLO + NUM_WORLD)
# Verify secret content is changed
assert secret_content_alternative == read_secret("world-0", "bash -c 'echo $WORLD_SECRET1_ENV'")
assert secret_content_alternative == read_secret("world-0", "cat WORLD_SECRET2_FILE")
assert secret_content_alternative == read_secret("world-0", "cat secret3")
assert secret_content_alternative == read_secret("hello-0", "bash -c 'echo $HELLO_SECRET1_ENV'")
assert secret_content_alternative == read_secret("hello-0", "cat HELLO_SECRET1_FILE")
assert secret_content_alternative == read_secret("hello-0", "cat HELLO_SECRET2_FILE")
# clean up and delete secrets
delete_secrets()
@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.secrets
@sdk_utils.dcos_ee_only
@pytest.mark.dcos_min_version('1.10')
def test_secrets_dcos_space():
# 1) create secrets in hello-world/somePath, i.e. hello-world/somePath/secret1 ...
# 2) Tasks with DCOS_SPACE hello-world/somePath
# or some DCOS_SPACE path under hello-world/somePath
# (for example hello-world/somePath/anotherPath/)
# can access these Secrets
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
# cannot access these secrets because of DCOS_SPACE authorization
create_secrets("{}/somePath/".format(config.SERVICE_NAME))
try:
sdk_install.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
NUM_HELLO + NUM_WORLD,
additional_options=options_dcos_space_test,
timeout_seconds=5 * 60) # Wait for 5 minutes. We don't need to wait 15 minutes for hello-world to fail an install
assert False, "Should have failed to install"
except AssertionError as arg:
raise arg
except:
pass # expected to fail
# clean up and delete secrets
delete_secrets("{}/somePath/".format(config.SERVICE_NAME))
def create_secrets(path_prefix="", secret_content_arg=secret_content_default):
sdk_cmd.run_cli("security secrets create --value={} {}secret1".format(secret_content_arg, path_prefix))
sdk_cmd.run_cli("security secrets create --value={} {}secret2".format(secret_content_arg, path_prefix))
sdk_cmd.run_cli("security secrets create --value={} {}secret3".format(secret_content_arg, path_prefix))
def delete_secrets(path_prefix=""):
sdk_cmd.run_cli("security secrets delete {}secret1".format(path_prefix))
sdk_cmd.run_cli("security secrets delete {}secret2".format(path_prefix))
sdk_cmd.run_cli("security secrets delete {}secret3".format(path_prefix))
def delete_secrets_all(path_prefix=""):
# if there is any secret left, delete
# use in teardown_module
try:
sdk_cmd.run_cli("security secrets get {}secret1".format(path_prefix))
sdk_cmd.run_cli("security secrets delete {}secret1".format(path_prefix))
except:
pass
try:
sdk_cmd.run_cli("security secrets get {}secret2".format(path_prefix))
sdk_cmd.run_cli("security secrets delete {}secret2".format(path_prefix))
except:
pass
try:
sdk_cmd.run_cli("security secrets get {}secret3".format(path_prefix))
sdk_cmd.run_cli("security secrets delete {}secret3".format(path_prefix))
except:
pass
@retry
def read_secret(task_name, command):
cmd_str = "task exec {} {}".format(task_name, command)
lines = sdk_cmd.run_cli(cmd_str).split('\n')
log.info('dcos %s output: %s', cmd_str, lines)
for i in lines:
if i.strip().startswith(secret_content_default):
return i
raise Exception("Failed to read secret")
| apache-2.0 | -7,747,007,387,317,984,000 | 37.055556 | 125 | 0.685255 | false |
adviti/melange | app/soc/modules/gsoc/views/proposal_review.py | 1 | 33758 | #!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for the GSoC proposal page.
"""
from google.appengine.ext import db
from django.core.urlresolvers import resolve
from django.core.urlresolvers import reverse
from django import forms as django_forms
from django.utils.translation import ugettext
from soc.logic import cleaning
from soc.logic.exceptions import BadRequest
from soc.logic.helper import notifications
from soc.views.helper import url as url_helper
from soc.views.helper.access_checker import isSet
from soc.views.template import Template
from soc.views.toggle_button import ToggleButtonTemplate
from soc.tasks import mailer
from soc.modules.gsoc.logic import profile as profile_logic
from soc.modules.gsoc.models.comment import GSoCComment
from soc.modules.gsoc.models.proposal_duplicates import GSoCProposalDuplicate
from soc.modules.gsoc.models.profile import GSoCProfile
from soc.modules.gsoc.models.score import GSoCScore
from soc.modules.gsoc.views import assign_mentor
from soc.modules.gsoc.views.base import RequestHandler
from soc.modules.gsoc.views.forms import GSoCModelForm
from soc.modules.gsoc.views.helper import url_patterns
from soc.modules.gsoc.views.helper.url_patterns import url
class CommentForm(GSoCModelForm):
"""Django form for the comment.
"""
class Meta:
model = GSoCComment
#css_prefix = 'gsoc_comment'
fields = ['content']
def clean_content(self):
field_name = 'content'
wrapped_clean_html_content = cleaning.clean_html_content(field_name)
content = wrapped_clean_html_content(self)
if content:
return content
else:
raise django_forms.ValidationError(
ugettext('Comment content cannot be empty.'), code='invalid')
def templatePath(self):
return 'v2/modules/gsoc/proposal/_comment_form.html'
class PrivateCommentForm(CommentForm):
"""Django form for the comment.
"""
class Meta:
model = GSoCComment
fields = CommentForm.Meta.fields + ['is_private']
class Duplicate(Template):
"""Template for showing a duplicates to the org admin.
"""
def __init__(self, data, duplicate):
"""Instantiates the template for rendering duplicates for a single
proposal.
Args:
data: RequestData object
duplicate: GSoCProposalDuplicate entity to render.
"""
self.duplicate = duplicate
super(Duplicate, self).__init__(data)
def context(self):
"""The context for this template used in render().
"""
r = self.data.redirect
orgs = []
for org in db.get(self.duplicate.orgs):
q = GSoCProfile.all()
q.filter('org_admin_for', org)
q.filter('status', 'active')
admins = q.fetch(1000)
data = {'name': org.name,
'link': r.organization(org).urlOf('gsoc_org_home'),
'admins': admins}
orgs.append(data)
context = {'orgs': orgs}
return context
def templatePath(self):
return 'v2/modules/gsoc/duplicates/proposal_duplicate_review.html'
class UserActions(Template):
"""Template to render the left side user actions.
"""
DEF_ACCEPT_PROPOSAL_HELP = ugettext(
'Choosing Yes will mark this proposal as accepted. The proposal is '
'accepted when Yes is displayed in bright orange.')
DEF_IGNORE_PROPOSAL_HELP = ugettext(
'Choosing Yes will mark this proposal as ignored. The student will be '
'be able to see that this proposal is ignored when he/she visits this '
'page. The proposal is ignored when Yes is displayed in bright orange.')
DEF_IGNORE_PROPOSAL_NOTE = ugettext(
'Please refresh this page after setting this preference.')
DEF_PROPOSAL_MODIFICATION_HELP = ugettext(
'Choosing Enabled allows the student to edit this proposal. The '
'student can edit the proposal when Enabled is displayed in bright '
'orange.')
DEF_PUBLICLY_VISIBLE_HELP = ugettext(
'Choosing Yes will make this proposal publicly visible. The proposal '
'will be visible to even those who do not have a user account on this '
'site. The proposal is publicly visible when Yes is displayed in '
'bright orange')
DEF_WISH_TO_MENTOR_HELP = ugettext(
'Choosing Yes will add your name to the list of possible mentors to '
'this proposal. You will be listed as a possible mentor when Yes is '
'displayed in bright orange.')
DEF_WITHDRAW_PROPOSAL_HELP = ugettext(
'Choosing Yes, notifies your organization that you have withdrawn '
'this proposal and no longer wish to participate in the program with '
'this proposal. The proposal is withdrawn when the button displays '
'Yes in bright orange.')
def __init__(self, data, user_role):
super(UserActions, self).__init__(data)
self.user_role = user_role
self.toggle_buttons = []
def _mentorContext(self):
"""Construct the context needed for mentor actions.
"""
r = self.data.redirect.review()
wish_to_mentor = ToggleButtonTemplate(
self.data, 'on_off', 'Wish to Mentor', 'wish-to-mentor',
r.urlOf('gsoc_proposal_wish_to_mentor'),
checked=self.data.isPossibleMentorForProposal(),
help_text=self.DEF_WISH_TO_MENTOR_HELP,
labels = {
'checked': 'Yes',
'unchecked': 'No'})
self.toggle_buttons.append(wish_to_mentor)
if self.data.timeline.afterStudentSignupEnd():
proposal_modification_button = ToggleButtonTemplate(
self.data, 'long', 'Proposal Modifications', 'proposal-modification',
r.urlOf('gsoc_proposal_modification'),
checked=self.data.proposal.is_editable_post_deadline,
help_text=self.DEF_PROPOSAL_MODIFICATION_HELP,
labels = {
'checked': 'Enabled',
'unchecked': 'Disabled'})
self.toggle_buttons.append(proposal_modification_button)
return {}
def _orgAdminContext(self):
"""Construct the context needed for org admin actions.
"""
context = {}
r = self.data.redirect.review()
ignore_button_checked = False
if self.data.proposal.status == 'ignored':
ignore_button_checked = True
if self.data.proposal.status in ['pending', 'withdrawn', 'ignored']:
ignore_proposal = ToggleButtonTemplate(
self.data, 'on_off', 'Ignore Proposal', 'proposal-ignore',
r.urlOf('gsoc_proposal_ignore'),
checked=ignore_button_checked,
help_text=self.DEF_IGNORE_PROPOSAL_HELP,
note=self.DEF_IGNORE_PROPOSAL_NOTE,
labels={
'checked': 'Yes',
'unchecked': 'No'})
self.toggle_buttons.append(ignore_proposal)
if not self.proposal_ignored:
accept_proposal = ToggleButtonTemplate(
self.data, 'on_off', 'Accept proposal', 'accept-proposal',
r.urlOf('gsoc_proposal_accept'),
checked=self.data.proposal.accept_as_project,
help_text=self.DEF_ACCEPT_PROPOSAL_HELP,
labels = {
'checked': 'Yes',
'unchecked': 'No',})
self.toggle_buttons.append(accept_proposal)
r = self.data.redirect
possible_mentors_keys = self.data.proposal.possible_mentors
all_mentors_keys = profile_logic.queryAllMentorsKeysForOrg(
self.data.proposal_org)
context['assign_mentor'] = assign_mentor.AssignMentorFields(
self.data, self.data.proposal.mentor,
r.review().urlOf('gsoc_proposal_assign_mentor'),
possible_mentors_keys, all_mentors_keys)
return context
def _proposerContext(self):
"""Construct the context needed for proposer actions.
"""
r = self.data.redirect.review()
publicly_visible = ToggleButtonTemplate(
self.data, 'on_off', 'Publicly Visible', 'publicly-visible',
r.urlOf('gsoc_proposal_publicly_visible'),
checked=self.data.proposal.is_publicly_visible,
help_text=self.DEF_PUBLICLY_VISIBLE_HELP,
labels = {
'checked': 'Yes',
'unchecked': 'No',})
self.toggle_buttons.append(publicly_visible)
if self.data.proposal.status in ['pending', 'withdrawn']:
if self.data.proposal.status == 'withdrawn':
checked=True
elif self.data.proposal.status == 'pending':
checked=False
withdraw_proposal = ToggleButtonTemplate(
self.data, 'on_off', 'Withdraw Proposal', 'withdraw-proposal',
r.urlOf('gsoc_proposal_withdraw'), checked=checked,
help_text=self.DEF_WITHDRAW_PROPOSAL_HELP,
labels = {
'checked': 'Yes',
'unchecked': 'No',})
self.toggle_buttons.append(withdraw_proposal)
return {}
def context(self):
assert isSet(self.data.proposal)
context = {
'title': 'Proposal Actions',
}
self.proposal_ignored = self.data.proposal.status == 'ignored'
if self.user_role == 'mentor' and not self.proposal_ignored:
context.update(self._mentorContext())
if self.user_role == 'org_admin':
context.update(self._orgAdminContext())
# org admin is a mentor by default so add that context and buttons
# as well.
if not self.proposal_ignored:
context.update(self._mentorContext())
if self.user_role == 'proposer':
context.update(self._proposerContext())
context['toggle_buttons'] = self.toggle_buttons
return context
def templatePath(self):
return "v2/modules/gsoc/proposal/_user_action.html"
class ReviewProposal(RequestHandler):
"""View for the Propsal Review page.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/review/%s$' % url_patterns.REVIEW,
self, name='review_gsoc_proposal'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
self.check.canAccessProposalEntity()
self.mutator.commentVisible()
def templatePath(self):
return 'v2/modules/gsoc/proposal/review.html'
def getScores(self):
"""Gets all the scores for the proposal.
"""
assert isSet(self.data.private_comments_visible)
assert isSet(self.data.proposal_org)
assert isSet(self.data.proposal)
if not self.data.private_comments_visible:
return None
total = 0
number = 0
user_score = 0
query = db.Query(GSoCScore).ancestor(self.data.proposal)
for score in query:
total += score.value
number += 1
author_key = GSoCScore.author.get_value_for_datastore(score)
if author_key == self.data.profile.key():
user_score = score.value
return {
'average': total / number if number else 0,
'number': number,
'total': total,
'user_score': user_score,
}
def getComments(self, limit=1000):
"""Gets all the comments for the proposal visible by the current user.
"""
assert isSet(self.data.private_comments_visible)
assert isSet(self.data.proposal)
public_comments = []
private_comments = []
query = db.Query(GSoCComment).ancestor(self.data.proposal)
query.order('created')
all_comments = query.fetch(limit=limit)
for comment in all_comments:
if not comment.is_private:
public_comments.append(comment)
elif self.data.private_comments_visible:
private_comments.append(comment)
return public_comments, private_comments
def sanitizePossibleMentors(self, possible_mentors):
"""Removes possible mentors that are no longer mentors
"""
changed = False
result = []
for mentor in possible_mentors:
if self.data.proposal_org.key() in mentor.mentor_for:
result.append(mentor)
continue
changed = True
self.data.proposal.possible_mentors.remove(mentor.key())
if changed:
self.data.proposal.put()
return result
def context(self):
assert isSet(self.data.public_comments_visible)
assert isSet(self.data.private_comments_visible)
assert isSet(self.data.url_profile)
assert isSet(self.data.url_user)
assert isSet(self.data.proposal)
context = {}
user_role = None
scores = self.getScores()
# TODO: check if the scoring is not disabled
score_action = reverse('score_gsoc_proposal', kwargs=self.data.kwargs)
# get all the comments for the the proposal
public_comments, private_comments = self.getComments()
# TODO: check if it is possible to post a comment
comment_action = reverse('comment_gsoc_proposal', kwargs=self.data.kwargs)
if self.data.private_comments_visible:
form = PrivateCommentForm(self.data.POST or None)
if self.data.orgAdminFor(self.data.proposal.org):
user_role = 'org_admin'
else:
user_role = 'mentor'
else:
form = CommentForm(self.data.POST or None)
comment_box = {
'action': comment_action,
'form': form,
}
# to keep the blocks as simple as possible, the if branches have
# been broken down into several if blocks
user_is_proposer = self.data.user and \
(self.data.user.key() == self.data.url_user.key())
if user_is_proposer:
user_role = 'proposer'
# we will check if the student is allowed to modify the proposal
# after the student proposal deadline
is_editable = self.data.timeline.afterStudentSignupEnd() and \
self.data.proposal.is_editable_post_deadline
if self.data.timeline.studentSignup() or is_editable:
context['update_link'] = self.data.redirect.id().urlOf(
'update_gsoc_proposal')
possible_mentors = db.get(self.data.proposal.possible_mentors)
possible_mentors = self.sanitizePossibleMentors(possible_mentors)
possible_mentors_names = ', '.join([m.name() for m in possible_mentors])
scoring_visible = self.data.private_comments_visible and (
not self.data.proposal_org.scoring_disabled)
if self.data.orgAdminFor(self.data.proposal_org):
scoring_visible = True
duplicate = None
if self.data.program.duplicates_visible and self.data.orgAdminFor(
self.data.proposal_org):
q = GSoCProposalDuplicate.all()
q.filter('duplicates', self.data.proposal)
q.filter('is_duplicate', True)
dup_entity = q.get()
duplicate = Duplicate(self.data, dup_entity) if dup_entity else None
additional_info = self.data.proposal.additional_info
if user_role:
context['user_actions'] = UserActions(self.data, user_role)
context.update({
'additional_info': url_helper.trim_url_to(additional_info, 50),
'additional_info_link': additional_info,
'comment_box': comment_box,
'duplicate': duplicate,
'max_score': self.data.proposal_org.max_score,
'mentor': self.data.proposal.mentor,
'page_name': self.data.proposal.title,
'possible_mentors': possible_mentors_names,
'private_comments': private_comments,
'private_comments_visible': self.data.private_comments_visible,
'proposal': self.data.proposal,
'public_comments': public_comments,
'public_comments_visible': self.data.public_comments_visible,
'score_action': score_action,
'scores': scores,
'scoring_visible': scoring_visible,
'student_email': self.data.url_profile.email,
'student_name': self.data.url_profile.name(),
'proposal_ignored': self.data.proposal.status == 'ignored',
'user_role': user_role,
})
return context
class PostComment(RequestHandler):
"""View which handles publishing comments.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/comment/%s$' % url_patterns.REVIEW,
self, name='comment_gsoc_proposal'),
]
def checkAccess(self):
self.check.isProgramVisible()
self.check.isProfileActive()
self.mutator.proposalFromKwargs()
self.mutator.commentVisible()
assert isSet(self.data.proposer)
assert isSet(self.data.proposal_org)
# check if the comment is given by the author of the proposal
if self.data.proposer.key() == self.data.profile.key():
self.data.public_only = True
return
self.data.public_only = False
self.check.isMentorForOrganization(self.data.proposal_org)
def createCommentFromForm(self):
"""Creates a new comment based on the data inserted in the form.
Returns:
a newly created comment entity or None
"""
assert isSet(self.data.public_only)
assert isSet(self.data.proposal)
if self.data.public_only:
comment_form = CommentForm(self.data.request.POST)
else:
# this form contains checkbox for indicating private/public comments
comment_form = PrivateCommentForm(self.data.request.POST)
if not comment_form.is_valid():
return None
if self.data.public_only:
comment_form.cleaned_data['is_private'] = False
comment_form.cleaned_data['author'] = self.data.profile
q = GSoCProfile.all().filter('mentor_for', self.data.proposal.org)
q = q.filter('status', 'active')
if comment_form.cleaned_data.get('is_private'):
q.filter('notify_private_comments', True)
else:
q.filter('notify_public_comments', True)
mentors = q.fetch(1000)
to_emails = [i.email for i in mentors \
if i.key() != self.data.profile.key()]
def create_comment_txn():
comment = comment_form.create(commit=True, parent=self.data.proposal)
context = notifications.newCommentContext(self.data, comment, to_emails)
sub_txn = mailer.getSpawnMailTaskTxn(context, parent=comment)
sub_txn()
return comment
return db.run_in_transaction(create_comment_txn)
def post(self):
assert isSet(self.data.proposer)
assert isSet(self.data.proposal)
comment = self.createCommentFromForm()
if comment:
self.redirect.program()
self.redirect.to('gsoc_dashboard')
else:
# This is an insanely and absolutely hacky solution. We definitely
# do not want any one to use this a model for writing code elsewhere
# in Melange.
# TODO (Madhu): Replace this in favor of PJAX for loading comments.
r = self.redirect.review(self.data.proposal.key().id(),
self.data.proposer.link_id)
redirect_url = r.urlOf('review_gsoc_proposal')
proposal_match = resolve(redirect_url)
proposal_view = proposal_match[0]
self.request.method = 'GET'
self.response = proposal_view(self.request, *self.args, **self.kwargs)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class PostScore(RequestHandler):
"""View which handles posting scores.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/score/%s$' % url_patterns.REVIEW,
self, name='score_gsoc_proposal'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
org = self.data.proposal_org
if not self.data.orgAdminFor(org) and org.scoring_disabled:
raise BadRequest('Scoring is disabled for this organization')
self.check.isMentorForOrganization(org)
def createOrUpdateScore(self, value):
"""Creates a new score or updates a score if there is already one
posted by the current user.
If the value passed in is 0 then the Score of the user will be removed and
None will be returned.
Args:
value: The value of the score the user gave as an integer.
Returns:
The score entity that was created/updated or None if value is 0.
"""
assert isSet(self.data.proposal)
assert isSet(self.data.proposal_org)
max_score = self.data.proposal_org.max_score
if value < 0 or value > max_score:
raise BadRequest("Score must not be higher than %d" % max_score)
query = db.Query(GSoCScore)
query.filter('author = ', self.data.profile)
query.ancestor(self.data.proposal)
def update_score_trx():
delta = 0
# update score entity
score = query.get()
if not score:
if not value:
return
old_value = 0
score = GSoCScore(
parent=self.data.proposal,
author=self.data.profile,
value=value)
score.put()
delta = 1
else:
old_value = score.value
if not value:
delta = -1
score.delete()
else:
score.value = value
score.put()
# update total score for the proposal
proposal = db.get(self.data.proposal.key())
proposal.score += value - old_value
proposal.nr_scores += delta
proposal.put()
db.run_in_transaction(update_score_trx)
def post(self):
value_str = self.data.POST.get('value', '')
value = int(value_str) if value_str.isdigit() else None
self.createOrUpdateScore(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class WishToMentor(RequestHandler):
"""View handling wishing to mentor requests.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/wish_to_mentor/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_wish_to_mentor'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
self.check.isMentorForOrganization(self.data.proposal_org)
def addToPotentialMentors(self, value):
"""Toggles the user from the potential mentors list.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.profile)
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and not self.data.isPossibleMentorForProposal():
raise BadRequest("Invalid post data.")
if value == 'unchecked' and self.data.isPossibleMentorForProposal():
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
profile_key = self.data.profile.key()
def update_possible_mentors_trx():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
# we have already been added
if profile_key in proposal.possible_mentors:
return
proposal.possible_mentors.append(profile_key)
else:
# we have already been removed
if profile_key not in proposal.possible_mentors:
return
proposal.possible_mentors.remove(profile_key)
db.put(proposal)
db.run_in_transaction(update_possible_mentors_trx)
def post(self):
value = self.data.POST.get('value')
self.addToPotentialMentors(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class AssignMentor(RequestHandler):
"""View which handles assigning mentor to a proposal.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/assign_mentor/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_assign_mentor'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
self.check.isOrgAdminForOrganization(self.data.proposal_org)
def assignMentor(self, mentor_entity):
"""Assigns the mentor to the proposal.
Args:
mentor_entity: The entity of the mentor profile which needs to assigned
to the proposal.
"""
assert isSet(self.data.proposal)
proposal_key = self.data.proposal.key()
def assign_mentor_txn():
proposal = db.get(proposal_key)
proposal.mentor = mentor_entity
proposal.has_mentor = True
db.put(proposal)
db.run_in_transaction(assign_mentor_txn)
def unassignMentor(self):
"""Removes the mentor assigned to the proposal.
"""
assert isSet(self.data.proposal)
proposal_key = self.data.proposal.key()
def unassign_mentor_txn():
proposal = db.get(proposal_key)
proposal.mentor = None
proposal.has_mentor = False
db.put(proposal)
db.run_in_transaction(unassign_mentor_txn)
def validate(self):
mentor_key = self.data.POST.get('assign_mentor')
if mentor_key:
mentor_entity = db.get(mentor_key)
org = self.data.proposal.org
if mentor_entity and self.data.isPossibleMentorForProposal(
mentor_entity) or (org.list_all_mentors
and db.Key(mentor_key) in profile_logic.queryAllMentorsKeysForOrg(
org)):
return mentor_entity
else:
raise BadRequest("Invalid post data.")
return None
def post(self):
assert isSet(self.data.proposal)
mentor_entity= self.validate()
if mentor_entity:
self.assignMentor(mentor_entity)
else:
self.unassignMentor()
self.data.proposer = self.data.proposal.parent()
self.redirect.review(self.data.proposal.key().id(),
self.data.proposer.link_id)
self.redirect.to('review_gsoc_proposal')
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class IgnoreProposal(RequestHandler):
"""View which allows org admins to ignore a proposal.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/ignore/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_ignore'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
self.check.isOrgAdminForOrganization(self.data.proposal_org)
def toggleIgnoreProposal(self, value):
"""Toggles the ignore status of the proposal.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and self.data.proposal.status != 'ignored':
raise BadRequest("Invalid post data.")
if value == 'unchecked' and self.data.proposal.status not in [
'pending', 'withdrawn']:
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
def update_status_txn():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
proposal.status = 'ignored'
elif value == 'checked':
proposal.status = 'pending'
db.put(proposal)
db.run_in_transaction(update_status_txn)
def post(self):
value = self.data.POST.get('value')
self.toggleIgnoreProposal(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class ProposalModificationPostDeadline(RequestHandler):
"""View allowing mentors to allow students to modify the proposal.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/modification/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_modification'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
self.check.isMentorForOrganization(self.data.proposal_org)
def toggleModificationPermission(self, value):
"""Toggles the permission to modify the proposal after proposal deadline.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and not self.data.proposal.is_editable_post_deadline:
raise BadRequest("Invalid post data.")
if (value == 'unchecked' and
self.data.proposal.is_editable_post_deadline):
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
def update_modification_perm_txn():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
proposal.is_editable_post_deadline = True
elif value == 'checked':
proposal.is_editable_post_deadline = False
db.put(proposal)
db.run_in_transaction(update_modification_perm_txn)
def post(self):
value = self.data.POST.get('value')
self.toggleModificationPermission(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class AcceptProposal(RequestHandler):
"""View allowing org admins to directly accept the proposal.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/accept/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_accept'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
assert isSet(self.data.proposal_org)
self.check.isOrgAdminForOrganization(self.data.proposal_org)
def toggleStatus(self, value):
"""Toggles the the application state between accept and pending.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and not self.data.proposal.accept_as_project:
raise BadRequest("Invalid post data.")
if value == 'unchecked' and self.data.proposal.accept_as_project:
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
def update_status_txn():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
proposal.accept_as_project = True
elif value == 'checked':
proposal.accept_as_project = False
db.put(proposal)
db.run_in_transaction(update_status_txn)
def post(self):
value = self.data.POST.get('value')
self.toggleStatus(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class ProposalPubliclyVisible(RequestHandler):
"""View allowing the proposer to make the proposal publicly visible.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/publicly_visible/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_publicly_visible'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
self.check.isProposer()
def togglePublicVisibilty(self, value):
"""Toggles the the public visibility of the application.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and not self.data.proposal.is_publicly_visible:
raise BadRequest("Invalid post data.")
if value == 'unchecked' and self.data.proposal.is_publicly_visible:
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
def update_publicly_visibility_txn():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
proposal.is_publicly_visible = True
elif value == 'checked':
proposal.is_publicly_visible = False
db.put(proposal)
db.run_in_transaction(update_publicly_visibility_txn)
def post(self):
value = self.data.POST.get('value')
self.togglePublicVisibilty(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
class WithdrawProposal(RequestHandler):
"""View allowing the proposer to withdraw the proposal.
"""
def djangoURLPatterns(self):
return [
url(r'proposal/withdraw/%s$' % url_patterns.REVIEW,
self, name='gsoc_proposal_withdraw'),
]
def checkAccess(self):
self.mutator.proposalFromKwargs()
self.check.isProposer()
def toggleWithdrawProposal(self, value):
"""Toggles the the application state between withdraw and pending.
Args:
value: can be either "checked" or "unchecked".
"""
assert isSet(self.data.proposal)
if value != 'checked' and value != 'unchecked':
raise BadRequest("Invalid post data.")
if value == 'checked' and not self.data.proposal.status == 'withdrawn':
raise BadRequest("Invalid post data.")
if value == 'unchecked' and self.data.proposal.status == 'withdrawn':
raise BadRequest("Invalid post data.")
proposal_key = self.data.proposal.key()
def update_withdraw_status_txn():
# transactionally get latest version of the proposal
proposal = db.get(proposal_key)
if value == 'unchecked':
proposal.status = 'withdrawn'
elif value == 'checked':
proposal.status = 'pending'
db.put(proposal)
db.run_in_transaction(update_withdraw_status_txn)
def post(self):
value = self.data.POST.get('value')
self.toggleWithdrawProposal(value)
def get(self):
"""Special Handler for HTTP GET request since this view only handles POST.
"""
self.error(405)
| apache-2.0 | -1,911,329,805,116,648,400 | 29.773017 | 79 | 0.666183 | false |
sunfall/giles | giles/games/ataxx/ataxx.py | 1 | 24616 | # Giles: ataxx.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from giles.state import State
from giles.games.seated_game import SeatedGame
from giles.games.seat import Seat
from giles.utils import demangle_move
MIN_SIZE = 5
MAX_SIZE = 26
RED = "red"
BLUE = "blue"
YELLOW = "yellow"
GREEN = "green"
PIT = "pit"
COLS = "abcdefghijklmnopqrstuvwxyz"
TAGS = ["abstract", "capture", "square", "2p", "4p"]
class Ataxx(SeatedGame):
"""An Ataxx game table implementation. Invented in 1988 by Dave Crummack
and Craig Galley.
"""
def __init__(self, server, table_name):
super(Ataxx, self).__init__(server, table_name)
self.game_display_name = "Ataxx"
self.game_name = "ataxx"
self.seats = [
Seat("Red"),
Seat("Blue"),
]
self.min_players = 2
self.max_players = 2
self.state = State("need_players")
self.prefix = "(^RAtaxx^~): "
self.log_prefix = "%s/%s: " % (self.table_display_name, self.game_display_name)
# Ataxx-specific stuff.
self.board = None
self.printable_board = None
self.sides = {}
self.size = 7
self.player_mode = 2
self.turn = None
self.last_r = None
self.last_c = None
self.init_seats()
self.init_board()
def init_board(self):
self.board = []
for r in range(self.size):
self.board.append([None] * self.size)
# Place starting pieces, depending on the number of players.
bottom_left = BLUE
bottom_right = RED
if self.player_mode == 4:
bottom_left = YELLOW
bottom_right = GREEN
self.board[0][0] = RED
self.board[0][self.size - 1] = BLUE
self.board[self.size - 1][0] = bottom_left
self.board[self.size - 1][self.size - 1] = bottom_right
self.update_printable_board()
def init_seats(self):
# If we're in 2-player mode, and there are 4 seats, delete the
# extras.
if self.player_mode == 2 and len(self.seats) == 4:
del self.seats[2]
del self.seats[2]
self.sides = {}
# Set the sides and data for players one and two.
self.seats[0].data.side = RED
self.seats[0].data.count = 2
self.seats[0].data.resigned = False
self.seats[1].data.side = BLUE
self.seats[1].data.count = 2
self.seats[1].data.resigned = False
self.sides[RED] = self.seats[0]
self.sides[BLUE] = self.seats[1]
# If there are four players...
if self.player_mode == 4:
# ...and only two seats, create them.
if len(self.seats) == 2:
self.seats.append(Seat("Green"))
self.seats.append(Seat("Yellow"))
# Either way, set the sides and data.
self.seats[2].data.side = GREEN
self.seats[2].data.resigned = False
self.sides[GREEN] = self.seats[2]
self.seats[3].data.side = YELLOW
self.seats[3].data.resigned = False
self.sides[YELLOW] = self.seats[3]
self.seats[0].data.count = 1
self.seats[1].data.count = 1
self.seats[2].data.count = 1
self.seats[3].data.count = 1
def change_player_mode(self, count):
# Don't bother if it's the mode we're already in.
if count == self.player_mode:
return False
# Don't bother if it's not a valid option either.
if count != 2 and count != 4:
return False
# Okay. Set values...
self.player_mode = count
self.min_players = count
self.max_players = count
# ...initialize the seats...
self.init_seats()
# ...and reinitialize the board.
self.init_board()
def update_printable_board(self):
self.printable_board = []
col_str = " " + "".join([" " + COLS[i] for i in range(self.size)])
self.printable_board.append(col_str + "\n")
self.printable_board.append(" ^m.=" + "".join(["=="] * self.size) + ".^~\n")
for r in range(self.size):
this_str = "%2d ^m|^~ " % (r + 1)
for c in range(self.size):
if r == self.last_r and c == self.last_c:
this_str += "^I"
loc = self.board[r][c]
if loc == RED:
this_str += "^RR^~ "
elif loc == BLUE:
this_str += "^BB^~ "
elif loc == GREEN:
this_str += "^GG^~ "
elif loc == YELLOW:
this_str += "^YY^~ "
elif loc == PIT:
this_str += "^Ko^~ "
else:
this_str += "^M.^~ "
this_str += "^m|^~ %d" % (r + 1)
self.printable_board.append(this_str + "\n")
self.printable_board.append(" ^m`=" + "".join(["=="] * self.size) + "'^~\n")
self.printable_board.append(col_str + "\n")
def get_info_str(self):
if not self.turn:
return("The game has not yet started.\n")
if self.turn == RED:
name = self.seats[0].player_name
turn_str = "^RRed^~"
elif self.turn == BLUE:
name = self.seats[1].player_name
turn_str = "^BBlue^~"
elif self.turn == GREEN:
name = self.seats[2].player_name
turn_str = "^GGreen^~"
else:
name = self.seats[3].player_name
turn_str = "^YYellow^~"
info_str = "It is %s's turn (%s).\n" % (name, turn_str)
info_str += "^RRed^~: %d ^BBlue^~: %d" % (self.seats[0].data.count, self.seats[1].data.count)
if self.player_mode == 4:
info_str += " ^GGreen^~: %d ^YYellow^~: %d" % (self.seats[2].data.count, self.seats[3].data.count)
info_str += "\n"
return(info_str)
def show(self, player):
if not self.printable_board:
self.update_printable_board()
for line in self.printable_board:
player.tell_cc(line)
player.tell_cc(self.get_info_str())
def send_board(self):
for listener in self.channel.listeners:
self.show(listener)
def is_valid(self, row, col):
# Note that this does /not/ care about pits, just about the proper
# ranges for coordinates.
if row < 0 or row >= self.size or col < 0 or col >= self.size:
return False
return True
def piece_has_move(self, row, col):
# Returns whether or not a given piece has a potential move.
# Bail on dud data.
if not self.is_valid(row, col) or not self.board[row][col]:
return False
# Okay. A piece can potentially move anywhere in a 5x5 area centered
# on its location.
found_move = False
for r_d in range(-2, 3): # <--- why I hate range syntax.
for c_d in range(-2, 3):
if not found_move and self.is_valid(row + r_d, col + c_d) and not self.board[row + r_d][col + c_d]:
found_move = True
# Return whether we found a move or not.
return found_move
def color_has_move(self, color):
# Returns whether or not a given side has a potential move.
# Bail immediately if green or yellow and we're in 2p mode.
if self.player_mode == 2 and (color == YELLOW or color == GREEN):
return False
# Bail if this player has resigned.
if ((color == RED and self.seats[0].data.resigned) or
(color == BLUE and self.seats[1].data.resigned) or
(color == GREEN and self.seats[2].data.resigned) or
(color == YELLOW and self.seats[3].data.resigned)):
return False
# Okay. Scan the board for pieces...
for r in range(self.size):
for c in range(self.size):
if self.board[r][c] == color and self.piece_has_move(r, c):
return True
# Found no moves. This color has no valid moves.
return False
def loc_to_str(self, row, col):
return "%s%s" % (COLS[col], row + 1)
def move(self, player, src_loc, dst_loc):
seat = self.get_seat_of_player(player)
if not seat:
player.tell_cc(self.prefix + "You can't move; you're not playing!\n")
return False
if self.turn != seat.data.side:
player.tell_cc(self.prefix + "You must wait for your turn to move.\n")
return False
if src_loc == dst_loc:
player.tell_cc(self.prefix + "You can't make a non-move move!\n")
return False
src_c, src_r = src_loc
dst_c, dst_r = dst_loc
if not self.is_valid(src_c, src_r) or not self.is_valid(dst_c, dst_r):
player.tell_cc(self.prefix + "Your move is out of bounds.\n")
return False
src_str = self.loc_to_str(src_r, src_c)
dst_str = self.loc_to_str(dst_r, dst_c)
# Do they have a piece at the source?
color = seat.data.side
if self.board[src_r][src_c] != color:
player.tell_cc(self.prefix + "You don't have a piece at ^C%s^~.\n" % src_str)
return False
# Is the destination within range?
if abs(src_r - dst_r) > 2 or abs(src_c - dst_c) > 2:
player.tell_cc(self.prefix + "That move is too far.\n")
return False
# Is the destination empty?
if self.board[dst_r][dst_c]:
player.tell_cc(self.prefix + "^C%s^~ is already occupied.\n" % dst_str)
return False
# In range, to an empty cell. It's a valid move. Mark it.
self.last_r = dst_r
self.last_c = dst_c
# Now, is it a split or a leap?
if abs(src_r - dst_r) < 2 and abs(src_c - dst_c) < 2:
# Split. Add a new piece, increase the count.
action_str = "^Mgrew^~ into"
self.board[dst_r][dst_c] = color
seat.data.count += 1
else:
# Leap. Move the piece, don't increase the count.
action_str = "^Cjumped^~ to"
self.board[src_r][src_c] = None
self.board[dst_r][dst_c] = color
# Whichever action occurred, check all cells surrounding the
# destination. If they are opponents, transform them.
change_count = 0
change_str = ""
for r_d in range(-1, 2):
for c_d in range(-1, 2):
if self.is_valid(dst_r + r_d, dst_c + c_d):
occupier = self.board[dst_r + r_d][dst_c + c_d]
if occupier and occupier != color and occupier != PIT:
# Another player. Uh oh! Flip it and decrement that
# player's count.
self.board[dst_r + r_d][dst_c + c_d] = color
seat.data.count += 1
self.sides[occupier].data.count -= 1
change_count += 1
if change_count:
change_str = ", ^!converting %d piece" % change_count
if change_count != 1:
change_str += "s"
# Tell everyone what just happened.
self.channel.broadcast_cc(self.prefix + "From ^c%s^~, %s %s ^C%s^~%s^~.\n" % (src_str, player, action_str, dst_str, change_str))
self.update_printable_board()
return True
def toggle_pits(self, player, loc_list):
# Undocumented bonus feature: handles multiple locations, but if
# any of them are invalid, it'll bail halfway through. Useful for
# prepping a particular cool layout with a single cut-and-pasted
# string, though.
for loc in loc_list:
col, row = loc
# Bail if out of bounds.
if not self.is_valid(row, col):
player.tell_cc(self.prefix + "Pit out of bounds.\n")
return
# Bail if a starting piece is there.
thing_there = self.board[row][col]
if thing_there and not (thing_there == PIT):
player.tell_cc(self.prefix + "Cannot put a pit on a starting piece.\n")
return
# Since it's a toggle, figure out what we're toggling to.
if thing_there:
new_thing = None
action_str = "^cremoved^~"
else:
new_thing = PIT
action_str = "^Cadded^~"
# Tentative place the thing.
self.board[row][col] = new_thing
# Does it keep red or blue (which, in a 4p game, is equivalent to
# all four players) from being able to make a move? If so, it's
# invalid. Put the board back the way it was.
if not self.color_has_move(RED) or not self.color_has_move(BLUE):
player.tell_cc(self.prefix + "Players must have a valid move.\n")
self.board[row][col] = thing_there
return
loc_list = [(row, col)]
edge = self.size - 1
# In either mode, we place another pit across the center line,
# but not if that's the same location as the one we just placed
# (on the center line on odd-sized boards).
if (edge - row) != row:
self.board[edge - row][col] = new_thing
loc_list.append((edge - row, col))
# Handle the 4p down-reflection if necessary.
if self.player_mode == 4 and (edge - col) != col:
self.board[edge - row][edge - col] = new_thing
loc_list.append((edge - row, edge - col))
# Handle the 4p right-reflection if necessary.
if self.player_mode == 4 and (edge - col) != col:
self.board[row][edge - col] = new_thing
loc_list.append((row, edge - col))
# Generate the list of locations.
loc_str = ", ".join(["^C%s^~" % self.loc_to_str(x[0], x[1]) for x in loc_list])
# Finally, send the string detailing what just happened.
self.channel.broadcast_cc(self.prefix + "^Y%s^~ has %s a pit at: %s\n" % (player, action_str, loc_str))
self.update_printable_board()
def set_size(self, player, size_bits):
if not size_bits.isdigit():
player.tell_cc(self.prefix + "Invalid size command.\n")
return
size = int(size_bits)
if size < MIN_SIZE or size > MAX_SIZE:
player.tell_cc(self.prefix + "Size must be between %d and %d inclusive.\n" % (MIN_SIZE, MAX_SIZE))
return
# Valid!
self.size = size
self.channel.broadcast_cc(self.prefix + "^R%s^~ has set the board size to ^C%d^~.\n" % (player, size))
self.init_board()
self.update_printable_board()
def set_player_mode(self, player, mode_bits):
if not mode_bits.isdigit():
player.tell_cc(self.prefix + "Invalid player mode command.\n")
return
mode = int(mode_bits)
if mode != 2 and mode != 4:
player.tell_cc(self.prefix + "This game only supports two or four players.\n")
return
elif mode == self.player_mode:
player.tell_cc(self.prefix + "This table is already in that mode.\n")
return
else:
self.change_player_mode(mode)
self.channel.broadcast_cc(self.prefix + "^Y%s^~ has changed the game to ^C%d-player^~ mode.\n" % (player, mode))
def resign(self, player):
seat = self.get_seat_of_player(player)
if not seat:
player.tell_cc(self.prefix + "You can't resign; you're not playing!\n")
return False
if self.turn != seat.data.side:
player.tell_cc(self.prefix + "You must wait for your turn to resign.\n")
return False
if seat.data.resigned:
player.tell_cc(self.prefix + "You've already resigned.\n")
return False
# They've passed the tests and can resign.
seat.data.resigned = True
self.channel.broadcast_cc(self.prefix + "^R%s^~ is resigning from the game.\n" % player)
def tick(self):
# If all seats are full and the game is active, autostart.
active_seats = [x for x in self.seats if x.player]
if (self.state.get() == "need_players" and
len(active_seats) == self.player_mode and self.active):
self.state.set("playing")
send_str = "^RRed^~: %s; ^BBlue^~: %s" % (self.seats[0].player_name, self.seats[1].player_name)
if self.player_mode == 4:
send_str += "; ^GGreen^~: %s; ^YYellow^~: %s" % (self.seats[2].player_name, self.seats[3].player_name)
self.channel.broadcast_cc(self.prefix + send_str + "\n")
self.turn = RED
self.send_board()
def handle(self, player, command_str):
# Handle common commands.
handled = self.handle_common_commands(player, command_str)
if not handled:
state = self.state.get()
command_bits = command_str.split()
primary = command_bits[0].lower()
if state == "setup":
if primary in ("size", "sz",):
if len(command_bits) == 2:
self.set_size(player, command_bits[1])
else:
player.tell_cc(self.prefix + "Invalid size command.\n")
handled = True
elif primary in ("players", "player", "pl",):
if len(command_bits) == 2:
self.set_player_mode(player, command_bits[1])
else:
player.tell_cc(self.prefix + "Invalid player mode command.\n")
handled = True
elif primary in ("pit", "hole",):
loc_list = demangle_move(command_bits[1:])
if loc_list:
self.toggle_pits(player, loc_list)
else:
player.tell_cc(self.prefix + "Invalid pit command.\n")
handled = True
elif primary in ("ready", "done", "r", "d",):
self.channel.broadcast_cc(self.prefix + "The game is now looking for players.\n")
self.state.set("need_players")
handled = True
elif state == "need_players":
if primary in ("config", "setup", "conf",):
self.state.set("setup")
self.channel.broadcast_cc(self.prefix + "^R%s^~ has switched the game to setup mode.\n" % player)
handled = True
elif state == "playing":
made_move = False
if primary in ("move", "play", "mv", "pl",):
move_bits = demangle_move(command_bits[1:])
if move_bits and len(move_bits) == 2:
made_move = self.move(player, move_bits[0], move_bits[1])
else:
player.tell_cc(self.prefix + "Invalid move command.\n")
handled = True
elif primary in ("resign",):
self.resign(player)
made_move = True
handled = True
if made_move:
# Did someone win?
winner = self.find_winner()
if winner:
self.resolve(winner)
self.finish()
else:
# Okay, well, let's see whose turn it is. If it comes
# back around to us, the game is over anyway.
curr_turn = self.turn
done = False
while not done:
if self.turn == RED:
self.turn = BLUE
elif self.turn == BLUE:
# The only tough one; switch depending on mode.
if self.player_mode == 2:
self.turn = RED
else:
self.turn = GREEN
elif self.turn == GREEN:
self.turn = YELLOW
elif self.turn == YELLOW:
self.turn = RED
# Now see if this player even has a move.
if self.color_has_move(self.turn):
done = True
elif self.turn == curr_turn:
# If we've wrapped back around to the current
# turn, no one had a move. Bail as well.
done = True
# Check to see if we're back at the mover.
if curr_turn == self.turn:
# No one had a valid move. Game's over.
self.no_move_resolve()
self.finish()
else:
# Otherwise it's some other player's turn; game on.
self.send_board()
if not handled:
player.tell_cc(self.prefix + "Invalid command.\n")
def find_winner(self):
# Get the list of players that haven't resigned and have at least one
# piece left on the board. If that list is only one long, we have a
# winner. Otherwise, the game continues.
live_players = [x for x in self.seats if not x.data.resigned and x.data.count]
if len(live_players) == 1:
return live_players[0].player_name
else:
return None
def resolve(self, winner):
self.send_board()
self.channel.broadcast_cc(self.prefix + "^C%s^~ wins!\n" % winner)
def no_move_resolve(self):
self.send_board()
# We look at the number of pieces each player has. Highest wins.
high_count = -1
high_list = None
for seat in self.seats:
if seat.data.count > high_count:
high_count = seat.data.count
high_list = ["^C%s^~" % seat.player_name]
elif seat.data.count == high_count:
# Potential tie.
high_list.append("^C%s^~" % seat.player_name)
# If a single player has the highest count, they win; otherwise, tie.
if len(high_list) == 1:
self.channel.broadcast_cc(self.prefix + "%s wins with ^Y%d^~ pieces!\n" % (high_list[0], high_count))
else:
self.channel.broadcast_cc(self.prefix + "These players ^Rtied^~ for first with ^Y%d^~ pieces: %s\n" % (", ".join(high_list)))
def show_help(self, player):
super(Ataxx, self).show_help(player)
player.tell_cc("\nATAXX SETUP PHASE:\n\n")
player.tell_cc(" ^!setup^., ^!config^., ^!conf^. Enter setup phase.\n")
player.tell_cc(" ^!size^. <size>, ^!sz^. Set board to <size>.\n")
player.tell_cc(" ^!players^. 2|4, ^!pl^. Set number of players.\n")
player.tell_cc(" ^!pit^. <ln> Add or remove pit at <ln>.\n")
player.tell_cc(" ^!ready^., ^!done^., ^!r^., ^!d^. End setup phase.\n")
player.tell_cc("\nATAXX PLAY:\n\n")
player.tell_cc(" ^!move^. <ln> <ln2>, ^!mv^. Move from <ln> to <ln2> (letter number).\n")
player.tell_cc(" ^!resign^. Resign.\n")
| agpl-3.0 | 1,148,322,591,392,057,900 | 35.960961 | 137 | 0.510156 | false |
mwil/collision | figs/ber_contour_AsAu/plot_ber_contour_AsAu.py | 1 | 3179 | # Copyright 2013-2014 Matthias Wilhelm
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.cm as cm
mpl.rc_file("../rc/1fig-contour-rc.txt")
from style_ber_contour_AsAu import Style1col
def do_plot(mode, content, wide):
global style
style.apply(mode, content, wide)
data = np.load("data/prr_AsAu_%s%s.npz"%(content, wide))
AU, TAU = np.meshgrid(-data["Au_range_dB"], data["tau_range"])
Zu = data["PRR_U"]
Zs = data["PRR_S"]
assert TAU.shape == AU.shape == Zu.shape, "The inputs TAU, AU, PRR_U must have the same shape for plotting!"
plt.clf()
if mode in ("sync",):
# Plot the inverse power ratio, sync signal is stronger for positive ratios
CSf = plt.contourf(TAU, AU, Zs, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 0.9, 1.0), colors=("1.0", "0.75", "0.5", "0.25", "0.15", "0.0"), origin="lower")
CS2 = plt.contour(CSf, colors = ("r",)*5+("w",), linewidths=(0.75,)*5+(1.0,), origin="lower", hold="on")
else:
CSf = plt.contourf(TAU, AU, Zs, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 0.9, 1.0), colors=("1.0", "0.75", "0.5", "0.25", "0.15", "0.0"), origin="lower")
CS2f = plt.contour(CSf, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 1.0), colors=4*("r",)+("w",), linewidths=(0.75,)*4+(1.0,), origin="lower", hold="on")
#CS2f = plt.contour(TAU, -AU, Zu, levels=(0.9, 1.0), colors=("0.0",), linewidths=(1.0,), origin="lower", hold="on")
if content in ("unif",):
CSu = plt.contourf(TAU, AU, Zu, levels=(0.2, 1.0), hatches=("////",), colors=("0.75",), origin="lower")
CS2 = plt.contour(CSu, levels=(0.2,), colors = ("r",), linewidths=(1.0,), origin="lower", hold="on")
style.annotate(mode, content, wide)
plt.axis([data["tau_range"][0], data["tau_range"][-1], -data["Au_range_dB"][-1], -data["Au_range_dB"][0]])
plt.ylabel(r"Signal power ratio ($\mathrm{SIR}$)", labelpad=2)
plt.xlabel(r"Time offset $\tau$ ($/T$)", labelpad=2)
plt.savefig("pdf/prrc2_%s_%s%s_z.pdf"%(mode, content, wide))
if __name__ == "__main__":
argp = argparse.ArgumentParser()
argp.add_argument("mode", choices=("sync", "usync"), help="Plot from the view of the synchronized or unsynchronized sender")
argp.add_argument("content", choices=("same", "unif"), help="Relation between data content in the two transmitted packets")
argp.add_argument("-w", "--wide", action="store_true", help="Wide interval of time offsets used (-4T to 4T instead of -1.5T to 1.5T)")
args = argp.parse_args()
wide = ("_wide" if args.wide else "")
style = Style1col()
do_plot(args.mode, args.content, wide)
| gpl-3.0 | 5,220,181,277,542,690,000 | 40.828947 | 148 | 0.654923 | false |
sburnett/seattle | custominstallerbuilder/common/logging.py | 1 | 2344 | """
<Program Name>
logging.py
<Started>
December 2010
<Author>
Alex Hanson
<Purpose>
Provides a log_exception() function that can be called in the except portion
of a try...except block to log a formatted traceback to sys.stderr.
For exceptions that were not explicitly caught in a try...except block,
provides an AutoLogger class that serves as Django middleware, calling
log_exception() automatically.
"""
import sys
import traceback
def _indent_string(string, num_spaces):
lines = string.strip(' \n').split('\n')
for line_num, line in enumerate(lines):
lines[line_num] = (num_spaces * ' ') + line
return '\n'.join(lines)
def log_exception(request=None):
"""
<Purpose>
Write details regarding the latest thrown exception to sys.stderr. On a web
server, this should send the information to the server error log.
<Arguments>
request:
An optional HttpRequest object, used for providing additional detail.
<Exceptions>
None. Throwing an exception here would be pretty ironic.
<Side Effects>
None.
<Returns>
None.
"""
# By default, do not indent any messages.
indentation = 0
# Log the URL if a request object was given.
if request is not None:
url = request.build_absolute_uri()
sys.stderr.write('Error while generating ' + url + '\n')
# If a URL is printed, then indent the traceback.
indentation = 2
traceback_string = _indent_string(traceback.format_exc(), indentation)
sys.stderr.write(traceback_string + '\n')
sys.stderr.flush()
class AutoLogger(object):
"""
<Purpose>
If an exception is not explicitly caught somewhere, this class will
write the details to the error log. It is meant to be used Django
middleware.
By default, uncaught exceptions will generate an HTTP 500 error (and a
traceback page if in debug mode), but are seemingly not logged by the web
server. This middleware fixes that by manually printing tracebacks to
sys.stderr.
<Side Effects>
None.
<Example Use>
In the Django settings file (settings_base.py), add the following entry to
the MIDDLEWARE_CLASSES list:
'custominstallerbuilder.common.logging.AutoLogger'
"""
def process_exception(self, request, exception):
log_exception(request)
| mit | -7,317,077,929,695,061,000 | 23.673684 | 79 | 0.689846 | false |
endlessm/chromium-browser | build/android/gyp/dex.py | 1 | 19214 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import re
import shutil
import sys
import tempfile
import zipfile
from util import build_utils
from util import md5_check
from util import zipalign
sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir))
import convert_dex_profile
_IGNORE_WARNINGS = (
# A play services library triggers this.
r'Type `libcore.io.Memory` was not found',
# Filter out warnings caused by our fake main dex list used to enable
# multidex on library targets.
# Warning: Application does not contain `Foo` as referenced in main-dex-list
r'does not contain `Foo`',
)
def _ParseArgs(args):
args = build_utils.ExpandFileArgs(args)
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument('--output', required=True, help='Dex output path.')
parser.add_argument(
'--class-inputs',
action='append',
help='GN-list of .jars with .class files.')
parser.add_argument(
'--class-inputs-filearg',
action='append',
help='GN-list of .jars with .class files (added to depfile).')
parser.add_argument(
'--dex-inputs', action='append', help='GN-list of .jars with .dex files.')
parser.add_argument(
'--dex-inputs-filearg',
action='append',
help='GN-list of .jars with .dex files (added to depfile).')
parser.add_argument(
'--incremental-dir',
help='Path of directory to put intermediate dex files.')
parser.add_argument(
'--main-dex-list-path',
help='File containing a list of the classes to include in the main dex.')
parser.add_argument(
'--multi-dex',
action='store_true',
help='Allow multiple dex files within output.')
parser.add_argument('--r8-jar-path', required=True, help='Path to R8 jar.')
parser.add_argument('--desugar', action='store_true')
parser.add_argument(
'--bootclasspath',
action='append',
help='GN-list of bootclasspath. Needed for --desugar')
parser.add_argument(
'--desugar-jdk-libs-json', help='Path to desugar_jdk_libs.json.')
parser.add_argument(
'--classpath',
action='append',
help='GN-list of full classpath. Needed for --desugar')
parser.add_argument(
'--release',
action='store_true',
help='Run D8 in release mode. Release mode maximises main dex and '
'deletes non-essential line number information (vs debug which minimizes '
'main dex and keeps all line number information, and then some.')
parser.add_argument(
'--min-api', help='Minimum Android API level compatibility.')
group = parser.add_argument_group('Dexlayout')
group.add_argument(
'--dexlayout-profile',
help=('Text profile for dexlayout. If present, a dexlayout '
'pass will happen'))
group.add_argument(
'--profman-path',
help=('Path to ART profman binary. There should be a lib/ directory at '
'the same path with shared libraries (shared with dexlayout).'))
group.add_argument(
'--dexlayout-path',
help=('Path to ART dexlayout binary. There should be a lib/ directory at '
'the same path with shared libraries (shared with dexlayout).'))
group.add_argument('--dexdump-path', help='Path to dexdump binary.')
group.add_argument(
'--proguard-mapping-path',
help=('Path to proguard map from obfuscated symbols in the jar to '
'unobfuscated symbols present in the code. If not present, the jar '
'is assumed not to be obfuscated.'))
parser.add_argument(
'--force-enable-assertions',
action='store_true',
help='Forcefully enable javac generated assertion code.')
options = parser.parse_args(args)
if options.dexlayout_profile:
build_utils.CheckOptions(
options,
parser,
required=('profman_path', 'dexlayout_path', 'dexdump_path'))
elif options.proguard_mapping_path is not None:
parser.error('Unexpected proguard mapping without dexlayout')
if options.main_dex_list_path and not options.multi_dex:
parser.error('--main-dex-list-path is unused if multidex is not enabled')
options.class_inputs = build_utils.ParseGnList(options.class_inputs)
options.class_inputs_filearg = build_utils.ParseGnList(
options.class_inputs_filearg)
options.bootclasspath = build_utils.ParseGnList(options.bootclasspath)
options.classpath = build_utils.ParseGnList(options.classpath)
options.dex_inputs = build_utils.ParseGnList(options.dex_inputs)
options.dex_inputs_filearg = build_utils.ParseGnList(
options.dex_inputs_filearg)
return options
def _RunD8(dex_cmd, input_paths, output_path):
dex_cmd = dex_cmd + ['--output', output_path] + input_paths
def stderr_filter(output):
patterns = _IGNORE_WARNINGS
# No classpath means we are using Bazel's Desugar tool to desugar lambdas
# and interface methods, in which case we intentionally do not pass a
# classpath to D8.
# Not having a classpath makes incremental dexing much more effective.
# D8 will still be used for backported method desugaring.
# We still use D8 for backported method desugaring.
if '--classpath' not in dex_cmd:
patterns = list(patterns) + ['default or static interface methods']
combined_pattern = '|'.join(re.escape(p) for p in patterns)
output = build_utils.FilterLines(output, combined_pattern)
# Each warning has a prefix line of tthe file it's from. If we've filtered
# out the warning, then also filter out the file header.
# E.g.:
# Warning in path/to/Foo.class:
# Error message #1 indented here.
# Error message #2 indented here.
output = re.sub(r'^Warning in .*?:\n(?! )', '', output, flags=re.MULTILINE)
return output
# stdout sometimes spams with things like:
# Stripped invalid locals information from 1 method.
build_utils.CheckOutput(dex_cmd, stderr_filter=stderr_filter)
def _EnvWithArtLibPath(binary_path):
"""Return an environment dictionary for ART host shared libraries.
Args:
binary_path: the path to an ART host binary.
Returns:
An environment dictionary where LD_LIBRARY_PATH has been augmented with the
shared library path for the binary. This assumes that there is a lib/
directory in the same location as the binary.
"""
lib_path = os.path.join(os.path.dirname(binary_path), 'lib')
env = os.environ.copy()
libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l]
libraries.append(lib_path)
env['LD_LIBRARY_PATH'] = ':'.join(libraries)
return env
def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir):
"""Create a binary profile for dexlayout.
Args:
text_profile: The ART text profile that will be converted to a binary
profile.
input_dex: The input dex file to layout.
profman_path: Path to the profman binary.
temp_dir: Directory to work in.
Returns:
The name of the binary profile, which will live in temp_dir.
"""
binary_profile = os.path.join(
temp_dir, 'binary_profile-for-' + os.path.basename(text_profile))
open(binary_profile, 'w').close() # Touch binary_profile.
profman_cmd = [profman_path,
'--apk=' + input_dex,
'--dex-location=' + input_dex,
'--create-profile-from=' + text_profile,
'--reference-profile-file=' + binary_profile]
build_utils.CheckOutput(
profman_cmd,
env=_EnvWithArtLibPath(profman_path),
stderr_filter=lambda output:
build_utils.FilterLines(output, '|'.join(
[r'Could not find (method_id|proto_id|name):',
r'Could not create type list'])))
return binary_profile
def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir):
"""Layout a dexfile using a profile.
Args:
binary_profile: An ART binary profile, eg output from _CreateBinaryProfile.
input_dex: The dex file used to create the binary profile.
dexlayout_path: Path to the dexlayout binary.
temp_dir: Directory to work in.
Returns:
List of output files produced by dexlayout. This will be one if the input
was a single dexfile, or multiple files if the input was a multidex
zip. These output files are located in temp_dir.
"""
dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output')
os.mkdir(dexlayout_output_dir)
dexlayout_cmd = [ dexlayout_path,
'-u', # Update checksum
'-p', binary_profile,
'-w', dexlayout_output_dir,
input_dex ]
build_utils.CheckOutput(
dexlayout_cmd,
env=_EnvWithArtLibPath(dexlayout_path),
stderr_filter=lambda output:
build_utils.FilterLines(output,
r'Can.t mmap dex file.*please zipalign'))
output_files = os.listdir(dexlayout_output_dir)
if not output_files:
raise Exception('dexlayout unexpectedly produced no output')
return sorted([os.path.join(dexlayout_output_dir, f) for f in output_files])
def _ZipMultidex(file_dir, dex_files):
"""Zip dex files into a multidex.
Args:
file_dir: The directory into which to write the output.
dex_files: The dexfiles forming the multizip. Their names must end with
classes.dex, classes2.dex, ...
Returns:
The name of the multidex file, which will live in file_dir.
"""
ordered_files = [] # List of (archive name, file name)
for f in dex_files:
if f.endswith('dex.jar'):
ordered_files.append(('classes.dex', f))
break
if not ordered_files:
raise Exception('Could not find classes.dex multidex file in %s',
dex_files)
for dex_idx in xrange(2, len(dex_files) + 1):
archive_name = 'classes%d.dex' % dex_idx
for f in dex_files:
if f.endswith(archive_name):
ordered_files.append((archive_name, f))
break
else:
raise Exception('Could not find classes%d.dex multidex file in %s',
dex_files)
if len(set(f[1] for f in ordered_files)) != len(ordered_files):
raise Exception('Unexpected clashing filenames for multidex in %s',
dex_files)
zip_name = os.path.join(file_dir, 'multidex_classes.zip')
build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name))
for archive_name, file_name in ordered_files),
zip_name)
return zip_name
def _ZipAligned(dex_files, output_path):
"""Creates a .dex.jar with 4-byte aligned files.
Args:
dex_files: List of dex files.
output_path: The output file in which to write the zip.
"""
with zipfile.ZipFile(output_path, 'w') as z:
for i, dex_file in enumerate(dex_files):
name = 'classes{}.dex'.format(i + 1 if i > 0 else '')
zipalign.AddToZipHermetic(z, name, src_path=dex_file, alignment=4)
def _PerformDexlayout(tmp_dir, tmp_dex_output, options):
if options.proguard_mapping_path is not None:
matching_profile = os.path.join(tmp_dir, 'obfuscated_profile')
convert_dex_profile.ObfuscateProfile(
options.dexlayout_profile, tmp_dex_output,
options.proguard_mapping_path, options.dexdump_path, matching_profile)
else:
logging.warning('No obfuscation for %s', options.dexlayout_profile)
matching_profile = options.dexlayout_profile
binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output,
options.profman_path, tmp_dir)
output_files = _LayoutDex(binary_profile, tmp_dex_output,
options.dexlayout_path, tmp_dir)
if len(output_files) > 1:
return _ZipMultidex(tmp_dir, output_files)
if zipfile.is_zipfile(output_files[0]):
return output_files[0]
final_output = os.path.join(tmp_dir, 'dex_classes.zip')
_ZipAligned(output_files, final_output)
return final_output
def _CreateFinalDex(d8_inputs, output, tmp_dir, dex_cmd, options=None):
tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output.zip')
if (output.endswith('.dex')
or not all(f.endswith('.dex') for f in d8_inputs)):
if options:
if options.main_dex_list_path:
dex_cmd = dex_cmd + ['--main-dex-list', options.main_dex_list_path]
elif options.multi_dex and int(options.min_api or 1) < 21:
# When dexing library targets, it doesn't matter what's in the main dex.
tmp_main_dex_list_path = os.path.join(tmp_dir, 'main_list.txt')
with open(tmp_main_dex_list_path, 'w') as f:
f.write('Foo.class\n')
dex_cmd = dex_cmd + ['--main-dex-list', tmp_main_dex_list_path]
tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir')
os.mkdir(tmp_dex_dir)
_RunD8(dex_cmd, d8_inputs, tmp_dex_dir)
logging.debug('Performed dex merging')
dex_files = [os.path.join(tmp_dex_dir, f) for f in os.listdir(tmp_dex_dir)]
if output.endswith('.dex'):
if len(dex_files) > 1:
raise Exception('%d files created, expected 1' % len(dex_files))
tmp_dex_output = dex_files[0]
else:
_ZipAligned(sorted(dex_files), tmp_dex_output)
else:
# Skip dexmerger. Just put all incrementals into the .jar individually.
_ZipAligned(sorted(d8_inputs), tmp_dex_output)
logging.debug('Quick-zipped %d files', len(d8_inputs))
if options and options.dexlayout_profile:
tmp_dex_output = _PerformDexlayout(tmp_dir, tmp_dex_output, options)
# The dex file is complete and can be moved out of tmp_dir.
shutil.move(tmp_dex_output, output)
def _IntermediateDexFilePathsFromInputJars(class_inputs, incremental_dir):
"""Returns a list of all intermediate dex file paths."""
dex_files = []
for jar in class_inputs:
with zipfile.ZipFile(jar, 'r') as z:
for subpath in z.namelist():
if subpath.endswith('.class'):
subpath = subpath[:-5] + 'dex'
dex_files.append(os.path.join(incremental_dir, subpath))
return dex_files
def _DeleteStaleIncrementalDexFiles(dex_dir, dex_files):
"""Deletes intermediate .dex files that are no longer needed."""
all_files = build_utils.FindInDirectory(dex_dir)
desired_files = set(dex_files)
for path in all_files:
if path not in desired_files:
os.unlink(path)
def _ExtractClassFiles(changes, tmp_dir, class_inputs):
classes_list = []
for jar in class_inputs:
if changes:
changed_class_list = set(changes.IterChangedSubpaths(jar))
predicate = lambda x: x in changed_class_list and x.endswith('.class')
else:
predicate = lambda x: x.endswith('.class')
classes_list.extend(
build_utils.ExtractAll(jar, path=tmp_dir, predicate=predicate))
return classes_list
def _CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd):
# Create temporary directory for classes to be extracted to.
tmp_extract_dir = os.path.join(tmp_dir, 'tmp_extract_dir')
os.mkdir(tmp_extract_dir)
# Do a full rebuild when changes are to classpath or other non-input files.
allowed_changed = set(options.class_inputs)
allowed_changed.update(options.dex_inputs)
strings_changed = changes.HasStringChanges()
non_direct_input_changed = next(
(p for p in changes.IterChangedPaths() if p not in allowed_changed), None)
if strings_changed or non_direct_input_changed:
logging.debug('Full dex required: strings_changed=%s path_changed=%s',
strings_changed, non_direct_input_changed)
changes = None
class_files = _ExtractClassFiles(changes, tmp_extract_dir,
options.class_inputs)
logging.debug('Extracted class files: %d', len(class_files))
# If the only change is deleting a file, class_files will be empty.
if class_files:
# Dex necessary classes into intermediate dex files.
dex_cmd = dex_cmd + ['--intermediate', '--file-per-class']
_RunD8(dex_cmd, class_files, options.incremental_dir)
logging.debug('Dexed class files.')
def _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd):
logging.debug('_OnStaleMd5')
with build_utils.TempDir() as tmp_dir:
if options.incremental_dir:
# Create directory for all intermediate dex files.
if not os.path.exists(options.incremental_dir):
os.makedirs(options.incremental_dir)
_DeleteStaleIncrementalDexFiles(options.incremental_dir, final_dex_inputs)
logging.debug('Stale files deleted')
_CreateIntermediateDexFiles(changes, options, tmp_dir, dex_cmd)
_CreateFinalDex(
final_dex_inputs, options.output, tmp_dir, dex_cmd, options=options)
def MergeDexForIncrementalInstall(r8_jar_path, src_paths, dest_dex_jar):
dex_cmd = [
build_utils.JAVA_PATH,
'-jar',
r8_jar_path,
'd8',
]
with build_utils.TempDir() as tmp_dir:
_CreateFinalDex(src_paths, dest_dex_jar, tmp_dir, dex_cmd)
def main(args):
build_utils.InitLogging('DEX_DEBUG')
options = _ParseArgs(args)
options.class_inputs += options.class_inputs_filearg
options.dex_inputs += options.dex_inputs_filearg
input_paths = options.class_inputs + options.dex_inputs
if options.multi_dex and options.main_dex_list_path:
input_paths.append(options.main_dex_list_path)
input_paths.append(options.r8_jar_path)
depfile_deps = options.class_inputs_filearg + options.dex_inputs_filearg
output_paths = [options.output]
if options.incremental_dir:
final_dex_inputs = _IntermediateDexFilePathsFromInputJars(
options.class_inputs, options.incremental_dir)
output_paths += final_dex_inputs
track_subpaths_allowlist = options.class_inputs
else:
final_dex_inputs = list(options.class_inputs)
track_subpaths_allowlist = None
final_dex_inputs += options.dex_inputs
dex_cmd = [
build_utils.JAVA_PATH, '-jar', options.r8_jar_path, 'd8',
]
if options.release:
dex_cmd += ['--release']
if options.min_api:
dex_cmd += ['--min-api', options.min_api]
if not options.desugar:
dex_cmd += ['--no-desugaring']
elif options.classpath:
# Don't pass classpath when Desugar.jar is doing interface desugaring.
dex_cmd += ['--lib', build_utils.JAVA_HOME]
for path in options.bootclasspath:
dex_cmd += ['--lib', path]
for path in options.classpath:
dex_cmd += ['--classpath', path]
depfile_deps += options.classpath
depfile_deps += options.bootclasspath
input_paths += options.classpath
input_paths += options.bootclasspath
if options.desugar_jdk_libs_json:
dex_cmd += ['--desugared-lib', options.desugar_jdk_libs_json]
if options.force_enable_assertions:
dex_cmd += ['--force-enable-assertions']
md5_check.CallAndWriteDepfileIfStale(
lambda changes: _OnStaleMd5(changes, options, final_dex_inputs, dex_cmd),
options,
input_paths=input_paths,
input_strings=dex_cmd + [bool(options.incremental_dir)],
output_paths=output_paths,
pass_changes=True,
track_subpaths_allowlist=track_subpaths_allowlist,
depfile_deps=depfile_deps)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | 7,710,404,384,167,602,000 | 35.95 | 80 | 0.67404 | false |
erigones/esdc-ce | api/authtoken/apps.py | 1 | 1738 | """
Copied+modified from rest_framework.authtoken.apps, which is licensed under the BSD license:
*******************************************************************************
Copyright (c) 2011-2016, Tom Christie
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*******************************************************************************
"""
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AuthTokenConfig(AppConfig):
name = 'api.authtoken'
verbose_name = _("Auth Token")
| apache-2.0 | 4,715,227,434,735,411,000 | 50.117647 | 92 | 0.732451 | false |
jeremyosborne/python | multiproc/mproc.py | 1 | 2729 | # Why potentially use multiprocessing over threading?
# See:
#
# http://eli.thegreenplace.net/2012/01/16/python-parallelizing-cpu-bound-tasks-with-multiprocessing/
# and
# http://www.dabeaz.com/GIL/
# Notes about multiprocessing: Need to be able to do multiple imports on
# the __main__ file, and as such cannot do these examples from the command
# line.
import multiprocessing
import time
import random
class NamedObject(object):
"""Object identifiable by a name.
"""
def __init__(self, name):
self.name = name
def speak(self):
# arbitrary minor pause.
time.sleep(random.random())
# Processes have names. We can get the name from the multiprocess
# module static method current_process. The name is arbitrary.
proc_name = multiprocessing.current_process().name
print 'My name is {0}, and I am in {1}!'.format(self.name, proc_name)
def worker(q):
while q.empty() is not True:
# Pull an item from the queue.
obj = q.get()
# We assume that we're only getting NamedObjects.
# If we weren't getting named objects, this would break.
obj.speak()
if __name__ == '__main__':
name = "George Foreman %s"
# Queues are First In, First Out objects designed for passing data into
# a process callback.
qs = multiprocessing.Queue(), multiprocessing.Queue()
for i in range(10):
# Fill the qs
qs[i % 2].put(NamedObject(name % i))
# This spawns our secondary process.
# Should always call with keyword arguments.
# target is our callback function
# args {tuple} are passed to our target as formal arguments.
p = multiprocessing.Process(target=worker, args=(qs[0],))
p2 = multiprocessing.Process(target=worker, args=(qs[1],))
# Begin the process.
p.start()
p2.start()
# Attempt to join the background process and flush the queue into it.
# But, if we close, we don't have to purposefully join.
# (This call is not always required, it is implied when we call close and
# the process is not a daemon).
#qs[0].join_thread()
#qs[1].join_thread()
# Wait for the worker to finish
# Close indicates no more data will be added to the queue.
# We need to close the data queue before joining the thread.
qs[0].close()
qs[1].close()
# Pause until the process terminates (blocking call).
# (This call is not required).
#p.join()
#p2.join()
# With the join call, print will not execute until the queue is empty.
# Turn off the p.join() above and print will call before the other procs.
print "If join is called, I will always be last. If not...."
| mit | 5,438,134,134,401,925,000 | 33.1125 | 104 | 0.648223 | false |
pracedru/pyDesign | PyDesignData/PyDesignSerializer.py | 1 | 1563 | import json
import zlib
import base64
import PyDesignData.PyDesignDocument
__author__ = 'magnus'
def complex_handler(Obj):
if hasattr(Obj, 'jsonable'):
return Obj.jsonable()
else:
raise Exception('Object of type %s with value of %s is not JSON serializable' % (type(Obj), repr(Obj)))
def serialize_document_to_console(document):
print(json.dumps(document, default=complex_handler))
def serialize_document_to_disk(document):
data = json.dumps(document, default=complex_handler)
if ".json" in document.name:
text_file = open(document.path + "/" + document.name, "w")
text_file.write(data)
text_file.close()
else:
code = zlib.compress(bytes(data, 'UTF-8'))
text_file = open(document.path + "/" + document.name, "wb")
text_file.write(code)
text_file.close()
document.changed = False
def deserialize_document_from_disk(file_path):
doc = None
if ".json" in file_path:
with open(file_path) as data_file:
data = json.load(data_file)
doc = PyDesignData.PyDesignDocument.PyDesignDocument.deserialize(data)
doc.path = file_path.replace("/" + doc.name, "")
else:
with open(file_path, "rb") as data_file:
S = data_file.read()
data_string = zlib.decompress(S)
data = json.loads(data_string.decode("UTF-8"))
doc = PyDesignData.PyDesignDocument.PyDesignDocument.deserialize(data)
doc.path = file_path.replace("/" + doc.name, "")
return doc
| mit | 1,326,747,839,351,036,200 | 30.26 | 111 | 0.62572 | false |
andreashorn/lead_dbs | ext_libs/OSS-DBS/OSS_platform/Math_module_MPI_only_FEniCS.py | 1 | 25928 | # -*- coding: utf-8 -*-
"""
Created on Sun Aug 19 13:23:54 2018
@author: butenko
"""
from dolfin import *
from pandas import read_csv
from tissue_dielectrics import DielectricProperties
import numpy as np
import os
import subprocess
import pickle
import warnings
warnings.filterwarnings("ignore")
import importlib
# additions
#import CSF_refinement as CSF_imp
import time as tm
#def run_mpi(np):
# print("Code runs in parallel, {} processors.".format(np))
# subprocess.call(["mpirun", "-np", "{}".format(np), "python3", "Math_module_MPI_only_FEniCS.py"])
parameters["allow_extrapolation"]=True
parameters['linear_algebra_backend']='PETSc'
set_log_active(False) #turns off debugging info
#def norm_E(E_r,E_im):
# return sqrt(E_r[0] ** 2 + E_r[1] ** 2+ E_r[2] ** 2+E_im[0] ** 2 + E_im[1] ** 2+ E_im[2] ** 2)
#this function runs in series
#def get_solutions(EQS_form,Domains,boundaries_sol,frequency,el_order): #know where to look for the solutions in h5py files
def get_solutions(EQS_form,frequency,el_order):
start_reassamble=tm.time()
mesh_sol = Mesh()
f = HDF5File(mesh_sol.mpi_comm(),os.environ['PATIENTDIR']+"/Results_adaptive/Solution_"+str(np.round(frequency,6))+".h5",'r')
f.read(mesh_sol,"mesh_sol", False)
if EQS_form == 'EQS':
Er = FiniteElement("Lagrange", mesh_sol.ufl_cell(),el_order)
Ei = FiniteElement("Lagrange", mesh_sol.ufl_cell(),el_order)
Ec = Er * Ei
V = FunctionSpace(mesh_sol, Ec)
phi_sol=Function(V)
f.read(phi_sol,'solution_phi_full')
phi_r_sol,phi_i_sol=phi_sol.split(deepcopy=True)
if el_order>1:
W =VectorFunctionSpace(mesh_sol,'DG',el_order-1)
W_i =VectorFunctionSpace(mesh_sol,'DG',el_order-1)
V_normE=FunctionSpace(mesh_sol,"CG",el_order-1)
else:
W =VectorFunctionSpace(mesh_sol,'DG',el_order)
W_i =VectorFunctionSpace(mesh_sol,'DG',el_order)
V_normE=FunctionSpace(mesh_sol,"CG",el_order)
E_field = Function(W)
E_field_im = Function(W_i)
f.read(E_field,'solution_E_field')
f.read(E_field_im,'solution_E_field_im')
j_dens_real = Function(W)
j_dens_im = Function(W_i)
f.read(j_dens_real, "solution_j_real")
f.read(j_dens_im, "solution_j_im")
J_Vector=PETScVector(MPI.comm_world,2)
f.read(J_Vector, "J_Vector",False)
J_real,J_im=J_Vector[:]
E_norm=project(sqrt(inner(E_field,E_field)+inner(E_field_im,E_field_im)),V_normE,solver_type="cg", preconditioner_type="amg")
max_E=E_norm.vector().max()
file=File(os.environ['PATIENTDIR']+'/Results_adaptive/Phi_r_field_EQS.pvd')
file<<phi_r_sol,mesh_sol
file=File(os.environ['PATIENTDIR']+'/Results_adaptive/Phi_im_field_EQS.pvd')
file<<phi_i_sol,mesh_sol
file=File(os.environ['PATIENTDIR']+'/Results_adaptive/E_norm_EQS.pvd')
file<<E_norm,mesh_sol
elif EQS_form == 'QS':
V = FunctionSpace(mesh_sol, "Lagrange",el_order)
phi_r_sol=Function(V)
phi_i_sol=Function(V)
f.read(phi_r_sol,'solution_phi_full')
phi_i_sol.vector()[:]=0.0
if el_order>1:
W =VectorFunctionSpace(mesh_sol,'DG',el_order-1)
V_normE=FunctionSpace(mesh_sol,"CG",el_order-1)
else:
W =VectorFunctionSpace(mesh_sol,'DG',el_order)
V_normE=FunctionSpace(mesh_sol,"CG",el_order)
E_field = Function(W)
f.read(E_field,'solution_E_field')
E_field_im=Function(W)
E_field_im.vector()[:] = 0.0 #fake
j_dens_real = Function(W)
j_dens_im = Function(W)
f.read(j_dens_real, "solution_j_real")
j_dens_im.vector()[:] = 0.0 #fake
J_Vector=PETScVector(MPI.comm_world,2)
f.read(J_Vector, "J_Vector",False)
J_real,J_im=J_Vector[:]
E_norm=project(sqrt(inner(E_field,E_field)),V_normE,solver_type="cg", preconditioner_type="amg")
max_E=E_norm.vector().max()
file=File(os.environ['PATIENTDIR']+'/Results_adaptive/E_norm_QS.pvd')
file<<E_norm,mesh_sol
file=File(os.environ['PATIENTDIR']+'/Results_adaptive/Phi_r_field_QS.pvd')
file<<phi_r_sol,mesh_sol
f.close()
#if we want to get the potential magnitude on the neuron compartments
Vertices_get=read_csv(os.environ['PATIENTDIR']+'/Neuron_model_arrays/Vert_of_Neural_model_NEURON.csv', delimiter=' ', header=None)
Vertices_array=Vertices_get.values
Phi_ROI=np.zeros((Vertices_array.shape[0],4),float)
for inx in range(Vertices_array.shape[0]):
pnt=Point(Vertices_array[inx,0],Vertices_array[inx,1],Vertices_array[inx,2])
Phi_ROI[inx,0]=Vertices_array[inx,0]
Phi_ROI[inx,1]=Vertices_array[inx,1]
Phi_ROI[inx,2]=Vertices_array[inx,2]
Phi_ROI[inx,3]=np.sqrt(phi_r_sol(pnt)*phi_r_sol(pnt)+phi_i_sol(pnt)*phi_i_sol(pnt))
np.savetxt(os.environ['PATIENTDIR']+'/Results_adaptive/Phi_'+str(frequency)+'.csv', Phi_ROI, delimiter=" ")
if MPI.comm_world.rank==0:
print("Unscaled current on the contact (to check current convergence): ",J_real,J_im)
minutes=int((tm.time() - start_reassamble)/60)
secnds=int(tm.time() - start_reassamble)-minutes*60
print("--- solution reassambled in ",minutes," min ",secnds," s ---")
return phi_r_sol,phi_i_sol,E_field,E_field_im,max_E,J_real,J_im,j_dens_real,j_dens_im
def get_field_on_points(phi_r,phi_i,c_c,J_r,J_i):
#================If we check on predefined vertices===========================#
Vertices_neur_get=read_csv(os.environ['PATIENTDIR']+'/Neuron_model_arrays/Vert_of_Neural_model_NEURON.csv', delimiter=' ', header=None)
Vertices_neur=Vertices_neur_get.values
Ampl_ROI=np.zeros((Vertices_neur.shape[0],4),float)
for inx in range(Vertices_neur.shape[0]):
pnt=Point(Vertices_neur[inx,0],Vertices_neur[inx,1],Vertices_neur[inx,2])
Ampl_ROI[inx,3]=sqrt(phi_r(pnt)*phi_r(pnt)+phi_i(pnt)*phi_i(pnt))
Ampl_ROI[inx,0]=Vertices_neur[inx,0]
Ampl_ROI[inx,1]=Vertices_neur[inx,1]
Ampl_ROI[inx,2]=Vertices_neur[inx,2]
Ampl_ROI=Ampl_ROI[~np.all(Ampl_ROI==0.0,axis=1)] #deletes all zero enteries
return Ampl_ROI
def get_cond_tensor(mesh):
# Code for C++ evaluation of conductivity
conductivity_code = """
#include <pybind11/pybind11.h>
#include <pybind11/eigen.h>
namespace py = pybind11;
#include <dolfin/function/Expression.h>
#include <dolfin/mesh/MeshFunction.h>
class Conductivity : public dolfin::Expression
{
public:
// Create expression with 6 components
Conductivity() : dolfin::Expression(6) {}
// Function for evaluating expression on each cell
void eval(Eigen::Ref<Eigen::VectorXd> values, Eigen::Ref<const Eigen::VectorXd> x, const ufc::cell& cell) const override
{
const uint cell_index = cell.index;
values[0] = (*c00)[cell_index];
values[1] = (*c01)[cell_index];
values[2] = (*c02)[cell_index];
values[3] = (*c11)[cell_index];
values[4] = (*c12)[cell_index];
values[5] = (*c22)[cell_index];
}
// The data stored in mesh functions
std::shared_ptr<dolfin::MeshFunction<double>> c00;
std::shared_ptr<dolfin::MeshFunction<double>> c01;
std::shared_ptr<dolfin::MeshFunction<double>> c02;
std::shared_ptr<dolfin::MeshFunction<double>> c11;
std::shared_ptr<dolfin::MeshFunction<double>> c12;
std::shared_ptr<dolfin::MeshFunction<double>> c22;
};
PYBIND11_MODULE(SIGNATURE, m)
{
py::class_<Conductivity, std::shared_ptr<Conductivity>, dolfin::Expression>
(m, "Conductivity")
.def(py::init<>())
.def_readwrite("c00", &Conductivity::c00)
.def_readwrite("c01", &Conductivity::c01)
.def_readwrite("c02", &Conductivity::c02)
.def_readwrite("c11", &Conductivity::c11)
.def_readwrite("c12", &Conductivity::c12)
.def_readwrite("c22", &Conductivity::c22);
}
"""
# Define conductivity expression and matrix
c00 = MeshFunction("double", mesh, 3, 0.0)
c01 = MeshFunction("double", mesh, 3, 0.0)
c02 = MeshFunction("double", mesh, 3, 0.0)
c11 = MeshFunction("double", mesh, 3, 0.0)
c12 = MeshFunction("double", mesh, 3, 0.0)
c22 = MeshFunction("double", mesh, 3, 0.0)
hdf = HDF5File(mesh.mpi_comm(), os.environ['PATIENTDIR']+"/Results_adaptive/Mesh_to_solve.h5", "r")
hdf.read(c00, "/c00")
hdf.read(c01, "/c01")
hdf.read(c02, "/c02")
hdf.read(c11, "/c11")
hdf.read(c12, "/c12")
hdf.read(c22, "/c22")
hdf.close()
c = CompiledExpression(compile_cpp_code(conductivity_code).Conductivity(),
c00=c00, c01=c01, c02=c02, c11=c11, c12=c12, c22=c22, degree=0)
C = as_matrix(((c[0], c[1], c[2]), (c[1], c[3], c[4]),(c[2],c[4],c[5])))
return C
def compute_field(mesh_sol,Domains,subdomains,boundaries_sol,kappa_r,Field_calc_param,kappa_i=False):
set_log_active(False) #turns off debugging info
if MPI.comm_world.rank==1:
print("_________________________")
parameters['linear_algebra_backend']='PETSc'
if Field_calc_param.EQS_mode == 'EQS':
kappa = [kappa_r, kappa_i]
else:
kappa = [kappa_r]
if Field_calc_param.anisotropy==1:
Cond_tensor=get_cond_tensor(mesh_sol) # unlike get_scaled_cond_tensor, this function does not scale tensor with conductivity (as it was already scaled)
else:
Cond_tensor=False #just to initialize
from Math_module_hybrid import choose_solver_for_me
if Field_calc_param.Solver_type=='Default':
Solver_type=choose_solver_for_me(Field_calc_param.EQS_mode,Domains.Float_contacts) #choses solver basing on the Laplace formulation and whether the floating conductors are used
else:
Solver_type=Field_calc_param.Solver_type # just get the solver directly
#In case of current-controlled stimulation, Dirichlet_bc or the whole potential distribution will be scaled afterwards (due to the system's linearity)
from FEM_in_spectrum import get_solution_space_and_Dirichlet_BC
V_space,Dirichlet_bc,ground_index,facets=get_solution_space_and_Dirichlet_BC(Field_calc_param.external_grounding,Field_calc_param.c_c,mesh_sol,subdomains,boundaries_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,Domains.Contacts,Domains.fi)
#ground index refers to the ground in .med/.msh file
#facets = MeshFunction('size_t',mesh_sol,2)
#facets.set_all(0)
if Field_calc_param.external_grounding==False:
facets.array()[boundaries_sol.array()==Domains.Contacts[ground_index]]=1
dsS=Measure("ds",domain=mesh_sol,subdomain_data=facets)
Ground_surface_size=assemble(1.0*dsS(1))
dx = Measure("dx",domain=mesh_sol)
# to solve the Laplace equation div(kappa*grad(phi))=0 (variational form: a(u,v)=L(v))
start_math=tm.time()
from FEM_in_spectrum import define_variational_form_and_solve
phi_sol=define_variational_form_and_solve(V_space,Dirichlet_bc,kappa,Field_calc_param.EQS_mode,Cond_tensor,Solver_type)
if MPI.comm_world.rank==1:
minutes=int((tm.time() - start_math)/60)
secnds=int(tm.time() - start_math)-minutes*60
print("--- assembled and solved in ",minutes," min ",secnds," s ---")
if Field_calc_param.EQS_mode == 'EQS':
(phi_r_sol,phi_i_sol)=phi_sol.split(deepcopy=True)
else:
phi_r_sol=phi_sol
phi_i_sol=Function(V_space)
phi_i_sol.vector()[:] = 0.0
if MPI.comm_world.rank==1:
print("dofs on 2nd process: ",(max(V_space.dofmap().dofs())+1))
# get current flowing through the grounded contact and the electric field in the whole domain
from FEM_in_spectrum import get_current
J_ground,E_field,E_field_im = get_current(mesh_sol,facets,boundaries_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,Domains.Contacts,kappa,Cond_tensor,phi_r_sol,phi_i_sol,ground_index,get_E_field=True)
# If EQS, J_ground is a complex number. If QS, E_field_im is a null function
# to get current density function which is required for mesh refinement when checking current convergence
from Math_module_hybrid import get_current_density
j_dens_real,j_dens_im = get_current_density(mesh_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,kappa,Cond_tensor,E_field,E_field_im)
# If QS, j_dens_im is null function
# will be used for mesh refinement
j_dens_real_unscaled=j_dens_real.copy(deepcopy=True)
j_dens_im_unscaled=j_dens_im.copy(deepcopy=True) # null function if QS
import copy
J_real_unscaled=copy.deepcopy(np.real(J_ground))
J_im_unscaled=copy.deepcopy(np.imag(J_ground)) # 0 if QS
# to project the E-field magnitude
if Field_calc_param.element_order>1:
V_normE=FunctionSpace(mesh_sol,"CG",Field_calc_param.element_order-1)
else:
V_normE=FunctionSpace(mesh_sol,"CG",Field_calc_param.element_order)
if Field_calc_param.external_grounding==True and (Field_calc_param.c_c==1 or len(Domains.fi)==1):
V_max=max(Domains.fi[:], key=abs)
V_min=0.0
elif -1*Domains.fi[0]==Domains.fi[1]: # V_across is needed only for 2 active contact systems
V_min=-1*abs(Domains.fi[0])
V_max=abs(Domains.fi[0])
else:
V_min=min(Domains.fi[:], key=abs)
V_max=max(Domains.fi[:], key=abs)
V_across=V_max-V_min # this can be negative
### Do not probe anything when inside MPI process!
# Vertices_get=read_csv('Neuron_model_arrays/Vert_of_Neural_model_NEURON.csv', delimiter=' ', header=None)
# Vertices_array=Vertices_get.values
# Phi_ROI=np.zeros((Vertices_array.shape[0],4),float)
#
# for inx in range(Vertices_array.shape[0]):
# pnt=Point(Vertices_array[inx,0],Vertices_array[inx,1],Vertices_array[inx,2])
#
# Phi_ROI[inx,0]=Vertices_array[inx,0]
# Phi_ROI[inx,1]=Vertices_array[inx,1]
# Phi_ROI[inx,2]=Vertices_array[inx,2]
# if Field_calc_param.c_c==1:
# phi_r_sol_scaled_on_point=V_across*np.real((phi_r_sol(pnt)+1j*phi_i_sol(pnt))/(J_real+1j*J_im))
# phi_i_sol_scaled_on_point=V_across*np.imag((phi_r_sol(pnt)+1j*phi_i_sol(pnt))/(J_real+1j*J_im))
# Phi_ROI[inx,3]=np.sqrt(phi_r_sol_scaled_on_point*phi_r_sol_scaled_on_point+phi_i_sol_scaled_on_point*phi_i_sol_scaled_on_point)
# else:
# Phi_ROI[inx,3]=np.sqrt(phi_r_sol(pnt)*phi_r_sol(pnt)+phi_i_sol(pnt)*phi_i_sol(pnt))
#
# np.savetxt('Results_adaptive/Phi_'+str(Field_calc_param.frequenc)+'.csv', Phi_ROI, delimiter=" ")
#save the results
if Field_calc_param.c_c!=1 and Field_calc_param.CPE!=1:
#Just to save total currunt through the ground in FEniCS hdf5
J_Vector=Vector(MPI.comm_self,2)
J_Vector.set_local(np.array([J_real_unscaled,J_im_unscaled],dtype=np.float64))
Hdf=HDF5File(mesh_sol.mpi_comm(), os.environ['PATIENTDIR']+"/Results_adaptive/Solution_"+str(np.round(Field_calc_param.frequenc,6))+".h5", "w")
Hdf.write(mesh_sol, "mesh_sol")
Hdf.write(phi_sol, "solution_phi_full")
Hdf.write(E_field, "solution_E_field")
Hdf.write(E_field_im, "solution_E_field_im")
Hdf.write(j_dens_real_unscaled, "solution_j_real")
Hdf.write(j_dens_im_unscaled, "solution_j_im")
Hdf.write(J_Vector, "/J_Vector")
Hdf.close()
return True
### Do not probe anything when inside MPI process!
# #Probe_of_potential
# probe_z=np.zeros((100,4),float)
# for inx in range(100):
# pnt=Point(75.5,78.5,27.865+inx/10.0)
# probe_z[inx,0]=75.5
# probe_z[inx,1]=78.5
# probe_z[inx,2]=27.865+inx/10.0
# if Field_calc_param.c_c==1:
# phi_r_sol_scaled_on_point=V_across*np.real((phi_r_sol(pnt)+1j*phi_i_sol(pnt))/(J_real+1j*J_im))
# phi_i_sol_scaled_on_point=V_across*np.imag((phi_r_sol(pnt)+1j*phi_i_sol(pnt))/(J_real+1j*J_im))
# probe_z[inx,3]=np.sqrt(phi_r_sol_scaled_on_point*phi_r_sol_scaled_on_point+phi_i_sol_scaled_on_point*phi_i_sol_scaled_on_point)
# else:
# probe_z[inx,3]=np.sqrt(phi_r_sol(pnt)*phi_r_sol(pnt)+phi_i_sol(pnt)*phi_i_sol(pnt))
# np.savetxt('Results_adaptive/Phi_Zprobe'+str(Field_calc_param.frequenc)+'.csv', probe_z, delimiter=" ")
#print("Tissue impedance: ", Z_tis)
#=============================================================================#
if Field_calc_param.c_c==1 or Field_calc_param.CPE==1:
Z_tissue = V_across/J_ground # Tissue impedance
if MPI.comm_world.rank==1:
print("Tissue impedance: ", Z_tissue)
if Field_calc_param.CPE==1:
if len(Domains.fi)>2:
print("Currently, CPE can be used only for simulations with two contacts. Please, assign the rest to 'None'")
raise SystemExit
from GUI_inp_dict import d as d_cpe
CPE_param=[d_cpe["K_A"],d_cpe["beta"],d_cpe["K_A_ground"],d_cpe["beta_ground"]]
from FEM_in_spectrum import get_CPE_corrected_Dirichlet_BC #-1.0 to avoid printing
Dirichlet_bc_with_CPE,total_impedance=get_CPE_corrected_Dirichlet_BC(Field_calc_param.external_grounding,facets,boundaries_sol,CPE_param,Field_calc_param.EQS_mode,Field_calc_param.frequenc,-1.0,Domains.Contacts,Domains.fi,V_across,Z_tissue,V_space)
if MPI.comm_world.rank==1:
print("Solving for an adjusted potential on contacts to account for CPE")
start_math=tm.time()
# to solve the Laplace equation for the adjusted Dirichlet
phi_sol_CPE=define_variational_form_and_solve(V_space,Dirichlet_bc_with_CPE,kappa,Field_calc_param.EQS_mode,Cond_tensor,Solver_type)
if MPI.comm_world.rank==1:
minutes=int((tm.time() - start_math)/60)
secnds=int(tm.time() - start_math)-minutes*60
print("--- assembled and solved in ",minutes," min ",secnds," s ")
if Field_calc_param.EQS_mode=='EQS':
(phi_r_CPE,phi_i_CPE)=phi_sol_CPE.split(deepcopy=True)
else:
phi_r_CPE=phi_sol_CPE
phi_i_CPE=Function(V_space)
phi_i_CPE.vector()[:] = 0.0
# get current flowing through the grounded contact and the electric field in the whole domain
J_ground_CPE,E_field_CPE,E_field_im_CPE = get_current(mesh_sol,facets,boundaries_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,Domains.Contacts,kappa,Cond_tensor,phi_r_CPE,phi_i_CPE,ground_index,get_E_field=True)
# If EQS, J_ground is a complex number. If QS, E_field_CPE is a null function
# to get current density function which is required for mesh refinement when checking current convergence
j_dens_real_CPE,j_dens_im_CPE = get_current_density(mesh_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,kappa,Cond_tensor,E_field_CPE,E_field_im_CPE)
# If QS, j_dens_im is null function
# will be used for mesh refinement
j_dens_real_unscaled=j_dens_real_CPE.copy(deepcopy=True)
j_dens_im_unscaled=j_dens_im_CPE.copy(deepcopy=True)
J_real_unscaled=copy.deepcopy(np.real(J_ground))
J_im_unscaled=copy.deepcopy(np.imag(J_ground))
#Just to save total currunt through the ground in FEniCS hdf5
J_Vector=Vector(MPI.comm_self,2)
J_Vector.set_local(np.array([J_real_unscaled,J_im_unscaled],dtype=np.float64))
Hdf=HDF5File(mesh_sol.mpi_comm(), os.environ['PATIENTDIR']+"/Results_adaptive/Solution_"+str(np.round(Field_calc_param.frequenc,6))+".h5", "w")
Hdf.write(mesh_sol, "mesh_sol")
Hdf.write(phi_sol_CPE, "solution_phi_full")
Hdf.write(E_field_CPE, "solution_E_field")
Hdf.write(E_field_im_CPE, "solution_E_field_im")
Hdf.write(j_dens_real_unscaled, "solution_j_real")
Hdf.write(j_dens_im_unscaled, "solution_j_im")
Hdf.write(J_Vector, "/J_Vector")
Hdf.close()
return True
if Field_calc_param.c_c==1:
if Field_calc_param.EQS_mode=='EQS': # For EQS, we need to scale the potential on boundaries (because the error is absolute) and recompute field, etc. Maybe we can scale them also directly?
Dirichlet_bc_scaled=[]
for bc_i in range(len(Domains.Contacts)): #CPE estimation is valid only for one activa and one ground contact configuration
if Field_calc_param.EQS_mode=='EQS':
if Domains.fi[bc_i]!=0.0:
Active_with_CC=V_across*V_across/J_ground #(impedance * current through the contact (V_across coincides with the assigned current magnitude))
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(0), np.real(Active_with_CC), boundaries_sol,Domains.Contacts[bc_i]))
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(1), np.imag(Active_with_CC), boundaries_sol,Domains.Contacts[bc_i]))
else:
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(0), Constant(0.0), boundaries_sol,Domains.Contacts[bc_i]))
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(1), Constant(0.0), boundaries_sol,Domains.Contacts[bc_i]))
if Field_calc_param.external_grounding==True:
if Sim_setup.Laplace_eq == 'EQS':
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(0),0.0,facets,1))
Dirichlet_bc_scaled.append(DirichletBC(V_space.sub(1),0.0,facets,1))
else:
Dirichlet_bc_scaled.append(DirichletBC(V_space,0.0,facets,1))
print("Solving for a scaled potential on contacts (to match the desired current)")
start_math=tm.time()
# to solve the Laplace equation for the adjusted Dirichlet
phi_sol_scaled=define_variational_form_and_solve(V_space,Dirichlet_bc_scaled,kappa,Field_calc_param.EQS_mode,Cond_tensor,Solver_type)
minutes=int((tm.time() - start_math)/60)
secnds=int(tm.time() - start_math)-minutes*60
print("--- assembled and solved in ",minutes," min ",secnds," s ---")
(phi_r_sol_scaled,phi_i_sol_scaled)=phi_sol_scaled.split(deepcopy=True)
# get current flowing through the grounded contact and the electric field in the whole domain
J_ground_scaled,E_field_scaled,E_field_im_scaled = get_current(mesh_sol,facets,boundaries_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,Domains.Contacts,kappa,Cond_tensor,phi_r_CPE,phi_i_CPE,ground_index,get_E_field=True)
# If EQS, J_ground is a complex number. If QS, E_field_im is 0
else: # here we can simply scale the potential in the domain and recompute the E-field
phi_r_sol_scaled=Function(V_space)
phi_i_sol_scaled=Function(V_space)
phi_i_sol_scaled.vector()[:] = 0.0
phi_r_sol_scaled.vector()[:]=V_across*phi_r_sol.vector()[:]/J_ground
phi_sol_scaled=phi_r_sol_scaled
J_ground_scaled,E_field_scaled,E_field_im_scaled = get_current(mesh_sol,facets,boundaries_sol,Field_calc_param.element_order,Field_calc_param.EQS_mode,Domains.Contacts,kappa,Cond_tensor,phi_r_sol_scaled,phi_i_sol_scaled,ground_index,get_E_field=True)
#E_field_im_scale is a null function
#Just to save total currunt through the ground in FEniCS hdf5 (we save unscaled currents!)
J_Vector=Vector(MPI.comm_self,2)
J_Vector.set_local(np.array([J_real_unscaled,J_im_unscaled],dtype=np.float64))
Hdf=HDF5File(mesh_sol.mpi_comm(), os.environ['PATIENTDIR']+"/Results_adaptive/Solution_"+str(np.round(Field_calc_param.frequenc,6))+".h5", "w")
Hdf.write(mesh_sol, "mesh_sol")
Hdf.write(phi_sol_scaled, "solution_phi_full")
Hdf.write(E_field_scaled, "solution_E_field")
Hdf.write(E_field_im_scaled, "solution_E_field_im")
Hdf.write(j_dens_real_unscaled, "solution_j_real")
Hdf.write(j_dens_im_unscaled, "solution_j_im")
Hdf.write(J_Vector, "/J_Vector")
Hdf.close()
return True
return True
if __name__ == '__main__':
with open(os.environ['PATIENTDIR']+'/Meshes/Mesh_ind.file', "rb") as f:
Domains = pickle.load(f)
with open(os.environ['PATIENTDIR']+'/Results_adaptive/Field_calc_param.file', "rb") as f:
Field_calc_param = pickle.load(f)
mesh = Mesh()
hdf = HDF5File(mesh.mpi_comm(), os.environ['PATIENTDIR']+"/Results_adaptive/Mesh_to_solve.h5", "r")
hdf.read(mesh, "/mesh", False)
subdomains = MeshFunction("size_t", mesh, 3)
hdf.read(subdomains, "/subdomains")
boundaries = MeshFunction("size_t", mesh, 2)
hdf.read(boundaries, "/boundaries")
V0_r=FunctionSpace(mesh,'DG',0)
V0_i=FunctionSpace(mesh,'DG',0)
kappa_r=Function(V0_r)
kappa_i=Function(V0_i)
hdf.read(kappa_r, "/kappa_r")
if Field_calc_param.EQS_mode=='EQS':
hdf.read(kappa_i, "/kappa_i")
#anisotropy will be read in at the site
hdf.close()
if Field_calc_param.EQS_mode=='EQS':
compute_field(mesh,Domains,subdomains,boundaries,kappa_r,Field_calc_param,kappa_i)
elif Field_calc_param.EQS_mode=='QS':
compute_field(mesh,Domains,subdomains,boundaries,kappa_r,Field_calc_param)
| gpl-3.0 | 2,766,105,423,233,284,600 | 45.382826 | 266 | 0.627314 | false |
avinassh/haxor | hackernews/__init__.py | 1 | 13996 | #!/usr/bin/env python
"""
haxor
Unofficial Python wrapper for official Hacker News API
@author avinash sajjanshetty
@email [email protected]
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import asyncio
import datetime
import json
import sys
from urllib.parse import urljoin
import requests
import aiohttp
from .settings import supported_api_versions
__all__ = [
'User',
'Item',
'HackerNews',
'HackerNewsError',
'InvalidAPIVersion',
'InvalidItemID',
'InvalidUserID']
class HackerNewsError(Exception):
pass
class InvalidItemID(HackerNewsError):
pass
class InvalidUserID(HackerNewsError):
pass
class InvalidAPIVersion(HackerNewsError):
pass
class HTTPError(HackerNewsError):
pass
class HackerNews(object):
def __init__(self, version='v0'):
"""
Args:
version (string): specifies Hacker News API version.
Default is `v0`.
Raises:
InvalidAPIVersion: If Hacker News version is not supported.
"""
try:
self.base_url = supported_api_versions[version]
except KeyError:
raise InvalidAPIVersion
self.item_url = urljoin(self.base_url, 'item/')
self.user_url = urljoin(self.base_url, 'user/')
self.session = requests.Session()
def _get_sync(self, url):
"""Internal method used for GET requests
Args:
url (str): URL to fetch
Returns:
Individual URL request's response
Raises:
HTTPError: If HTTP request failed.
"""
response = self.session.get(url)
if response.status_code == requests.codes.ok:
return response.json()
else:
raise HTTPError
async def _get_async(self, url, session):
"""Asynchronous internal method used for GET requests
Args:
url (str): URL to fetch
session (obj): aiohttp client session for async loop
Returns:
data (obj): Individual URL request's response corountine
"""
data = None
async with session.get(url) as resp:
if resp.status == 200:
data = await resp.json()
return data
async def _async_loop(self, urls):
"""Asynchronous internal method used to request multiple URLs
Args:
urls (list): URLs to fetch
Returns:
responses (obj): All URL requests' response coroutines
"""
results = []
async with aiohttp.ClientSession(
connector=aiohttp.TCPConnector(ssl=False)
) as session:
for url in urls:
result = asyncio.ensure_future(self._get_async(url, session))
results.append(result)
responses = await asyncio.gather(*results)
return responses
def _run_async(self, urls):
"""Asynchronous event loop execution
Args:
urls (list): URLs to fetch
Returns:
results (obj): All URL requests' responses
"""
loop = asyncio.get_event_loop()
results = loop.run_until_complete(self._async_loop(urls))
return results
def _get_stories(self, page, limit):
"""
Hacker News has different categories (i.e. stories) like
'topstories', 'newstories', 'askstories', 'showstories', 'jobstories'.
This method, first fetches the relevant story ids of that category
The URL is: https://hacker-news.firebaseio.com/v0/<story_name>.json
e.g. https://hacker-news.firebaseio.com/v0/topstories.json
Then, asynchronously it fetches each story and returns the Item objects
The URL for individual story is:
https://hacker-news.firebaseio.com/v0/item/<item_id>.json
e.g. https://hacker-news.firebaseio.com/v0/item/69696969.json
"""
url = urljoin(self.base_url, F"{page}.json")
story_ids = self._get_sync(url)[:limit]
return self.get_items_by_ids(item_ids=story_ids)
def get_item(self, item_id, expand=False):
"""Returns Hacker News `Item` object.
Fetches the data from url:
https://hacker-news.firebaseio.com/v0/item/<item_id>.json
e.g. https://hacker-news.firebaseio.com/v0/item/69696969.json
Args:
item_id (int or string): Unique item id of Hacker News story,
comment etc.
expand (bool): expand (bool): Flag to indicate whether to
transform all IDs into objects.
Returns:
`Item` object representing Hacker News item.
Raises:
InvalidItemID: If corresponding Hacker News story does not exist.
"""
url = urljoin(self.item_url, F"{item_id}.json")
response = self._get_sync(url)
if not response:
raise InvalidItemID
item = Item(response)
if expand:
item.by = self.get_user(item.by)
item.kids = self.get_items_by_ids(item.kids) if item.kids else None
item.parent = self.get_item(item.parent) if item.parent else None
item.poll = self.get_item(item.poll) if item.poll else None
item.parts = (
self.get_items_by_ids(item.parts) if item.parts else None
)
return item
def get_items_by_ids(self, item_ids, item_type=None):
"""Given a list of item ids, return all the Item objects
Args:
item_ids (obj): List of item IDs to query
item_type (str): (optional) Item type to filter results with
Returns:
List of `Item` objects for given item IDs and given item type
"""
urls = [urljoin(self.item_url, F"{i}.json") for i in item_ids]
result = self._run_async(urls=urls)
items = [Item(r) for r in result if r]
if item_type:
return [item for item in items if item.item_type == item_type]
else:
return items
def get_user(self, user_id, expand=False):
"""Returns Hacker News `User` object.
Fetches data from the url:
https://hacker-news.firebaseio.com/v0/user/<user_id>.json
e.g. https://hacker-news.firebaseio.com/v0/user/pg.json
Args:
user_id (string): unique user id of a Hacker News user.
expand (bool): Flag to indicate whether to
transform all IDs into objects.
Returns:
`User` object representing a user on Hacker News.
Raises:
InvalidUserID: If no such user exists on Hacker News.
"""
url = urljoin(self.user_url, F"{user_id}.json")
response = self._get_sync(url)
if not response:
raise InvalidUserID
user = User(response)
if expand and user.submitted:
items = self.get_items_by_ids(user.submitted)
user_opt = {
'stories': 'story',
'comments': 'comment',
'jobs': 'job',
'polls': 'poll',
'pollopts': 'pollopt'
}
for key, value in user_opt.items():
setattr(
user,
key,
[i for i in items if i.item_type == value]
)
return user
def get_users_by_ids(self, user_ids):
"""
Given a list of user ids, return all the User objects
"""
urls = [urljoin(self.user_url, F"{i}.json") for i in user_ids]
result = self._run_async(urls=urls)
return [User(r) for r in result if r]
def top_stories(self, raw=False, limit=None):
"""Returns list of item ids of current top stories
Args:
limit (int): specifies the number of stories to be returned.
raw (bool): Flag to indicate whether to represent all
objects in raw json.
Returns:
`list` object containing ids of top stories.
"""
top_stories = self._get_stories('topstories', limit)
if raw:
top_stories = [story.raw for story in top_stories]
return top_stories
def new_stories(self, raw=False, limit=None):
"""Returns list of item ids of current new stories
Args:
limit (int): specifies the number of stories to be returned.
raw (bool): Flag to indicate whether to transform all
objects into raw json.
Returns:
`list` object containing ids of new stories.
"""
new_stories = self._get_stories('newstories', limit)
if raw:
new_stories = [story.raw for story in new_stories]
return new_stories
def ask_stories(self, raw=False, limit=None):
"""Returns list of item ids of latest Ask HN stories
Args:
limit (int): specifies the number of stories to be returned.
raw (bool): Flag to indicate whether to transform all
objects into raw json.
Returns:
`list` object containing ids of Ask HN stories.
"""
ask_stories = self._get_stories('askstories', limit)
if raw:
ask_stories = [story.raw for story in ask_stories]
return ask_stories
def show_stories(self, raw=False, limit=None):
"""Returns list of item ids of latest Show HN stories
Args:
limit (int): specifies the number of stories to be returned.
raw (bool): Flag to indicate whether to transform all
objects into raw json.
Returns:
`list` object containing ids of Show HN stories.
"""
show_stories = self._get_stories('showstories', limit)
if raw:
show_stories = [story.raw for story in show_stories]
return show_stories
def job_stories(self, raw=False, limit=None):
"""Returns list of item ids of latest Job stories
Args:
limit (int): specifies the number of stories to be returned.
raw (bool): Flag to indicate whether to transform all
objects into raw json.
Returns:
`list` object containing ids of Job stories.
"""
job_stories = self._get_stories('jobstories', limit)
if raw:
job_stories = [story.raw for story in job_stories]
return job_stories
def updates(self):
"""Returns list of item ids and user ids that have been
changed/updated recently.
Fetches data from URL:
https://hacker-news.firebaseio.com/v0/updates.json
Returns:
`dict` with two keys whose values are `list` objects
"""
url = urljoin(self.base_url, 'updates.json')
response = self._get_sync(url)
return {
'items': self.get_items_by_ids(item_ids=response['items']),
'profiles': self.get_users_by_ids(user_ids=response['profiles'])
}
def get_max_item(self, expand=False):
"""The current largest item id
Fetches data from URL:
https://hacker-news.firebaseio.com/v0/maxitem.json
Args:
expand (bool): Flag to indicate whether to transform all
IDs into objects.
Returns:
`int` if successful.
"""
url = urljoin(self.base_url, 'maxitem.json')
response = self._get_sync(url)
if expand:
return self.get_item(response)
else:
return response
def get_all(self):
"""Returns ENTIRE Hacker News!
Downloads all the HN articles and returns them as Item objects
Returns:
`list` object containing ids of HN stories.
"""
max_item = self.get_max_item()
return self.get_last(num=max_item)
def get_last(self, num=10):
"""Returns last `num` of HN stories
Downloads all the HN articles and returns them as Item objects
Returns:
`list` object containing ids of HN stories.
"""
max_item = self.get_max_item()
urls = [urljoin(self.item_url, F"{i}.json") for i in range(
max_item - num + 1, max_item + 1)]
result = self._run_async(urls=urls)
return [Item(r) for r in result if r]
class Item(object):
"""
Represents stories, comments, jobs, Ask HNs and polls
"""
def __init__(self, data):
self.item_id = data.get('id')
self.deleted = data.get('deleted')
self.item_type = data.get('type')
self.by = data.get('by')
self.submission_time = datetime.datetime.fromtimestamp(
data.get('time', 0))
self.text = data.get('text')
self.dead = data.get('dead')
self.parent = data.get('parent')
self.poll = data.get('poll')
self.kids = data.get('kids')
self.url = data.get('url')
self.score = data.get('score')
self.title = data.get('title')
self.parts = data.get('parts')
self.descendants = data.get('descendants')
self.time = datetime.datetime.fromtimestamp(data.get('time'))
self.raw = json.dumps(data)
def __repr__(self):
retval = '<hackernews.Item: {0} - {1}>'.format(
self.item_id, self.title)
return retval
class User(object):
"""
Represents a hacker i.e. a user on Hacker News
"""
def __init__(self, data):
self.user_id = data.get('id')
self.delay = data.get('delay')
self.created = datetime.datetime.fromtimestamp(data.get('created', 0))
self.karma = data.get('karma')
self.about = data.get('about')
self.submitted = data.get('submitted')
self.raw = json.dumps(data)
def __repr__(self):
retval = '<hackernews.User: {0}>'.format(self.user_id)
return retval
| mit | 7,675,574,859,662,946,000 | 28.341719 | 79 | 0.57345 | false |
geobricks/geobricks_modis | geobricks_modis_test/rest/test_modis_rest.py | 1 | 2136 | import json
import unittest
from flask import Flask
from geobricks_modis.rest.modis_rest import modis
class GeobricksModisRestTest(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.app.register_blueprint(modis, url_prefix='/modis')
self.tester = self.app.test_client(self)
def test_discovery(self):
response = self.tester.get('/modis/discovery/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(out['title'], 'MODIS')
self.assertEquals(out['properties']['service_type']['default'], 'DATASOURCE')
def test_list_products_service(self):
response = self.tester.get('/modis/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 68)
def test_list_years_service(self):
response = self.tester.get('/modis/MOD13A2/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 16)
def test_list_days_service(self):
response = self.tester.get('/modis/MOD13A2/2014/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 23)
def test_list_layers_service(self):
response = self.tester.get('/modis/MOD13A2/2014/001/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 286)
def test_list_layers_subset_service(self):
response = self.tester.get('/modis/MOD13A2/2014/001/5/7/3/9/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 5)
def test_list_layers_countries_subset_service(self):
response = self.tester.get('/modis/MOD13A2/2014/001/8,IT,fra/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 12)
def test_list_countries(self):
response = self.tester.get('/modis/countries/', content_type='application/json')
out = json.loads(response.data)
self.assertEquals(len(out), 277)
| gpl-2.0 | -8,529,286,641,541,722,000 | 39.301887 | 104 | 0.661985 | false |
combatopera/pym2149 | pym2149/dac.py | 1 | 3553 | # Copyright 2014, 2018, 2019, 2020 Andrzej Cichocki
# This file is part of pym2149.
#
# pym2149 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pym2149 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pym2149. If not, see <http://www.gnu.org/licenses/>.
from .buf import BufType
from .nod import BufNode
from .shapes import level4to5, level5toamp, level4tosinus5shape, level4totone5shape
from diapyr.util import singleton
import numpy as np
class Level(BufNode):
def __init__(self, levelmodereg, fixedreg, env, signal, rtone, timereffectreg):
super().__init__(BufType.signal) # Must be suitable for use as index downstream.
self.levelmodereg = levelmodereg
self.fixedreg = fixedreg
self.env = env
self.signal = signal
self.rtone = rtone
self.timereffectreg = timereffectreg
def callimpl(self):
self.timereffectreg.value.putlevel5(self)
@singleton
class NullEffect:
'All registers are non-virtual and write directly to chip, the timer does not interfere.'
def putlevel5(self, node):
node.blockbuf.copybuf(node.chain(node.signal))
if node.levelmodereg.value:
node.blockbuf.mulbuf(node.chain(node.env))
else:
# According to block diagram, the level is already 5-bit when combining with binary signal:
node.blockbuf.mul(level4to5(node.fixedreg.value))
class FixedLevelEffect:
'Registers levelmodereg and fixedreg are virtual, of which levelmodereg is ignored.'
def __init__(self):
self.wavelength, = {shape.wavelength() for shape in self.level4toshape}
def getshape(self, fixedreg):
return self.level4toshape[fixedreg.value]
def putlevel5(self, node):
node.blockbuf.copybuf(node.chain(node.signal))
node.blockbuf.mulbuf(node.chain(node.rtone))
@singleton
class PWMEffect(FixedLevelEffect):
'Interrupt setup/routine alternates fixed level between fixedreg and zero.'
level4toshape = level4totone5shape
@singleton
class SinusEffect(FixedLevelEffect):
'Interrupt setup/routine sets fixed level to sample value as scaled by fixedreg.'
level4toshape = level4tosinus5shape
class DigiDrumEffect:
'Like SinusEffect but in addition toneflagreg and noiseflagreg are virtual and we ignore them.'
def __init__(self, sample5shape):
self.sample5shape = sample5shape
def getshape(self, fixedreg):
return self.sample5shape
def putlevel5(self, node):
node.blockbuf.copybuf(node.chain(node.rtone))
class Dac(BufNode):
def __init__(self, level, log2maxpeaktopeak, ampshare):
buftype = BufType.float
super().__init__(buftype)
# We take off .5 so that the peak amplitude is about -3 dB:
maxpeaktopeak = (2 ** (log2maxpeaktopeak - .5)) / ampshare
# Lookup of ideal amplitudes:
self.leveltopeaktopeak = np.fromiter((level5toamp(v) * maxpeaktopeak for v in range(32)), buftype.dtype)
self.level = level
def callimpl(self):
self.blockbuf.mapbuf(self.chain(self.level), self.leveltopeaktopeak)
| gpl-3.0 | 8,010,350,940,594,868,000 | 34.888889 | 112 | 0.710667 | false |
Pexego/PXGO_00019_2014_BU | project-addons/sale_shipping_except/__openerp__.py | 1 | 1210 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Pexego All Rights Reserved
# $Jesús Ventosinos Mayor <[email protected]>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Manage sale shipping exception editable",
'version': '1.0',
'category': '',
'description': """""",
'author': 'Pexego',
'website': '',
"depends": ['sale'],
"data": [],
"installable": True
}
| agpl-3.0 | 7,370,873,289,079,417,000 | 36.78125 | 78 | 0.578991 | false |
Lucas-Wong/ToolsProject | WeChat/AutoWeChat.py | 1 | 1366 | #! /usr/bin/env python
#_*_coding:utf-8_*_
import itchat, time
from itchat.content import *
@itchat.msg_register([TEXT, MAP, CARD, NOTE, SHARING])
def text_reply(msg):
#itchat.send('%s: %s' % (msg['Type'], msg['Text']), msg['FromUserName'])
itchat.send_msg('已经收到了消息,消息内容为%s' % msg['Text'], toUserName=msg['FromUserName'])
return "T reveived: %s" % msg["Text"] # 返回的给对方的消息,msg["Text"]表示消息的内容
@itchat.msg_register([PICTURE, RECORDING, ATTACHMENT, VIDEO])
def download_files(msg):
msg['Text'](msg['FileName'])
return '@%s@%s' % ({'Picture': 'img', 'Video': 'vid'}.get(msg['Type'], 'fil'), msg['FileName'])
@itchat.msg_register(FRIENDS)
def add_friend(msg):
itchat.add_friend(**msg['Text']) # 该操作会自动将新好友的消息录入,不需要重载通讯录
itchat.send_msg('Nice to meet you!', msg['RecommendInfo']['UserName'])
@itchat.msg_register(TEXT, isGroupChat=True)
def text_reply(msg):
if msg['isAt']:
#itchat.send(u'@%s\u2005I received: %s' % (msg['ActualNickName'], msg['Content']), msg['FromUserName'])
itchat.send_msg("我已经收到了来自{0}的消息,实际内容为{1}".format(msg['ActualNickName'], msg['Text']),
toUserName=msg['FromUserName'])
itchat.auto_login(True, hotReload=True)
itchat.run(True) | gpl-3.0 | -4,828,230,460,930,350,000 | 38.516129 | 111 | 0.647059 | false |
NiclasEriksen/py-towerwars | src/test.py | 1 | 5993 | import unittest
# import main
import functions
tcase = unittest.TestCase
class TestColorCodeConverter(tcase):
def test_white(self):
self.assertEqual(functions.get_color(255, 255, 255, 255), (1, 1, 1, 1))
def test_red(self):
self.assertEqual(functions.get_color(255, 0, 0, 255), (1, 0, 0, 1))
def test_green(self):
self.assertEqual(functions.get_color(0, 255, 0, 255), (0, 1, 0, 1))
def test_blue(self):
self.assertEqual(functions.get_color(0, 0, 255, 255), (0, 0, 1, 1))
def test_transparent(self):
self.assertEqual(
functions.get_color(127, 84, 221, 70),
(
0.4980392156862745,
0.32941176470588235,
0.8666666666666667,
0.27450980392156865
)
)
class TestCreateRectangle(tcase):
def test_square(self):
self.assertEqual(
functions.create_rectangle(
48, 48, 32, 32, centered=False
),
[16, 48, 16, 16, 48, 16, 48, 48]
)
def test_rectangle(self):
self.assertEqual(
functions.create_rectangle(
48, 48, 64, 32, centered=False
),
[-16, 48, -16, 16, 48, 16, 48, 48]
)
def test_square_centered(self):
self.assertEqual(
functions.create_rectangle(
48, 48, 32, 32, centered=True
),
[32, 32, 32, 64, 64, 64, 64, 32],
)
def test_rectangle_centered(self):
self.assertEqual(
functions.create_rectangle(
48, 48, 64, 32, centered=True
),
[16, 32, 16, 64, 80, 64, 80, 32]
)
class TestCheckPointInRectangle(tcase):
def test_inside(self):
rect = [32, 32, 32, 64, 64, 64, 64, 32]
self.assertTrue(functions.check_point_rectangle(32, 32, rect))
def test_outside(self):
rect = [32, 32, 32, 64, 64, 64, 64, 32]
self.assertFalse(functions.check_point_rectangle(63, 65, rect))
class Mob:
"""The main mob constructor, for testing"""
def __init__(self, game):
self.move_type = "normal"
self.g = game
self.hp = 14.0
self.spd = 1.0
self.debug = game.debug
self.id = 0
s = game.grid.start
self.x = 0
self.y = 0
self.rx = 0
self.ry = 0 # Real position, which is used in game logic
self.state = "alive"
self.slow_cd = None
self.lastpoint = None
self.stall_timer = None
self.debuff_list = []
self.currentpoint = s
self.point = 0
self.path = False
if self.move_type == "flying":
self.path = game.grid.getPath(self.currentpoint, flying=True)
if not self.path:
self.path = game.grid.path
try:
self.targetpoint = self.path[1]
except IndexError:
self.targetpoint = (0, 0)
def make_game_object(self):
from grid import Grid
self.debug = False
self.grid_dim = (8, 8)
self.spawn = (0, 0)
self.goal = (8, 8)
self.tiles_no_walk = [(4, 4), (4, 5), (5, 5), (5, 6), (6, 6)]
self.tiles_no_build = [(4, 4), (4, 5), (5, 5), (5, 6), (6, 6)]
self.flightgrid = []
self.towers = []
self.grid = Grid(self)
self.grid.update("update")
self.m = Mob(self)
class TestPathChecking(tcase):
def test_update(self):
make_game_object(self)
self.assertTrue(
functions.check_path(self.m, self.grid, (4, 4))
)
def test_not_update(self):
make_game_object(self)
self.assertFalse(
functions.check_path(self.m, self.grid, (4, 5))
)
class TestCheckDiagonal(tcase):
def test_w_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_diagonal(self.grid.w_grid, 3, 3),
[(4, 2), (2, 4), (2, 2)]
)
def test_t_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_diagonal(self.grid.t_grid, 3, 3),
[(4, 2), (2, 4), (2, 2)]
)
def test_full_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_diagonal(self.grid.fullgrid, 3, 3),
[(4, 4), (4, 2), (2, 4), (2, 2)]
)
class TestGetNeighbors(tcase):
def test_w_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_neighbors(self.grid.w_grid, 3, 3),
[(4, 3), (3, 4), (2, 3), (3, 2)]
)
def test_t_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_neighbors(self.grid.t_grid, 3, 3),
[(4, 3), (3, 4), (2, 3), (3, 2)]
)
def test_full_grid(self):
make_game_object(self)
self.assertEqual(
functions.get_neighbors(self.grid.fullgrid, 3, 3),
[(4, 3), (3, 4), (2, 3), (3, 2)]
)
class TestExpandPath(tcase):
def test(self):
make_game_object(self)
print functions.expandPath(self.grid.w_grid, self.grid.path)
def expandPath(grid, path):
newpath = []
for p in path:
newpath.append(p)
neighbors = get_neighbors(grid, p[0], p[1])
diagonals = get_diagonal(grid, p[0], p[1])
for d in diagonals:
if not d in newpath:
newpath.append(d)
for n in neighbors:
if not n in newpath:
newpath.append(n)
for n in newpath:
if n[0] == 8:
print(n)
return newpath
def get_dist(x1, y1, x2, y2): # Returns distance between to points
x = (x1 - x2) * (x1 - x2)
y = (y1 - y2) * (y1 - y2)
dist = math.sqrt(x + y)
return dist
def get_angle(x1, y1, x2, y2):
dx = x2 - x1
dy = y2 - y1
rads = math.atan2(-dy, dx)
rads %= 2*math.pi
return rads
if __name__ == '__main__':
unittest.main()
| cc0-1.0 | 8,512,238,743,295,567,000 | 24.611111 | 79 | 0.519439 | false |
dvdme/forecastiopy | tests/test_ForecastIO.py | 1 | 1494 | import os
import sys
import json
import unittest
from forecastiopy import *
class TestForecastIO(unittest.TestCase):
def setUp(self):
with open('tests/resources/dummy_apikey.txt') as f:
apikey = f.read()
self.fio = ForecastIO.ForecastIO(apikey)
with open('tests/resources/response.json') as f:
rsp = f.read()
self.fio.get_forecast_fromstr(rsp)
self.response = json.loads(rsp)
def test_get_url(self):
with open('tests/resources/dummy_apikey.txt') as f:
apikey = f.read()
fio = ForecastIO.ForecastIO(apikey)
with self.assertRaises(TypeError):
fio.get_url()
def test_invalid_apikey(self):
with self.assertRaises(ValueError):
self.fio = ForecastIO.ForecastIO('123')
def test_get_latitude(self):
self.assertEqual(self.fio.latitude, self.response['latitude'])
def test_get_longitude(self):
self.assertEqual(self.fio.longitude, self.response['longitude'])
def test_get_timezone(self):
self.assertEqual(self.fio.timezone, self.response['timezone'])
def test_get_offset(self):
self.assertEqual(self.fio.offset, self.response['offset'])
if __name__ == '__main__':
# append module root directory to sys.path
sys.path.append(
os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)
)
)
)
unittest.main() | epl-1.0 | 843,940,500,443,580,300 | 27.75 | 72 | 0.605756 | false |
wbarfuss/pyunicorn | pyunicorn/climate/eventsynchronization_climatenetwork.py | 1 | 5600 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of pyunicorn.
# Copyright (C) 2008--2015 Jonathan F. Donges and pyunicorn authors
# URL: <http://www.pik-potsdam.de/members/donges/software>
# License: BSD (3-clause)
"""
Provides class for the analysis of dynamical systems and time series based
on event synchronization
"""
# array object and fast numerics
import numpy as np
from ..funcnet import EventSynchronization
from .climate_network import ClimateNetwork
from .climate_data import ClimateData
from .. import Data
#
# Class definitions
#
class EventSynchronizationClimateNetwork(EventSynchronization, ClimateNetwork):
"""
Class EventSynchronizationClimateNetwork for generating and quantitatively
analyzing event synchronization networks.
References: [Boers2014]_.
"""
#
# Internal methods
#
def __init__(self, data, quantile_threshold, taumax,
eventsynctype="directedES", non_local=False,
node_weight_type="surface", silence_level=0):
"""
Initialize an instance of EventSynchronizationClimateNetwork.
For other applications of event synchronization networks please use
the event synchronization class directly.
:type data: :classL`..climate.ClimateData`
:arg data: The climate data used for network construction.
:type quantile_threshold: float between 0 and 1
:arg quantile_threshold: values above will be treated as events
:arg int taumax: Maximum dynamical delay
:type eventsynctype: str
:arg eventsynctype: one of "directedES", "symmetricES" or
"antisymmetricES" [default: "directed"]
:arg bool non_local: Determines, whether links between spatially close
nodes should be suppressed.
:arg str node_weight_type: The type of geographical node weight to be
used.
:arg int silence_level: The inverse level of verbosity of the object.
"""
etypes = ["directedES", "symmetricES", "antisymmetricES"]
if eventsynctype not in etypes:
raise IOError("wrong eventsynctype...\n" +
"Available options: '%s', '%s' or '%s'" %
(etypes[0], etypes[1], etypes[2]))
self.__eventsynctype = eventsynctype
self.directed = self.__eventsynctype != "symmetricES"
eventmatrix = data.observable() > np.percentile(data.observable(),
quantile_threshold*100,
axis=0)
EventSynchronization.__init__(self, eventmatrix.astype(int), taumax)
eventsyncmatrix = getattr(self, self.__eventsynctype)()
ClimateNetwork.__init__(self, grid=data.grid,
similarity_measure=eventsyncmatrix,
threshold=0,
non_local=non_local,
directed=self.directed,
node_weight_type=node_weight_type,
silence_level=silence_level)
def __str__(self):
"""
Return a string representation of TsonisClimateNetwork.
**Example:**
>>> data = EventSynchronizationClimateNetwork.SmallTestData()
>>> print EventSynchronizationClimateNetwork(data, 0.8, 16)
Extracting network adjacency matrix by thresholding...
Setting area weights according to type surface...
Setting area weights according to type surface...
EventSynchronizationClimateNetwork:
EventSynchronization: 6 variables, 10 timesteps, taumax: 16
ClimateNetwork:
GeoNetwork:
Network: directed, 6 nodes, 0 links, link density 0.000.
Geographical boundaries:
time lat lon
min 0.0 0.00 2.50
max 9.0 25.00 15.00
Threshold: 0
Local connections filtered out: False
Type of event synchronization to construct the network: directedES
"""
text = ("EventSynchronizationClimateNetwork: \n%s\n%s\n" +
"Type of event synchronization to construct the network: %s")
return text % (EventSynchronization.__str__(self),
ClimateNetwork.__str__(self), self.__eventsynctype)
@staticmethod
def SmallTestData():
"""
Return test data set of 6 time series with 10 sampling points each.
**Example:**
>>> r(Data.SmallTestData().observable())
array([[ 0. , 1. , 0. , -1. , -0. , 1. ],
[ 0.309 , 0.9511, -0.309 , -0.9511, 0.309 , 0.9511],
[ 0.5878, 0.809 , -0.5878, -0.809 , 0.5878, 0.809 ],
[ 0.809 , 0.5878, -0.809 , -0.5878, 0.809 , 0.5878],
[ 0.9511, 0.309 , -0.9511, -0.309 , 0.9511, 0.309 ],
[ 1. , 0. , -1. , -0. , 1. , 0. ],
[ 0.9511, -0.309 , -0.9511, 0.309 , 0.9511, -0.309 ],
[ 0.809 , -0.5878, -0.809 , 0.5878, 0.809 , -0.5878],
[ 0.5878, -0.809 , -0.5878, 0.809 , 0.5878, -0.809 ],
[ 0.309 , -0.9511, -0.309 , 0.9511, 0.309 , -0.9511]])
:rtype: ClimateData instance
:return: a ClimateData instance for testing purposes.
"""
data = Data.SmallTestData()
return ClimateData(observable=data.observable(), grid=data.grid,
time_cycle=5, silence_level=2)
| bsd-3-clause | -5,787,141,346,296,175,000 | 39.28777 | 79 | 0.572857 | false |
kristerhedfors/xnet | xnet/io/serialize.py | 1 | 6892 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright(c) 2013 Krister Hedfors
#
import unittest
import StringIO
import cPickle
import hashlib
import hmac
import struct
class NutException(Exception):
pass
class InvalidMagic(NutException):
pass
class InvalidLength(NutException):
pass
class InvalidSignature(NutException):
pass
class ReadCoreFailed(NutException):
pass
class Nut(object):
'''
A Nut is slightly harder than a pickle.
Nut is a serialized and signed object which produces
a length-overhead of 20 bytes compared to pickle.
BEWARE that the nut serialization use of keys and signatures
does nothing to provide security or integrity. These
mechanisms are only used as the equivalent of CRC checksums
to identify corrupt packets in the stream.
Byte layout:
4 magic
8 signature (of the two following fields)
4 flags
4 serialized_length
X core (pickled object)
'''
PICKLER = cPickle # or pickle
KEY = 'mAqVPwP2Ka7JElJelK5wnJfAWLVJ9NVooZYBSTOWlwc='.decode('base64')
MAGIC_NEXT = 'xNXT' # marks beginning of Nut
MAGIC_DONE = 'xDNE' # marks end of stream of Nuts
MAX_LENGTH = 65536 # of pickle; can be increased to 2**32-1
HASH_FN = hashlib.sha256
HASH_LENGTH = 8
class _Nothing:
pass
def __init__(self, obj=_Nothing, flags=0, pickler=PICKLER, key=KEY, max_length=MAX_LENGTH,
hash_fn=HASH_FN, hash_length=HASH_LENGTH):
self._flags = flags
self._pickler = pickler
self._key = key
self._max_length = max_length
self._hash_fn = hash_fn
self._hash_length = hash_length
if not obj is self._Nothing:
self.load_object(obj)
def _sign(self, msg):
digest = hmac.new(self._key, msg, digestmod=self.HASH_FN).digest()
digest = digest[:self.HASH_LENGTH]
return digest
def _serialize_length(self, s):
return struct.pack('I', len(s))
def _serialize_flags(self, f):
return struct.pack('I', f)
@classmethod
def _deserialize_length(self, sl):
tup = struct.unpack('I', sl)
assert len(tup) == 1
return tup[0]
@classmethod
def _deserialize_flags(self, sf):
tup = struct.unpack('I', sf)
assert len(tup) == 1
return tup[0]
def _signature(self):
msg = self._serialize_flags(self._flags)
msg += self._serialized_length
msg += self._core
return self._sign(msg)
def __str__(self):
'''
This returns the serialized representation.
'''
s = self.MAGIC_NEXT
s += self._signature()
s += self._serialize_flags(self._flags)
s += self._serialized_length
s += self._core
return s
def _get_flags(self):
return self._flags
def _set_flags(self, flags):
self._flags = flags
flags = property(_get_flags, _set_flags)
def _load(self, core):
self._core = core
self._serialized_length = self._serialize_length(self._core)
def load_object(self, obj, flags=None):
core = self._pickler.dumps(obj)
self._load(core)
if not flags is None:
self._set_flags(flags)
return self
def serialize(self):
return self.__str__()
def deserialize(self):
return self._pickler.loads(self._core)
@classmethod
def generate_from_stream(cls, stream, **nut_kwargs):
'''
stream must implement read().
'''
while True:
#
# read magic (4)
#
magic = stream.read(4)
if magic == cls.MAGIC_DONE:
break
if not magic == cls.MAGIC_NEXT:
raise InvalidMagic(magic)
nut = Nut(**nut_kwargs)
#
# read signature (8)
#
signature = stream.read(8)
#
# read flags (4)
#
_sflags = stream.read(4)
flags = cls._deserialize_flags(_sflags)
#
# read length (4)
#
_slength = stream.read(4)
length = cls._deserialize_length(_slength)
if not 0 < length <= nut._max_length:
raise InvalidLength(length)
#
# read core (length)
#
core = stream.read(length)
if not len(core) == length:
errmsg = '{0} < {1}'.format(len(core), length)
raise ReadCoreFailed(errmsg)
#
# validate signature
#
msg = _sflags + _slength + core
if not signature == nut._sign(msg):
errmsg = '{0} != {1}'.format(signature, nut._sign(msg))
raise InvalidSignature(errmsg)
#
# Validity of pickled object 'core' is not verified;
# pickle.loads() may throw.
#
nut._flags = flags
nut._load(core)
yield nut
class NutWriteWrapper(object):
def __init__(self, fobj, **nut_kwargs):
self._fobj = fobj
self._nut_kwargs = nut_kwargs
def write(self, buf):
nut = Nut(buf, **self._nut_kwargs)
buf = nut.serialize()
self._fobj.write(buf)
def close(self):
self._fobj.write(Nut.MAGIC_DONE)
self._fobj.close()
class Test_Nut(unittest.TestCase):
def setUp(self):
stream = StringIO.StringIO()
stream.write(Nut('asd', flags=0))
stream.write(Nut(123, flags=1))
stream.write(Nut([1, 3.14, None], flags=2))
stream.write(Nut({'asd': 123, 'qwe': 'rty'}, flags=3))
#stream.write(Nut(dict(foo=Nut('bar'))))
stream.write(Nut.MAGIC_DONE)
stream.seek(0)
self._stream = stream
def test1(self):
a1 = Nut(1, key='a')
a2 = Nut(2, key='a')
b1 = Nut(1, key='b')
b2 = Nut(2, key='b')
assert 1 == a1.deserialize() == b1.deserialize()
assert 2 == a2.deserialize() == b2.deserialize()
assert str(a1) != str(a2) != str(b1) != str(b2)
def test2(self):
stream = self._stream
stream.seek(0)
for (i, nut) in enumerate(Nut.generate_from_stream(stream)):
obj = nut.deserialize()
assert nut.flags == i
print obj
def test3(self):
import sys
out = NutWriteWrapper(sys.stdout)
out.write('test3 123123')
out.write('test3 QWEQWE')
out.write('test3 foo')
out.write('test3 111111111111')
out.close()
def _test():
import doctest
import unittest
print(doctest.testmod())
unittest.main()
if __name__ == "__main__":
_test()
| bsd-3-clause | 241,163,241,223,117,340 | 25.205323 | 94 | 0.542078 | false |
gangeli/NaturalLI | pub/acl2015/pr_plot.py | 1 | 1168 | #!/usr/bin/env python
#
import sys
import matplotlib.pyplot as plt
#if len(sys.argv) < 4:
# print "Usage: File xlimit ylimit"
def pr(f):
precisions = []
recalls = []
with open(f, 'r') as f:
for line in f:
p, r = line.split("\t")
precisions.append(float(p))
recalls.append(float(r))
return [precisions, recalls]
plt.clf()
ax = plt.gca()
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
[precisions, recalls] = pr('final_results/ollie.plot');
plt.plot(recalls, precisions, label="Ollie", linewidth=3.0, linestyle=':', color='red')
[precisions, recalls] = pr('final_results/our_system_nonominals.plot');
plt.plot(recalls, precisions, label="Our System (without nominals)", linewidth=3.0, linestyle='-', color='green')
#[precisions, recalls] = pr('final_results/our_system_names_websites.plot');
#plt.plot(recalls, precisions, label="Our System + Alt. Name + Website")
plt.ylabel("Precision")
plt.xlabel("Recall")
plt.ylim([0.0, 1.0])
plt.xlim([0.0, 0.15])
plt.legend() #loc="lower left")
plt.savefig('pr_curve.pdf')
| mit | 6,420,919,372,658,777,000 | 25.545455 | 113 | 0.663527 | false |
AlexandreBiguet/NJLlikeModels | PythonStuffs/pnjlA.py | 1 | 3199 | #!/opt/local/bin/python
# coding: utf8
from scipy import optimize
from scipy import integrate
from pathlib import Path
import numpy as np
import sys
import njlA
import utils
class MpnjlA ( njlA.MnjlA ):
def __init__ ( self ):
njlA.MnjlA.__init__(self)
self.Phi = 0.0
self.Phi_min = 0.0
self.Phi_max = 0.999
def evalGp (self):
pass
def evalMFE ( self, s, Phi, T, mu ):
pass
def minGp ( self, min_method = "L-BFGS-B" ):
def Gp ( s, Phi ):
self.s = s
self.Phi = Phi
return self.evalGp()
GpFunc = lambda x: Gp ( x[0], x[1] )
s = self.s
Phi = self.Phi
xguess = [ s, Phi ]
bnds = ((self.s_min, self.s_max), (self.Phi_min, self.Phi_max))
res = optimize.minimize( GpFunc, xguess, method=min_method, bounds=bnds)
self.s = res[0]
self.Phi = res[1]
return res
def solMFE (self):
pass
def solMFE_s_Phi (self):
F = lambda x: self.evalMFE( x[0], x[1], self.T, self.mu )
xg = [ self.s, self.Phi ]
sol = optimize.root( F, xg, method='hybr' )
self.s = sol.x[0]
self.Phi = sol.x[1]
return sol
def solMFE_s_T (self):
F = lambda x: self.evalMFE( x[0], self.Phi, x[1], self.mu )
xg = [ self.s, self.T ]
sol = optimize.root( F, xg, method='hybr' )
self.s = sol.x[0]
self.T = sol.x[1]
return sol
def solMFE_s_mu (self):
F = lambda x: self.evalMFE( x[0], self.Phi, self.T, x[1])
xg = [ self.s, self.mu ]
sol = optimize.root( F, xg, method='hybr' )
self.s = sol.x[0]
self.mu = sol.x[1]
return sol
def solMFE_Phi_T (self):
F = lambda x: self.evalMFE( self.s, x[0], x[1], self.mu )
xg = [ self.Phi, self.T ]
sol = optimize.root( F, xg, method='hybr' )
self.Phi = sol.x[0]
self.T = sol.x[1]
return sol
def solMFE_Phi_mu (self):
F = lambda x: self.evalMFE( self.s, x[0], self.T, x[1])
xg = [ self.Phi, self.mu ]
sol = optimize.root( F, xg, method='hybr' )
self.Phi = sol.x[0]
self.mu = sol.x[1]
return sol
def set_sol ( self, sol=["s", "Phi"] ):
if utils.isContained( sol, ["s", "Phi" ]) :
self.solMFE = self.solMFE_s_Phi
elif utils.isContained (sol , ["s", "T"] ):
self.solMFE = self.solMFE_s_T
elif utils.isContained (sol , ["s", "mu"]):
self.solMFE = self.solMFE_s_mu
elif utils.isContained (sol , ["Phi", "T"] ):
self.solMFE = self.solMFE_Phi_T
elif utils.isContained (sol , ["Phi", "mu"]):
self.solMFE = self.solMFE_Phi_mu
else:
print ("\n", sol, ": ")
sys.exit("Unknown sol variables" )
if __name__ == "__main__":
from scipy import optimize
from scipy import integrate
from pathlib import Path
import numpy as np
import sys
import njlA
import utils
M = MpnjlA ()
print ( M.s_min, " ", M.Utype )
M.set_sol ( ["mu", "s"] )
| gpl-3.0 | 5,631,456,100,910,675,000 | 27.061404 | 80 | 0.500156 | false |
Elian-0x/practice-python | tutos/How to Think Like a Computer Scientist/chap7.py | 1 | 3387 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
# Learning python like a computer scientist chap7
# Elian
# Les chaines de caracteres
'''
Elles sont immutables, il n'est donc pas possible de modifier une
chaîne existante sans en créer une nouvelle
'''
import string
nom="Elian"
def ecrire_inverse(s):
length=len(s)
while length>=1:
print s[length-1]
length=length-1
# Renvoie l'index d'un caractere (première ocurrence)
def findindex(s,car):
index=0
while index<len(s):
if s[index]==car:
return index
index=index+1
return -1 # code d'erreur
# Renvoie l'index d'un caractere depuis un index donné
def findindexpos(s,car,pos):
index=pos # index de départ
while index<len(s):
if s[index]==car:
return index
index=index+1
return -1
# Compte le nombre d'occurrence d'un caractere
def nbOcc(s,car):
nb=0
for carac in s:
if carac==car:
nb=nb+1
return nb
# Compte le nombre d'occurrence d'un caractere
# depuis un index donné
def nbOccPos(s,car,pos):
nb=0
index=pos
while index<len(s):
if s[index]==car:
nb=nb+1
index=index+1
return nb
# Détermine si un caractere est une minuscule
def isLower(ch):
return string.find(string.lowercase,ch) != -1
# idem return ch in string.lowercase
# idem return ’a’ <= ch <= ’z’
#---=] Main [=-------------------------------------------------------
print nom[0], # premier caractere index à partir de 0
print nom[1]
print "Length:", len(nom)
length=len(nom)
last_car=nom[length-1] # dernier caractere
dernier_car=nom[-1] # idem avec index negatif
print "dernier caractere =", last_car, dernier_car
# traitement des caracteres un par un
index=0
while index<len(nom):
print nom[index],
index=index+1
print "\nA l'envers, une lettre par ligne"
ecrire_inverse(nom)
# Traverser chaque caractere d'une chaine
for car in nom:
print car,
print "\n"
# Concaténation
prefixes="JKLMNOPQ"
suffixe="ack"
for lettres in prefixes:
if lettres == "O" or lettres=="Q":
print lettres+"u"+suffixe
else:
print lettres+suffixe
# Sous chaines
noms="Joe, Steve, me"
print noms[0:3], noms[5:10], noms[12:14] # index du dernier car+1
print noms[:10] # en partant du debut
print noms[5:] # d'un index à la fin
print noms[:] # totalité de la chaine
# Comparaisons de chaines
ch1="abcd"
ch2="abcde"
if ch1==ch2:
print "les 2 chaines sont égales"
else:
print "les 2 chaines sont différentes"
ch1="Elian"
ch2="me"
# Tri par ordre alphabétique
# Attention les majuscules sont avant les minuscules donc pour
# un tri, tout convertir avant dans l'un des 2 types
if ch1<ch2:
print ch1, ch2
else:
print ch2,ch1
# Index d'un caractere
print "index 1ere occurrence de i =",findindex(nom,'i')
print "index à partir d'une position =",findindexpos(nom,'i',2)
# Nombre d'occurrence d'un caractere
print "Nb occurrence de i =",nbOcc(nom,'i')
nom="ElianElian"
print "Nb occurrence à partir pos =",nbOccPos(nom,'i',4)
# Fonctions du module string
chaine="Elianpacmand"
print "index d=",string.find(chaine,'d')
print "index depuis pos=",string.find(chaine,'i',2)
print "index sous chaine nd=",string.find(chaine,'nd')
print "index entre les rang 6 à 10 =",string.find(chaine,'i',6,10)
print string.lowercase
print string.uppercase
print string.digits
print "i is lower?",isLower('i')
print "J is lower?",isLower('J')
print string.whitespace,"espace" # espace, \t, \n
| gpl-3.0 | -8,941,041,229,237,536,000 | 18.660819 | 69 | 0.692445 | false |
Facerafter/starcitizen-tools | extensions/EventLogging/server/tests/test_load.py | 1 | 7720 | # -*- coding: utf-8 -*-
"""
This script generates a stream of requests to beta-labs
(bits.beta.wmflabs.org) to test EventLogging load limitations.
Thanks to Emilio Monti for the Worker and ThreadPool codes!
https://code.activestate.com/recipes/577187-python-thread-pool/
"""
import json
import time
import string
import random
import urllib
import httplib
import argparse
from threading import Thread
from Queue import Queue
POOL_SIZE = 100
SCHEMA_HOST = 'meta.wikimedia.org'
SCHEMA_URL = ('/w/api.php?action=query&prop=revisions&format=json'
'&rvprop=content&titles=Schema:%s&revid=%s')
CAPSULE_REVISION = 10981547
EL_URL = '/event.gif?%s;'
class Worker(Thread):
"""Thread executing tasks from a given tasks queue"""
def __init__(self, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
func, args, kargs = self.tasks.get()
try:
func(*args, **kargs)
except Exception, e:
print 'Worker error: %s.' % e
self.tasks.task_done()
class ThreadPool(object):
"""Pool of threads consuming tasks from a queue"""
def __init__(self, num_threads):
self.tasks = Queue(num_threads)
for _ in range(num_threads):
Worker(self.tasks)
def add_task(self, func, *args, **kargs):
"""Add a task to the queue"""
self.tasks.put((func, args, kargs))
def wait_completion(self):
"""Wait for completion of all the tasks in the queue"""
self.tasks.join()
class EventGenerator(object):
"""Generates events for a given schema."""
def __init__(self, schema_name, schema_revision):
self.schema_name = schema_name
self.schema_revision = schema_revision
try:
self.schema = get_schema(schema_name, schema_revision)
except Exception:
raise RuntimeError(
'Could not retrieve schema information: %s.' % schema_name)
def generate(self, capsule_schema, optional_values):
event = self.instantiate(capsule_schema, optional_values)
event['schema'] = self.schema_name
event['revision'] = self.schema_revision
event['timestamp'] = int(time.time())
event['event'] = self.instantiate(self.schema, optional_values)
return event
def instantiate(self, schema, optional_values):
event = {}
for name, prop in schema['properties'].iteritems():
# Decide if the property should be instantiated
if (prop.get('required', None) or optional_values == 'always' or
(optional_values == 'sometimes' and random.random() < .2)):
# Instantiate depending on kind of property
if 'enum' in prop:
value = random.choice(prop['enum'])
else:
prop_type = prop['type']
if prop_type in ['integer', 'number']:
value = random.randint(0, 99)
elif prop_type == 'boolean':
value = random.random() < 0.5
elif prop_type == 'string':
value = self.random_string(2)
elif prop_type == 'object':
pass # only event capsule has that
else:
raise ValueError(
'Unexpected property type: %s' % prop_type)
event[name] = value
return event
def random_string(self, length):
alphabet = (string.ascii_uppercase + string.digits +
string.ascii_lowercase)
return ''.join(random.choice(alphabet) for _ in range(length))
def get_schema(schema_name, schema_revision):
conn = httplib.HTTPSConnection(SCHEMA_HOST)
conn.request("GET", SCHEMA_URL % (schema_name, schema_revision))
data = json.loads(conn.getresponse().read())
pages = data['query']['pages']
page_id = pages.keys()[0]
schema_str = pages[page_id]['revisions'][0]['*']
return json.loads(schema_str)
def send_event(event, endpoint):
query_string = urllib.quote(json.dumps(event))
conn = httplib.HTTPConnection(endpoint)
conn.request("GET", EL_URL % query_string)
def get_arguments():
# Get argparse params.
ap = argparse.ArgumentParser(
description='EventLogging load tester',
fromfile_prefix_chars='@')
ap.add_argument(
'events_per_second',
help='Number of total of events per second that will be sent.',
default='100')
ap.add_argument(
'-s', '--schema',
help=('Format: "SchemaName:Revision:Share". Example: '
'"Edit:11448630:0.35". SchemaName and Revision indicate a '
'schema for which events will be sent. Share indicates the '
'proportion of events for that schema (Integer or float).'),
action='append')
ap.add_argument(
'--optional-values',
help=('Indicates when to instantiate optional event fields. '
'Possible values: "never", "sometimes" and "always".'),
default='sometimes')
ap.add_argument(
'--endpoint',
help=('Hostname where events should be sent. '
'E.g. bits.wikimedia.org'),
default='bits.beta.wmflabs.org')
args = ap.parse_args()
# Check and build sleep interval param.
try:
events_per_second = int(args.events_per_second)
sleep_interval = 1.0 / events_per_second
except ValueError:
raise ValueError('Invalid parameter events_per_second: %s.' %
args.events_per_second)
# Check and build generators param.
generators = []
if args.schema:
for schema in args.schema:
try:
schema_name, schema_revision, schema_share = schema.split(':')
schema_revision = int(schema_revision)
schema_share = float(schema_share)
except ValueError:
raise ValueError('Invalid parameter -s/--schema: %s.' % schema)
generator = EventGenerator(schema_name, schema_revision)
generators.append((generator, schema_share))
# Check and build optional values param.
optional_values = 'sometimes'
if args.optional_values:
if args.optional_values in ['never', 'sometimes', 'always']:
optional_values = args.optional_values
else:
raise ValueError('Invalid parameter --optional-values: %s.' %
args.optional_values)
return sleep_interval, generators, optional_values, args.endpoint
def weighted_choice(choices):
total = sum(w for c, w in choices)
r = random.uniform(0, total)
upto = 0
for c, w in choices:
if upto + w > r:
return c
upto += w
def main():
print 'Initializing...'
sleep_interval, generators, optional_values, endpoint = get_arguments()
capsule_schema = get_schema('EventCapsule', CAPSULE_REVISION)
pool = ThreadPool(POOL_SIZE)
print 'Sending events...'
count = 0
try:
while True:
t1 = time.time()
generator = weighted_choice(generators)
event = generator.generate(capsule_schema, optional_values)
pool.add_task(send_event, event, endpoint)
t2 = time.time()
count += 1
time_to_sleep = max(sleep_interval - (t2 - t1), 0)
time.sleep(time_to_sleep)
except KeyboardInterrupt:
print '\n%d events sent, exiting.' % count
if __name__ == '__main__':
main()
| gpl-3.0 | -1,500,287,337,348,135,200 | 33.774775 | 79 | 0.585363 | false |
lbdreyer/nc-time-axis | nc_time_axis/__init__.py | 1 | 10445 | """
Support for netcdftime axis in matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
from collections import namedtuple
import matplotlib.dates as mdates
import matplotlib.ticker as mticker
import matplotlib.transforms as mtransforms
import matplotlib.units as munits
import netcdftime
import numpy as np
# Define __version__ based on versioneer's interpretation.
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
# Lower and upper are in number of days.
FormatOption = namedtuple('FormatOption', ['lower', 'upper', 'format_string'])
class CalendarDateTime(object):
"""
Container for :class:`netcdftime.datetime` object and calendar.
"""
def __init__(self, datetime, calendar):
self.datetime = datetime
self.calendar = calendar
def __eq__(self, other):
return (isinstance(other, self.__class__) and
self.datetime == other.datetime and
self.calendar == other.calendar)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
msg = '<{}: datetime={}, calendar={}>'
return msg.format(type(self).__name__, self.datetime, self.calendar)
class NetCDFTimeDateFormatter(mticker.Formatter):
"""
Formatter for netcdftime.datetime data.
"""
# Some magic numbers. These seem to work pretty well.
format_options = [FormatOption(0.0, 0.2, '%H:%M:%S'),
FormatOption(0.2, 0.8, '%H:%M'),
FormatOption(0.8, 15, '%Y-%m-%d %H:%M'),
FormatOption(15, 90, '%Y-%m-%d'),
FormatOption(90, 900, '%Y-%m'),
FormatOption(900, 6000000, '%Y')]
def __init__(self, locator, calendar, time_units):
#: The locator associated with this formatter. This is used to get hold
#: of the scaling information.
self.locator = locator
self.calendar = calendar
self.time_units = time_units
def pick_format(self, ndays):
"""
Returns a format string for an interval of the given number of days.
"""
for option in self.format_options:
if option.lower < ndays <= option.upper:
return option.format_string
else:
msg = 'No formatter found for an interval of {} days.'
raise ValueError(msg.format(ndays))
def __call__(self, x, pos=0):
format_string = self.pick_format(ndays=self.locator.ndays)
dt = netcdftime.utime(self.time_units, self.calendar).num2date(x)
return dt.strftime(format_string)
class NetCDFTimeDateLocator(mticker.Locator):
"""
Determines tick locations when plotting netcdftime.datetime data.
"""
def __init__(self, max_n_ticks, calendar, date_unit, min_n_ticks=3):
# The date unit must be in the form of days since ...
self.max_n_ticks = max_n_ticks
self.min_n_ticks = min_n_ticks
self._max_n_locator = mticker.MaxNLocator(max_n_ticks, integer=True)
self._max_n_locator_days = mticker.MaxNLocator(
max_n_ticks, integer=True, steps=[1, 2, 4, 7, 14])
self.calendar = calendar
self.date_unit = date_unit
if not self.date_unit.lower().startswith('days since'):
msg = 'The date unit must be days since for a NetCDF time locator.'
raise ValueError(msg)
self._cached_resolution = {}
def compute_resolution(self, num1, num2, date1, date2):
"""
Returns the resolution of the dates (hourly, minutely, yearly), and
an **approximate** number of those units.
"""
num_days = float(np.abs(num1 - num2))
resolution = 'SECONDLY'
n = mdates.SEC_PER_DAY
if num_days * mdates.MINUTES_PER_DAY > self.max_n_ticks:
resolution = 'MINUTELY'
n = int(num_days / mdates.MINUTES_PER_DAY)
if num_days * mdates.HOURS_PER_DAY > self.max_n_ticks:
resolution = 'HOURLY'
n = int(num_days / mdates.HOURS_PER_DAY)
if num_days > self.max_n_ticks:
resolution = 'DAILY'
n = int(num_days)
if num_days > 30 * self.max_n_ticks:
resolution = 'MONTHLY'
n = num_days // 30
if num_days > 365 * self.max_n_ticks:
resolution = 'YEARLY'
n = abs(date1.year - date2.year)
return resolution, n
def __call__(self):
vmin, vmax = self.axis.get_view_interval()
return self.tick_values(vmin, vmax)
def tick_values(self, vmin, vmax):
vmin, vmax = mtransforms.nonsingular(vmin, vmax, expander=1e-7,
tiny=1e-13)
self.ndays = float(abs(vmax - vmin))
utime = netcdftime.utime(self.date_unit, self.calendar)
lower = utime.num2date(vmin)
upper = utime.num2date(vmax)
resolution, n = self.compute_resolution(vmin, vmax, lower, upper)
if resolution == 'YEARLY':
# TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
# appropriate.
years = self._max_n_locator.tick_values(lower.year, upper.year)
ticks = [netcdftime.datetime(int(year), 1, 1) for year in years]
elif resolution == 'MONTHLY':
# TODO START AT THE BEGINNING OF A DECADE/CENTURY/MILLENIUM as
# appropriate.
months_offset = self._max_n_locator.tick_values(0, n)
ticks = []
for offset in months_offset:
year = lower.year + np.floor((lower.month + offset) / 12)
month = ((lower.month + offset) % 12) + 1
ticks.append(netcdftime.datetime(int(year), int(month), 1))
elif resolution == 'DAILY':
# TODO: It would be great if this favoured multiples of 7.
days = self._max_n_locator_days.tick_values(vmin, vmax)
ticks = [utime.num2date(dt) for dt in days]
elif resolution == 'HOURLY':
hour_unit = 'hours since 2000-01-01'
hour_utime = netcdftime.utime(hour_unit, self.calendar)
in_hours = hour_utime.date2num([lower, upper])
hours = self._max_n_locator.tick_values(in_hours[0], in_hours[1])
ticks = [hour_utime.num2date(dt) for dt in hours]
elif resolution == 'MINUTELY':
minute_unit = 'minutes since 2000-01-01'
minute_utime = netcdftime.utime(minute_unit, self.calendar)
in_minutes = minute_utime.date2num([lower, upper])
minutes = self._max_n_locator.tick_values(in_minutes[0],
in_minutes[1])
ticks = [minute_utime.num2date(dt) for dt in minutes]
elif resolution == 'SECONDLY':
second_unit = 'seconds since 2000-01-01'
second_utime = netcdftime.utime(second_unit, self.calendar)
in_seconds = second_utime.date2num([lower, upper])
seconds = self._max_n_locator.tick_values(in_seconds[0],
in_seconds[1])
ticks = [second_utime.num2date(dt) for dt in seconds]
else:
msg = 'Resolution {} not implemented yet.'.format(resolution)
raise ValueError(msg)
return utime.date2num(ticks)
class NetCDFTimeConverter(mdates.DateConverter):
"""
Converter for netcdftime.datetime data.
"""
standard_unit = 'days since 2000-01-01'
@staticmethod
def axisinfo(unit, axis):
"""
Returns the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
calendar, date_unit = unit
majloc = NetCDFTimeDateLocator(4, calendar=calendar,
date_unit=date_unit)
majfmt = NetCDFTimeDateFormatter(majloc, calendar=calendar,
time_units=date_unit)
datemin = CalendarDateTime(netcdftime.datetime(2000, 1, 1), calendar)
datemax = CalendarDateTime(netcdftime.datetime(2010, 1, 1), calendar)
return munits.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax))
@classmethod
def default_units(cls, sample_point, axis):
"""
Computes some units for the given data point.
"""
try:
# Try getting the first item. Otherwise we just use this item.
sample_point = sample_point[0]
except (TypeError, IndexError):
pass
if not hasattr(sample_point, 'calendar'):
msg = 'Expecting netcdftimes with an extra "calendar" attribute.'
raise ValueError(msg)
return sample_point.calendar, cls.standard_unit
@classmethod
def convert(cls, value, unit, axis):
"""
Converts value, if it is not already a number or sequence of numbers,
with :func:`netcdftime.utime().date2num`.
"""
if isinstance(value, np.ndarray):
# Don't do anything with numeric types.
if value.dtype != np.object:
return value
first_value = value[0]
else:
# Don't do anything with numeric types.
if munits.ConversionInterface.is_numlike(value):
return value
first_value = value
if not isinstance(first_value, CalendarDateTime):
raise ValueError('The values must be numbers or instances of '
'"nc_time_axis.CalendarDateTime".')
if not isinstance(first_value.datetime, netcdftime.datetime):
raise ValueError('The datetime attribute of the CalendarDateTime '
'object must be of type `netcdftime.datetime`.')
ut = netcdftime.utime(cls.standard_unit, calendar=first_value.calendar)
if isinstance(value, CalendarDateTime):
value = [value]
return ut.date2num([v.datetime for v in value])
# Automatically register NetCDFTimeConverter with matplotlib.unit's converter
# dictionary.
if CalendarDateTime not in munits.registry:
munits.registry[CalendarDateTime] = NetCDFTimeConverter()
| bsd-3-clause | 7,312,765,437,910,256,000 | 36.437276 | 79 | 0.589086 | false |
nave91/dbt | test/integration/012_profile_config_tests/test_profile_config.py | 1 | 1413 | from nose.plugins.attrib import attr
from test.integration.base import DBTIntegrationTest
import dbt.deprecations
class TestNoRunTargetDeprecation(DBTIntegrationTest):
def setUp(self):
DBTIntegrationTest.setUp(self)
self.run_sql_file("test/integration/012_profile_config_tests/seed.sql")
dbt.deprecations.reset_deprecations()
@property
def schema(self):
return "profile_config_012"
@property
def models(self):
return "test/integration/012_profile_config_tests/models"
@property
def profile_config(self):
return {
'test': {
'outputs': {
'my-target': {
'type': 'postgres',
'threads': 1,
'host': 'database',
'port': 5432,
'user': 'root',
'pass': 'password',
'dbname': 'dbt',
'schema': self.unique_schema()
}
},
'target': 'my-target'
}
}
@attr(type='postgres')
def test_deprecated_run_target_config(self):
results = self.run_dbt()
self.assertEqual(len(results), 1)
self.assertTablesEqual("seed","view_model")
self.assertFalse('run-target' in dbt.deprecations.active_deprecations)
| apache-2.0 | -2,097,655,783,617,538,300 | 28.4375 | 79 | 0.519462 | false |
uts-magic-lab/pyride_nao | scripts/py_main.py | 1 | 2931 | import PyNAO
import math
import constants
import tinstate
import messenger
import tininfo
import time
from timers import timermanager
myMessenger = None
msgTryTimer = -1
calTryTimer = -1
cktime = time.time()
def userLogon( name ):
PyNAO.say( '%s has logged on.' % name )
tinstate.updateStatus( constants.USER_PRESENT, False )
def userLogoff( name ):
PyNAO.say( '%s has logged off.' % name )
tinstate.updateStatus( constants.USER_PRESENT, len(PyNAO.listCurrentUsers()) == 0)
def bumperActions( id ):
global cktime
if time.time() - cktime <= 60:
return #ignore
if id == 'right':
users = PyNAO.listCurrentUsers()
if len(users) == 0:
PyNAO.say( 'No one is telepresent.' )
else:
PyNAO.say( 'Following users are telepresent:' )
for i in users:
PyNAO.say( '\pau=1000\ %s' % i )
elif id == 'left':
if len(PyNAO.listCurrentUsers()) > 0:
PyNAO.say( 'I will notify the telepresent members to tune in.' )
PyNAO.updateOperationalStatus( constants.CUSTOM_STATE, 'Need your attention' )
cktime = time.time()
def remoteCommandActions( cmd, arg ):
pass
def timerLapsedActions( id ):
timermanager.onTimerLapsed( id )
def timerActions( id ):
global myMessenger, msgTryTimer
if msgTryTimer == id and myMessenger.checkin():
PyNAO.removeTimer( msgTryTimer )
msgTryTimer = -1
else:
timermanager.onTimerCall( id )
def chestBtnActions( id ):
global myMessenger, purgeuser
if id == 1:
myMessenger.announce()
#do report messages
elif id == 2:
myMessenger.purgearchive()
elif id == 3:
PyNAO.say( constants.INTRO_TEXT )
def powerPlugChangeActions( isplugged ):
global myMessenger
text = ""
if isplugged:
text = "I'm on main power."
else:
text = "I'm on battery power."
PyNAO.say( text )
if myMessenger:
myMessenger.updatestatus( text )
def batteryChargeChangeActions( batpc, isdischarge ):
global myMessenger
if batpc < 20 and isdischarge:
PyNAO.say( "I'm low on battery, please put me back on main power." )
if myMessenger:
myMessenger.updatestatus( "I have only %d percent battery power left!" % batpc )
def systemShutdownActions():
global myMessenger
myMessenger.checkout()
PyNAO.say( "I am going off line. Goodbye." )
def main():
global myMessenger, msgTryTimer
PyNAO.onUserLogOn = userLogon
PyNAO.onUserLogOff = userLogoff
PyNAO.onBumperPressed = bumperActions
PyNAO.onTimer = timerActions
PyNAO.onTimerLapsed = timerLapsedActions
PyNAO.onRemoteCommand = remoteCommandActions
PyNAO.onChestButtonPressed = chestBtnActions
PyNAO.onSystemShutdown = systemShutdownActions
PyNAO.onPowerPluggedChange = powerPlugChangeActions
PyNAO.onBatteryChargeChange = batteryChargeChangeActions
PyNAO.say( constants.INTRO_TEXT )
myMessenger = messenger.Messenger()
if not myMessenger.checkin():
msgTryTimer = PyNAO.addTimer( 10*60, -1, 10*60 )
| gpl-3.0 | 5,249,737,204,137,457,000 | 23.838983 | 86 | 0.707608 | false |
KonradBreitsprecher/espresso | testsuite/dawaanr-and-bh-gpu.py | 1 | 5810 | from __future__ import print_function
import math
import unittest as ut
import numpy as np
from numpy import linalg as la
from numpy.random import random, seed
import espressomd
from espressomd.interactions import *
from espressomd.magnetostatics import *
from espressomd.analyze import *
from tests_common import *
from espressomd import assert_features, has_features, missing_features
@ut.skipIf(not has_features(["DIPOLAR_BARNES_HUT"]),
"Features not available, skipping test!")
class BHGPUTest(ut.TestCase):
longMessage = True
# Handle for espresso system
system = espressomd.System(box_l=[1,1,1])
def vectorsTheSame(self,a,b):
tol = 5E-2
vec_len = la.norm(a - b)
rel = 2 * vec_len / (la.norm(a) + la.norm(b))
if rel <= tol:
return True
else:
return False
def stopAll(self):
for i in range(len(self.system.part)):
self.system.part[i].v = np.array([0.0,0.0,0.0])
self.system.part[i].omega_body = np.array([0.0,0.0,0.0])
def run_test_case(self):
seed(1)
pf_bh_gpu = 2.34
pf_dawaanr = 3.524
ratio_dawaanr_bh_gpu = pf_dawaanr / pf_bh_gpu
l = 15
self.system.box_l = [l, l, l]
self.system.periodicity = [0, 0, 0]
self.system.time_step = 1E-4
self.system.cell_system.skin = 0.1
part_dip = np.zeros((3))
for n in [ 110, 111, 540, 541, 5946 ]:
dipole_modulus = 1.3
# scale the box for a large number of particles:
if n > 1000:
l *= (n / 541) ** (1 / 3.0)
for i in range(n):
part_pos = np.array(random(3)) * l
costheta = 2 * random() - 1
sintheta = np.sin(np.arcsin(costheta))
phi = 2 * np.pi * random()
part_dip[0] = sintheta * np.cos(phi) * dipole_modulus
part_dip[1] = sintheta * np.sin(phi) * dipole_modulus
part_dip[2] = costheta * dipole_modulus
self.system.part.add(id = i, type = 0, pos = part_pos, dip = part_dip, v = np.array([0,0,0]), omega_body = np.array([0,0,0]))
self.system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=10.0, sigma=0.5,
cutoff=0.55, shift="auto")
self.system.thermostat.set_langevin(kT=0.0, gamma=10.0)
self.stopAll()
self.system.integrator.set_vv()
self.system.non_bonded_inter[0, 0].lennard_jones.set_params(
epsilon=0.0, sigma=0.0,
cutoff=-1, shift=0.0)
self.system.cell_system.skin = 0.0
self.system.time_step = 0.01
self.system.thermostat.turn_off()
# gamma should be zero in order to avoid the noise term in force and torque
self.system.thermostat.set_langevin(kT=1.297, gamma=0.0)
dds_cpu = DipolarDirectSumCpu(prefactor = pf_dawaanr)
self.system.actors.add(dds_cpu)
self.system.integrator.run(steps = 0,recalc_forces = True)
dawaanr_f = []
dawaanr_t = []
for i in range(n):
dawaanr_f.append(self.system.part[i].f)
dawaanr_t.append(self.system.part[i].torque_lab)
dawaanr_e = Analysis(self.system).energy()["total"]
del dds_cpu
for i in range(len(self.system.actors.active_actors)):
self.system.actors.remove(self.system.actors.active_actors[i])
self.system.integrator.run(steps = 0,recalc_forces = True)
bh_gpu = DipolarBarnesHutGpu(prefactor = pf_bh_gpu, epssq = 200.0, itolsq = 8.0)
self.system.actors.add(bh_gpu)
self.system.integrator.run(steps = 0,recalc_forces = True)
bhgpu_f = []
bhgpu_t = []
for i in range(n):
bhgpu_f.append(self.system.part[i].f)
bhgpu_t.append(self.system.part[i].torque_lab)
bhgpu_e = Analysis(self.system).energy()["total"]
# compare
for i in range(n):
self.assertTrue(self.vectorsTheSame(np.array(dawaanr_t[i]),ratio_dawaanr_bh_gpu * np.array(bhgpu_t[i])), \
msg = 'Torques on particle do not match. i={0} dawaanr_t={1} ratio_dawaanr_bh_gpu*bhgpu_t={2}'.format(i,np.array(dawaanr_t[i]), ratio_dawaanr_bh_gpu * np.array(bhgpu_t[i])))
self.assertTrue(self.vectorsTheSame(np.array(dawaanr_f[i]),ratio_dawaanr_bh_gpu * np.array(bhgpu_f[i])), \
msg = 'Forces on particle do not match: i={0} dawaanr_f={1} ratio_dawaanr_bh_gpu*bhgpu_f={2}'.format(i,np.array(dawaanr_f[i]), ratio_dawaanr_bh_gpu * np.array(bhgpu_f[i])))
self.assertTrue(abs(dawaanr_e - bhgpu_e * ratio_dawaanr_bh_gpu) <= abs(1E-3 * dawaanr_e), \
msg = 'Energies for dawaanr {0} and bh_gpu {1} do not match.'.format(dawaanr_e,ratio_dawaanr_bh_gpu * bhgpu_e))
self.system.integrator.run(steps = 0,recalc_forces = True)
del bh_gpu
for i in range(len(self.system.actors.active_actors)):
self.system.actors.remove(self.system.actors.active_actors[i])
self.system.part.clear()
def test(self):
if (self.system.cell_system.get_state()["n_nodes"] > 1):
print("NOTE: Ignoring testcase for n_nodes > 1")
else:
self.run_test_case()
if __name__ == '__main__':
print("Features: ", espressomd.features())
ut.main()
| gpl-3.0 | 3,505,035,319,132,454,000 | 41.720588 | 205 | 0.543373 | false |
richtermondt/ankiqt | ankiqt/ui/getshared.py | 1 | 8977 | # -*- coding: utf-8 -*-
# Copyright: Damien Elmes <[email protected]>
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import ankiqt, simplejson, time, cStringIO, zipfile, tempfile, os, re
import traceback, urllib2, socket, cgi
from ankiqt.ui.utils import saveGeom, restoreGeom, showInfo
from anki.utils import fmtTimeSpan
R_ID = 0
R_USERNAME = 1
R_TITLE = 2
R_DESCRIPTION = 3
R_TAGS = 4
R_VERSION = 5
R_FACTS = 6
R_SIZE = 7
R_COUNT = 8
R_MODIFIED = 9
R_FNAME = 10
class GetShared(QDialog):
def __init__(self, parent, type):
QDialog.__init__(self, parent, Qt.Window)
self.parent = parent
self.form = ankiqt.forms.getshared.Ui_Dialog()
self.form.setupUi(self)
self.ok = True
self.conErrMsg = _("""\
<b>Unable to connect to the server.<br><br>
Please check your network connection or try again in a few minutes.</b><br>
<br>
Error was:<pre>%s</pre>""")
restoreGeom(self, "getshared")
self.setupTable()
self.onChangeType(type)
if type == 0:
self.setWindowTitle(_("Download Shared Deck"))
else:
self.setWindowTitle(_("Download Shared Plugin"))
if self.ok:
self.exec_()
def setupTable(self):
self.connect(
self.form.table, SIGNAL("currentCellChanged(int,int,int,int)"),
self.onCellChanged)
self.form.table.verticalHeader().setDefaultSectionSize(
self.parent.config['editLineSize'])
self.connect(self.form.search, SIGNAL("textChanged(QString)"),
self.limit)
def fetchData(self):
self.parent.setProgressParent(None)
self.parent.startProgress()
self.parent.updateProgress()
try:
socket.setdefaulttimeout(30)
try:
sock = urllib2.urlopen(
"http://anki.ichi2.net/file/search?t=%d" % self.type)
self.allList = simplejson.loads(unicode(sock.read()))
except:
showInfo(self.conErrMsg % cgi.escape(unicode(
traceback.format_exc(), "utf-8", "replace")))
self.close()
self.ok = False
return
finally:
self.parent.finishProgress()
socket.setdefaulttimeout(None)
self.form.search.setFocus()
self.typeChanged()
self.limit()
def limit(self, txt=""):
if not txt:
self.curList = self.allList
else:
txt = unicode(txt).lower()
self.curList = [
l for l in self.allList
if (txt in l[R_TITLE].lower() or
txt in l[R_DESCRIPTION].lower() or
txt in l[R_TAGS].lower())]
self.redraw()
def redraw(self):
self.form.table.setSortingEnabled(False)
self.form.table.setRowCount(len(self.curList))
self.items = {}
if self.type == 0:
cols = (R_TITLE, R_FACTS, R_COUNT, R_MODIFIED)
else:
cols = (R_TITLE, R_COUNT, R_MODIFIED)
for rc, r in enumerate(self.curList):
for cc, c in enumerate(cols):
if c == R_FACTS or c == R_COUNT:
txt = unicode("%15d" % r[c])
elif c == R_MODIFIED:
days = int(((time.time() - r[c])/(24*60*60)))
txt = ngettext("%6d day ago", "%6d days ago", days) % days
else:
txt = unicode(r[c])
item = QTableWidgetItem(txt)
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
self.items[item] = r
self.form.table.setItem(rc, cc, item)
self.form.table.setSortingEnabled(True)
if self.type == 0:
self.form.table.sortItems(2, Qt.DescendingOrder)
else:
self.form.table.sortItems(1, Qt.DescendingOrder)
self.form.table.selectRow(0)
self.onCellChanged(None, None, None, None)
def onCellChanged(self, row, col, x, y):
ci = self.form.table.currentItem()
if not ci:
self.form.bottomLabel.setText(_("Nothing selected."))
return
r = self.items[ci]
self.curRow = r
self.form.bottomLabel.setText(_("""\
<b>Title</b>: %(title)s<br>
<b>Tags</b>: %(tags)s<br>
<b>Size</b>: %(size)0.2fKB<br>
<b>Uploader</b>: %(author)s<br>
<b>Downloads</b>: %(count)s<br>
<b>Modified</b>: %(mod)s ago<br>
<br>%(description)s""") % {
'title': r[R_TITLE],
'tags': r[R_TAGS],
'size': r[R_SIZE] / 1024.0,
'author': r[R_USERNAME],
'count': r[R_COUNT],
'mod': fmtTimeSpan(time.time() - r[R_MODIFIED]),
'description': r[R_DESCRIPTION].replace("\n", "<br>"),
})
self.form.scrollAreaWidgetContents.adjustSize()
self.form.scrollArea.setWidget(self.form.scrollAreaWidgetContents)
def onChangeType(self, type):
self.type = type
self.fetchData()
def typeChanged(self):
self.form.table.clear()
if self.type == 0:
self.form.table.setColumnCount(4)
self.form.table.setHorizontalHeaderLabels([
_("Title"), _("Facts"), _("Downloads"), _("Modified")])
else:
self.form.table.setColumnCount(3)
self.form.table.setHorizontalHeaderLabels([
_("Title"), _("Downloads"), _("Modified")])
self.form.table.horizontalHeader().setResizeMode(
0, QHeaderView.Stretch)
self.form.table.verticalHeader().hide()
def accept(self):
if self.type == 0:
if not self.parent.saveAndClose(hideWelcome=True, parent=self):
return QDialog.accept(self)
(fd, tmpname) = tempfile.mkstemp(prefix="anki")
tmpfile = os.fdopen(fd, "w+b")
cnt = 0
try:
socket.setdefaulttimeout(30)
self.parent.setProgressParent(self)
self.parent.startProgress()
self.parent.updateProgress()
try:
sock = urllib2.urlopen(
"http://anki.ichi2.net/file/get?id=%d" %
self.curRow[R_ID])
while 1:
data = sock.read(32768)
if not data:
break
cnt += len(data)
tmpfile.write(data)
self.parent.updateProgress(
label=_("Downloaded %dKB") % (cnt/1024.0))
except:
showInfo(self.conErrMsg % cgi.escape(unicode(
traceback.format_exc(), "utf-8", "replace")))
self.close()
return
finally:
socket.setdefaulttimeout(None)
self.parent.setProgressParent(None)
self.parent.finishProgress()
QDialog.accept(self)
# file is fetched
tmpfile.seek(0)
self.handleFile(tmpfile)
QDialog.accept(self)
def handleFile(self, file):
ext = os.path.splitext(self.curRow[R_FNAME])[1]
if ext == ".zip":
z = zipfile.ZipFile(file)
else:
z = None
tit = self.curRow[R_TITLE]
tit = re.sub("[^][A-Za-z0-9 ()\-]", "", tit)
tit = tit[0:40]
if self.type == 0:
# deck
dd = self.parent.documentDir
p = os.path.join(dd, tit + ".anki")
if os.path.exists(p):
tit += "%d" % time.time()
for l in z.namelist():
if l == "shared.anki":
dpath = os.path.join(dd, tit + ".anki")
open(dpath, "wb").write(z.read(l))
elif l.startswith("shared.media/"):
try:
os.mkdir(os.path.join(dd, tit + ".media"))
except OSError:
pass
open(os.path.join(dd, tit + ".media",
os.path.basename(l)),"wb").write(z.read(l))
self.parent.loadDeck(dpath)
else:
pd = self.parent.pluginsFolder()
if z:
for l in z.infolist():
try:
os.makedirs(os.path.join(
pd, os.path.dirname(l.filename)))
except OSError:
pass
if l.filename.endswith("/"):
# directory
continue
path = os.path.join(pd, l.filename)
open(path, "wb").write(z.read(l.filename))
else:
open(os.path.join(pd, tit + ext), "wb").write(file.read())
showInfo(_("Plugin downloaded. Please restart Anki."),
parent=self)
| gpl-3.0 | 2,402,456,029,569,720,300 | 35.197581 | 81 | 0.508745 | false |
captiosus/treadmill | treadmill/supervisor/__init__.py | 1 | 16553 | """Supervisor definition anc control.
Linux:
Manages daemontools-like services inside the container.
For each application container there may be multiple services defined, which
are controlled by skarnet.org s6 supervision suite.
Application container is started in chrooted environment, and the root
directory structure::
/
/services/
foo/
bar/
Application container is started with the supervisor monitoring the services
directory using 'svscan /services'. The svscan become the container 'init' -
parent to all processes inside container.
Treadmill will put svscan inside relevant cgroup hierarchy and subsystems.
Once started, services are added by created subdirectory for each service.
The following files are created in each directory:
- run
- app.sh
The run file is executed by s6-supervise. The run file will perform the
following actions:
- setuidgid - change execution context to the proid
- softlimit - part of the suite, set process limits
- setlock ../../<app.name> - this will create a lock monitored by Treadmill,
so that Treadmill is notified when the app exits.
- exec app.sh
All services will be started by Treadmill runtime using 's6-svc' utility. Each
service will be started with 'svc -o' (run once) option, and Treadmill will
be responsible for restart and maintaining restart count.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import os
import logging
import time
import enum
import jinja2
import six
if six.PY2 and os.name == 'posix':
import subprocess32 as subprocess # pylint: disable=import-error
else:
import subprocess # pylint: disable=wrong-import-order
from treadmill import fs
from treadmill import utils
from treadmill import subproc
from . import _service_base
from . import _utils as supervisor_utils
if os.name == 'nt':
from . import winss as sup_impl
_PREFIX = 'winss'
else:
# Disable C0411: standard import "import pwd" comes before "import enum"
import pwd # pylint: disable=C0411
from . import s6 as sup_impl
_PREFIX = 's6'
_LOGGER = logging.getLogger(__name__)
JINJA2_ENV = jinja2.Environment(loader=jinja2.PackageLoader(__name__))
# svc exits 111 if it cannot send a command.
ERR_COMMAND = 111
# svc exits 100 if no supervise process is running on servicedir.
ERR_NO_SUP = 100
POLICY_JSON = 'policy.json'
TRACE_FILE = 'trace'
def open_service(service_dir, existing=True):
"""Open a service object from a service directory.
:param ``str`` service_dir:
Location of the service to open.
:param ``bool`` existing:
Whether the service must already exist
:returns ``_service_base.Service``:
Instance of a service
"""
if not isinstance(service_dir, _service_base.Service):
svc_data = _service_base.Service.read_dir(
service_dir
)
if svc_data is None:
if existing:
raise ValueError('Invalid Service directory: %r' % service_dir)
else:
svc_type = _service_base.ServiceType.LongRun
svc_basedir = os.path.dirname(service_dir)
svc_name = os.path.basename(service_dir)
else:
svc_type, svc_basedir, svc_name = svc_data
return sup_impl.create_service(
svc_basedir=svc_basedir,
svc_name=svc_name,
svc_type=svc_type
)
return service_dir
def _create_scan_dir_s6(scan_dir, finish_timeout, monitor_service=None):
"""Create a scan directory.
:param ``str`` scan_dir:
Location of the scan directory.
:param ``int`` finish_timeout:
The finish script timeout.
:param ``str`` monitor_service:
Service monitoring other services in this scan directory.
:returns ``_service_dir_base.ServiceDirBase``:
Instance of a service dir
"""
if not isinstance(scan_dir, sup_impl.ScanDir):
scan_dir = sup_impl.ScanDir(scan_dir)
svscan_finish_script = utils.generate_template(
's6.svscan.finish',
timeout=finish_timeout,
_alias=subproc.get_aliases()
)
scan_dir.finish = svscan_finish_script
svscan_sigterm_script = utils.generate_template(
's6.svscan.sigterm',
monitor_service=monitor_service,
_alias=subproc.get_aliases()
)
scan_dir.sigterm = svscan_sigterm_script
svscan_sighup_script = utils.generate_template(
's6.svscan.sighup',
monitor_service=monitor_service,
_alias=subproc.get_aliases()
)
scan_dir.sighup = svscan_sighup_script
svscan_sigint_script = utils.generate_template(
's6.svscan.sigint',
monitor_service=monitor_service,
_alias=subproc.get_aliases()
)
scan_dir.sigint = svscan_sigint_script
svscan_sigquit_script = utils.generate_template(
's6.svscan.sigquit',
monitor_service=monitor_service,
_alias=subproc.get_aliases()
)
scan_dir.sigquit = svscan_sigquit_script
return scan_dir
def _create_scan_dir_winss(scan_dir, finish_timeout):
"""Create a scan directory.
:param ``str`` scan_dir:
Location of the scan directory.
:param ``int`` finish_timeout:
The finish script timeout.
:returns ``_service_dir_base.ServiceDirBase``:
Instance of a service dir
"""
if not isinstance(scan_dir, sup_impl.ScanDir):
scan_dir = sup_impl.ScanDir(scan_dir)
svscan_finish_script = utils.generate_template(
'winss.svscan.finish',
timeout=finish_timeout,
scan_dir=scan_dir.directory,
_alias=subproc.get_aliases()
)
scan_dir.finish = svscan_finish_script
return scan_dir
# Disable C0103: Invalid constant name "create_service"
# pylint: disable=C0103
if _PREFIX == 'winss':
create_scan_dir = _create_scan_dir_winss
else:
create_scan_dir = _create_scan_dir_s6
def create_environ_dir(env_dir, env, update=False):
"""Create/update environment directory for the supervisor.
"""
fs.mkdir_safe(env_dir)
supervisor_utils.environ_dir_write(
env_dir, env,
update=update
)
# Disable W0613: Unused argument 'kwargs' (for s6/winss compatibility)
# pylint: disable=W0613
def _create_service_s6(base_dir,
name,
app_run_script,
userid='root',
downed=False,
environ_dir=None,
environ=None,
environment='prod',
monitor_policy=None,
trace=None,
timeout_finish=None,
run_script='s6.run',
log_run_script='s6.logger.run',
finish_script='s6.finish',
**kwargs):
"""Initializes service directory.
Creates run, finish scripts as well as log directory with appropriate
run script.
"""
try:
user_pw = pwd.getpwnam(userid)
except KeyError:
# Check the identity we are going to run as. It needs to exists on the
# host or we will fail later on as we try to seteuid.
_LOGGER.exception('Unable to find userid %r in passwd database.',
userid)
raise
if isinstance(base_dir, sup_impl.ScanDir):
# We are given a scandir as base, use it.
svc = base_dir.add_service(name, _service_base.ServiceType.LongRun)
else:
svc = LongrunService(base_dir, name)
# Setup the environ
if environ is None:
svc_environ = {}
else:
svc_environ = environ.copy()
svc_environ['HOME'] = user_pw.pw_dir
svc.environ = svc_environ
if environment == 'prod':
ionice_prio = 5
else:
ionice_prio = 6
monitored = (monitor_policy is not None)
# Setup the run script
svc.run_script = utils.generate_template(
run_script,
user=userid,
shell=user_pw.pw_shell,
environ_dir=environ_dir,
monitored=monitored,
ionice_prio=ionice_prio,
_alias=subproc.get_aliases()
)
# Setup the finish script
svc.finish_script = utils.generate_template(
finish_script,
_alias=subproc.get_aliases()
)
# Setup the log run script
svc.log_run_script = utils.generate_template(
log_run_script,
logdir=os.path.relpath(
os.path.join(svc.data_dir, 'log'),
svc.logger_dir
),
_alias=subproc.get_aliases()
)
svc.default_down = bool(downed)
if monitored:
svc.timeout_finish = 0
else:
svc.timeout_finish = timeout_finish
svc.write()
# Write the app_start script
supervisor_utils.script_write(
os.path.join(svc.data_dir, 'app_start'),
app_run_script
)
# Optionally write a monitor policy file
_LOGGER.info('monitor_policy, %r', monitor_policy)
if monitor_policy is not None:
supervisor_utils.data_write(
os.path.join(svc.data_dir, POLICY_JSON),
json.dumps(monitor_policy)
)
# Optionally write trace information file
if trace is not None:
supervisor_utils.data_write(
os.path.join(svc.data_dir, TRACE_FILE),
json.dumps(trace)
)
return svc
# Disable W0613: Unused argument 'kwargs' (for s6/winss compatibility)
# pylint: disable=W0613
def _create_service_winss(base_dir,
name,
app_run_script,
downed=False,
environ=None,
monitor_policy=None,
timeout_finish=None,
run_script='winss.run',
log_run_script='winss.logger.run',
finish_script='winss.finish',
**kwargs):
"""Initializes service directory.
Creates run, finish scripts as well as log directory with appropriate
run script.
"""
if isinstance(base_dir, sup_impl.ScanDir):
# We are given a scandir as base, use it.
svc = base_dir.add_service(name, _service_base.ServiceType.LongRun)
else:
svc = LongrunService(base_dir, name)
# Setup the environ
if environ is None:
svc_environ = {}
else:
svc_environ = environ.copy()
svc.environ = svc_environ
monitored = (monitor_policy is not None)
# Setup the run script
svc.run_script = utils.generate_template(
run_script,
app_run_script=app_run_script,
_alias=subproc.get_aliases()
)
# Setup the finish script
svc.finish_script = utils.generate_template(
finish_script,
_alias=subproc.get_aliases()
)
logdir = os.path.join(svc.data_dir, 'log')
fs.mkdir_safe(logdir)
# Setup the log run script
svc.log_run_script = utils.generate_template(
log_run_script,
logdir=os.path.relpath(
logdir,
svc.logger_dir
),
_alias=subproc.get_aliases()
)
svc.default_down = bool(downed)
if monitored:
svc.timeout_finish = 0
else:
svc.timeout_finish = timeout_finish
svc.write()
# Optionally write a monitor policy file
if monitor_policy is not None:
supervisor_utils.data_write(
os.path.join(svc.data_dir, POLICY_JSON),
json.dumps(monitor_policy)
)
return svc
# Disable C0103: Invalid constant name "create_service"
# pylint: disable=C0103
if _PREFIX == 'winss':
create_service = _create_service_winss
else:
create_service = _create_service_s6
class ServiceWaitAction(enum.Enum):
"""Enumeration of wait actions."""
# pylint complains: Invalid class attribute name "up"
up = 'u' # pylint: disable=C0103
down = 'd'
really_up = 'U'
really_down = 'D'
class ServiceControlAction(enum.Enum):
"""Enumeration of control actions."""
kill = 'k'
once = 'o'
once_at_most = 'O'
down = 'd'
# pylint complains: Invalid class attribute name "up"
up = 'u' # pylint: disable=C0103
exit = 'x'
class SvscanControlAction(enum.Enum):
"""Enumeration of control actions."""
alarm = 'a'
abort = 'b'
nuke = 'n'
quit = 'q'
exit = 'x'
def _get_cmd(cmd):
return _PREFIX + '_' + cmd
def _get_wait_action(action):
if os.name == 'nt' and action == ServiceWaitAction.really_up:
action = ServiceWaitAction.up
return action
def is_supervised(service_dir):
"""Checks if the supervisor is running."""
try:
subproc.check_call([_get_cmd('svok'), service_dir])
return True
except subprocess.CalledProcessError as err:
# svok returns 1 when the service directory is not supervised.
if err.returncode == 1:
return False
else:
raise
def control_service(service_dir, actions, wait=None, timeout=0):
"""Sends a control signal to the supervised process."""
cmd = [_get_cmd('svc')]
if wait:
cmd.append('-w' + _get_wait_action(wait).value)
if timeout > 0:
cmd.extend(['-T{}'.format(timeout)])
action_str = '-'
for action in utils.get_iterable(actions):
action_str += action.value
cmd.append(action_str)
cmd.append(service_dir)
try:
subproc.check_call(cmd)
return True
except subprocess.CalledProcessError as err:
# svc returns 1 on timeout.
if err.returncode == 1:
return False
else:
raise
def control_svscan(scan_dir, actions):
"""Sends a control signal to a svscan instance."""
action_str = '-'
for action in utils.get_iterable(actions):
action_str += action.value
subproc.check_call([_get_cmd('svscanctl'), action_str, scan_dir])
def wait_service(service_dirs, action, all_services=True, timeout=0):
"""Performs a wait task on the given list of service directories."""
cmd = [_get_cmd('svwait')]
if timeout > 0:
cmd.extend(['-t{}'.format(timeout)])
if not all_services:
cmd.append('-o')
cmd.append('-' + _get_wait_action(action).value)
cmd.extend(utils.get_iterable(service_dirs))
try:
subproc.check_call(cmd)
return True
except subprocess.CalledProcessError as err:
# old svwait returns 1 and new svwait returns 99 on timeout.
if err.returncode in (1, 99):
return False
else:
raise
def ensure_not_supervised(service_dir):
"""Waits for the service and log service to not be supervised."""
service_dirs = []
if is_supervised(service_dir):
service_dirs.append(service_dir)
log_dir = os.path.join(service_dir, 'log')
if os.path.exists(log_dir) and is_supervised(log_dir):
service_dirs.append(log_dir)
for service in service_dirs:
try:
# Kill and close supervised process as it should have already
# been told to go down
control_service(service, (ServiceControlAction.kill,
ServiceControlAction.exit),
ServiceWaitAction.really_down,
timeout=1000)
except subprocess.CalledProcessError:
# Ignore this as supervisor may be down
pass
count = 0
while is_supervised(service):
count += 1
if count == 50:
raise Exception(
'Service dir {0} failed to stop in a reasonable time.'
.format(service)
)
time.sleep(0.1)
ScanDir = sup_impl.ScanDir
LongrunService = sup_impl.LongrunService
ServiceType = _service_base.ServiceType
__all__ = [
'ScanDir',
'LongrunService',
'ServiceType',
'ERR_COMMAND',
'ERR_NO_SUP',
'POLICY_JSON',
'TRACE_FILE',
'create_environ_dir',
'ServiceWaitAction',
'ServiceControlAction',
'SvscanControlAction',
'open_service',
'create_scan_dir',
'create_service',
'is_supervised',
'control_service',
'control_svscan',
'wait_service',
]
if _PREFIX == 's6':
BundleService = sup_impl.BundleService
OneshotService = sup_impl.OneshotService
__all__ += [
'BundleService',
'OneshotService',
]
| apache-2.0 | -1,955,057,397,741,377,800 | 27.103565 | 79 | 0.61137 | false |
warp1337/opencv_facerecognizer | src/ocvfacerec/facerec/operators.py | 1 | 5486 | # Copyright (c) 2015.
# Philipp Wagner <bytefish[at]gmx[dot]de> and
# Florian Lier <flier[at]techfak.uni-bielefeld.de> and
# Norman Koester <nkoester[at]techfak.uni-bielefeld.de>
#
#
# Released to public domain under terms of the BSD Simplified license.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the organization nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# See <http://www.opensource.org/licenses/bsd-license>
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as np
from ocvfacerec.facerec.feature import AbstractFeature
class FeatureOperator(AbstractFeature):
"""
A FeatureOperator operates on two feature models.
Args:
model1 [AbstractFeature]
model2 [AbstractFeature]
"""
def __init__(self, model1, model2):
if (not isinstance(model1, AbstractFeature)) or (not isinstance(model2, AbstractFeature)):
raise Exception("A FeatureOperator only works on classes implementing an AbstractFeature!")
self.model1 = model1
self.model2 = model2
def __repr__(self):
return "FeatureOperator(" + repr(self.model1) + "," + repr(self.model2) + ")"
class ChainOperator(FeatureOperator):
"""
The ChainOperator chains two feature extraction modules:
model2.compute(model1.compute(X,y),y)
Where X can be generic input data.
Args:
model1 [AbstractFeature]
model2 [AbstractFeature]
"""
def __init__(self, model1, model2):
FeatureOperator.__init__(self, model1, model2)
def compute(self, X, y):
X = self.model1.compute(X, y)
return self.model2.compute(X, y)
def extract(self, X):
X = self.model1.extract(X)
return self.model2.extract(X)
def __repr__(self):
return "ChainOperator(" + repr(self.model1) + "," + repr(self.model2) + ")"
class CombineOperator(FeatureOperator):
"""
The CombineOperator combines the output of two feature extraction modules as:
(model1.compute(X,y),model2.compute(X,y))
, where the output of each feature is a [1xN] or [Nx1] feature vector.
Args:
model1 [AbstractFeature]
model2 [AbstractFeature]
"""
def __init__(self, model1, model2):
FeatureOperator.__init__(self, model1, model2)
def compute(self, X, y):
A = self.model1.compute(X, y)
B = self.model2.compute(X, y)
C = []
for i in range(0, len(A)):
ai = np.asarray(A[i]).reshape(1, -1)
bi = np.asarray(B[i]).reshape(1, -1)
C.append(np.hstack((ai, bi)))
return C
def extract(self, X):
ai = self.model1.extract(X)
bi = self.model2.extract(X)
ai = np.asarray(ai).reshape(1, -1)
bi = np.asarray(bi).reshape(1, -1)
return np.hstack((ai, bi))
def __repr__(self):
return "CombineOperator(" + repr(self.model1) + "," + repr(self.model2) + ")"
class CombineOperatorND(FeatureOperator):
"""
The CombineOperator combines the output of two multidimensional feature extraction modules.
(model1.compute(X,y),model2.compute(X,y))
Args:
model1 [AbstractFeature]
model2 [AbstractFeature]
hstack [bool] stacks data horizontally if True and vertically if False
"""
def __init__(self, model1, model2, hstack=True):
FeatureOperator.__init__(self, model1, model2)
self._hstack = hstack
def compute(self, X, y):
A = self.model1.compute(X, y)
B = self.model2.compute(X, y)
C = []
for i in range(0, len(A)):
if self._hstack:
C.append(np.hstack((A[i], B[i])))
else:
C.append(np.vstack((A[i], B[i])))
return C
def extract(self, X):
ai = self.model1.extract(X)
bi = self.model2.extract(X)
if self._hstack:
return np.hstack((ai, bi))
return np.vstack((ai, bi))
def __repr__(self):
return "CombineOperatorND(" + repr(self.model1) + "," + repr(self.model2) + ", hstack=" + str(
self._hstack) + ")"
| bsd-3-clause | 1,871,205,741,122,541,000 | 33.942675 | 103 | 0.642362 | false |
AstroTech/workshop-python | _book/conf.py | 1 | 3091 | author = 'Matt Harasymczuk'
email = '[email protected]'
project = 'Python 3: from None to Machine Learning'
description = "Matt Harasymczuk's Python 3: from None to Machine Learning"
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.imgmath',
# 'sphinx.ext.autosectionlabel',
# 'sphinx.ext.viewcode',
# 'sphinx.ext.coverage',
# 'sphinx.ext.githubpages',
# 'sphinx.ext.autodoc',
# 'sphinx.ext.intersphinx',
# 'sphinx.ext.graphviz',
'nbsphinx',
'sphinxcontrib.bibtex',
# 'sphinxjp.themes.revealjs',
'IPython.sphinxext.ipython_console_highlighting',
]
todo_emit_warnings = False
todo_include_todos = True
html_theme = 'sphinx_rtd_theme'
language = 'en'
numfig_format = {
'section': 'Sect. %s.',
'figure': 'Fig. %s.',
'table': 'Tab. %s.',
'code-block': 'Code Listing %s.',
}
# -----------------------------------------------------------------------------
# Standard book config
# -----------------------------------------------------------------------------
import datetime
import os
import re
import subprocess
import sys
source_directory = 'book'
highlight_language = 'python3'
pygments_style = 'borland'
numfig = True
templates_path = ['_templates']
master_doc = 'index'
source_suffix = ['.rst']
imgmath_image_format = 'svg'
today_fmt = '%Y-%m-%d'
project_slug = re.sub(r'[\W]+', '', project)
sha1 = subprocess.Popen('git log -1 --format="%h"', stdout=subprocess.PIPE, shell=True).stdout.read().decode().replace('\n', '')
version = '#{sha1}, {date:%Y-%m-%d}'.format(sha1=sha1, date=datetime.date.today())
release = '#{sha1}, {date:%Y-%m-%d}'.format(sha1=sha1, date=datetime.date.today())
copyright = '{year}, {author} <{email}>'.format(
year=datetime.date.today().year,
author=author,
email=email,
)
exclude_patterns = [
'about/assignment/*.rst',
'_build', '.build',
'_tmp',
'**/contrib',
'_themes',
'**.ipynb_checkpoints',
'_static',
'*/_template.rst',
'.*', 'venv*', 'virtualenv*',
'index-*.rst',
]
extensions_dir = os.path.join(os.path.dirname(__file__), '', '_extensions')
sys.path.append(extensions_dir)
html_theme_path = ['_themes']
html_static_path = ['_static']
html_favicon = '_static/favicon.png'
html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']}
html_show_sphinx = False
htmlhelp_basename = project
latex_documents = [(master_doc, '{0}.tex'.format(project_slug), project, author, 'manual')]
latex_elements = {
'papersize': 'a4paper',
'pointsize': '10pt',
'figure_align': 'htbp',
# Fix for: LaTeX Backend Fails with Citations In Figure Captions
'preamble': r"""
\usepackage{etoolbox}
\AtBeginEnvironment{figure}{\renewcommand{\phantomsection}{}}
"""
}
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
epub_exclude_files = ['search.html']
man_pages = [
(master_doc, project_slug, project, [author], 1)
]
texinfo_documents = [
(master_doc, project_slug, project, author, project, '', 'Miscellaneous'),
]
| mit | 2,576,808,613,306,298,400 | 26.598214 | 128 | 0.612423 | false |
TheCoSMoCompany/biopredyn | Prototype/src/libsbml-5.10.0/examples/python/validateSBML.py | 1 | 6406 | #!/usr/bin/env python
##
## @file validateSBML.py
## @brief Validates one or more SBML files
## @author Akiya Jouraku (translated from libSBML C++ examples)
## @author Ben Bornstein
## @author Michael Hucka
##
## <!--------------------------------------------------------------------------
## This sample program is distributed under a different license than the rest
## of libSBML. This program uses the open-source MIT license, as follows:
##
## Copyright (c) 2013-2014 by the California Institute of Technology
## (California, USA), the European Bioinformatics Institute (EMBL-EBI, UK)
## and the University of Heidelberg (Germany), with support from the National
## Institutes of Health (USA) under grant R01GM070923. All rights reserved.
##
## Permission is hereby granted, free of charge, to any person obtaining a
## copy of this software and associated documentation files (the "Software"),
## to deal in the Software without restriction, including without limitation
## the rights to use, copy, modify, merge, publish, distribute, sublicense,
## and/or sell copies of the Software, and to permit persons to whom the
## Software is furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in
## all copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
## THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
## FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
## DEALINGS IN THE SOFTWARE.
##
## Neither the name of the California Institute of Technology (Caltech), nor
## of the European Bioinformatics Institute (EMBL-EBI), nor of the University
## of Heidelberg, nor the names of any contributors, may be used to endorse
## or promote products derived from this software without specific prior
## written permission.
## ------------------------------------------------------------------------ -->
import sys
import os.path
import time
import libsbml
class validateSBML:
def __init__(self, ucheck):
self.reader = libsbml.SBMLReader()
self.ucheck = ucheck
self.numinvalid = 0
def validate(self, file):
if not os.path.exists(file):
print("[Error] %s : No such file." % (infile))
self.numinvalid += 1
return
start = time.time()
sbmlDoc = libsbml.readSBML(file)
stop = time.time()
timeRead = (stop - start)*1000
errors = sbmlDoc.getNumErrors()
seriousErrors = False
numReadErr = 0
numReadWarn = 0
errMsgRead = ""
if errors > 0:
for i in range(errors):
severity = sbmlDoc.getError(i).getSeverity()
if (severity == libsbml.LIBSBML_SEV_ERROR) or (severity == libsbml.LIBSBML_SEV_FATAL):
seriousErrors = True
numReadErr += 1
else:
numReadWarn += 1
errMsgRead = sbmlDoc.getErrorLog().toString()
# If serious errors are encountered while reading an SBML document, it
# does not make sense to go on and do full consistency checking because
# the model may be nonsense in the first place.
numCCErr = 0
numCCWarn = 0
errMsgCC = ""
skipCC = False;
timeCC = 0.0
if seriousErrors:
skipCC = True;
errMsgRead += "Further consistency checking and validation aborted."
self.numinvalid += 1;
else:
sbmlDoc.setConsistencyChecks(libsbml.LIBSBML_CAT_UNITS_CONSISTENCY, self.ucheck)
start = time.time()
failures = sbmlDoc.checkConsistency()
stop = time.time()
timeCC = (stop - start)*1000
if failures > 0:
isinvalid = False;
for i in range(failures):
severity = sbmlDoc.getError(i).getSeverity()
if (severity == libsbml.LIBSBML_SEV_ERROR) or (severity == libsbml.LIBSBML_SEV_FATAL):
numCCErr += 1
isinvalid = True;
else:
numCCWarn += 1
if isinvalid:
self.numinvalid += 1;
errMsgCC = sbmlDoc.getErrorLog().toString()
#
# print results
#
print(" filename : %s" % (file))
print(" file size (byte) : %d" % (os.path.getsize(file)))
print(" read time (ms) : %f" % (timeRead))
if not skipCC :
print( " c-check time (ms) : %f" % (timeCC))
else:
print( " c-check time (ms) : skipped")
print( " validation error(s) : %d" % (numReadErr + numCCErr))
if not skipCC :
print( " (consistency error(s)): %d" % (numCCErr))
else:
print( " (consistency error(s)): skipped")
print( " validation warning(s) : %d" % (numReadWarn + numCCWarn))
if not skipCC :
print( " (consistency warning(s)): %d" % (numCCWarn))
else:
print( " (consistency warning(s)): skipped")
if errMsgRead or errMsgCC:
print()
print( "===== validation error/warning messages =====\n")
if errMsgRead :
print( errMsgRead)
if errMsgCC :
print( "*** consistency check ***\n")
print( errMsgCC)
def main (args):
"""usage: validateSBML.py [-u] inputfile1 [inputfile2 ...]
-u skips unit consistency check
"""
if len(args) < 2:
print( main.__doc__)
sys.exit(1)
elif (len(args) == 1) and (args[1] == "-u"):
print( main.__doc__)
sys.exit(1)
enableUnitCCheck = True
if args[1] == "-u":
enableUnitCCheck = False
validator = validateSBML(enableUnitCCheck)
fnum = 0
for i in range(1,len(args)):
if args[i] == "-u":
continue
print( "---------------------------------------------------------------------------")
validator.validate(args[i])
fnum += 1
numinvalid = validator.numinvalid
print( "---------------------------------------------------------------------------")
print( "Validated %d files, %d valid files, %d invalid files" % (fnum, fnum - numinvalid, numinvalid))
if not enableUnitCCheck:
print( "(Unit consistency checks skipped)")
if numinvalid > 0:
sys.exit(1)
if __name__ == '__main__':
main(sys.argv)
| bsd-3-clause | -5,608,578,717,612,800,000 | 31.683673 | 104 | 0.604277 | false |
huaxz1986/git_book | chapters/Kaggle/grid_search.py | 1 | 1955 | import scipy
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import classification_report
from data_clean import current_time
from sklearn.model_selection import train_test_split
from data_preprocess import Data_Preprocesser,Data_Cleaner
def grid_search(tuned_parameters,data,train_size,seed):
'''
参数优化
:param tuned_parameters: 待优化的参数字典
:param data: 数据集
:param train_size:训练集大小
:param seed:用于生成随机数种子
:return:
'''
print("----- Begin run grid_search at %s -------"%current_time())
X=data[:,:-1]
y=data[:,-1]
X_train,X_test,y_train,y_test=train_test_split(X,y,train_size=train_size,stratify=data[:,-1],random_state=seed)
clf=GridSearchCV(GradientBoostingClassifier(),tuned_parameters,cv=10,scoring="roc_auc")
clf.fit(X_train,y_train)
print("Best parameters set found:",clf.best_params_)
print("Randomized Grid scores:")
for params, mean_score, scores in clf.grid_scores_:
print("\t%0.3f (+/-%0.03f) for %s" % (mean_score, scores.std() * 2, params))
print("Optimized Score:",clf.score(X_test,y_test))
print("Detailed classification report:")
y_true, y_pred = y_test, clf.predict(X_test)
print(classification_report(y_true, y_pred))
print("----- End run grid_search at %s -------"%current_time())
if __name__=='__main__':
clearner=Data_Cleaner("./data/people.csv",'./data/act_train.csv','./data/act_test.csv')
result=clearner.load_data()
preprocessor=Data_Preprocesser(*result)
train_datas,test_datas=preprocessor.load_data()
tuned_parameters={'subsample':[0.3,0.35,0.4,0.45,0.5,0.55,0.6],
'n_estimators':[30,35,50,100,150,200]
,
'max_depth':[2,4,8,16,32]}
grid_search(tuned_parameters,train_datas['type 7'],train_size=0.75,seed=0) | gpl-3.0 | -9,070,638,557,172,177,000 | 41.177778 | 115 | 0.659989 | false |
thousandparsec-obsolete/libmng-py | mngdisplay.py | 1 | 1428 | """\
mngdisplay.py <options> <filename>
Tries to displays a mng using a different options.
-t sdl Use pygame (ctypes of C version)
-t pygame Alias of the above
-t pygameC Use the C version of pygame
-t pygameT Use the ctypes version of pygame
-t wx Use wx
"""
def main(argv):
import sys
type = None
for i in range(0, len(argv)):
arg = argv[i]
if arg == "-t":
type = argv[i+1]
break
while type == None:
# Try for pygame
try:
import pygame
type = "pygame"
break
except ImportError, e:
pass
# O well, maybe wx?
try:
import wx
type = "wx"
break
except ImportError, e:
pass
break
if type == "sdl":
type = "pygame"
print type
if type == "pygame":
import pygame
screen = pygame.display.set_mode((600, 600), 0, 16)
from mng.pygame import MNG
def input(events):
for event in events:
if event.type == pygame.QUIT:
sys.exit(0)
else:
print event
s = pygame.Surface((1,1)).convert_alpha()
a = MNG(argv[-1], s)
a.play()
while True:
screen.fill((0,255,0))
delay, image = a.nextframe()
screen.blit(image, (0,0))
pygame.display.flip()
input(pygame.event.get())
import sys
if __name__ == "__main__":
argv = list(sys.argv)
if argv[1] == "-p":
del argv[1]
print "Profile..."
import hotshot
prof = hotshot.Profile("hotshot_stats")
prof.runcall(main, argv)
prof.close()
else:
main(argv)
| lgpl-2.1 | 7,213,422,322,001,718,000 | 16 | 54 | 0.611345 | false |
BarraQDA/nvivotools | nvpn2nvp.py | 1 | 6639 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2020 Jonathan Schultz
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from argrecord import ArgumentHelper, ArgumentRecorder
import argparse
import NVivo
from mssqlTools import mssqlAPI
import os
import sys
import shutil
import subprocess
import tempfile
def DenormaliseNVP(arglist):
parser = ArgumentRecorder(description='Create an NVivo for Mac file from a normalised SQLite file.')
parser.add_argument('-nv', '--nvivoversion', choices=["10", "11"], default="10",
help='NVivo version (10 or 11)')
parser.add_argument('-S', '--server', type=str,
help="IP address/name of Microsoft SQL Server")
parser.add_argument('-P', '--port', type=int,
help="Port of Microsoft SQL Server")
parser.add_argument('-i', '--instance', type=str,
help="Microsoft SQL Server instance")
parser.add_argument('-U', '--sshuser', type=str,
help="User name for ssh connections to server")
parser.add_argument('-u', '--users', choices=["skip", "merge", "overwrite", "replace"], default="merge",
help='User action.')
parser.add_argument('-p', '--project', choices=["skip", "overwrite"], default="overwrite",
help='Project action.')
parser.add_argument('-nc', '--node-categories', choices=["skip", "merge", "overwrite"], default="merge",
help='Node category action.')
parser.add_argument('-n', '--nodes', choices=["skip", "merge"], default="merge",
help='Node action.')
parser.add_argument('-na', '--node-attributes', choices=["skip", "merge", "overwrite"], default="merge",
help='Node attribute table action.')
parser.add_argument('-sc', '--source-categories', choices=["skip", "merge", "overwrite"], default="merge",
help='Source category action.')
parser.add_argument('--sources', choices=["skip", "merge", "overwrite"], default="merge",
help='Source action.')
parser.add_argument('-sa', '--source-attributes', choices=["skip", "merge", "overwrite"], default="merge",
help='Source attribute action.')
parser.add_argument('-t', '--taggings', choices=["skip", "merge"], default="merge",
help='Tagging action.')
parser.add_argument('-a', '--annotations', choices=["skip", "merge"], default="merge",
help='Annotation action.')
parser.add_argument('-b', '--base', dest='basefile', type=argparse.FileType('rb'), nargs='?',
help="Base NVP file to insert into")
parser.add_argument('-v', '--verbosity', type=int, default=1, private=True)
parser.add_argument('--logfile', type=str, help="Logfile, default is <outfile>.log",
private=True)
parser.add_argument('--no-logfile', action='store_true', help='Do not output a logfile')
parser.add_argument('infile', type=str, input=True,
help="Input normalised SQLite (.nvpn) file")
parser.add_argument('outfile', type=str, nargs='?', output=True,
help="Output NVivo for Windows (.nvp) file or directory; default is <infile>.nvp")
args = parser.parse_args(arglist)
# Function to execute a command either locally or remotely
def executecommand(command):
if not args.server: # ie server is on same machine as this script
return subprocess.check_output(command, text=True).strip()
else:
print(['ssh', ((args.sshuser + '@') if args.sshuser else '') + args.server] + [('"' + word + '"') if ' ' in word else word for word in command])
# This quoting of arguments is a bit of a hack but seems to work
return subprocess.check_output(['ssh', ((args.sshuser + '@') if args.sshuser else '') + args.server] + [('"' + word + '"') if ' ' in word else word for word in command], text=True).strip()
if args.outfile is None:
args.outfile = args.infile.rsplit('.',1)[0] + '.nvp'
elif os.path.isdir(args.outfile):
args.outfile = os.path.join(args.outfile,
os.path.basename(args.infile.name.rsplit('.',1)[0] + '.nvp'))
if not args.no_logfile:
logfilename = args.outfile.rsplit('.',1)[0] + '.log'
incomments = ArgumentHelper.read_comments(logfilename) or ArgumentHelper.separator()
logfile = open(logfilename, 'w')
parser.write_comments(args, logfile, incomments=incomments)
logfile.close()
# Fill in extra arguments that NVivo module expects
args.mac = False
args.windows = True
if args.basefile is None:
args.basefile = os.path.dirname(os.path.realpath(__file__)) + os.path.sep + ('emptyNVivo10Win.nvp' if args.nvivoversion == '10' else 'emptyNVivo11Win.nvp')
if args.server is None:
if os.name != 'nt':
raise RuntimeError("This does not appear to be a Windows machine so --server must be specified.")
mssqlapi = mssqlAPI(args.server,
user=args.sshuser,
port=args.port,
instance=args.instance,
version = ('MSSQL12' if args.nvivoversion == '11' else 'MSSQL10_50'),
verbosity = args.verbosity)
# Get reasonably distinct yet recognisable DB name
dbname = 'nvivo' + str(os.getpid())
mssqlapi.attach(args.basefile, dbname)
try:
args.indb = 'sqlite:///' + args.infile
args.outdb = 'mssql+pymssql://nvivotools:nvivotools@' + (args.server or 'localhost') + ((':' + str(args.port)) if args.port else '') + '/' + dbname
NVivo.Denormalise(args)
mssqlapi.save(args.outfile, dbname)
except:
raise
finally:
mssqlapi.drop(dbname)
if __name__ == '__main__':
DenormaliseNVP(None)
| gpl-3.0 | 6,203,106,739,478,604,000 | 46.76259 | 200 | 0.602802 | false |
yoon-gu/Mozart | mozart/mesh/triangle.py | 1 | 5353 | import numpy as np
def compute_n4s(n4e):
"""
Get a matrix whose each row contains end points of the corresponding side (or edge)
Paramters
- ``n4e`` (``int32 array``) : nodes for elements
Returns
- ``n4s`` (``int32 array``) : nodes for sides
Example
>>> n4e = np.array([[1, 3, 0], [3, 1, 2]])
>>> n4s = compute_n4s(n4e)
>>> n4s
array([[1, 3],
[3, 0],
[1, 2],
[0, 1],
[2, 3]])
"""
allSides = np.vstack((np.vstack((n4e[:,[0,1]], n4e[:,[1,2]])),n4e[:,[2,0]]))
tmp=np.sort(allSides)
x, y = tmp.T
_, ind = np.unique(x + y*1.0j, return_index=True)
n4sInd = np.sort(ind)
n4s = allSides[n4sInd,:]
return n4s
def compute_s4e(n4e):
"""
Get a matrix whose each row contains three side numbers of the corresponding element
Paramters
- ``n4e`` (``int32 array``) : nodes for elements
Returns
- ``s4e`` (``int32 array``) : sides for elements
Example
>>> n4e = np.array([[1, 3, 0], [3, 1, 2]])
>>> s4e = compute_s4e(n4e)
>>> s4e
array([[0, 1, 3],
[0, 2, 4]])
"""
allSides = np.vstack((np.vstack((n4e[:,[0,1]], n4e[:,[1,2]])),n4e[:,[2,0]]))
tmp=np.sort(allSides)
x, y = tmp.T
_, ind, back = np.unique(x + y*1.0j, return_index=True, return_inverse=True)
sortInd = ind.argsort()
sideNr = np.zeros(ind.size, dtype = int)
sideNr[sortInd] = np.arange(0,ind.size)
s4e = sideNr[back].reshape(3,-1).transpose().astype('int')
return s4e
def compute_e4s(n4e):
"""
Get a matrix whose each row contains two elements sharing the corresponding side
If second column is -1, the corresponding side is on the boundary
Paramters
- ``n4e`` (``int32 array``) : nodes for elements
Returns
- ``e4s`` (``int32 array``) : elements for sides
Example
>>> n4e = np.array([[1, 3, 0], [3, 1, 2]])
>>> e4s = compute_e4s(n4e)
>>> e4s
array([[ 0, 1],
[ 0, -1],
[ 1, -1],
[ 0, -1],
[ 1, -1]])
"""
allSides = np.vstack((np.vstack((n4e[:,[0,1]], n4e[:,[1,2]])),n4e[:,[2,0]]))
tmp=np.sort(allSides)
x, y = tmp.T
_, ind, back = np.unique(x + y*1.0j, return_index=True, return_inverse=True)
n4sInd = np.sort(ind)
nrElems = n4e.shape[0]
elemNumbers = np.hstack((np.hstack((np.arange(0,nrElems),np.arange(0,nrElems))),np.arange(0,nrElems)))
e4s=np.zeros((ind.size,2),int)
e4s[:,0]=elemNumbers[n4sInd] + 1
allElems4s=np.zeros(allSides.shape[0],int)
tmp2 = np.bincount((back + 1),weights = (elemNumbers + 1))
allElems4s[ind]=tmp2[1::]
e4s[:,1] = allElems4s[n4sInd] - e4s[:,0]
e4s=e4s-1
return e4s
def refineUniformRed(c4n, n4e, n4Db, n4Nb):
"""
Refine a given mesh uniformly using the red refinement
Paramters
- ``c4n`` (``float64 array``) : coordinates for elements
- ``n4e`` (``int32 array``) : nodes for elements
- ``n4Db`` (``int32 array``) : nodes for Difichlet boundary
- ``n4Nb`` (``int32 array``) : nodes for Neumann boundary
Returns
- ``c4nNew`` (``float64 array``) : coordinates for element obtained from red refinement
- ``n4eNew`` (``int32 array``) : nodes for element obtained from red refinement
- ``n4DbNew`` (``int32 array``) : nodes for Dirichlet boundary obtained from red refinement
- ``n4NbNew`` (``int32 array``) : nodes for Neumann boundary obtained from red refinement
Example
>>> c4n = np.array([[0., 0.], [1., 0.], [1., 1.], [0., 1.]])
>>> n4e = np.array([[1, 3, 0], [3, 1, 2]])
>>> n4Db = np.array([[0, 1], [1, 2]])
>>> n4Nb = np.array([[2, 3],[3, 0]])
>>> c4nNew, n4eNew, n4DbNew, n4NbNew = refineUniformRed(c4n, n4e, n4Db, n4Nb)
>>> c4nNew
array([[ 0. , 0. ],
[ 1. , 0. ],
[ 1. , 1. ],
[ 0. , 1. ],
[ 0.5, 0.5],
[ 0. , 0.5],
[ 1. , 0.5],
[ 0.5, 0. ],
[ 0.5, 1. ]])
>>> n4eNew
array([[1, 4, 7],
[4, 3, 5],
[5, 7, 4],
[7, 5, 0],
[3, 4, 8],
[4, 1, 6],
[6, 8, 4],
[8, 6, 2]])
>>> n4DbNew
array([[0, 7],
[7, 1],
[1, 6],
[6, 2]])
>>>n4NbNew
array([[2, 8],
[8, 3],
[3, 5],
[5, 0]])
"""
nrNodes = c4n.shape[0]
nrElems = n4e.shape[0]
n4s = compute_n4s(n4e)
nrSides = n4s.shape[0]
from scipy.sparse import coo_matrix
newNodes4s = coo_matrix((np.arange(0,nrSides)+nrNodes, (n4s[:,0], n4s[:,1])), shape=(nrNodes, nrNodes))
newNodes4s = newNodes4s.tocsr()
newNodes4s = newNodes4s + newNodes4s.transpose()
mid4s = (c4n[n4s[:,0],:] + c4n[n4s[:,1],:]) * 0.5
c4nNew = np.vstack((c4n, mid4s))
n4eNew = np.zeros((4 * nrElems, 3), dtype=int)
for elem in range(0,nrElems):
nodes = n4e[elem,:]
newNodes = np.array([newNodes4s[nodes[0],nodes[1]], newNodes4s[nodes[1],nodes[2]], newNodes4s[nodes[2],nodes[0]]])
n4eNew[4*elem + np.arange(0,4),:] = np.array([[nodes[0], newNodes[0], newNodes[2]],
[newNodes[0], nodes[1], newNodes[1]], [newNodes[1], newNodes[2], newNodes[0]],
[newNodes[2], newNodes[1], nodes[2]]])
n4DbNew = np.zeros((2 * n4Db.shape[0], 2), dtype = int)
for side in range(0, n4Db.shape[0]):
nodes = n4Db[side,:]
newNodes = newNodes4s[nodes[0], nodes[1]]
n4DbNew[2*side + np.arange(0,2),:] = np.array([[nodes[0], newNodes], [newNodes, nodes[1]]])
n4NbNew = np.zeros((2 * n4Nb.shape[0], 2), dtype = int)
for side in range(0, n4Nb.shape[0]):
nodes = n4Nb[side,:]
newNodes = newNodes4s[nodes[0], nodes[1]]
n4NbNew[2*side + np.arange(0,2),:] = np.array([[nodes[0], newNodes], [newNodes, nodes[1]]])
return (c4nNew, n4eNew, n4DbNew, n4NbNew) | mit | -4,737,356,614,649,700,000 | 28.58011 | 116 | 0.57706 | false |
googlei18n/glyphsLib | Lib/glyphsLib/builder/masters.py | 1 | 4706 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division, absolute_import, unicode_literals
import os
from .axes import font_uses_new_axes, get_axis_definitions
from .constants import GLYPHS_PREFIX, GLYPHLIB_PREFIX
MASTER_ID_LIB_KEY = GLYPHS_PREFIX + "fontMasterID"
UFO_FILENAME_KEY = GLYPHLIB_PREFIX + "ufoFilename"
UFO_YEAR_KEY = GLYPHLIB_PREFIX + "ufoYear"
UFO_NOTE_KEY = GLYPHLIB_PREFIX + "ufoNote"
def to_ufo_master_attributes(self, source, master):
ufo = source.font
ufo.info.ascender = master.ascender
ufo.info.capHeight = master.capHeight
ufo.info.descender = master.descender
ufo.info.xHeight = master.xHeight
horizontal_stems = master.horizontalStems
vertical_stems = master.verticalStems
italic_angle = -master.italicAngle
if horizontal_stems:
ufo.info.postscriptStemSnapH = horizontal_stems
if vertical_stems:
ufo.info.postscriptStemSnapV = vertical_stems
if italic_angle is not None:
ufo.info.italicAngle = italic_angle
year = master.userData[UFO_YEAR_KEY]
if year is not None:
ufo.info.year = year
note = master.userData[UFO_NOTE_KEY]
if note is not None:
ufo.info.note = note
# All of this will go into the designspace as well
# "Native" designspace fonts will only have the designspace info
# FIXME: (jany) maybe we should not duplicate the information and only
# write it in the designspace?
widthValue = master.widthValue
weightValue = master.weightValue
if weightValue is not None:
ufo.lib[GLYPHS_PREFIX + "weightValue"] = weightValue
if widthValue:
ufo.lib[GLYPHS_PREFIX + "widthValue"] = widthValue
for number in ("", "1", "2", "3"):
custom_value = getattr(master, "customValue" + number)
if custom_value:
ufo.lib[GLYPHS_PREFIX + "customValue" + number] = custom_value
if font_uses_new_axes(self.font):
# Set the OS/2 weightClass and widthClas according the this master's
# user location ("Axis Location" parameter)
for axis in get_axis_definitions(self.font):
if axis.tag in ("wght", "wdth"):
user_loc = axis.get_user_loc(master)
axis.set_ufo_user_loc(ufo, user_loc)
self.to_ufo_blue_values(ufo, master)
self.to_ufo_guidelines(ufo, master)
self.to_ufo_master_user_data(ufo, master)
self.to_ufo_custom_params(ufo, master)
master_id = master.id
if self.minimize_glyphs_diffs:
ufo.lib[MASTER_ID_LIB_KEY] = master_id
def to_glyphs_master_attributes(self, source, master):
ufo = source.font
# Glyphs ensures that the master ID is unique by simply making up a new one when
# finding a duplicate.
ufo_master_id_lib_key = ufo.lib.get(MASTER_ID_LIB_KEY)
if ufo_master_id_lib_key and not self.font.masters[ufo_master_id_lib_key]:
master.id = ufo_master_id_lib_key
if source.filename is not None and self.minimize_ufo_diffs:
master.userData[UFO_FILENAME_KEY] = source.filename
elif ufo.path and self.minimize_ufo_diffs:
master.userData[UFO_FILENAME_KEY] = os.path.basename(ufo.path)
master.ascender = ufo.info.ascender
master.capHeight = ufo.info.capHeight
master.descender = ufo.info.descender
master.xHeight = ufo.info.xHeight
horizontal_stems = ufo.info.postscriptStemSnapH
vertical_stems = ufo.info.postscriptStemSnapV
italic_angle = 0
if ufo.info.italicAngle:
italic_angle = -ufo.info.italicAngle
if horizontal_stems:
master.horizontalStems = horizontal_stems
if vertical_stems:
master.verticalStems = vertical_stems
if italic_angle:
master.italicAngle = italic_angle
if ufo.info.year is not None:
master.userData[UFO_YEAR_KEY] = ufo.info.year
if ufo.info.note is not None:
master.userData[UFO_NOTE_KEY] = ufo.info.note
self.to_glyphs_blue_values(ufo, master)
self.to_glyphs_master_names(ufo, master)
self.to_glyphs_master_user_data(ufo, master)
self.to_glyphs_guidelines(ufo, master)
self.to_glyphs_custom_params(ufo, master)
| apache-2.0 | 2,800,870,986,591,150,000 | 36.648 | 84 | 0.694858 | false |
nschloe/quadpy | src/quadpy/tn/_stroud_1969.py | 1 | 1840 | import ndim
import numpy as np
from sympy import Rational as frac
from sympy import sqrt
from ..helpers import article, rd, untangle
from ._helpers import TnScheme
source = article(
authors=["A.H. Stroud"],
title="A Fifth Degree Integration Formula for the n-Simplex",
journal="SIAM J. Numer. Anal.",
volume="6",
number="1",
pages="90–98",
url="https://doi.org/10.1137/0706009",
)
def stroud_1969(n):
assert n >= 3
degree = 5
sqrt15 = sqrt(15)
t = frac(1, n + 1)
r1, r2 = [(n + 4 - pm * sqrt15) / (n ** 2 + 8 * n + 1) for pm in [+1, -1]]
s1, s2 = [(4 * n + 1 + pm * n * sqrt15) / (n ** 2 + 8 * n + 1) for pm in [+1, -1]]
u1, u2 = [(n + 7 + pm * 2 * sqrt15) / (n ** 2 + 14 * n - 11) for pm in [+1, -1]]
v1, v2 = [
(4 * n - 2 - pm * (n - 1) * sqrt15) / (n ** 2 + 14 * n - 11) for pm in [+1, -1]
]
# Solve linear equation system for x^k, k={0, 2, 3, 4, 5}, for the
# weights (the same is done in Stroud's article).
pts = [
np.full((1, n + 1), t),
rd(n + 1, [(r1, n), (s1, 1)]),
rd(n + 1, [(r2, n), (s2, 1)]),
rd(n + 1, [(u1, n - 1), (v1, 2)]),
]
k_range = [0, 2, 3, 4]
if n > 3:
pts.append(rd(n + 1, [(u2, n - 1), (v2, 2)]))
k_range.append(5)
b0 = ndim.nsimplex.integrate_monomial(n * [0], symbolic=True)
b = [
ndim.nsimplex.integrate_monomial(np.array([k] + (n - 1) * [0]), symbolic=True)
/ b0
for k in k_range
]
A = [[sum(p[:, 0] ** k) for p in pts] for k in k_range]
flt = np.vectorize(float)
w = np.linalg.solve(flt(A), flt(b))
data = [(w[i], pts[i]) for i in range(len(w))]
points, weights = untangle(data)
points = np.ascontiguousarray(points.T)
return TnScheme("Stroud 1969", n, weights, points, degree, source)
| mit | 6,444,330,403,827,270,000 | 27.276923 | 87 | 0.507617 | false |
juliakreutzer/wtfrnn | transformTrees.py | 1 | 2645 | import codecs
import sys
from fromTextToFullTree import *
#transform parsed trees into full binary trees
if __name__=="__main__":
limit=15
dataDir = "trees"
trainFile = codecs.open(dataDir+"/train.txt", "r", "utf8")
testFile = codecs.open(dataDir+"/test.txt", "r", "utf8")
devFile = codecs.open(dataDir+"/dev.txt", "r", "utf8")
trainOutFile = codecs.open(dataDir+"/train.full.%d.txt" % limit, "w", "utf8")
testOutFile = codecs.open(dataDir+"/test.full.%d.txt" % limit, "w", "utf8")
devOutFile = codecs.open(dataDir+"/dev.full.%d.txt" % limit, "w", "utf8")
inFiles = [trainFile, testFile, devFile]
outFiles = [trainOutFile, testOutFile, devOutFile]
for f, inFile in enumerate(inFiles):
for i,line in enumerate(inFile):
print "line", i
#first filter words from trees
if not line.startswith("("):
print("Not a valid format.")
sys.exit(-1)
wordLabels = list()
words = list()
sentenceLabel = -1
sentenceLevel = True
lastLabel = -1
word = ""
for j in xrange(len(line)):
char = line[j]
#print "char", char
if char == "(":
continue
elif char == ")":
if len(word)>0:
words.append(word)
#print "new word", word, lastLabel
wordLabels.append(lastLabel)
word = ""
elif char.isspace():
continue
else:
if char.isdigit(): #label
if sentenceLevel:
sentenceLabel = char
#print "sent label", sentenceLabel
sentenceLevel = False
else:
lastLabel = char #save for later
else: #word
word += char
assert len(words) == len(wordLabels)
if len(words)<limit:
#now transform to full tree
#remove phrase-level annotations, keep word-level and sentence-level
treeString = buildTree(words, wordLabels, sentenceLabel)
outFiles[f].write(treeString+"\n")
outFiles[f].flush()
#print sentenceLabel
#print wordLabels
trainFile.close()
testFile.close()
devFile.close()
trainOutFile.close()
testOutFile.close()
devOutFile.close()
| mit | -6,286,931,452,027,079,000 | 31.654321 | 84 | 0.487335 | false |
kamidox/weixin_producthunt | manage.py | 1 | 2537 | """
productporter.manage
~~~~~~~~~~~~~~~~~~~~
This script provides some easy to use commands for
creating the database with or without some sample content.
You can also run the development server with it.
Just type `python manage.py` to see the full list of commands.
:copyright: (c) 2014 by the ProductPorter Team.
:license: BSD, see LICENSE for more details.
"""
import os
import json
from flask import current_app
from flask.ext.script import (Manager, Shell, Server)
from flask.ext.migrate import MigrateCommand
from productporter.app import create_app
from productporter.product.models import Product
from productporter.extensions import db
from productporter.utils.helper import pull_and_save_posts, create_default_groups, \
create_admin_user, create_default_tags
from tests.fixtures.sampledata import SAMPLE_DATA
# Use the development configuration if available
try:
from productporter.configs.production import ProductionConfig as Config
except ImportError:
from productporter.configs.development import DevelopmentConfig as Config
except ImportError:
from productporter.configs.default import DefaultConfig as Config
app = create_app(Config)
manager = Manager(app)
# Run local server
manager.add_command("runserver", Server("localhost", port=5000))
# Migration commands
manager.add_command('db', MigrateCommand)
# Add interactive project shell
def make_shell_context():
return dict(app=current_app, db=db)
manager.add_command("shell", Shell(make_context=make_shell_context))
@manager.command
def initdb():
"""Creates the database."""
db.create_all()
@manager.command
def dropdb():
"""Deletes the database"""
db.drop_all()
@manager.command
def createall():
"""Creates the database."""
print("create database in %s" % (Config.SQLALCHEMY_DATABASE_URI))
db.drop_all()
db.create_all()
create_default_groups()
create_default_tags()
create_admin_user('admin', 'admin', '[email protected]')
@manager.command
def createdefault():
print("create default data for %s" % (Config.SQLALCHEMY_DATABASE_URI))
create_default_groups()
create_default_tags()
create_admin_user('admin', 'admin', '[email protected]')
@manager.command
def pullsample():
"""pull sample data"""
jsondata = json.loads(SAMPLE_DATA)
some_posts = jsondata['posts']
for p in some_posts:
pi = Product.from_json(p)
pi.save()
print('pull %d posts' % (len(some_posts)))
if __name__ == "__main__":
manager.run()
| bsd-2-clause | -6,277,587,297,717,733,000 | 27.829545 | 84 | 0.715018 | false |
tchaly-bethmaure/Emotes | models/emotesV8-1/config_related_save/csv_to_png/csv_to_png.py | 1 | 3825 |
import png
import os
def string_rgbhex_to_lst_rgbdec(strRGBHEX):
# strRGB ex : FFFFFF
if len(strRGBHEX) == 6:
return [give_dec_code(strRGBHEX[0] + strRGBHEX[1]),
give_dec_code(strRGBHEX[2] + strRGBHEX[3]),
give_dec_code(strRGBHEX[4] + strRGBHEX[5])]
else:
return [0, 0, 0]
def give_dec_code(str_as_hex):
#str_as_hex ex: FF or 00
if str_as_hex == "FF":
return 255
if str_as_hex == "00":
return 0
def give_grey_code(str_as_hex):
# str_as_hex ex: FF or 00
if str_as_hex == "FF0000":
return "0"
if str_as_hex == "00FF00":
return "1"
def convert_grey_with_only_RG():
# Conversion Hex to Dec in grey depth 2
zoom = 8;
dir = "/home/tchaly/EspaceDeTravail/Gama/works/EMOTES/models/emotesV8/config_related_save"
for file in os.listdir(dir):
if file.endswith(".csv"):
with open(dir+"/"+file,'r') as fichier:
print("Parsing : "+file+" ...")
line = ""
pixel_col = []
# Read each line of the file
for ligne in fichier.readlines():
pixel_line = ""
# line of pixels separated by a ';' ex : FF00FF;FFFFFF;FF0000; ... etc
line = ligne.strip("\t\r\n")
pixelHEX_tab = line.split(";")
# A pixel is symbolized with a FFFFF hex chars
for pixelHex in pixelHEX_tab:
if pixelHex != "":
for i in range(zoom):
pixel_line += str(give_grey_code(pixelHex))
for i in range(zoom):
pixel_col.append(pixel_line)
print("OK.")
s = map(lambda x: map(int, x), pixel_col)
# Creating Png file
filename = file.replace(".csv",".png")
fichier.close()
print("Writing : "+filename+" ...")
f = open("png/"+filename, 'wb')
w = png.Writer(len(s[0]), len(s), greyscale=True, bitdepth=2)
w.write(f, s)
f.close()
print("OK.")
print("Done.")
def convert_to_RGB():
# Conversion Hex to Dec in grey depth 2
zoom = 8;
dir = "/home/tchaly/EspaceDeTravail/Gama/works/EMOTES/models/emotesV8/config_related_save"
for file in os.listdir(dir):
if file.endswith(".csv"):
with open(dir+"/"+file,'r') as fichier:
print("Parsing : "+file+" ...")
line = ""
pixel_col = []
# Read each line of the file
for ligne in fichier.readlines():
pixel_line = []
# line of pixels separated by a ';' ex : FF00FF;FFFFFF;FF0000; ... etc
line = ligne.strip("\t\r\n")
pixelHEX_tab = line.split(";")
# A pixel is symbolized with a FFFFF hex chars
for pixelHex in pixelHEX_tab:
if pixelHex != "":
for i in range(zoom):
pixel_line.extend(string_rgbhex_to_lst_rgbdec(pixelHex))
for i in range(zoom):
pixel_col.append(pixel_line)
print("OK.")
# Creating Png file
filename = file.replace(".csv",".png")
fichier.close()
print("Writing : "+filename+" ...")
f = open(dir+"/png/"+filename, 'wb')
w = png.Writer(len(pixel_col), len(pixel_col))
w.write(f, pixel_col)
f.close()
print("OK.")
print("Done.")
convert_to_RGB() | gpl-2.0 | -876,502,132,345,458,700 | 35.788462 | 94 | 0.463791 | false |
vivsh/django-ginger | ginger/views/mixins.py | 1 | 1640 |
from ginger.exceptions import LoginRequired, PermissionRequired
from django.core.exceptions import PermissionDenied
__all__ = ['PermissionRequiredMixin', 'PrivilegeRequiredMixin', 'LoginRequiredMixin',
'StaffRequiredMixin', 'SuperUserRequiredMixin', 'OwnerRequiredMixin']
class PermissionRequiredMixin(object):
def get_permission_url(cls, user):
raise NotImplementedError
def get_user(self):
user = super(PermissionRequiredMixin, self).get_user()
url = self.get_permission_url()
if url is not None:
raise PermissionRequired(url)
return user
class LoginRequiredMixin(object):
def get_user(self):
user = super(LoginRequiredMixin, self).get_user()
if not user.is_authenticated():
raise LoginRequired
return user
class OwnerRequiredMixin(LoginRequiredMixin):
def get_target(self):
obj = super(OwnerRequiredMixin, self).get_target()
if not self.user.is_superuser and obj.owner != self.user:
raise PermissionDenied
return obj
class PrivilegeRequiredMixin(LoginRequiredMixin):
def get_user(self):
user = super(PrivilegeRequiredMixin, self).get_user()
if not self.has_privileges(user):
raise PermissionDenied
return user
def has_privileges(self, user):
raise NotImplementedError
class StaffRequiredMixin(PrivilegeRequiredMixin):
def has_privileges(self, user):
return user.is_staff
class SuperUserRequiredMixin(PrivilegeRequiredMixin):
def has_privileges(self, user):
return user.is_superuser
| mit | 8,859,729,103,791,103,000 | 25.031746 | 85 | 0.688415 | false |