commit
stringlengths
40
40
old_file
stringlengths
7
86
new_file
stringlengths
7
86
old_contents
stringlengths
63
1.64k
new_contents
stringlengths
243
2.71k
subject
stringlengths
24
138
message
stringlengths
25
394
lang
stringclasses
1 value
license
stringclasses
7 values
repos
stringlengths
12
20k
70d2f182c09583802da2860994a99fd2bc9e39d5
kimochiconsumer/views.py
kimochiconsumer/views.py
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('1') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('index') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
Use 'index' as the default page alias for lookups
Use 'index' as the default page alias for lookups
Python
mit
matslindh/kimochi-consumer
5d61b4904057acbe235b74fc1122d09aa365bdeb
edx_data_research/monitor/monitor_tracking.py
edx_data_research/monitor/monitor_tracking.py
import sys import time from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler class TrackingEventHandler(FileSystemEventHandler): def on_created(self, event): pass def on_moved(self, event): pass if __name__ == "__main__": if len(sys.argv) > 1: args = sys.argv[1] else: raise ValueError('Missing path to directory to monitor!!!') event_handler = TrackingEventHandler() observer = Observer() observer.schedule(event_handler, path, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
import sys import time from watchdog.observers import Observer from watchdog.events import PatternMatchingEventHandler class TrackingLogHandler(PatternMatchingEventHandler): def on_created(self, event): print event.__repr__() print event.event_type, event.is_directory, event.src_path if __name__ == "__main__": if len(sys.argv) > 1: path = sys.argv[1] else: raise ValueError('Missing path to directory to monitor!!!') event_handler = TrackingLogHandler(['*.log'], ['*.log-errors'], case_sensitive=True) observer = Observer() observer.schedule(event_handler, path, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() observer.join()
Define handler for tracking log files
Define handler for tracking log files
Python
mit
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
91853432d2e57bd7c01403c943fff4c2dad1cf5a
openquake/__init__.py
openquake/__init__.py
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2010-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. __import__('pkg_resources').declare_namespace(__name__)
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2010-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. # Make the namespace compatible with old setuptools, like the one # provided by QGIS 2.1x on Windows try: __import__('pkg_resources').declare_namespace(__name__) except ImportError: __path__ = __import__('pkgutil').extend_path(__path__, __name__)
Make the openquake namespace compatible with old setuptools
Make the openquake namespace compatible with old setuptools Former-commit-id: b1323f4831645a19d5e927fc342abe4b319a76bb [formerly 529c98ec0a7c5a3fefa4da6cdf2f6a58b5487ebc] [formerly 529c98ec0a7c5a3fefa4da6cdf2f6a58b5487ebc [formerly e5f4dc01e94694bf9bfcae3ecd6eca34a33a24eb]] Former-commit-id: e01df405c03f37a89cdf889c45de410cb1ca9b00 Former-commit-id: f8d3b5d4c1d3d81dee1c22a4e2563e6b8d116c74
Python
agpl-3.0
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
ca2789ad15cba31449e4946494122ab271a83c92
inspirationforge/settings/production.py
inspirationforge/settings/production.py
from .base import * DEBUG = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ # TODO: Add MEDIA_ROOT setting. #MEDIA_ROOT = get_secret("MEDIA_ROOT") # Security-related settings ALLOWED_HOSTS = ["*"] SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # Static asset configuration STATIC_URL = '/static/' STATIC_ROOT = 'staticfiles' STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
from .base import * DEBUG = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ MEDIA_ROOT = os.path.join(BASE_DIR, 'media') # Security-related settings ALLOWED_HOSTS = ["*"] SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # Static asset configuration STATIC_URL = '/static/' STATIC_ROOT = 'staticfiles' STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
Set MEDIA_ROOT setting for Production.
Set MEDIA_ROOT setting for Production.
Python
mit
FarmCodeGary/InspirationForge,FarmCodeGary/InspirationForge,FarmCodeGary/InspirationForge
2eba1fc80263c11a1b2b5ee1707b19e98a7b2980
apps/submission/tests/test_models.py
apps/submission/tests/test_models.py
null
from django.test import TestCase from django.urls import reverse from apps.core.factories import PIXELER_PASSWORD, PixelerFactory from .. import models class SubmissionProcessTestCase(TestCase): def test_can_create_submission_process(self): label = 'Candida datasest 0001' qs = models.SubmissionProcess.objects.all() self.assertEqual(qs.count(), 0) process = models.SubmissionProcess.objects.create( label=label, ) self.assertEqual(process.label, label) self.assertEqual(qs.count(), 1) def test_archive_upload_to(self): # Create process and activate tasks self.user = PixelerFactory( is_active=True, is_staff=True, is_superuser=True, ) self.client.login( username=self.user.username, password=PIXELER_PASSWORD, ) self.client.post( reverse('submission:start'), data={ 'label': 'Candida datasest 0001', '_viewflow_activation-started': '2000-01-01', }, follow=True, ) process = models.SubmissionProcess.objects.get() filename = 'archive.zip' upload_path = models.SubmissionProcess.archive_upload_to( process, filename ) expected = '{}/submissions/{}/{}'.format( process.created_by.id, process.id, filename ) self.assertEqual(upload_path, expected)
Add tests for the SubmissionProcess model
Add tests for the SubmissionProcess model
Python
bsd-3-clause
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
347038c528b07f2553f09daab6915828ab2a6113
tests/__init__.py
tests/__init__.py
# -*- coding: utf-8 -*- # Copyright 2011-2012 Antoine Bertin <[email protected]> # # This file is part of subliminal. # # subliminal is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # subliminal is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with subliminal. If not, see <http://www.gnu.org/licenses/>. from . import test_language, test_services, test_subliminal import unittest suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite()]) if __name__ == '__main__': unittest.TextTestRunner().run(suite)
# -*- coding: utf-8 -*- # Copyright 2011-2012 Antoine Bertin <[email protected]> # # This file is part of subliminal. # # subliminal is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # subliminal is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with subliminal. If not, see <http://www.gnu.org/licenses/>. from . import test_language, test_services, test_subliminal, test_videos import unittest suite = unittest.TestSuite([test_language.suite(), test_services.suite(), test_subliminal.suite(), test_videos.suite()]) if __name__ == '__main__': unittest.TextTestRunner().run(suite)
Add test_videos to the main test suite
Add test_videos to the main test suite
Python
mit
oxan/subliminal,t4lwh/subliminal,h3llrais3r/subliminal,getzze/subliminal,bogdal/subliminal,hpsbranco/subliminal,ravselj/subliminal,nvbn/subliminal,fernandog/subliminal,goll/subliminal,ratoaq2/subliminal,Elettronik/subliminal,neo1691/subliminal,juanmhidalgo/subliminal,SickRage/subliminal,Diaoul/subliminal,kbkailashbagaria/subliminal,ofir123/subliminal,pums974/subliminal
4c84dafeca9977543824653e354f113b5142d259
jsonsempai.py
jsonsempai.py
import imp import json import os import sys class Dot(dict): def __init__(self, d): super(dict, self).__init__() for k, v in d.iteritems(): if isinstance(v, dict): self[k] = Dot(v) else: self[k] = v def __getattr__(self, attr): try: return self[attr] except KeyError: raise AttributeError("'{}'".format(attr)) __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ class SempaiLoader(object): def find_module(self, name, path=None): for d in sys.path: self.json_path = os.path.join(d, '{}.json'.format(name)) if os.path.isfile(self.json_path): return self return None def load_module(self, name): mod = imp.new_module(name) mod.__file__ = self.json_path mod.__loader__ = self try: with open(self.json_path) as f: d = json.load(f) except ValueError: raise ImportError( '"{}" does not contain valid json.'.format(self.json_path)) except: raise ImportError( 'Could not open "{}".'.format(self.json_path)) mod.__dict__.update(d) for k, i in mod.__dict__.items(): if isinstance(i, dict): mod.__dict__[k] = Dot(i) return mod sys.meta_path.append(SempaiLoader())
import imp import json import os import sys class Dot(dict): def __init__(self, d): super(dict, self).__init__() for k, v in iter(d.items()): if isinstance(v, dict): self[k] = Dot(v) else: self[k] = v def __getattr__(self, attr): try: return self[attr] except KeyError: raise AttributeError("'{}'".format(attr)) __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ class SempaiLoader(object): def find_module(self, name, path=None): for d in sys.path: self.json_path = os.path.join(d, '{}.json'.format(name)) if os.path.isfile(self.json_path): return self return None def load_module(self, name): mod = imp.new_module(name) mod.__file__ = self.json_path mod.__loader__ = self try: with open(self.json_path) as f: d = json.load(f) except ValueError: raise ImportError( '"{}" does not contain valid json.'.format(self.json_path)) except: raise ImportError( 'Could not open "{}".'.format(self.json_path)) mod.__dict__.update(d) for k, i in mod.__dict__.items(): if isinstance(i, dict): mod.__dict__[k] = Dot(i) return mod sys.meta_path.append(SempaiLoader())
Fix python 3 use of iteritems
Fix python 3 use of iteritems
Python
mit
kragniz/json-sempai
743f4affcd89aa3d9fd37774e2e5f8e05525cb04
api/sync_wallet.py
api/sync_wallet.py
import urlparse import os, sys import json tools_dir = os.environ.get('TOOLSDIR') lib_path = os.path.abspath(tools_dir) sys.path.append(lib_path) from msc_apps import * data_dir_root = os.environ.get('DATADIR') def sync_wallet_response(request_dict): if not request_dict.has_key('type'): return (None, 'No field type in response dict '+str(request_dict)) req_type = request_dict['type'][0].upper() if req_type == "SYNCWALLET": response_data = syncWallets(request_dict['masterWallets'][0]) else: return (None, req_type + ' is not supported') response = { 'status': 'OK', 'data': response_data } return (json.dumps(response), None) def syncWallets(master_wallets_json): master_wallets = json.loads(master_wallets_json) for wallet in master_wallets: uuid = wallet['uuid'] filename = data_dir_root + '/wallets/' + uuid + '.json' with open(filename, 'w') as f: json.dump(wallet, f) return "OK" def sync_wallet_handler(environ, start_response): return general_handler(environ, start_response, sync_wallet_response)
import urlparse import os, sys import json tools_dir = os.environ.get('TOOLSDIR') lib_path = os.path.abspath(tools_dir) sys.path.append(lib_path) from msc_apps import * data_dir_root = os.environ.get('DATADIR') def sync_wallet_response(request_dict): if not request_dict.has_key('type'): return (None, 'No field type in response dict '+str(request_dict)) req_type = request_dict['type'][0].upper() if req_type == "SYNCWALLET": syncWallets(request_dict['masterWallets'][0]) else: return (None, req_type + ' is not supported') response = { 'status': 'OK' } return (json.dumps(response), None) def syncWallets(master_wallets_json): master_wallets = json.loads(master_wallets_json) for wallet in master_wallets: uuid = wallet['uuid'] filename = data_dir_root + '/wallets/' + uuid + '.json' with open(filename, 'w') as f: json.dump(wallet, f) return "OK" def sync_wallet_handler(environ, start_response): return general_handler(environ, start_response, sync_wallet_response)
Clean up return value for API
Clean up return value for API
Python
agpl-3.0
ripper234/omniwallet,maran/omniwallet,maran/omniwallet,Nevtep/omniwallet,FuzzyBearBTC/omniwallet,FuzzyBearBTC/omniwallet,achamely/omniwallet,curtislacy/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,ripper234/omniwallet,habibmasuro/omniwallet,ripper234/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,curtislacy/omniwallet,OmniLayer/omniwallet,dexX7/omniwallet,arowser/omniwallet,habibmasuro/omniwallet,dexX7/omniwallet,Nevtep/omniwallet,VukDukic/omniwallet,arowser/omniwallet,achamely/omniwallet,FuzzyBearBTC/omniwallet,maran/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,achamely/omniwallet,arowser/omniwallet,VukDukic/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,dexX7/omniwallet,curtislacy/omniwallet
c145b2cc08b3bbf0d2506afb58116e1a0c2dc4fc
tests/core_tests.py
tests/core_tests.py
null
from graffiti import core from graffiti import util def test_schema(): assert "fn" in core.schema(1) fn = lambda x: 1 assert core.schema(fn) == util.fninfo(fn) def t(): return 1 t._schema = { "schema": 1 } assert core.schema(t) == { "schema": 1 } def test_dependencies(): g = { "a": util.fninfo(lambda x: 1), "b": util.fninfo(lambda y, z: 2), "c": util.fninfo(lambda: 3), "d": util.fninfo(lambda o=1: o) } assert core.dependencies(g) == { "a": {"x"}, "b": {"y", "z"}, "c": set(), "d": set() } def test_transitive(): g = { "a": {"b"}, "b": {"c"}, "c": {"d"} } assert core.transitive(g) == { "a": {"b", "c", "d"}, "b": {"c", "d"}, "c": {"d"} } def test_topological(): g = { "a": {"b", "c", "d"}, "b": {"c", "d"}, "c": {"d"}, "d": {} } res = core.topological(g) assert res.index("d") > res.index("c") assert res.index("c") > res.index("b") assert res.index("b") > res.index("a")
Add tests for core graph functions
Add tests for core graph functions
Python
mit
SegFaultAX/graffiti
08a6dddb866ec53ff45a302d7c163d041bbefe71
protoplot-test/test_options_resolving.py
protoplot-test/test_options_resolving.py
null
import unittest from protoplot.engine.item import Item from protoplot.engine.item_container import ItemContainer class Series(Item): pass Series.options.register("color", True) Series.options.register("lineWidth", False) Series.options.register("lineStyle", False) class TestOptionsResolving(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testOptionsResolving(self): pass if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
Add stub unit test for options resolving
Add stub unit test for options resolving
Python
agpl-3.0
deffi/protoplot
516bebe37212e72362b416bd1d9c87a83726fa5f
changes/api/cluster_nodes.py
changes/api/cluster_nodes.py
from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
from __future__ import absolute_import from datetime import datetime, timedelta from flask.ext.restful import reqparse from changes.api.base import APIView from changes.models import Cluster, JobStep, Node class ClusterNodesAPIView(APIView): parser = reqparse.RequestParser() parser.add_argument('since', type=int, location='args') def get(self, cluster_id): cluster = Cluster.query.get(cluster_id) if cluster is None: return '', 404 queryset = Node.query.filter( Node.clusters.contains(cluster), ).order_by(Node.label.asc()) args = self.parser.parse_args() if args.since: cutoff = datetime.utcnow() - timedelta(days=args.since) queryset = queryset.join( JobStep, JobStep.node_id == Node.id, ).filter( JobStep.date_created > cutoff, ).group_by(Node) return self.paginate(queryset)
Enforce ordering on cluster nodes endpoint
Enforce ordering on cluster nodes endpoint
Python
apache-2.0
bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes
c5a2167a63516c23390263408fcd2c9a4f654fc8
webcomix/tests/test_comic_spider.py
webcomix/tests/test_comic_spider.py
null
from webcomix.comic_spider import ComicSpider def test_parse_yields_good_page(mocker): mock_response = mocker.patch('scrapy.http.Response') mock_response.urljoin.return_value = "http://xkcd.com/3/" mock_response.url = "http://xkcd.com/2/" mock_selector = mocker.patch('scrapy.selector.SelectorList') mock_response.xpath.return_value = mock_selector mock_selector.extract_first.side_effect = [ '//imgs.xkcd.com/comics/tree_cropped_(1).jpg', 'xkcd.com/3/' ] spider = ComicSpider() result = spider.parse(mock_response) results = list(result) assert len(results) == 2 assert results[0].get( 'url') == "http://imgs.xkcd.com/comics/tree_cropped_(1).jpg" assert results[1].url == "http://xkcd.com/3/" def test_parse_yields_bad_page(mocker): mock_response = mocker.patch('scrapy.http.Response') mock_response.urljoin.return_value = "http://xkcd.com/3/" mock_response.url = "http://xkcd.com/2/" mock_selector = mocker.patch('scrapy.selector.SelectorList') mock_response.xpath.return_value = mock_selector mock_selector.extract_first.side_effect = [None, 'xkcd.com/3/'] spider = ComicSpider() result = spider.parse(mock_response) results = list(result) assert len(results) == 1 assert results[0].url == "http://xkcd.com/3/"
Add tests for the parse method of the spider
Add tests for the parse method of the spider
Python
mit
J-CPelletier/webcomix,J-CPelletier/webcomix,J-CPelletier/WebComicToCBZ
8f2f89129d24cdaa6bc37e4fec885ac78aa30ce4
test/automl/test_models.py
test/automl/test_models.py
null
# -*- encoding: utf-8 -*- from __future__ import print_function import unittest import mock from autosklearn.automl import AutoML from autosklearn.util.backend import Backend class AutoMLStub(object): def __init__(self): self.__class__ = AutoML class AutoMlModelsTest(unittest.TestCase): def setUp(self): self.automl = AutoMLStub() self.automl._shared_mode = False self.automl._seed = 42 self.automl._backend = mock.Mock(spec=Backend) self.automl._delete_output_directories = lambda: 0 def test_only_loads_ensemble_models(self): identifiers = [(1, 2), (3, 4)] models = [ 42 ] self.automl._backend.load_ensemble.return_value.identifiers_ \ = identifiers self.automl._backend.load_models_by_identifiers.side_effect \ = lambda ids: models if ids is identifiers else None self.automl._load_models() self.assertEqual(models, self.automl.models_) def test_loads_all_models_if_no_ensemble(self): models = [ 42 ] self.automl._backend.load_ensemble.return_value = None self.automl._backend.load_all_models.return_value = models self.automl._load_models() self.assertEqual(models, self.automl.models_) def test_raises_if_no_models(self): self.automl._backend.load_ensemble.return_value = None self.automl._backend.load_all_models.return_value = [] self.assertRaises(ValueError, self.automl._load_models)
Test AutoML usage of Backend to load models
Test AutoML usage of Backend to load models
Python
bsd-3-clause
automl/auto-sklearn,automl/auto-sklearn
7127489fc85537722c6216ea6af0005604214bdc
txircd/modules/umode_i.py
txircd/modules/umode_i.py
from txircd.modbase import Mode class InvisibleMode(Mode): def namesListEntry(self, recipient, channel, user, representation): if channel not in recipient.channels and "i" in user.mode: return "" return representation class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): return { "modes": { "uni": InvisibleMode() } } def cleanup(self): self.ircd.removeMode("uni")
from txircd.modbase import Mode class InvisibleMode(Mode): def namesListEntry(self, recipient, channel, user, representation): if channel.name not in recipient.channels and "i" in user.mode: return "" return representation class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): return { "modes": { "uni": InvisibleMode() } } def cleanup(self): self.ircd.removeMode("uni")
Fix interpretation of parameters for names list modification
Fix interpretation of parameters for names list modification
Python
bsd-3-clause
DesertBus/txircd,ElementalAlchemist/txircd,Heufneutje/txircd
8803f6058255237dff39549426ca6a513a25193c
website_product_supplier/__openerp__.py
website_product_supplier/__openerp__.py
# -*- coding: utf-8 -*- # (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel # (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html { 'name': "Website Product Supplier", 'category': 'Website', 'version': '8.0.1.0.0', 'depends': [ 'website_sale', 'website_portal_purchase', ], 'data': [ 'security/ir.model.access.csv', 'views/product_supplier_view.xml', 'views/website_portal.xml', 'views/assets.xml', 'security/website_product_supplier_security.xml', ], 'qweb': ['static/src/xml/*.xml'], 'author': 'Antiun Ingeniería S.L., ' 'Incaser Informatica S.L., ' 'Odoo Community Association (OCA)', 'website': 'http://www.antiun.com', 'license': 'AGPL-3', 'installable': True, }
# -*- coding: utf-8 -*- # (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel # (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html { 'name': "Website Product Supplier", 'category': 'Website', 'version': '8.0.1.0.0', 'depends': [ 'website_sale', 'website_portal_purchase', ], 'data': [ 'security/ir.model.access.csv', 'views/product_supplier_view.xml', 'views/website_portal.xml', 'views/assets.xml', 'security/website_product_supplier_security.xml', ], 'images': [], 'qweb': ['static/src/xml/*.xml'], 'author': 'Antiun Ingeniería S.L., ' 'Incaser Informatica S.L., ' 'Odoo Community Association (OCA)', 'website': 'http://www.antiun.com', 'license': 'AGPL-3', 'installable': True, }
Add images key in manifest file
[FIX] website_product_supplier: Add images key in manifest file
Python
agpl-3.0
nuobit/website,open-synergy/website,gfcapalbo/website,LasLabs/website,acsone/website,nuobit/website,LasLabs/website,Yajo/website,LasLabs/website,gfcapalbo/website,kaerdsar/website,Yajo/website,nuobit/website,nuobit/website,Yajo/website,gfcapalbo/website,acsone/website,kaerdsar/website,LasLabs/website,open-synergy/website,acsone/website,acsone/website,gfcapalbo/website,open-synergy/website,brain-tec/website,brain-tec/website,open-synergy/website,brain-tec/website,kaerdsar/website,brain-tec/website,Yajo/website
323d52928fa58196299e1dde0c5b3ef0d1451d9f
setup.py
setup.py
from setuptools import find_packages, setup import sys if 'install' in sys.argv: import webbrowser webbrowser.open('https://www.youtube.com/watch?v=NMZcwXh7HDA', new=2, autoraise=True) setup( name='rdalal', version='1.0', description='Install some sweet Rehan', author='Will Kahn-Greene', author_email='[email protected]', url='https://github.com/willkg/rdalal', zip_safe=True, packages=find_packages(), entry_points=""" [console_scripts] rdalal=rdalal.cmdline:run """, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'License :: OSI Approved :: BSD License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Operating System :: POSIX :: Linux', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], )
from setuptools import find_packages, setup import sys if 'install' in sys.argv: import webbrowser webbrowser.open('https://www.youtube.com/watch?v=NMZcwXh7HDA', new=2, autoraise=True) setup( name='rdalal', version='1.1', description='Install some sweet Rehan', author='Will Kahn-Greene', author_email='[email protected]', url='https://github.com/willkg/rdalal', zip_safe=True, packages=find_packages(), entry_points=""" [console_scripts] rdalal=rdalal.cmdline:run """, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'License :: OSI Approved :: BSD License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Operating System :: POSIX :: Linux', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], )
Set to 1.1 for new release
Set to 1.1 for new release
Python
bsd-3-clause
willkg/rdalal
6e9e6c0fbba6b1f6e97c40181ec58c55e4980995
pyipmi/fw.py
pyipmi/fw.py
"""FW records """ class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
"""FW records """ class FWInfo(object): """Object to hold device-reported SPI flash table""" def __str__(self): return "%s | %s | %s | %s | %s" % (self.slot, self.type, self.offset, self.size, self.flags) def __eq__(self, other): if isinstance(other, self.__class__): return (self.slot == other.slot and \ self.type == other.type and \ self.offset == other.offset and \ self.size == other.size and \ self.flags == other.flags) else: return False class FWDownloadResult(object): """Object to hold firmware update results""" start_fw_download_failed = None class FWUploadResult(object): """Object to hold firmware retrieve results""" pass class FWActivateResult(object): """Object to hold firmware activate results""" pass class FWDeactivateResult(object): """Object to hold firmware deactivate results""" pass class FWFlagsResult(object): """Object to hold firmware flag command results""" pass class FWStatus(object): """Object to hold firmware operation status""" pass class FWCancelResult(object): """Object to hold firmware operation cancelation results""" pass class FWCheckResult(object): """Object to hold firmware CRC check results""" pass class FWBlowResult(object): """Object to hold firmware blow results""" pass
Add equality operator to FWInfo
Add equality operator to FWInfo
Python
bsd-3-clause
Cynerva/pyipmi,emaadmanzoor/pyipmi
56471d264671b652b4b40619f709dc6b8e02eac1
dragonflow/db/models/host_route.py
dragonflow/db/models/host_route.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import dragonflow.db.field_types as df_fields import dragonflow.db.model_framework as mf @mf.construct_nb_db_model class HostRoute(mf.ModelBase): id = None destination = df_fields.IpNetworkField(required=True) nexthop = df_fields.IpAddressField(required=True)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from jsonmodels import models import dragonflow.db.field_types as df_fields class HostRoute(models.Base): destination = df_fields.IpNetworkField(required=True) nexthop = df_fields.IpAddressField(required=True)
Change HostRoute to a plain model
Change HostRoute to a plain model Since HostRoute doesn't have id, store it as a plain db model. Change-Id: I3dbb9e5ffa42bf48f47b7010ee6baf470b55e85e Partially-Implements: bp refactor-nb-api
Python
apache-2.0
openstack/dragonflow,openstack/dragonflow,openstack/dragonflow
2dbc4e2aec98aba8a0e307f951b412464db8b078
della/user_manager/urls.py
della/user_manager/urls.py
from django.conf.urls import url from django.contrib.auth import views as auth_views from .views import (SignupView, UserProfileUpdateView, DrawNamesView, UserProfileDetailView, ActivateView) urlpatterns = [ url(r'^login/$', auth_views.login, name='login', kwargs={'template_name': 'user_manager/login.html'}), url(r'^logout/$', auth_views.logout, name='logout', kwargs={'next_page': '/'}), url(r'^create/$', SignupView.as_view()), url(r'^update/$', UserProfileUpdateView.as_view(), name='update'), url(r'^activate/(?P<username>[0-9A-Za-z_]+)-(?P<code>[0-9A-Za-z_:-]+)/$', ActivateView.as_view()), url(r'^draw-names/$', DrawNamesView.as_view()), url(r'^@(?P<username>[a-zA-Z0-9_]+)/$', UserProfileDetailView.as_view(), name='user-detail'), ]
from django.conf.urls import url from django.contrib.auth import views as auth_views from .views import (SignupView, UserProfileUpdateView, DrawNamesView, UserProfileDetailView, ActivateView) urlpatterns = [ url(r'^login/$', auth_views.login, name='login', kwargs={'template_name': 'user_manager/login.html'}), url(r'^logout/$', auth_views.logout, name='logout', kwargs={'next_page': '/'}), url(r'^create/$', SignupView.as_view()), url(r'^update/$', UserProfileUpdateView.as_view(), name='update'), url(r'^activate/(?P<username>[0-9A-Za-z_]+)-(?P<code>[0-9A-Za-z_:-]+)/$', ActivateView.as_view(), name='activate-user'), url(r'^draw-names/$', DrawNamesView.as_view()), url(r'^@(?P<username>[a-zA-Z0-9_]+)/$', UserProfileDetailView.as_view(), name='user-detail'), ]
Add name for url config of `ActivateView`
Add name for url config of `ActivateView`
Python
mit
avinassh/della,avinassh/della,avinassh/della
3279d68859d947f2e618e2770a9fd1b7ce3f26c9
tests/test_cardxml.py
tests/test_cardxml.py
null
from hearthstone.enums import GameTag, Rarity import utils def test_all_tags_known(): """ Iterate through the card database and check that all specified GameTags are known in hearthstone.enums.GameTag """ unknown_tags = set() known_tags = list(GameTag) known_rarities = list(Rarity) # Check the db loaded correctly assert utils.fireplace.cards.db for card in utils.fireplace.cards.db.values(): card_tags = [int(e.attrib["enumID"]) for e in card.xml.findall("./Tag")] for tag in card_tags: # We have fake tags in fireplace.enums which are always negative if tag not in known_tags and tag > 0: unknown_tags.add(tag) # Test rarities as well (cf. TB_BlingBrawl_Blade1e in 10956...) assert card.rarity in known_rarities assert not unknown_tags
Add a test to verify that all GameTags are known
Add a test to verify that all GameTags are known
Python
agpl-3.0
Ragowit/fireplace,smallnamespace/fireplace,jleclanche/fireplace,beheh/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,NightKev/fireplace,Ragowit/fireplace
dcfa7bfa11bea86d831959a217b558d704ece078
ensemble/ctf/tests/test_manager.py
ensemble/ctf/tests/test_manager.py
null
from contextlib import contextmanager from os.path import isfile, join import shutil import tempfile from numpy.testing import assert_allclose from ensemble.ctf.editor import ALPHA_DEFAULT, COLOR_DEFAULT, create_function from ensemble.ctf.manager import CTF_EXTENSION, CtfManager @contextmanager def temp_directory(): tempdir = tempfile.mkdtemp(suffix='', prefix='tmp', dir=None) try: yield tempdir finally: shutil.rmtree(tempdir) def sample_function_parts(): return create_function(COLOR_DEFAULT), create_function(ALPHA_DEFAULT) def test_ctf_manager_add(): name = 'test_function' color_func, alpha_func = sample_function_parts() with temp_directory() as root_dir: manager = CtfManager.from_directory(root_dir) manager.add(name, color_func, alpha_func) assert isfile(join(root_dir, name + CTF_EXTENSION)) def test_ctf_manager_get(): color_func, alpha_func = sample_function_parts() with temp_directory() as root_dir: manager = CtfManager.from_directory(root_dir) manager.add('test', color_func, alpha_func) ret_color, ret_alpha = manager.get('test') assert_allclose(ret_color.values(), COLOR_DEFAULT) assert_allclose(ret_alpha.values(), ALPHA_DEFAULT) def test_ctf_manager_load(): color_func, alpha_func = sample_function_parts() with temp_directory() as root_dir: manager = CtfManager.from_directory(root_dir) manager.add('test', color_func, alpha_func) del manager manager = CtfManager.from_directory(root_dir) manager.get('test')
Add unit tests for CtfManager.
Add unit tests for CtfManager.
Python
bsd-3-clause
dmsurti/ensemble
330bdcb3bc50acf4c15299a15fb7fdf8b58e152c
setup.py
setup.py
#!/usr/bin/env python import setuptools setuptools.setup( name='bumper', version='0.1.1', author='Max Zheng', author_email='maxzheng.os @t gmail.com', description=open('README.rst').read(), entry_points={ 'console_scripts': [ 'bump = bumper:bump', ], }, install_requires=open('requirements.txt').read(), license='MIT', setup_requires=['setuptools-git'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development :: Development Utilities', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='bump pin requirements requirements.txt pinned.txt', )
#!/usr/bin/env python import setuptools setuptools.setup( name='bumper', version='0.1.1', author='Max Zheng', author_email='maxzheng.os @t gmail.com', description='Bump (pin/manage) your dependency requirements with ease', long_description=open('README.rst').read(), url='https://github.com/maxzheng/bumper', entry_points={ 'console_scripts': [ 'bump = bumper:bump', ], }, install_requires=open('requirements.txt').read(), license='MIT', setup_requires=['setuptools-git'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='bump pin requirements requirements.txt pinned.txt', )
Add long description / url
Add long description / url
Python
mit
maxzheng/bumper
02c74c5235b8ad821786213a3bcf5f824162454d
flax/linen/combinators.py
flax/linen/combinators.py
"""Combinators of modules, such as a Sequential.""" from typing import Callable, Sequence from flax.linen.module import Module class Sequential(Module): """Applies a linear chain of Modules. Meant to be used only for the simple case of fusing together callables where the input of a particular module/op is the output of the previous one. Modules will be applied in the order that they are passed in the constructor. The apply() method of Sequential accepts any input and forwards it to the first module it contains. It chains the output sequentially to the input of the next module and returns the output of the final module. Example usage:: class Foo(nn.Module): feature_sizes: Sequence[int] @nn.compact def __call__(self, x): return nn.Sequential([nn.Dense(layer_size, name=f'layers_{idx}') for idx, layer_size in enumerate(self.feature_sizes)])(x) """ layers: Sequence[Callable] def __call__(self, *args, **kwargs): if not self.layers: raise ValueError(f'Empty Sequential module {self.name}.') outputs = self.layers[0](*args, **kwargs) for layer in self.layers[1:]: outputs = layer(outputs) return outputs
"""Combinators of modules, such as a Sequential.""" from typing import Callable, Sequence from flax.linen.module import Module class Sequential(Module): """Applies a linear chain of Modules. Meant to be used only for the simple case of fusing together callables where the input of a particular module/op is the output of the previous one. Modules will be applied in the order that they are passed in the constructor. The apply() method of Sequential accepts any input and forwards it to the first module it contains. It chains the output sequentially to the input of the next module and returns the output of the final module. Example usage:: class Foo(nn.Module): feature_sizes: Sequence[int] @nn.compact def __call__(self, x): return nn.Sequential([nn.Dense(4), nn.relu, nn.Dense(2), nn.log_softmax])(x) """ layers: Sequence[Callable] def __call__(self, *args, **kwargs): if not self.layers: raise ValueError(f'Empty Sequential module {self.name}.') outputs = self.layers[0](*args, **kwargs) for layer in self.layers[1:]: outputs = layer(outputs) return outputs
Include activations in Sequential example.
Include activations in Sequential example.
Python
apache-2.0
google/flax,google/flax
ff618ea57b8f3d71772bcef5f7fecf9eceae4e3d
scripts/upsrv_schema.py
scripts/upsrv_schema.py
#!/usr/bin/python # Copyright (c) 2006 rPath, Inc # All rights reserved import sys import os import pwd from conary.server import schema from conary.lib import cfgtypes from conary.repository.netrepos.netserver import ServerConfig from conary import dbstore cnrPath = '/srv/conary/repository.cnr' cfg = ServerConfig() try: cfg.read(cnrPath) except cfgtypes.CfgEnvironmentError: print "Error reading %s" % cnrPath sys.exit(1) db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0]) schema.loadSchema(db, doMigrate=True) if cfg.repositoryDB[0] == 'sqlite': os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2], pwd.getpwnam('apache')[3])
#!/usr/bin/python # Copyright (c) 2006 rPath, Inc # All rights reserved import sys import os import pwd from conary.server import schema from conary.lib import cfgtypes, tracelog from conary.repository.netrepos.netserver import ServerConfig from conary import dbstore cnrPath = '/srv/conary/repository.cnr' cfg = ServerConfig() tracelog.initLog(filename='stdout', level=2) try: cfg.read(cnrPath) except cfgtypes.CfgEnvironmentError: print "Error reading %s" % cnrPath sys.exit(1) db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0]) schema.loadSchema(db, doMigrate=True) if cfg.repositoryDB[0] == 'sqlite': os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2], pwd.getpwnam('apache')[3])
Set log level to 2 when migrating so there is some indication it is running
Set log level to 2 when migrating so there is some indication it is running
Python
apache-2.0
sassoftware/rbm,sassoftware/rbm,sassoftware/rbm
4f6fde8329b0873f3568ce7153dc64017f5bc0cb
boto/beanstalk/__init__.py
boto/beanstalk/__init__.py
null
# Copyright (c) 2013 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. # All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from layer1 import Layer1 from boto.regioninfo import RegionInfo RegionData = { 'us-east-1': 'elasticbeanstalk.us-east-1.amazonaws.com', 'us-west-1': 'elasticbeanstalk.us-west-1.amazonaws.com', 'us-west-2': 'elasticbeanstalk.us-west-2.amazonaws.com', 'sa-east-1': 'elasticbeanstalk.sa-east-1.amazonaws.com', 'eu-west-1': 'elasticbeanstalk.eu-west-1.amazonaws.com', 'ap-northeast-1': 'elasticbeanstalk.ap-northeast-1.amazonaws.com', 'ap-southeast-1': 'elasticbeanstalk.ap-southeast-1.amazonaws.com', 'ap-southeast-2': 'elasticbeanstalk.ap-southeast-2.amazonaws.com', } def regions(): """ Get all available regions for the Elastic Beanstalk service. :rtype: list :return: A list of :class:`boto.RegionInfo` instances """ regions = [] for region_name in RegionData: region = RegionInfo(name=region_name, endpoint=RegionData[region_name], connection_cls=Layer1) regions.append(region) return regions def connect_to_region(region_name, **kw_params): """ Given a valid region name, return a :class:`boto.beanstalk.Layer1`. :param str region_name: The name of the region to connect to. :rtype: :class:`boto.beanstalk.Layer1` or ``None`` :return: A connection to the given region, or None if an invalid region name is given """ for region in regions(): if region.name == region_name: return region.connect(**kw_params) return None
Add connect_to_region to beanstalk module.
Add connect_to_region to beanstalk module.
Python
mit
jamesls/boto,appneta/boto,rjschwei/boto,SaranyaKarthikeyan/boto,drbild/boto,lra/boto,weebygames/boto,revmischa/boto,s0enke/boto,pfhayes/boto,drbild/boto,khagler/boto,dimdung/boto,Timus1712/boto,trademob/boto,alex/boto,nishigori/boto,jindongh/boto,janslow/boto,garnaat/boto,alfredodeza/boto,jameslegg/boto,disruptek/boto,dablak/boto,bleib1dj/boto,israelbenatar/boto,kouk/boto,nikhilraog/boto,dablak/boto,darjus-amzn/boto,shaunbrady/boto,elainexmas/boto,TiVoMaker/boto,lochiiconnectivity/boto,shipci/boto,bryx-inc/boto,weka-io/boto,rjschwei/boto,lochiiconnectivity/boto,j-carl/boto,ryansb/boto,alex/boto,serviceagility/boto,stevenbrichards/boto,appneta/boto,FATruden/boto,jamesls/boto,yangchaogit/boto,ddzialak/boto,cyclecomputing/boto,campenberger/boto,tpodowd/boto,vishnugonela/boto,nexusz99/boto,Asana/boto,awatts/boto,varunarya10/boto,jotes/boto,vijaylbais/boto,andresriancho/boto,abridgett/boto,andresriancho/boto,ric03uec/boto,Pretio/boto,ramitsurana/boto,acourtney2015/boto,tpodowd/boto,rosmo/boto,jameslegg/boto,disruptek/boto,podhmo/boto,ekalosak/boto,ocadotechnology/boto,felix-d/boto,kouk/boto,zachmullen/boto,rayluo/boto,clouddocx/boto,zzzirk/boto
b9b095a2a66f79e36bbad1affaeb57b38e20803b
cwod_site/cwod/models.py
cwod_site/cwod/models.py
from django.db import models # Create your models here. class CongressionalRecordVolume(models.Model): congress = models.IntegerField(db_index=True) session = models.CharField(max_length=10, db_index=True) volume = models.IntegerField()
from django.db import models # Create your models here. class CongressionalRecordVolume(models.Model): congress = models.IntegerField(db_index=True) session = models.CharField(max_length=10, db_index=True) volume = models.IntegerField() class NgramDateCount(models.Model): """Storing the total number of ngrams per date allows us to show the percentage of a given ngram on a given date, mainly for graphing purposes. """ n = models.IntegerField(db_index=True) date = models.DateField(db_index=True) count = models.IntegerField() class Meta: unique_together = (('n', 'date', ), )
Create model for storing total n-gram counts by date
Create model for storing total n-gram counts by date
Python
bsd-3-clause
sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words
9e900eb16e92027cfe990a07c5703a6adbb41a09
drivers/python/wappalyzer.py
drivers/python/wappalyzer.py
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import PyV8 import urllib from urlparse import urlparse try: import json except ImportError: import simplejson as json class Wappalyzer(object): def __init__(self, url): self.file_dir = os.path.dirname(__file__) f = open(os.path.join(self.file_dir, '../../share/apps.json')) data = json.loads(f.read()) f.close() self.categories = data['categories'] self.apps = data['apps'] self.url = url def analyze(self): ctxt = PyV8.JSContext() ctxt.enter() f1 = open(os.path.join(self.file_dir, '../php/js/wappalyzer.js')) f2 = open(os.path.join(self.file_dir, '../php/js/driver.js')) ctxt.eval(f1.read()) ctxt.eval(f2.read()) f1.close() f2.close() host = urlparse(self.url).hostname html = urllib.urlopen(self.url).read() data = {'host': host, 'url': self.url, 'html': html, 'headers': {}} apps = json.dumps(self.apps) categories = json.dumps(self.categories) return ctxt.eval("w.apps = %s; w.categories = %s; w.driver.data = %s; w.driver.init();" % (apps, categories, json.dumps(data))) if __name__ == '__main__': try: w = Wappalyzer(sys.argv[1]) print w.analyze() except IndexError: print ('Usage: python %s <url>' % sys.argv[0])
Add python driver (depend on PyV8)
Add python driver (depend on PyV8)
Python
mit
WPO-Foundation/Wappalyzer,WPO-Foundation/Wappalyzer,WPO-Foundation/Wappalyzer,AliasIO/wappalyzer,AliasIO/wappalyzer
b5477239d7b1ee9e73265b023355e8e83826ec49
scrapy_rss/items.py
scrapy_rss/items.py
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
Fix RssItem when each scraped item is instance of RssItem
Fix RssItem when each scraped item is instance of RssItem
Python
bsd-3-clause
woxcab/scrapy_rss
01732179f923394bee6e46ae1bbee0f316df8297
amostra/tests/test_revert.py
amostra/tests/test_revert.py
null
from hypothesis import given, strategies as st from hypothesis.strategies import text from hypothesis import settings import random alphabet_list = '' for i in range(26): alphabet_list = alphabet_list + chr(97 + i) @given(names = st.lists(st.text(alphabet=alphabet_list, min_size=1, max_size=4), min_size=3, max_size=4, unique=True)) @settings(max_examples = 10, deadline = 1000) def test_revert(client_conf, names): client, mongo_client = client_conf() n = len(names) s = client.samples.new(name = names[0]) for name in names[1:]: s.name = name num = random.randint(0, n-2) revert_target_cursor = mongo_client['tests-amostra'].samples_revisions.find({'revision': num}) s.revert(num) target = next(revert_target_cursor) for name, trait in s.traits().items(): if name is 'revision': continue else: assert getattr(s, name) == target[name]
Add a revert function test
TST: Add a revert function test
Python
bsd-3-clause
NSLS-II/amostra
ab4983e577b9831b91290976be00917edb9fad6f
mlox/modules/resources.py
mlox/modules/resources.py
"""Handle program wide resources (files, images, etc...)""" import os import sys import base64 import tempfile def unpack_resource(data): """Convert base64 encoded data into a file handle, and a temporary file name to access the data""" file_handle = tempfile.NamedTemporaryFile() file_handle.write(base64.b64decode(data)) file_handle.seek(0) return (file_handle,file_handle.name) #Paths to resource files program_path = os.path.realpath(sys.path[0]) resources_path = os.path.join(program_path,"Resources") translation_file = os.path.join(resources_path,"mlox.msg") gif_file = os.path.join(resources_path,"mlox.gif") base_file = os.path.join(program_path,"mlox_base.txt") user_file = os.path.join(program_path,"mlox_user.txt") #For the updater UPDATE_BASE = "mlox-data.7z" update_file = os.path.join(program_path,UPDATE_BASE) UPDATE_URL = 'https://sourceforge.net/projects/mlox/files/mlox/' + UPDATE_BASE
"""Handle program wide resources (files, images, etc...)""" import os import sys import base64 import tempfile def unpack_resource(data): """Convert base64 encoded data into a file handle, and a temporary file name to access the data""" file_handle = tempfile.NamedTemporaryFile() file_handle.write(base64.b64decode(data)) file_handle.seek(0) return (file_handle,file_handle.name) #Paths to resource files program_path = os.path.realpath(sys.path[0]) resources_path = os.path.join(program_path,"Resources") translation_file = os.path.join(resources_path,"mlox.msg") gif_file = os.path.join(resources_path,"mlox.gif") base_file = os.path.join(program_path,"mlox_base.txt") user_file = os.path.join(program_path,"mlox_user.txt") #For the updater UPDATE_BASE = "mlox-data.7z" update_file = os.path.join(program_path,UPDATE_BASE) UPDATE_URL = 'https://svn.code.sf.net/p/mlox/code/trunk/downloads/' + UPDATE_BASE
Switch back to using the old SVN update location.
Switch back to using the old SVN update location. While changing the download location would be nice, this keeps the option of putting a final data file that would force users to update.
Python
mit
EmperorArthur/mlox,EmperorArthur/mlox,EmperorArthur/mlox
f18cf3c17e450eb6f8db5288ecf146eff0968a47
xmt/select.py
xmt/select.py
from itertools import groupby from nltk.translate.gleu_score import sentence_gleu as gleu from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): hrs = ((r['g-result:surface'], r['item:i-translation']) for r in group) ranked = [(gleu(_tokenize(r), _tokenize(h)), h, r) for h, r in hrs] _, hyp, ref = sorted(ranked, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs
from itertools import groupby from nltk.translate import bleu_score from nltk.tokenize.toktok import ToktokTokenizer _tokenize = ToktokTokenizer().tokenize _smoother = bleu_score.SmoothingFunction().method3 bleu = bleu_score.sentence_bleu def select_first(p): """ Return (hypothesis, reference) translation pairs using the first realization result per item. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): row = next(group) pairs.append((row['g-result:surface'], row['item:i-translation'])) return pairs def select_oracle(p): """ Return (hypothesis, reference) translation pairs using the realization result per item with the highest GLEU score. """ pairs = [] rows = p.join('item', 'g-result') for i_id, group in groupby(rows, key=lambda row: row['g-result:i-id']): scored = [] for res in group: ref = res['item:i-translation'] hyp = res['g-result:surface'] scored.append( (bleu([_tokenize(ref)], _tokenize(hyp), smoothing_function=_smoother), hyp, ref) ) _, hyp, ref = sorted(scored, key=lambda r: r[0])[-1] pairs.append((hyp, ref)) return pairs
Use NIST-BLEU instead of GLEU for oracle.
Use NIST-BLEU instead of GLEU for oracle.
Python
mit
goodmami/xmt,goodmami/xmt
01163ce7fc43b4ab2e5b9ab1c5f94556d0509004
examples/tornado/auth_demo.py
examples/tornado/auth_demo.py
from mongrel2.config import * main = Server( uuid="f400bf85-4538-4f7a-8908-67e313d515c2", access_log="/logs/access.log", error_log="/logs/error.log", chroot="./", default_host="localhost", name="test", pid_file="/run/mongrel2.pid", port=6767, hosts = [ Host(name="localhost", routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) }) ] ) commit([main])
from mongrel2.config import * main = Server( uuid="f400bf85-4538-4f7a-8908-67e313d515c2", access_log="/logs/access.log", error_log="/logs/error.log", chroot="./", default_host="localhost", name="test", pid_file="/run/mongrel2.pid", port=6767, hosts = [ Host(name="localhost", routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) }) ] ) commit([main], settings={'limits.buffer_size': 4 * 1024})
Add the settings to the authdemo.
Add the settings to the authdemo.
Python
bsd-3-clause
niedbalski/mongrel2,jdesgats/mongrel2,winks/mongrel2,griffordson/mongrel2,cpick/mongrel2,duaneg/mongrel2,jagguli/mongrel2,mongrel2/mongrel2,issuu/mongrel2,metadave/mongrel2,steamraven/mongrel2,cpick/mongrel2,niedbalski/mongrel2,AustinWise/mongrel2,nmandery/mongrel2,markokr/mongrel2,reshefm/mongrel2,sshirokov/mongrel2,fanout/mongrel2,nickdesaulniers/mongrel2,mongrel2/mongrel2,ralphbean/mongrel2,aidenkeating/mongrel2,moai/mongrel2,jablkopp/mongrel2,jubarajborgohain/mongrel2,dermoth/mongrel2,bashi-bazouk/mongrel2,ameuret/mongrel2,metadave/mongrel2,markokr/mongrel2,AvdN/mongrel2,elo80ka/mongrel2,nickdesaulniers/mongrel2,wayneeseguin/mongrel2,mongrel2/mongrel2,steamraven/mongrel2,rpeterson/mongrel2,reshefm/mongrel2,ralphbean/mongrel2,winks/mongrel2,AlexVPopov/mongrel2,jiffyjeff/mongrel2,markokr/mongrel2,msteinert/mongrel2,AlexVPopov/mongrel2,duaneg/mongrel2,xrl/mongrel2,dermoth/mongrel2,musl/mongrel2,ameuret/mongrel2,aidenkeating/mongrel2,musl/mongrel2,moai/mongrel2,minrk/mongrel2,markokr/mongrel2,jagguli/mongrel2,jdesgats/mongrel2,apjanke/mongrel2,griffordson/mongrel2,AlexVPopov/mongrel2,xrl/mongrel2,krakensden/mongrel2,dermoth/mongrel2,AustinWise/mongrel2,elo80ka/mongrel2,cpick/mongrel2,msteinert/mongrel2,xrl/mongrel2,wayneeseguin/mongrel2,jasom/mongrel2,Gibheer/mongrel2,jubarajborgohain/mongrel2,jiffyjeff/mongrel2,musl/mongrel2,wayneeseguin/mongrel2,mbj/mongrel2,bashi-bazouk/mongrel2,apjanke/mongrel2,krakensden/mongrel2,fanout/mongrel2,musl/mongrel2,jasom/mongrel2,jubarajborgohain/mongrel2,sshirokov/mongrel2,jagguli/mongrel2,ameuret/mongrel2,msteinert/mongrel2,nickdesaulniers/mongrel2,jagguli/mongrel2,jablkopp/mongrel2,jablkopp/mongrel2,xrl/mongrel2,jasom/mongrel2,ralphbean/mongrel2,musl/mongrel2,rpeterson/mongrel2,yoink00/mongrel2,pjkundert/mongrel2,issuu/mongrel2,ameuret/mongrel2,yoink00/mongrel2,Gibheer/mongrel2,jdesgats/mongrel2,chickenkiller/mongrel2,nmandery/mongrel2,steamraven/mongrel2,nmandery/mongrel2,fanout/mongrel2,pjkundert/mongrel2,ralphbean/mongrel2,mbj/mongrel2,jagguli/mongrel2,steamraven/mongrel2,jdesgats/mongrel2,nickdesaulniers/mongrel2,rpeterson/mongrel2,fanout/mongrel2,minrk/mongrel2,elo80ka/mongrel2,aidenkeating/mongrel2,apjanke/mongrel2,apjanke/mongrel2,jdesgats/mongrel2,xrl/mongrel2,elo80ka/mongrel2,metadave/mongrel2,bashi-bazouk/mongrel2,krakensden/mongrel2,reshefm/mongrel2,mbj/mongrel2,jasom/mongrel2,moai/mongrel2,minrk/mongrel2,yoink00/mongrel2,AustinWise/mongrel2,moai/mongrel2,metadave/mongrel2,bashi-bazouk/mongrel2,nickdesaulniers/mongrel2,rpeterson/mongrel2,pjkundert/mongrel2,Gibheer/mongrel2,krakensden/mongrel2,krakensden/mongrel2,jablkopp/mongrel2,jablkopp/mongrel2,AvdN/mongrel2,wayneeseguin/mongrel2,issuu/mongrel2,bashi-bazouk/mongrel2,cpick/mongrel2,pjkundert/mongrel2,sshirokov/mongrel2,jiffyjeff/mongrel2,jasom/mongrel2,pjkundert/mongrel2,cpick/mongrel2,steamraven/mongrel2,minrk/mongrel2,cpick/mongrel2,jiffyjeff/mongrel2,issuu/mongrel2,yoink00/mongrel2,AlexVPopov/mongrel2,xrl/mongrel2,AlexVPopov/mongrel2,Gibheer/mongrel2,aidenkeating/mongrel2,apjanke/mongrel2,markokr/mongrel2,ameuret/mongrel2,mbj/mongrel2,griffordson/mongrel2,AvdN/mongrel2,AvdN/mongrel2,nmandery/mongrel2,ameuret/mongrel2,aidenkeating/mongrel2,chickenkiller/mongrel2,chickenkiller/mongrel2,fanout/mongrel2,jubarajborgohain/mongrel2,duaneg/mongrel2,winks/mongrel2,dermoth/mongrel2,moai/mongrel2,sshirokov/mongrel2,mongrel2/mongrel2,elo80ka/mongrel2,griffordson/mongrel2,minrk/mongrel2,niedbalski/mongrel2,mongrel2/mongrel2,aidenkeating/mongrel2,yoink00/mongrel2,yoink00/mongrel2,duaneg/mongrel2,winks/mongrel2,mongrel2/mongrel2,msteinert/mongrel2,niedbalski/mongrel2,jiffyjeff/mongrel2,chickenkiller/mongrel2,issuu/mongrel2,chickenkiller/mongrel2,reshefm/mongrel2,chickenkiller/mongrel2,AustinWise/mongrel2
967cf8774a5033f310ca69e7ad86fc79b2628882
infrastructure/aws/trigger-provision.py
infrastructure/aws/trigger-provision.py
# trigger-provision.py <indexer-provision.sh | web-server-provision.sh> import boto3 from datetime import datetime, timedelta import sys import os.path provisioners = sys.argv[1:] ec2 = boto3.resource('ec2') client = boto3.client('ec2') script = '' for provisioner in provisioners: script += open(provisioner).read() + '\n' user_data = '''#!/usr/bin/env bash cat > ~ubuntu/provision.sh <<"FINAL" {script} FINAL chmod +x ~ubuntu/provision.sh sudo -i -u ubuntu ~ubuntu/provision.sh '''.format(script=script) # ubuntu/images/hvm-ssd/ubuntu-xenial-16.04-amd64-server-20160815 (ami-f701cb97) image_id = 'ami-f701cb97' launch_spec = { 'ImageId': image_id, 'KeyName': 'Main Key Pair', 'SecurityGroups': ['indexer'], 'UserData': user_data, 'InstanceType': 'c3.2xlarge', 'BlockDeviceMappings': [] } client.run_instances(MinCount=1, MaxCount=1, **launch_spec)
# trigger-provision.py <indexer-provision.sh | web-server-provision.sh> import boto3 from datetime import datetime, timedelta import sys import os.path provisioners = sys.argv[1:] ec2 = boto3.resource('ec2') client = boto3.client('ec2') script = '' for provisioner in provisioners: script += open(provisioner).read() + '\n' user_data = '''#!/usr/bin/env bash cat > ~ubuntu/provision.sh <<"FINAL" {script} FINAL chmod +x ~ubuntu/provision.sh sudo -i -u ubuntu ~ubuntu/provision.sh '''.format(script=script) # ubuntu/images/hvm-ssd/ubuntu-xenial-16.04-amd64-server-20160815 (ami-f701cb97) image_id = 'ami-f701cb97' launch_spec = { 'ImageId': image_id, 'KeyName': 'Main Key Pair', 'SecurityGroups': ['indexer'], 'UserData': user_data, 'InstanceType': 'c3.2xlarge', 'BlockDeviceMappings': [], 'TagSpecifications': [{ 'ResourceType': 'instance', 'Tags': [{ 'Key': 'provisioner', 'Value': sys.argv[1], }], }], } client.run_instances(MinCount=1, MaxCount=1, **launch_spec)
Tag provisioning instances to make them easier to identify
Tag provisioning instances to make them easier to identify
Python
mpl-2.0
bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox
32e70ee06be67cb9058b2da7dc1a714272c6a07a
pyQuantuccia/setup.py
pyQuantuccia/setup.py
import setuptools qu_ext = setuptools.Extension( 'quantuccia', include_dirs=['src/Quantuccia'], sources=['src/pyQuantuccia.cpp'] ) setuptools.setup( name='pyQuantuccia', author='Jack Grahl', author_email='[email protected]', version='0.1.0', packages=['pyQuantuccia'], ext_modules=[qu_ext] )
import setuptools qu_ext = setuptools.Extension( 'quantuccia', include_dirs=['src/Quantuccia'], sources=['src/pyQuantuccia.cpp'] ) setuptools.setup( name='pyQuantuccia', author='Jack Grahl', author_email='[email protected]', version='0.1.0', packages=['pyQuantuccia'], test_suite='tests', ext_modules=[qu_ext] )
Add the location of tests.
Add the location of tests.
Python
bsd-3-clause
jwg4/pyQuantuccia,jwg4/pyQuantuccia
324ce82f25c78bce7f92af52952f036ba48e72e7
astrobin_apps_notifications/utils.py
astrobin_apps_notifications/utils.py
# Python import simplejson import urllib2 # Django from django.apps import apps from django.conf import settings from django.core.exceptions import ImproperlyConfigured # Third party from notification import models as notification from persistent_messages.models import Message def push_notification(recipients, notice_type, data): data.update({'notices_url': settings.ASTROBIN_BASE_URL}) notification.send(recipients, notice_type, data) def get_recent_notifications(user, n = 10): if not user.is_authenticated(): return None notifications = Message.objects.filter(user = user).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications def get_unseen_notifications(user, n = 10): if not user.is_authenticated(): return None notifications =\ Message.objects.filter(user = user, read = False).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications def get_seen_notifications(user, n = 10): if not user.is_authenticated(): return None notifications =\ Message.objects.filter(user = user, read = True).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications
# Python import simplejson import urllib2 # Django from django.apps import apps from django.conf import settings from django.core.exceptions import ImproperlyConfigured # Third party from notification import models as notification from persistent_messages.models import Message def push_notification(recipients, notice_type, data): data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'}) notification.send(recipients, notice_type, data) def get_recent_notifications(user, n = 10): if not user.is_authenticated(): return None notifications = Message.objects.filter(user = user).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications def get_unseen_notifications(user, n = 10): if not user.is_authenticated(): return None notifications =\ Message.objects.filter(user = user, read = False).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications def get_seen_notifications(user, n = 10): if not user.is_authenticated(): return None notifications =\ Message.objects.filter(user = user, read = True).order_by('-created') if n >= 0: notifications = notifications[:n] return notifications
Revert "Drop extra trailing slash from notices_url"
Revert "Drop extra trailing slash from notices_url" This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.
Python
agpl-3.0
astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin
0cfe6707cf02bab74741433dbe7a91b8c5c57f38
cinder/tests/unit/test_fixtures.py
cinder/tests/unit/test_fixtures.py
null
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures as fx from oslo_log import log as logging import testtools from cinder.tests import fixtures class TestLogging(testtools.TestCase): def test_default_logging(self): stdlog = self.useFixture(fixtures.StandardLogging()) root = logging.getLogger() # there should be a null handler as well at DEBUG self.assertEqual(2, len(root.handlers), root.handlers) log = logging.getLogger(__name__) log.info("at info") log.debug("at debug") self.assertIn("at info", stdlog.logger.output) self.assertNotIn("at debug", stdlog.logger.output) # broken debug messages should still explode, even though we # aren't logging them in the regular handler self.assertRaises(TypeError, log.debug, "this is broken %s %s", "foo") # and, ensure that one of the terrible log messages isn't # output at info warn_log = logging.getLogger('migrate.versioning.api') warn_log.info("warn_log at info, should be skipped") warn_log.error("warn_log at error") self.assertIn("warn_log at error", stdlog.logger.output) self.assertNotIn("warn_log at info", stdlog.logger.output) def test_debug_logging(self): self.useFixture(fx.EnvironmentVariable('OS_DEBUG', '1')) stdlog = self.useFixture(fixtures.StandardLogging()) root = logging.getLogger() # there should no longer be a null handler self.assertEqual(1, len(root.handlers), root.handlers) log = logging.getLogger(__name__) log.info("at info") log.debug("at debug") self.assertIn("at info", stdlog.logger.output) self.assertIn("at debug", stdlog.logger.output)
Copy unit tests for StandardLogging fixture from Nova
Copy unit tests for StandardLogging fixture from Nova This comes from commit f96ec4411ce89606cf52211061003c14306dcfa1 in Nova by Sean Dague <[email protected]>. The StandardLogging fixture was already merged into Cinder, this adds the unit tests that were missed when copying over the fixture. Change-Id: I2fbe25ec71138e4b96ff175af72a2a56c1c8f52a Related-Bug: #1551325
Python
apache-2.0
Nexenta/cinder,bswartz/cinder,NetApp/cinder,mahak/cinder,Nexenta/cinder,phenoxim/cinder,Datera/cinder,openstack/cinder,cloudbase/cinder,Hybrid-Cloud/cinder,phenoxim/cinder,NetApp/cinder,Datera/cinder,cloudbase/cinder,j-griffith/cinder,openstack/cinder,mahak/cinder,ge0rgi/cinder,Hybrid-Cloud/cinder,bswartz/cinder,j-griffith/cinder,eharney/cinder,eharney/cinder
9116776bd62e6b7ae7a018fa2c2c0b3964c3fa7d
py/maximum-binary-tree.py
py/maximum-binary-tree.py
null
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def findMax(self, start, end): bit_length = (end - start).bit_length() - 1 d = 1 << bit_length return max(self.SparseTable[bit_length][start], self.SparseTable[bit_length][end - d]) def do_constructMaximumBinaryTree(self, start, end): if start == end: return None v, i = self.findMax(start, end) ret = TreeNode(v) ret.left = self.do_constructMaximumBinaryTree(start, i) ret.right = self.do_constructMaximumBinaryTree(i + 1, end) return ret def constructMaximumBinaryTree(self, nums): """ :type nums: List[int] :rtype: TreeNode """ self.SparseTable = [[(v, i) for i, v in enumerate(nums)]] l = len(nums) t = 1 while t * 2 < l: prevTable = self.SparseTable[-1] self.SparseTable.append([max(prevTable[i], prevTable[i + t]) for i in xrange(l - t * 2 + 1)]) t *= 2 return self.do_constructMaximumBinaryTree(0, l)
Add py solution for 654. Maximum Binary Tree
Add py solution for 654. Maximum Binary Tree 654. Maximum Binary Tree: https://leetcode.com/problems/maximum-binary-tree/
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
cf52a7c83e1479a99e95ab2125958a67febfccf5
dataviews/__init__.py
dataviews/__init__.py
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plots", "sheetcoords" ]
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__) from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plots", "sheetcoords" ]
Set __version__ using param.Version (commit tagged as 'v0.7')
Set __version__ using param.Version (commit tagged as 'v0.7')
Python
bsd-3-clause
mjabri/holoviews,basnijholt/holoviews,ioam/holoviews,mjabri/holoviews,ioam/holoviews,vascotenner/holoviews,vascotenner/holoviews,ioam/holoviews,basnijholt/holoviews,basnijholt/holoviews,vascotenner/holoviews,mjabri/holoviews
608dc0db688be1dabe3c6ba7647807f6697fcefe
tools/misc/python/test-data-in-out.py
tools/misc/python/test-data-in-out.py
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.) # INPUT input TYPE GENERIC # OUTPUT output # OUTPUT OPTIONAL missing_output.txt import shutil shutil.copyfile('input', 'output')
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.) # INPUT input TYPE GENERIC # OUTPUT output # OUTPUT OPTIONAL missing_output.txt # IMAGE chipster-tools-python import shutil shutil.copyfile('input', 'output')
Test image definition in SADL
Test image definition in SADL
Python
mit
chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools
da3599ac6ed29e750d28834a8c0c0f39e9b57702
src/acquisition/covid_hosp/state_daily/network.py
src/acquisition/covid_hosp/state_daily/network.py
# first party from delphi.epidata.acquisition.covid_hosp.common.network import Network as BaseNetwork class Network(BaseNetwork): DATASET_ID = '823dd0e-c8c4-4206-953e-c6d2f451d6ed' def fetch_metadata(*args, **kwags): """Download and return metadata. See `fetch_metadata_for_dataset`. """ return Network.fetch_metadata_for_dataset( *args, **kwags, dataset_id=Network.DATASET_ID)
# first party from delphi.epidata.acquisition.covid_hosp.common.network import Network as BaseNetwork class Network(BaseNetwork): DATASET_ID = '7823dd0e-c8c4-4206-953e-c6d2f451d6ed' def fetch_metadata(*args, **kwags): """Download and return metadata. See `fetch_metadata_for_dataset`. """ return Network.fetch_metadata_for_dataset( *args, **kwags, dataset_id=Network.DATASET_ID)
Update state daily dataset ID
Update state daily dataset ID
Python
mit
cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata,cmu-delphi/delphi-epidata
0043fe9c8de4d8341afbcea388f472a50017de2c
jiradoc/__main__.py
jiradoc/__main__.py
# ------------------------------------------------------------ # __main__.py # # The main program which expects a jiradoc formatted file to # be passed in as a cmdline option. It reads the file and # parses its content to Story objects. # ------------------------------------------------------------ import argparse import pkg_resources from jiradoc.parser.parser import parser def main(args=None): argparser = argparse.ArgumentParser(description='The JIRAdoc parser') test_file = pkg_resources.resource_filename(__name__, 'data/test.jiradoc') argparser.add_argument('-f', dest='file', default=test_file, help='The jiradoc formatted file') args = argparser.parse_args() with open(args.file) as f: content = f.read() stories = parser.parse(content) for story in stories: print story if __name__ == "__main__": main()
# ------------------------------------------------------------ # __main__.py # # The main program # ------------------------------------------------------------ import argparse import os import pkg_resources import sys from jiradoc.parser.parser import parser as jiradoc_parser def main(args=None): parser = argparse.ArgumentParser(description='A tool that parses a JIRAdoc formatted file and returns a list of ' 'story objects') test_file = pkg_resources.resource_filename(__name__, 'data/test.jiradoc') parser.add_argument('-f', dest='file', default=test_file, help='A .jiradoc file containing sub-tasks to JIRA stories') args = parser.parse_args() filename, ext = os.path.splitext(args.file) if ext != '.jiradoc': print 'Invalid file extension: ' + ext print 'The only valid extension is .jiradoc' sys.exit(1) with open(args.file) as f: content = f.read() stories = jiradoc_parser.parse(content) for story in stories: print story if __name__ == "__main__": main()
Validate that the input file ends with .jiradoc
Validate that the input file ends with .jiradoc
Python
mit
lucianovdveekens/jiradoc
e91079ecab2d324ae5428239bcf792ab149faa4a
jacquard/storage/tests/test_dummy.py
jacquard/storage/tests/test_dummy.py
null
import unittest from jacquard.storage.dummy import DummyStore from jacquard.storage.testing_utils import StorageGauntlet class DummyGauntletTest(StorageGauntlet, unittest.TestCase): def open_storage(self): return DummyStore('')
Cover the dummy storage engine with tests
Cover the dummy storage engine with tests
Python
mit
prophile/jacquard,prophile/jacquard
3ff6b8a2e8eecf48bfe74d5a0b0972e29ace15fd
imagetagger/imagetagger/annotations/admin.py
imagetagger/imagetagger/annotations/admin.py
from django.contrib import admin from .models import Annotation, AnnotationType, Export, Verification, ExportFormat admin.site.register(Annotation) admin.site.register(AnnotationType) admin.site.register(Export) admin.site.register(Verification) admin.site.register(ExportFormat)
from django.contrib import admin from .models import Annotation, AnnotationType, Export, Verification, ExportFormat @admin.register(Annotation) class AnnotationAdmin(admin.ModelAdmin): raw_id_fields = ( 'image', ) @admin.register(Verification) class VerificationAdmin(admin.ModelAdmin): raw_id_fields = ( 'annotation', ) admin.site.register(AnnotationType) admin.site.register(Export) admin.site.register(ExportFormat)
Use raw id fields for annotation and verification foreign keys
Use raw id fields for annotation and verification foreign keys
Python
mit
bit-bots/imagetagger,bit-bots/imagetagger,bit-bots/imagetagger,bit-bots/imagetagger
24cbbd24e6398aa11956ac48282bd907806284c3
genderbot.py
genderbot.py
import re from twitterbot import TwitterBot import wikipedia class Genderbot(TwitterBot): boring_article_regex = (r"municipality|village|town|football|genus|family|" "administrative|district|community|region|hamlet|" "school|actor|mountain|basketball|city|species|film|" "county|located|politician|professional|settlement") def tweet(self): article = self.__random_wikipedia_article() match = re.search(r"\bis [^.?]+", article.content, re.UNICODE) if match: status = self.__format_status(match.group(0), article.url) if self.__is_interesting(status): self.post_tweet(status) def __format_status(self, is_phrase, url): status = 'gender %s' % (is_phrase) if len(status) > 114: status = status[0:113] + '...' return status + ' %s' % (url) def __is_interesting(self, status): boring_match = re.search(Genderbot.boring_article_regex, status, re.UNICODE) return boring_match is None def __random_wikipedia_article(self): random_title = wikipedia.random(pages=1) return wikipedia.page(title=random_title) if __name__ == "__main__": try: Genderbot("CustomGender").tweet() except: pass
import re from twitterbot import TwitterBot import wikipedia class Genderbot(TwitterBot): boring_regex = (r"municipality|village|town|football|genus|family|" "administrative|district|community|region|hamlet|" "school|actor|mountain|basketball|city|species|film|" "county|located|politician|professional|settlement|" "river|lake|province|replaced|origin|band|park|song" "approximately|north|south|east|west|business") def tweet(self): article = self.__random_wikipedia_article() match = re.search(r"\bis [^.?]+", article.content, re.UNICODE) if match: status = self.__format_status(match.group(0), article.url) if self.__is_interesting(status): self.post_tweet(status) def __format_status(self, is_phrase, url): status = 'gender %s' % (is_phrase) if len(status) > 114: status = status[0:113] + '...' return status + ' %s' % (url) def __is_interesting(self, status): flags = re.UNICODE | re.IGNORECASE boring = re.search(Genderbot.boring_regex, status, flags) return boring is None def __random_wikipedia_article(self): random_title = wikipedia.random(pages=1) return wikipedia.page(title=random_title) if __name__ == "__main__": try: Genderbot("CustomGender").tweet() except: pass
Tweak boring regex to exclude more terms
Tweak boring regex to exclude more terms
Python
mit
DanielleSucher/genderbot
f24fe32329625ec037a9afc8d3bdeed5f41e69a0
scripts/diff_incar.py
scripts/diff_incar.py
null
#!/usr/bin/env python ''' Created on Nov 12, 2011 ''' __author__="Shyue Ping Ong" __copyright__ = "Copyright 2011, The Materials Project" __version__ = "0.1" __maintainer__ = "Shyue Ping Ong" __email__ = "[email protected]" __date__ = "Nov 12, 2011" import sys import itertools from pymatgen.io.vaspio import Incar from pymatgen.util.string_utils import str_aligned filepath1 = sys.argv[1] filepath2 = sys.argv[2] incar1 = Incar.from_file(filepath1) incar2 = Incar.from_file(filepath2) def format_lists(v): if isinstance(v, (tuple, list)): return " ".join([str(i) + "*" + str(len(tuple(group))) for (i,group) in itertools.groupby(v)]) return v d = incar1.diff(incar2) output = [['SAME PARAMS','', '']] output.append(['---------------','', '']) output.extend([(k,format_lists(v),format_lists(v)) for k,v in d['Same'].items() if k != "SYSTEM"]) output.append(['','', '']) output.append(['DIFFERENT PARAM','', '']) output.append(['---------------','', '']) output.extend([(k,format_lists(v['INCAR1']),format_lists(v['INCAR2'])) for k, v in d['Different'].items() if k != "SYSTEM"]) print str_aligned(output, ['', filepath1, filepath2])
Add a script for easy diffing of two Incars.
Add a script for easy diffing of two Incars. Former-commit-id: 998a47c0b96b3024abd82b196f431926cc50847d [formerly 927396038d147b633bee31988cf1e016258c5320] Former-commit-id: 4a8c6bb9cfef4a3a3f6cc211b7ef558a06f523c3
Python
mit
gpetretto/pymatgen,aykol/pymatgen,Bismarrck/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,Bismarrck/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,dongsenfo/pymatgen,montoyjh/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,blondegeek/pymatgen,nisse3000/pymatgen,richardtran415/pymatgen,tschaume/pymatgen,fraricci/pymatgen,matk86/pymatgen,mbkumar/pymatgen,czhengsci/pymatgen,dongsenfo/pymatgen,tallakahath/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,Bismarrck/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,vorwerkc/pymatgen,gpetretto/pymatgen,johnson1228/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,xhqu1981/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,matk86/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,tschaume/pymatgen,matk86/pymatgen,gpetretto/pymatgen,setten/pymatgen,richardtran415/pymatgen,setten/pymatgen,tallakahath/pymatgen,ndardenne/pymatgen,gVallverdu/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,montoyjh/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,xhqu1981/pymatgen,montoyjh/pymatgen,davidwaroquiers/pymatgen,matk86/pymatgen,nisse3000/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,tallakahath/pymatgen,tschaume/pymatgen,gpetretto/pymatgen,xhqu1981/pymatgen,ndardenne/pymatgen,setten/pymatgen,gmatteo/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,setten/pymatgen,aykol/pymatgen,dongsenfo/pymatgen,mbkumar/pymatgen,blondegeek/pymatgen,aykol/pymatgen,johnson1228/pymatgen,tschaume/pymatgen
eae949e483e1d30e8c11b662bb07e9d30dcf39c5
lc0049_group_anagrams.py
lc0049_group_anagrams.py
"""Leetcode 49. Group Anagrams Medium URL: https://leetcode.com/problems/group-anagrams/ Given an array of strings, group anagrams together. Example: Input: ["eat", "tea", "tan", "ate", "nat", "bat"], Output: [ ["ate","eat","tea"], ["nat","tan"], ["bat"] ] Note: - All inputs will be in lowercase. - The order of your output does not matter. """ class SolutionSortedDict(object): def groupAnagrams(self, strs): """ :type strs: List[str] :rtype: List[List[str]] Output Limit Exceede. Time complexity: O(n*klogk), where - n is the length of strs, - k is the lenght of the longest string. Space complexity: O(n). """ from collections import defaultdict # Store in a dict with sorted string->string list. anagrams_d = defaultdict(list) for s in strs: # Use sorted string as dict key. k = ''.join(sorted(s)) anagrams_d[k].append(s) return anagrams_d.values() def main(): # Output: # [ # ["ate","eat","tea"], # ["nat","tan"], # ["bat"] # ] strs = ["eat", "tea", "tan", "ate", "nat", "bat"] print SolutionSortedDict().groupAnagrams(strs) if __name__ == '__main__': main()
"""Leetcode 49. Group Anagrams Medium URL: https://leetcode.com/problems/group-anagrams/ Given an array of strings, group anagrams together. Example: Input: ["eat", "tea", "tan", "ate", "nat", "bat"], Output: [ ["ate","eat","tea"], ["nat","tan"], ["bat"] ] Note: - All inputs will be in lowercase. - The order of your output does not matter. """ class SolutionSortedAnagramDict(object): def groupAnagrams(self, strs): """ :type strs: List[str] :rtype: List[List[str]] Output Limit Exceede. Time complexity: O(n*klogk), where - n is the length of strs, - k is the lenght of the longest string. Space complexity: O(n). """ from collections import defaultdict # Store in a dict with sorted string->string list. anagram_lists = defaultdict(list) for s in strs: # Use sorted string as dict key. k = ''.join(sorted(s)) anagram_lists[k].append(s) return anagram_lists.values() def main(): # Output: # [ # ["ate","eat","tea"], # ["nat","tan"], # ["bat"] # ] strs = ["eat", "tea", "tan", "ate", "nat", "bat"] print SolutionSortedAnagramDict().groupAnagrams(strs) if __name__ == '__main__': main()
Revise to anagram_lists and rename to sorted anagram dict class
Revise to anagram_lists and rename to sorted anagram dict class
Python
bsd-2-clause
bowen0701/algorithms_data_structures
e76ca364ab979e309d34ff458ef2629145a52ce2
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
magnum/db/sqlalchemy/alembic/versions/a1136d335540_add_docker_storage_driver_column.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add docker storage driver column Revision ID: a1136d335540 Revises: d072f58ab240 Create Date: 2016-03-07 19:00:28.738486 """ # revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add docker storage driver column Revision ID: a1136d335540 Revises: d072f58ab240 Create Date: 2016-03-07 19:00:28.738486 """ # revision identifiers, used by Alembic. revision = 'a1136d335540' down_revision = 'd072f58ab240' from alembic import op import sqlalchemy as sa docker_storage_driver_enum = sa.Enum('devicemapper', 'overlay', name='docker_storage_driver') def upgrade(): docker_storage_driver_enum.create(op.get_bind(), checkfirst=True) op.add_column('baymodel', sa.Column('docker_storage_driver', docker_storage_driver_enum, nullable=True))
Fix for enum type docker_storage_driver
Fix for enum type docker_storage_driver Create enum type "docker_storage_driver" for migration This is fixing oslo_db.exception.DBError: (psycopg2.ProgrammingError) type "docker_storage_driver" does not exist Closes-Bug: #1609776 Change-Id: I92d427e90bd73b4114d8688d3761cabac450fc9d
Python
apache-2.0
openstack/magnum,openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum
379e99a672537776ac0e160999967b5efce29305
tweepy/media.py
tweepy/media.py
# Tweepy # Copyright 2009-2021 Joshua Roesslein # See LICENSE for details. from tweepy.mixins import DataMapping class Media(DataMapping): __slots__ = ( "data", "media_key", "type", "duration_ms", "height", "non_public_metrics", "organic_metrics", "preview_image_url", "promoted_metrics", "public_metrics", "width" ) def __init__(self, data): self.data = data self.media_key = data["media_key"] self.type = data["type"] self.duration_ms = data.get("duration_ms") self.height = data.get("height") self.non_public_metrics = data.get("non_public_metrics") self.organic_metrics = data.get("organic_metrics") self.preview_image_url = data.get("preview_image_url") self.promoted_metrics = data.get("promoted_metrics") self.public_metrics = data.get("public_metrics") self.width = data.get("width") def __eq__(self, other): if isinstance(other, self.__class__): return self.media_key == other.media_key return NotImplemented def __hash__(self): return hash(self.media_key) def __repr__(self): return f"<Media media_key={self.media_key} type={self.type}>"
# Tweepy # Copyright 2009-2021 Joshua Roesslein # See LICENSE for details. from tweepy.mixins import DataMapping class Media(DataMapping): __slots__ = ( "data", "media_key", "type", "duration_ms", "height", "non_public_metrics", "organic_metrics", "preview_image_url", "promoted_metrics", "public_metrics", "width", "alt_text" ) def __init__(self, data): self.data = data self.media_key = data["media_key"] self.type = data["type"] self.duration_ms = data.get("duration_ms") self.height = data.get("height") self.non_public_metrics = data.get("non_public_metrics") self.organic_metrics = data.get("organic_metrics") self.preview_image_url = data.get("preview_image_url") self.promoted_metrics = data.get("promoted_metrics") self.public_metrics = data.get("public_metrics") self.width = data.get("width") self.alt_text = data.get("alt_text") def __eq__(self, other): if isinstance(other, self.__class__): return self.media_key == other.media_key return NotImplemented def __hash__(self): return hash(self.media_key) def __repr__(self): return f"<Media media_key={self.media_key} type={self.type}>"
Add alt_text field for Media
Add alt_text field for Media
Python
mit
svven/tweepy,tweepy/tweepy
052392da7980c4f4e2e86cd8eb65da5b91d3547b
CodeFights/differentSymbolsNaive.py
CodeFights/differentSymbolsNaive.py
null
#!/usr/local/bin/python # Code Fights Different Symbols Naive Problem from collections import Counter def differentSymbolsNaive(s): return len(Counter(s)) def main(): tests = [ ["cabca", 3], ["aba", 2] ] for t in tests: res = differentSymbolsNaive(t[0]) ans = t[1] if ans == res: print("PASSED: differentSymbolsNaive({}) returned {}" .format(t[0], res)) else: print(("FAILED: differentSymbolsNaive({}) returned {}," "answer: {}").format(t[0], res, ans)) if __name__ == '__main__': main()
Solve Code Fights different symbols naive problem
Solve Code Fights different symbols naive problem
Python
mit
HKuz/Test_Code
921d95a432316b2b76aadabcc3fc90fbf90376aa
simphony/testing/abc_container_check.py
simphony/testing/abc_container_check.py
null
import abc from functools import partial from simphony.testing.utils import ( create_data_container, compare_data_containers) from simphony.core.data_container import DataContainer from simphony.core.cuba import CUBA class ContainerCheck(object): __metaclass__ = abc.ABCMeta def setUp(self): self.addTypeEqualityFunc( DataContainer, partial(compare_data_containers, testcase=self)) self.container = self.container_factory(u'foo') self.data = create_data_container() self.container.data = DataContainer(self.data) @abc.abstractmethod def container_factory(self, name): """ Create and return the container object """ def test_name(self): self.assertEqual(self.contaoner.name, u'foo') def test_rename(self): container = self.container container.name = u'bar' self.assertEqual(container.name, u'bar') def test_data(self): self.assertEqual(self.container.data, self.data) self.assertIsNot(self.container.data, self.data) def test_update_data(self): container = self.container data = container.data data[CUBA.TEMPERATURE] = 23.4 self.assertNotEqual(container.data, data) self.assertEqual(container.data, self.data) container.data = data self.assertEqual(container.data, data) self.assertIsNot(container.data, data)
Add a basic test template for high level CUDS containers
Add a basic test template for high level CUDS containers
Python
bsd-2-clause
simphony/simphony-common
90d1a40175675b2950cb41b85434b522d6e21c4d
mass/cli.py
mass/cli.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() def init(): utils.register_domain() utils.register_workflow_type() utils.register_activity_type() @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python: # built-in modules # 3rd-party modules import click # local modules from mass.monitor.app import app from mass.scheduler.swf import utils from mass.scheduler.swf import SWFWorker @click.group() def cli(): pass @cli.command() @click.option('-d', '--domain', help='Amazon SWF Domain.') @click.option('-r', '--region', help='Amazon Region.') def init(domain, region): utils.register_domain(domain, region) utils.register_workflow_type(domain, region) utils.register_activity_type(domain, region) @cli.group() def worker(): pass @cli.group() def job(): pass @cli.group() def monitor(): pass @worker.command('start') def worker_start(): worker = SWFWorker() worker.start() @job.command('submit') @click.option('-j', '--json', help='Job Description in JSON.') @click.option('-a', '--alfscript', help='Job Description in alfscript.') def job_submit(json_script, alf_script): pass @monitor.command('start') def monitor_start(): monitor = app.run(debug=True) cli.add_command(init) cli.add_command(worker) cli.add_command(job) cli.add_command(monitor)
Add arguments --domain and --region to mass init.
Add arguments --domain and --region to mass init.
Python
apache-2.0
badboy99tw/mass,KKBOX/mass,badboy99tw/mass,badboy99tw/mass,KKBOX/mass,KKBOX/mass
fee11dbff232216726516eea6c8bf7645fdef1a7
pyxif/__init__.py
pyxif/__init__.py
from ._remove import remove from ._load_and_dump import load, dump, ZerothIFD, ExifIFD, GPSIFD from ._transplant import transplant from ._insert import insert try: from ._thumbnail import thumbnail except ImportError: print("'thumbnail' function depends on PIL or Pillow.") VERSION = '0.4.7'
from ._remove import remove from ._load_and_dump import load, dump, ZerothIFD, ExifIFD, GPSIFD from ._transplant import transplant from ._insert import insert try: from ._thumbnail import thumbnail except ImportError: print("'thumbnail' function depends on PIL or Pillow.") VERSION = '0.4.6'
Revert "up version to 0.4.7."
Revert "up version to 0.4.7." This reverts commit 9b1177d4a56070092faa89778911d11c70efdc54.
Python
mit
hMatoba/Piexif
8e065cb164b4ec1a9aafd7f448555707a875fab9
aggregator/espn_fc.py
aggregator/espn_fc.py
null
import datetime from aggregator import exceptions from aggregator.base import Aggregator, Article, InvalidArticle, make_soup EXCLUDE_IF_IN_TITLE = ['LIVE:', 'WATCH:', 'LISTEN:'] class ESPNFC(Aggregator): base_url = 'http://www.espnfc.com/?country-view=www&lang-view=en' source = 'ESPN FC' def extract(self): soup = make_soup(ESPNFC.base_url) divs = soup.find('div', {'alt': ' TOP STORIES '}) divs = iter(divs.find_all('div', {'class': 'grid-item-content'})) articles = (self.crawl(div) for div in divs) return list(article for article in articles if article is not None) def crawl(self, tag): try: anchor = tag.find('a', {'class': 'common-link'}) url = self.get_url(anchor) title = self.get_title(anchor) if any(exclude in title for exclude in EXCLUDE_IF_IN_TITLE): return None date_published = self.get_date_published(tag) author = self.get_author(tag) return Article(ESPNFC.source, title, url, author, date_published) except exceptions.WebCrawlException as e: return InvalidArticle(ESPNFC.source, e) def get_author(self, tag): try: author = tag.find('span', {'class': 'author byline'}) return author.text.strip() except AttributeError as e: raise exceptions.AuthorNotFoundException def get_date_published(self, tag): try: date_published = tag.find('time')['datetime'] date_published = date_published.split('T')[0] date_published = datetime.datetime.strptime(date_published, '%Y-%m-%d').date() return date_published except (IndexError, AttributeError, ValueError, TypeError): raise exceptions.DatePublishedNotFoundException def get_title(self, tag): try: return tag.text.strip() except AttributeError as e: raise exceptions.TitleNotFoundException def get_url(self, tag): try: url = tag['href'] url = url.replace('.us', '.com') return url except (KeyError, AttributeError, TypeError): raise exceptions.UrlNotFoundException if __name__ == '__main__': espn_fc = ESPNFC() print(espn_fc.extract())
Implement web scraping functionality for ESPN FC
Implement web scraping functionality for ESPN FC
Python
apache-2.0
footynews/fn_backend
ed9a02a769a64edae6aa0e4a06acf7a8fe31cd89
scripts/create_windows_virtenv.py
scripts/create_windows_virtenv.py
null
#!/usr/bin/env python import platform import shutil import subprocess import sys from os.path import dirname, isdir, join as join_path file_path = dirname(__file__) requirements_path = '/'.join(file_path.split('/')[:-1]) + "/requirements" root_path = file_path.split('/')[:-2] root_path = '/'.join(root_path) class VirtualEnvironmentBuilder(object): def __init__(self, virt_env_name): self.virt_env_name = virt_env_name @property def virt_env_path(self): print(join_path(root_path, self.virt_env_name)) return join_path(root_path, self.virt_env_name) @property def virt_env_path(self): return root_path + "/" + self.virt_env_name def clean_build(self): self.delete_env() self.build() def build(self): # Create a fresh virtual environment if it doesn't exist self.create_venv() try: print(requirements_path) self.run_in_venv('pip', ['install', '-r', requirements_path]) except Exception: print("Erorrrr") self.delete_env() def create_venv(self): if isdir(self.virt_env_path): return print(self.virt_env_path) try: subprocess.check_call([sys.executable, '-m', 'virtualenv', self.virt_env_path, '--no-site-packages']) except Exception: print("Something is wrong!") self.delete_env() if isdir(self.virt_env_name): print("Environment {} created".format(self.virt_env_path)) def delete_env(self): print("Deleting env!") try: if isdir(self.virt_env_path): shutil.rmtree(self.virt_env_path) except Exception: print("Could not delete environment!") def run_in_venv(self, cmd, args): virtual_env_bin_path = self.virt_env_path if platform.system() == 'Windows': cmd += '.exe' virtual_env_bin_path += r'\Scripts' else: virtual_env_bin_path += r'/bin' print("here") print(virtual_env_bin_path) print(cmd) a = join_path[virtual_env_bin_path, cmd] print(a) subprocess.check_call(join_path[file_path, virtual_env_bin_path, cmd] + args) if __name__ == '__main__': builder = VirtualEnvironmentBuilder('hack33-virtenv') builder.build()
Create virtual builder for windows OS
Create virtual builder for windows OS
Python
mit
Rositsazz/hack33,Rositsazz/hack33,Rositsazz/hack33,Rositsazz/hack33
522edf619f1bebf855f24da6f84c90b10f866745
slack_to_habitica/views.py
slack_to_habitica/views.py
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
Use square brackets instead of angle brackets in messages, as the latter are getting stripped out
Use square brackets instead of angle brackets in messages, as the latter are getting stripped out
Python
mit
niteshpatel/habitica-slack
d52f59911929eda6b8c0c42837ae9c19b9e133e4
twokenize_py/align.py
twokenize_py/align.py
"""Aligner for texts and their segmentations. """ from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals __all__ = ['AlignmentFailed', 'Aligner'] class AlignmentFailed(Exception): pass class Aligner(object): """Align a text with its tokenization. """ def align(self, text, tokens): """Align text with its tokeniation. Parameters ---------- text : str Text. tokens : list of str Tokenization of ``text``. Returns ------- spans : list of tuple List of (``onset``, ``offset``) pairs, where ``spans[i]`` gives the onseta and offset in characters of ``tokens[i]`` relative to the beginning of ``text`` (0-indexed). """ spans = [] bi = 0 for token in tokens: try: token_len = len(token) token_bi = bi + txt[bi:].index(token) token_ei = token_bi + token_len - 1 spans.append([token_bi, token_ei]) bi = token_ei + 1 except ValueError: raise AlignmentFailed(token) return spans
"""Aligner for texts and their segmentations. """ from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals __all__ = ['AlignmentFailed', 'Aligner'] class AlignmentFailed(Exception): pass class Aligner(object): """Align a text with its tokenization. """ def align(self, text, tokens): """Align text with its tokeniation. Parameters ---------- text : str Text. tokens : list of str Tokenization of ``text``. Returns ------- spans : list of tuple List of (``onset``, ``offset``) pairs, where ``spans[i]`` gives the onseta and offset in characters of ``tokens[i]`` relative to the beginning of ``text`` (0-indexed). """ spans = [] bi = 0 for token in tokens: try: token_len = len(token) token_bi = bi + text[bi:].index(token) token_ei = token_bi + token_len - 1 spans.append([token_bi, token_ei]) bi = token_ei + 1 except ValueError: raise AlignmentFailed(token) return spans
Fix typo in variable name.
BUG: Fix typo in variable name.
Python
apache-2.0
nryant/twokenize_py
00cbac852e83eb1f3ddc03ed70ad32494f16fdbf
caslogging.py
caslogging.py
""" file: caslogging.py author: Ben Grawi <[email protected]> date: October 2013 description: Sets up the logging information for the CAS Reader """ from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s') logging_file_handler = root_logging.FileHandler(config['logging']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
""" file: caslogging.py author: Ben Grawi <[email protected]> date: October 2013 description: Sets up the logging information for the CAS Reader """ from config import config import logging as root_logging # Set up the logger logger = root_logging.getLogger() logger.setLevel(root_logging.INFO) logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S') logging_file_handler = root_logging.FileHandler(config['logging_system']['filename']) logging_file_handler.setLevel(root_logging.INFO) logging_file_handler.setFormatter(logger_format) logger.addHandler(logging_file_handler) logging_stream_handler = root_logging.StreamHandler() logging_stream_handler.setLevel(root_logging.INFO) logging_stream_handler.setFormatter(logger_format) logger.addHandler(logging_stream_handler) logging = root_logging
Fix of the logging system exception
Fix of the logging system exception Added a format to the date for the logging system. '%Y-%m-%d %H:%M:%S’. Fixed an exception opening the logging file because the variable name was not written correctly.
Python
mit
bumper-app/bumper-bianca,bumper-app/bumper-bianca
3df4cc086bf6c85eebc12094cc3ca459bd2bcd3d
project/members/tests/test_application.py
project/members/tests/test_application.py
null
# -*- coding: utf-8 -*- import pytest from members.tests.fixtures.memberlikes import MembershipApplicationFactory from members.tests.fixtures.types import MemberTypeFactory from members.models import Member @pytest.mark.django_db def test_application_approve(): mtypes = [MemberTypeFactory(label='Normal member')] application = MembershipApplicationFactory() email = application.email application.approve(set_mtypes=mtypes) Member.objects.get(email=email)
Add unit test for programmatic application and approval
Add unit test for programmatic application and approval
Python
mit
HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,jautero/asylum,rambo/asylum,jautero/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum,rambo/asylum,jautero/asylum,jautero/asylum,hacklab-fi/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum
41fd6e8aae4044520a2e44d590c005dd71150c0c
web/attempts/migrations/0008_add_submission_date.py
web/attempts/migrations/0008_add_submission_date.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = subquery.submission_date FROM ( SELECT user_id, part_id, max(history_date) AS submission_date FROM attempts_historicalattempt GROUP BY user_id, part_id ) AS subquery WHERE attempts_attempt.user_id = subquery.user_id AND attempts_attempt.part_id = subquery.part_id ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.5 on 2017-05-09 09:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('attempts', '0007_auto_20161004_0927'), ] operations = [ migrations.AddField( model_name='attempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.AddField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(null=True), ), migrations.RunSQL( 'UPDATE attempts_historicalattempt SET submission_date = history_date' ), migrations.RunSQL( '''UPDATE attempts_attempt SET submission_date = ( SELECT max(history_date) FROM attempts_historicalattempt WHERE attempts_attempt.user_id = user_id AND attempts_attempt.part_id = part_id ) ''' ), migrations.AlterField( model_name='attempt', name='submission_date', field=models.DateTimeField(auto_now=True), ), migrations.AlterField( model_name='historicalattempt', name='submission_date', field=models.DateTimeField(blank=True, editable=False), ), ]
Revert "Revert "Make migration SQLite compatible""
Revert "Revert "Make migration SQLite compatible"" This reverts commit b16016994f20945a8a2bbb63b9cb920d856ab66f.
Python
agpl-3.0
ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo
15cb279724a646368066591e81467e1b26d61938
examples/charts/file/steps.py
examples/charts/file/steps.py
from bokeh.charts import Step, show, output_file # build a dataset where multiple columns measure the same thing data = dict(python=[2, 3, 7, 5, 26, 221, 44, 233, 254, 265, 266, 267, 120, 111], pypy=[12, 33, 47, 15, 126, 121, 144, 233, 254, 225, 226, 267, 110, 130], jython=[22, 43, 10, 25, 26, 101, 114, 203, 194, 215, 201, 227, 139, 160], test=['foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar'] ) # create a line chart where each column of measures receives a unique color and dash style line = Step(data, y=['python', 'pypy', 'jython'], dash=['python', 'pypy', 'jython'], color=['python', 'pypy', 'jython'], title="Interpreter Sample Data", ylabel='Duration', legend=True) output_file("steps.html") show(line)
""" This example uses the U.S. postage rate per ounce for stamps and postcards. Source: https://en.wikipedia.org/wiki/History_of_United_States_postage_rates """ from bokeh.charts import Step, show, output_file # build a dataset where multiple columns measure the same thing data = dict(stamp=[ .33, .33, .34, .37, .37, .37, .37, .39, .41, .42, .44, .44, .44, .45, .46, .49, .49], postcard=[ .20, .20, .21, .23, .23, .23, .23, .24, .26, .27, .28, .28, .29, .32, .33, .34, .35], ) # create a line chart where each column of measures receives a unique color and dash style line = Step(data, y=['stamp', 'postcard'], dash=['stamp', 'postcard'], color=['stamp', 'postcard'], title="U.S. Postage Rates (1999-2015)", ylabel='Rate per ounce', legend=True) output_file("steps.html") show(line)
Change step example to plot US postage rates
Change step example to plot US postage rates
Python
bsd-3-clause
ptitjano/bokeh,timsnyder/bokeh,draperjames/bokeh,percyfal/bokeh,justacec/bokeh,clairetang6/bokeh,philippjfr/bokeh,ericmjl/bokeh,rs2/bokeh,azjps/bokeh,DuCorey/bokeh,clairetang6/bokeh,draperjames/bokeh,clairetang6/bokeh,DuCorey/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,bokeh/bokeh,aiguofer/bokeh,rs2/bokeh,aavanian/bokeh,stonebig/bokeh,schoolie/bokeh,msarahan/bokeh,msarahan/bokeh,jakirkham/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,stonebig/bokeh,azjps/bokeh,aavanian/bokeh,jakirkham/bokeh,ptitjano/bokeh,bokeh/bokeh,azjps/bokeh,ptitjano/bokeh,msarahan/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,phobson/bokeh,schoolie/bokeh,timsnyder/bokeh,azjps/bokeh,quasiben/bokeh,percyfal/bokeh,ericmjl/bokeh,jakirkham/bokeh,timsnyder/bokeh,phobson/bokeh,justacec/bokeh,mindriot101/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,quasiben/bokeh,dennisobrien/bokeh,philippjfr/bokeh,clairetang6/bokeh,ptitjano/bokeh,ericmjl/bokeh,quasiben/bokeh,aiguofer/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,dennisobrien/bokeh,phobson/bokeh,dennisobrien/bokeh,aavanian/bokeh,stonebig/bokeh,percyfal/bokeh,aiguofer/bokeh,aiguofer/bokeh,jakirkham/bokeh,rs2/bokeh,draperjames/bokeh,ericmjl/bokeh,bokeh/bokeh,dennisobrien/bokeh,schoolie/bokeh,bokeh/bokeh,KasperPRasmussen/bokeh,percyfal/bokeh,aavanian/bokeh,phobson/bokeh,justacec/bokeh,mindriot101/bokeh,DuCorey/bokeh,stonebig/bokeh,bokeh/bokeh,philippjfr/bokeh,philippjfr/bokeh,DuCorey/bokeh,rs2/bokeh,mindriot101/bokeh,timsnyder/bokeh,DuCorey/bokeh,dennisobrien/bokeh,ptitjano/bokeh,percyfal/bokeh,msarahan/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,mindriot101/bokeh,aiguofer/bokeh,philippjfr/bokeh
7ac7583d714b15bb3e24bba1c5972bd1d0c235f2
python/receive.py
python/receive.py
null
#!/usr/bin/env python import pika # Host in which RabbitMQ is running. HOST = 'localhost' # Name of the queue. QUEUE = 'pages' # Specifies what to do after consuming messages from RabbitMQ. def handler(channel, method, properties, body): print '-> Handled: [%s]' % body # Getting the connection using pika. # Creating the channel. connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST)) channel = connection.channel() print '* Handling messages...' # The consumption is defined. # Notice the 'handler' as first argument. channel.basic_consume(handler, queue=QUEUE, no_ack=True) # Starting the consumption. channel.start_consuming()
Add consumer written in Python
Add consumer written in Python
Python
apache-2.0
jovannypcg/rabbitmq_usage,jovannypcg/rabbitmq_usage
6fc0c3884c38448956273d99a57e0c758ecbc658
crmapp/marketing/views.py
crmapp/marketing/views.py
from django.shortcuts import render # Create your views here.
from django.views.generic.base import TemplateView class HomePage(TemplateView): """ Because our needs are so simple, all we have to do is assign one value; template_name. The home.html file will be created in the next lesson. """ template_name = 'marketing/home.html'
Create the Home Page > Create the Home Page View
Create the Home Page > Create the Home Page View
Python
mit
tabdon/crmeasyapp,tabdon/crmeasyapp,deenaariff/Django
a0f030cd03d28d97924a3277722d7a51cf3a3e92
cms/test_utils/project/extensionapp/models.py
cms/test_utils/project/extensionapp/models.py
# -*- coding: utf-8 -*- from cms.extensions import PageExtension, TitleExtension from cms.extensions.extension_pool import extension_pool from django.db import models class MyPageExtension(PageExtension): extra = models.CharField(blank=True, default='', max_length=255) extension_pool.register(MyPageExtension) class MyTitleExtension(TitleExtension): extra_title = models.CharField(blank=True, default='', max_length=255) extension_pool.register(MyTitleExtension)
# -*- coding: utf-8 -*- from cms.extensions import PageExtension, TitleExtension from cms.extensions.extension_pool import extension_pool from django.contrib.auth.models import User from django.db import models class MyPageExtension(PageExtension): extra = models.CharField(blank=True, default='', max_length=255) favorite_users = models.ManyToManyField(User, blank=True, null=True) def copy_relations(self, other, language): for favorite_user in other.favorite_users.all(): favorite_user.pk = None favorite_user.mypageextension = self favorite_user.save() extension_pool.register(MyPageExtension) class MyTitleExtension(TitleExtension): extra_title = models.CharField(blank=True, default='', max_length=255) extension_pool.register(MyTitleExtension)
Update extension app to include a M2M
Update extension app to include a M2M
Python
bsd-3-clause
kk9599/django-cms,jrclaramunt/django-cms,farhaadila/django-cms,FinalAngel/django-cms,leture/django-cms,yakky/django-cms,wuzhihui1123/django-cms,czpython/django-cms,jproffitt/django-cms,astagi/django-cms,DylannCordel/django-cms,evildmp/django-cms,jrclaramunt/django-cms,SachaMPS/django-cms,netzkolchose/django-cms,donce/django-cms,bittner/django-cms,jeffreylu9/django-cms,cyberintruder/django-cms,takeshineshiro/django-cms,Vegasvikk/django-cms,nostalgiaz/django-cms,kk9599/django-cms,rryan/django-cms,rscnt/django-cms,SmithsonianEnterprises/django-cms,jsma/django-cms,sephii/django-cms,selecsosi/django-cms,jsma/django-cms,SmithsonianEnterprises/django-cms,donce/django-cms,sznekol/django-cms,robmagee/django-cms,rsalmaso/django-cms,Livefyre/django-cms,divio/django-cms,owers19856/django-cms,isotoma/django-cms,intip/django-cms,qnub/django-cms,divio/django-cms,farhaadila/django-cms,iddqd1/django-cms,josjevv/django-cms,stefanfoulis/django-cms,farhaadila/django-cms,SofiaReis/django-cms,wuzhihui1123/django-cms,owers19856/django-cms,MagicSolutions/django-cms,jproffitt/django-cms,FinalAngel/django-cms,benzkji/django-cms,360youlun/django-cms,bittner/django-cms,netzkolchose/django-cms,jeffreylu9/django-cms,vstoykov/django-cms,stefanw/django-cms,jeffreylu9/django-cms,chkir/django-cms,nimbis/django-cms,vxsx/django-cms,selecsosi/django-cms,chkir/django-cms,qnub/django-cms,Jaccorot/django-cms,evildmp/django-cms,bittner/django-cms,wuzhihui1123/django-cms,iddqd1/django-cms,datakortet/django-cms,Vegasvikk/django-cms,benzkji/django-cms,wyg3958/django-cms,andyzsf/django-cms,MagicSolutions/django-cms,vstoykov/django-cms,intip/django-cms,intip/django-cms,memnonila/django-cms,takeshineshiro/django-cms,philippze/django-cms,vxsx/django-cms,jproffitt/django-cms,Livefyre/django-cms,SachaMPS/django-cms,stefanfoulis/django-cms,rryan/django-cms,AlexProfi/django-cms,petecummings/django-cms,vxsx/django-cms,rscnt/django-cms,dhorelik/django-cms,rsalmaso/django-cms,Vegasvikk/django-cms,liuyisiyisi/django-cms,youprofit/django-cms,wyg3958/django-cms,FinalAngel/django-cms,sznekol/django-cms,360youlun/django-cms,jrief/django-cms,andyzsf/django-cms,stefanw/django-cms,nostalgiaz/django-cms,selecsosi/django-cms,jsma/django-cms,donce/django-cms,360youlun/django-cms,rryan/django-cms,benzkji/django-cms,petecummings/django-cms,memnonila/django-cms,DylannCordel/django-cms,intgr/django-cms,Jaccorot/django-cms,rscnt/django-cms,frnhr/django-cms,astagi/django-cms,rsalmaso/django-cms,irudayarajisawa/django-cms,andyzsf/django-cms,chmberl/django-cms,saintbird/django-cms,evildmp/django-cms,frnhr/django-cms,MagicSolutions/django-cms,evildmp/django-cms,mkoistinen/django-cms,liuyisiyisi/django-cms,datakortet/django-cms,jeffreylu9/django-cms,intip/django-cms,vad/django-cms,isotoma/django-cms,divio/django-cms,mkoistinen/django-cms,intgr/django-cms,stefanw/django-cms,AlexProfi/django-cms,rryan/django-cms,stefanfoulis/django-cms,chmberl/django-cms,dhorelik/django-cms,nimbis/django-cms,mkoistinen/django-cms,Livefyre/django-cms,jrclaramunt/django-cms,saintbird/django-cms,yakky/django-cms,datakortet/django-cms,irudayarajisawa/django-cms,vstoykov/django-cms,jsma/django-cms,irudayarajisawa/django-cms,astagi/django-cms,FinalAngel/django-cms,wyg3958/django-cms,sephii/django-cms,kk9599/django-cms,saintbird/django-cms,divio/django-cms,chmberl/django-cms,josjevv/django-cms,intgr/django-cms,jrief/django-cms,wuzhihui1123/django-cms,webu/django-cms,frnhr/django-cms,sznekol/django-cms,SofiaReis/django-cms,philippze/django-cms,czpython/django-cms,frnhr/django-cms,vxsx/django-cms,cyberintruder/django-cms,cyberintruder/django-cms,rsalmaso/django-cms,timgraham/django-cms,yakky/django-cms,isotoma/django-cms,benzkji/django-cms,Livefyre/django-cms,nimbis/django-cms,AlexProfi/django-cms,robmagee/django-cms,jrief/django-cms,ScholzVolkmer/django-cms,robmagee/django-cms,webu/django-cms,netzkolchose/django-cms,intgr/django-cms,keimlink/django-cms,memnonila/django-cms,timgraham/django-cms,yakky/django-cms,datakortet/django-cms,mkoistinen/django-cms,philippze/django-cms,youprofit/django-cms,SmithsonianEnterprises/django-cms,SofiaReis/django-cms,chkir/django-cms,vad/django-cms,ScholzVolkmer/django-cms,takeshineshiro/django-cms,DylannCordel/django-cms,jrief/django-cms,liuyisiyisi/django-cms,stefanfoulis/django-cms,czpython/django-cms,owers19856/django-cms,petecummings/django-cms,keimlink/django-cms,nimbis/django-cms,ScholzVolkmer/django-cms,selecsosi/django-cms,leture/django-cms,jproffitt/django-cms,iddqd1/django-cms,keimlink/django-cms,qnub/django-cms,timgraham/django-cms,andyzsf/django-cms,SachaMPS/django-cms,czpython/django-cms,vad/django-cms,dhorelik/django-cms,vad/django-cms,youprofit/django-cms,netzkolchose/django-cms,Jaccorot/django-cms,sephii/django-cms,bittner/django-cms,isotoma/django-cms,josjevv/django-cms,nostalgiaz/django-cms,webu/django-cms,stefanw/django-cms,nostalgiaz/django-cms,sephii/django-cms,leture/django-cms
6854f889e38f565acb80c52a74df09730e0f7e45
uitools/notifications/linux.py
uitools/notifications/linux.py
from gi.repository import GLib, Notify as LibNotify class Notification(object): def __init__(self, title, message, subtitle=None, sticky=False): self.title = title self.subtitle = subtitle self.message = message self.sticky = sticky self._sent = False def send(self): # see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html # see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html # Can check LibNotify.get_server_caps() for a list of capabilities. print 'capabilities', LibNotify.get_server_caps() self._notification = notification = LibNotify.Notification.new( self.title, self.message, 'folder-new' ) # If this is "default", then it is the default action for clicking the notification. notification.add_action('default', 'Default Action', _on_action, 'on_action_payload') notification.add_action('not_default', 'Another Action', _on_action, 'another_payload') notification.connect('closed', _on_action) notification.set_timeout(5000) # 5s notification.show() # NOTE: This object NEEDS to be held onto for the callback to work.
from gi.repository import GLib, Notify as LibNotify DEV = False class Notification(object): def __init__(self, title, message, subtitle=None, sticky=False): self.title = title self.subtitle = subtitle self.message = message self.sticky = sticky self._sent = False def send(self): # see: https://developer.gnome.org/libnotify/0.7/libnotify-notify.html # see: https://developer.gnome.org/libnotify/0.7/NotifyNotification.html LibNotify.init('com.westernx.uitools.notifications') self._notification = notification = LibNotify.Notification.new( self.title, self.message, 'dialog-information' ) if DEV: # Can check LibNotify.get_server_caps() for a list of capabilities. print 'capabilities', LibNotify.get_server_caps() # If this is "default", then it is the default action for clicking the notification. notification.add_action('default', 'Default Action', _on_action, 'on_action_payload') notification.add_action('not_default', 'Another Action', _on_action, 'another_payload') notification.connect('closed', _on_action) notification.set_timeout(5000) # 5s notification.show() # NOTE: This object NEEDS to be held onto for the callback to work.
Clean up for production Linux use
Clean up for production Linux use
Python
bsd-3-clause
westernx/uitools
cb5591228408ccd22cfb3df9106d4ecdeac295a9
nose2/plugins/collect.py
nose2/plugins/collect.py
""" This plugin implements :func:`startTestRun`, setting a test executor (``event.executeTests``) that just collects tests without executing them. To do so it calls result.startTest, result.addSuccess and result.stopTest for ech test, without calling the test itself. """ from nose2.events import Plugin from nose2.compat import unittest __unittest = True class CollectOnly(Plugin): """Collect but don't run tests""" configSection = 'collect-only' commandLineSwitch = (None, 'collect-only', 'Collect and output test names; do not run any tests') _mpmode = False def registerInSubprocess(self, event): event.pluginClasses.append(self.__class__) self._mpmode = True def startTestRun(self, event): """Replace ``event.executeTests``""" if self._mpmode: return event.executeTests = self.collectTests def startSubprocess(self, event): event.executeTests = self.collectTests def collectTests(self, suite, result): """Collect tests, but don't run them""" for test in suite: if isinstance(test, unittest.BaseTestSuite): self.collectTests(test, result) continue result.startTest(test) result.addSuccess(test) result.stopTest(test)
""" This plugin implements :func:`startTestRun`, setting a test executor (``event.executeTests``) that just collects tests without executing them. To do so it calls result.startTest, result.addSuccess and result.stopTest for each test, without calling the test itself. """ from nose2.events import Plugin from nose2.compat import unittest __unittest = True class CollectOnly(Plugin): """Collect but don't run tests""" configSection = 'collect-only' commandLineSwitch = (None, 'collect-only', 'Collect and output test names; do not run any tests') _mpmode = False def registerInSubprocess(self, event): event.pluginClasses.append(self.__class__) self._mpmode = True def startTestRun(self, event): """Replace ``event.executeTests``""" if self._mpmode: return event.executeTests = self.collectTests def startSubprocess(self, event): event.executeTests = self.collectTests def collectTests(self, suite, result): """Collect tests, but don't run them""" for test in suite: if isinstance(test, unittest.BaseTestSuite): self.collectTests(test, result) continue result.startTest(test) result.addSuccess(test) result.stopTest(test)
Fix typo "ech" -> "each"
Fix typo "ech" -> "each"
Python
bsd-2-clause
ptthiem/nose2,ptthiem/nose2
cc89c5222ec7f6d6f95b5efdce3958b3ca33814e
mica/archive/tests/test_aca_dark_cal.py
mica/archive/tests/test_aca_dark_cal.py
null
""" Basic functionality and regression tests for ACA dark cal module. """ import numpy as np from ..aca_dark import dark_cal def test_date_to_dark_id(): assert dark_cal.date_to_dark_id('2011-01-15T12:00:00') == '2011015' def test_dark_id_to_date(): assert dark_cal.dark_id_to_date('2011015') == '2011:015' def test_dark_temp_scale(): scale = dark_cal.dark_temp_scale(-10., -14) assert np.allclose(scale, 0.70) def test_get_dark_cal_id(): assert dark_cal.get_dark_cal_id('2007:008', 'nearest') == '2007006' assert dark_cal.get_dark_cal_id('2007:008', 'before') == '2007006' assert dark_cal.get_dark_cal_id('2007:008', 'after') == '2007069' def test_get_dark_cal_image(): image = dark_cal.get_dark_cal_image('2007:008') assert image.shape == (1024, 1024) def test_get_dark_cal_props(): props = dark_cal.get_dark_cal_props('2007:008') assert len(props['replicas']) == 5 assert props['start'] == '2007:006:01:56:46.817' props = dark_cal.get_dark_cal_props('2007:008', include_image=True) assert len(props['replicas']) == 5 assert props['start'] == '2007:006:01:56:46.817' assert props['image'].shape == (1024, 1024) def test_get_dark_cal_props_table(): props = dark_cal.get_dark_cal_props_table('2007:001', '2008:001') assert np.allclose(props['eb'], [24.6, 25.89, 51.13, 1.9])
Add basic functionality and regression tests for ACA dark cal module
Add basic functionality and regression tests for ACA dark cal module
Python
bsd-3-clause
sot/mica,sot/mica
eaed2b0b37c042f1dbaf5c60163022e2e2605486
setup.py
setup.py
#!/usr/bin/env python # encoding: utf-8 """ setup.py Setup the Keyring Lib for Python. """ import sys from distutils.core import setup, Extension from extensions import get_extensions setup(name = 'keyring', version = "0.5.1", description = "Store and access your passwords safely.", url = "http://home.python-keyring.org/", keywords = "keyring Keychain GnomeKeyring Kwallet password storage", maintainer = "Kang Zhang", maintainer_email = "[email protected]", license="PSF", long_description = open('README.txt').read() + open('CHANGES.txt').read(), platforms = ["Many"], packages = ['keyring', 'keyring.tests', 'keyring.util', 'keyring.backends'], ext_modules = get_extensions() )
#!/usr/bin/env python # encoding: utf-8 """ setup.py Setup the Keyring Lib for Python. """ import sys from distutils.core import setup, Extension from extensions import get_extensions setup(name = 'keyring', version = "0.5.1", description = "Store and access your passwords safely.", url = "http://home.python-keyring.org/", keywords = "keyring Keychain GnomeKeyring Kwallet password storage", maintainer = "Kang Zhang", maintainer_email = "[email protected]", license="PSF", long_description = open('README').read() + open('CHANGES.txt').read(), platforms = ["Many"], packages = ['keyring', 'keyring.tests', 'keyring.util', 'keyring.backends'], ext_modules = get_extensions() )
Use the new README file, which has been renamed in 4ec717c11604
Use the new README file, which has been renamed in 4ec717c11604
Python
mit
jaraco/keyring
3fb4d7b630fb7a4b34dcc4e1b72947e61f73a80f
TestData/download_test_data.py
TestData/download_test_data.py
null
def set_test_db(): from sys import path path.insert(0, "..") from MyEdgarDb import get_list_sec_filings, get_cik_ticker_lookup_db, lookup_cik_ticker get_list_sec_filings (7, 'test_idx.db') get_cik_ticker_lookup_db ('test_idx.db') def download_test_data(): import sqlite3 from datetime import datetime import pandas as pd testDir = "..\\TestData\\" testTickers = { "AAPL": [datetime(2014, 8, 1), datetime(2018, 8, 1)], "ACLS": [datetime(2014, 8, 31), datetime(2018, 8, 31)], "ADSK": [datetime(2014, 4, 15), datetime(2018, 4, 15)], "ALEX": [datetime(2015, 12, 31), datetime(2019, 12, 31)], "MMM": [datetime(2015, 7, 1), datetime(2019, 7, 1)], "NRP": [datetime(2015, 12, 31), datetime(2019, 12, 31)], "NVDA": [datetime(2015, 12, 31), datetime(2019, 12, 31)] } conn3 = sqlite3.connect('test_idx.db') cursor = conn3.cursor() for ticker in testTickers: #cursor.execute('''SELECT * FROM idx WHERE Symbol=?;''', ("ABBV",)) cursor.execute('''SELECT * FROM cik_ticker_name WHERE ticker=?;''',(ticker,)) res = cursor.fetchall() print(res) cursor.execute('''SELECT * FROM idx WHERE cik=?;''', (res[0][0],)) recs = cursor.fetchall() print(len(recs)) names = list(map(lambda x: x[0], cursor.description)) #print(names) df = pd.DataFrame(data=recs, columns=names) df['date'] = pd.to_datetime(df['date']) beginDate = testTickers[ticker][0] endDate = testTickers[ticker][1] df1 = df[(df.date >= beginDate) & (df.date <= endDate)] ## Sort by date in descending order (most recent is first) df1.sort_values(by=['date'], inplace=True, ascending=False) df1[df1.type == "10-Q"].to_csv(testDir+ticker.lower()+"_all_10qs.csv", index=None) df1[df1.type == "10-K"].to_csv(testDir+ticker.lower()+"_all_10ks.csv", index=None) conn3.close() if __name__ == "__main__": #set_test_db() download_test_data()
Create script to dowload requisite test urls.
Create script to dowload requisite test urls.
Python
agpl-3.0
cielling/jupyternbs
ca97a29dded7278b40785fe88b5e8c9ceb542d86
urllib3/util/wait.py
urllib3/util/wait.py
from .selectors import ( HAS_SELECT, DefaultSelector, EVENT_READ, EVENT_WRITE ) def _wait_for_io_events(socks, events, timeout=None): """ Waits for IO events to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be interacted with immediately. """ if not HAS_SELECT: raise ValueError('Platform does not have a selector') if not isinstance(socks, list): # Probably just a single socket. if hasattr(socks, "fileno"): socks = [socks] # Otherwise it might be a non-list iterable. else: socks = list(socks) selector = DefaultSelector() for sock in socks: selector.register(sock, events) return [key[0].fileobj for key in selector.select(timeout) if key[1] & events] def wait_for_read(socks, timeout=None): """ Waits for reading to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be read from immediately. """ return _wait_for_io_events(socks, EVENT_READ, timeout) def wait_for_write(socks, timeout=None): """ Waits for writing to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be written to immediately. """ return _wait_for_io_events(socks, EVENT_WRITE, timeout)
from .selectors import ( HAS_SELECT, DefaultSelector, EVENT_READ, EVENT_WRITE ) def _wait_for_io_events(socks, events, timeout=None): """ Waits for IO events to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be interacted with immediately. """ if not HAS_SELECT: raise ValueError('Platform does not have a selector') if not isinstance(socks, list): # Probably just a single socket. if hasattr(socks, "fileno"): socks = [socks] # Otherwise it might be a non-list iterable. else: socks = list(socks) with DefaultSelector() as selector: for sock in socks: selector.register(sock, events) return [key[0].fileobj for key in selector.select(timeout) if key[1] & events] def wait_for_read(socks, timeout=None): """ Waits for reading to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be read from immediately. """ return _wait_for_io_events(socks, EVENT_READ, timeout) def wait_for_write(socks, timeout=None): """ Waits for writing to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be written to immediately. """ return _wait_for_io_events(socks, EVENT_WRITE, timeout)
Use DefaultSelector as context manager.
Use DefaultSelector as context manager.
Python
mit
sigmavirus24/urllib3,Lukasa/urllib3,Disassem/urllib3,Lukasa/urllib3,Disassem/urllib3,urllib3/urllib3,urllib3/urllib3,sigmavirus24/urllib3
5bb90727efb62525995caad3b52fd588d8b08298
pregnancy/urls.py
pregnancy/urls.py
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() import contractions.views urlpatterns = patterns('', # Examples: # url(r'^$', 'pregnancy.views.home', name='home'), # url(r'^pregnancy/', include('pregnancy.foo.urls')), url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'), url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'), url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'), url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'), url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'), url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), )
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() import contractions.views urlpatterns = patterns('', # Examples: # url(r'^$', 'pregnancy.views.home', name='home'), # url(r'^pregnancy/', include('pregnancy.foo.urls')), url(r'^$', contractions.views.ContractionList.as_view(), name='ContractionList'), url(r'^contractions/$', contractions.views.ContractionList.as_view(), name='ContractionList'), url(r'^update_intensity/(?P<pk>\d+)/$', contractions.views.UpdateIntensity.as_view(), name='UpdateIntensity'), url(r'^update_intensity2/(?P<pk>\d+)/$', contractions.views.UpdateIntensity2.as_view(), name='UpdateIntensity2'), url(r'^ContractionListTable/$', contractions.views.ContractionListTable.as_view(), name='ContractionListTable'), url(r'^StartContraction/$', contractions.views.StartContraction.as_view(), name='StartContraction'), url(r'^StopContraction/(?P<pk>\d+)/$', contractions.views.StopContraction.as_view(), name='StopContraction'), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), )
Update url to point / to the contractions app
Update url to point / to the contractions app
Python
bsd-2-clause
dreinhold/pregnancy,dreinhold/pregnancy,dreinhold/pregnancy
20b0e705fe6eedb05a94a3e9cb978b65a525fe91
conanfile.py
conanfile.py
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.2" class SanitizeTargetCMakeConan(ConanFile): name = "sanitize-target-cmake" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard", "cmake-multi-targets/master@smspillaz/cmake-multi-targets", "tooling-cmake-util/master@smspillaz/tooling-cmake-util", "cmake-unit/master@smspillaz/cmake-unit", "sanitizers-cmake/0.0.1@smspillaz/sanitizers-cmake") url = "http://github.com/polysquare/sanitize-target-cmake" license = "MIT" def source(self): zip_name = "sanitize-target-cmake.zip" download("https://github.com/polysquare/" "sanitize-target-cmake/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="*.cmake", dst="cmake/sanitize-target-cmake", src="sanitize-target-cmake-" + VERSION, keep_path=True)
from conans import ConanFile from conans.tools import download, unzip import os VERSION = "0.0.3" class SanitizeTargetCMakeConan(ConanFile): name = "sanitize-target-cmake" version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION) generators = "cmake" requires = ("cmake-include-guard/master@smspillaz/cmake-include-guard", "cmake-multi-targets/master@smspillaz/cmake-multi-targets", "tooling-cmake-util/master@smspillaz/tooling-cmake-util", "cmake-unit/master@smspillaz/cmake-unit", "sanitizers-cmake/0.0.1@smspillaz/sanitizers-cmake") url = "http://github.com/polysquare/sanitize-target-cmake" license = "MIT" def source(self): zip_name = "sanitize-target-cmake.zip" download("https://github.com/polysquare/" "sanitize-target-cmake/archive/{version}.zip" "".format(version="v" + VERSION), zip_name) unzip(zip_name) os.unlink(zip_name) def package(self): self.copy(pattern="*.cmake", dst="cmake/sanitize-target-cmake", src="sanitize-target-cmake-" + VERSION, keep_path=True)
Bump version: 0.0.2 -> 0.0.3
Bump version: 0.0.2 -> 0.0.3 [ci skip]
Python
mit
polysquare/sanitize-target-cmake
4130c082ae3008365c854ad65c4510cb04dfbf27
webcrawler.py
webcrawler.py
import requests from bs4 import BeautifulSoup def parse_html(html, **kwargs): parsed_html = BeautifulSoup(html, 'lxml') headline = parsed_html.body.find('h1') paragraph = None # Parse Paragraph content_container = parsed_html.body.find( 'div', attrs={'id': 'bodyContent'} ) for p in content_container.findAll('p'): if not p.findParents('table'): paragraph = p break # Parse Image infobox = parsed_html.body.find('table', attrs={'class': 'infobox'}) image = infobox.find('img') if infobox else None return { 'headline': headline.text.strip() if headline else '', 'paragraph': paragraph.text.strip() if paragraph else '', 'image_url': image.attrs.get('src') if image else '', } def crawl(url, **kwargs): response = requests.get(url) response.raise_for_status() data = parse_html(response.content, **kwargs) # TODOs: Persist data return data
import re import requests from bs4 import BeautifulSoup def parse_html(html, **kwargs): is_wikipedia_page = kwargs.get('is_wikipedia_page') parsed_html = BeautifulSoup(html, 'html.parser') headline = parsed_html.body.find('h1') paragraph = None if is_wikipedia_page: # Parse Paragraph content_container = parsed_html.body.find( 'div', attrs={'id': 'bodyContent'} ) for p in content_container.findAll('p'): if not p.findParents('table'): paragraph = p break # Parse Image infobox = parsed_html.body.find('table', attrs={'class': 'infobox'}) image = infobox.find('img') if infobox else None else: content_container_class = kwargs.get('content_container_class') image_container_class = kwargs.get('image_container_class') if not all([ content_container_class, image_container_class ]): return content_container = parsed_html.body.find('div', attrs={'class': content_container_class}) paragraph = content_container.find('p') image_container = parsed_html.body.find('div', attrs={'class': image_container_class}) image = image_container.find('img') return { 'headline': headline.text.strip() if headline else '', 'paragraph': paragraph.text.strip() if paragraph else '', 'image_url': image.attrs.get('src') if image else '', } def crawl(url, **kwargs): response = requests.get(url) response.raise_for_status() is_wikipedia_page = re.compile(r'.*(wikipedia.org)').match(url) is not None if is_wikipedia_page: kwargs.update({ 'is_wikipedia_page': is_wikipedia_page }) data = parse_html(response.content, **kwargs) # TODOs: Persist data return data
Add support for generic pages
Add support for generic pages
Python
mit
alamasfu10/webcrawler
41e0ea623baaff22ed5f436ad563edf52b762bcc
Main.py
Main.py
"""Main Module of PDF Splitter""" import argparse import os from PyPDF2 import PdfFileWriter from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages parser = \ argparse.ArgumentParser( description='Split all the pages of multiple PDF files in a directory by document number' ) parser.add_argument( 'directory', metavar='PATH', type=str, help='path to a directory' ) def width_greater_than_height(page): box = page.mediaBox return box.getWidth() > box.getHeight() if __name__ == '__main__': args = parser.parse_args() all_pdf_files = all_pdf_files_in_directory(args.directory) opened_files = map(lambda path: open(path, 'rb'), all_pdf_files) all_pages = concat_pdf_pages(opened_files) for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1): pdf_writer = PdfFileWriter() map(pdf_writer.addPage, pages) output_filename = '{0:05}.pdf'.format(idx) with open(output_filename, 'wb') as output_file: pdf_writer.write(output_file) output_file.flush() os.fsync(output_file.fileno()) map(lambda f: f.close, opened_files)
"""Main Module of PDF Splitter""" import argparse import os from PyPDF2 import PdfFileWriter from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages parser = \ argparse.ArgumentParser( description='Split all the pages of multiple PDF files in a directory by document number' ) parser.add_argument( 'directory', metavar='PATH', type=str, help='path to a directory' ) def width_greater_than_height(page): box = page.mediaBox return box.getWidth() > box.getHeight() if __name__ == '__main__': args = parser.parse_args() directory = args.directory all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)] opened_files = map(lambda path: open(path, 'rb'), all_pdf_files) all_pages = concat_pdf_pages(opened_files) for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1): pdf_writer = PdfFileWriter() map(pdf_writer.addPage, pages) output_filename = '{0:05}.pdf'.format(idx) with open(output_filename, 'wb') as output_file: pdf_writer.write(output_file) output_file.flush() os.fsync(output_file.fileno()) map(lambda f: f.close, opened_files)
Fix bug where only PDF files in current directory can be found
Fix bug where only PDF files in current directory can be found
Python
mit
shunghsiyu/pdf-processor
2ee763ae1e4564a57692cb7161f99daab4ae77b7
cookiecutter/main.py
cookiecutter/main.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ cookiecutter.main ----------------- Main entry point for the `cookiecutter` command. The code in this module is also a good example of how to use Cookiecutter as a library rather than a script. """ import argparse import os from .find import find_template from .generate import generate_context, generate_files from .vcs import git_clone def main(): """ Entry point for the package, as defined in setup.py. """ # Get command line input/output arguments parser = argparse.ArgumentParser( description='Create a project from a Cookiecutter project template.' ) parser.add_argument( 'input_dir', help='Cookiecutter project template dir, e.g. {{project.repo_name}}/' ) args = parser.parse_args() # If it's a git repo, clone and prompt if args.input_dir.endswith('.git'): repo_dir = git_clone(args.input_dir) project_template = find_template(repo_dir) os.chdir(repo_dir) else: project_template = args.input_dir # Create project from local context and project template. context = generate_context() generate_files( input_dir=project_template, context=context ) if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ cookiecutter.main ----------------- Main entry point for the `cookiecutter` command. The code in this module is also a good example of how to use Cookiecutter as a library rather than a script. """ import argparse import os from .cleanup import remove_repo from .find import find_template from .generate import generate_context, generate_files from .vcs import git_clone def main(): """ Entry point for the package, as defined in setup.py. """ # Get command line input/output arguments parser = argparse.ArgumentParser( description='Create a project from a Cookiecutter project template.' ) parser.add_argument( 'input_dir', help='Cookiecutter project template dir, e.g. {{project.repo_name}}/' ) args = parser.parse_args() # If it's a git repo, clone and prompt if args.input_dir.endswith('.git'): got_repo_arg = True repo_dir = git_clone(args.input_dir) project_template = find_template(repo_dir) os.chdir(repo_dir) else: project_template = args.input_dir # Create project from local context and project template. context = generate_context() generate_files( input_dir=project_template, context=context ) # Remove repo if Cookiecutter cloned it in the first place. # Here the user just wants a project, not a project template. if got_repo_arg: generated_project = context['project']['repo_name'] remove_repo(repo_dir, generated_project) if __name__ == '__main__': main()
Clean up after cloned repo if needed. (partial checkin)
Clean up after cloned repo if needed. (partial checkin)
Python
bsd-3-clause
atlassian/cookiecutter,dajose/cookiecutter,luzfcb/cookiecutter,foodszhang/cookiecutter,Springerle/cookiecutter,willingc/cookiecutter,utek/cookiecutter,takeflight/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,stevepiercy/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,0k/cookiecutter,nhomar/cookiecutter,janusnic/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,foodszhang/cookiecutter,kkujawinski/cookiecutter,ionelmc/cookiecutter,michaeljoseph/cookiecutter,sp1rs/cookiecutter,jhermann/cookiecutter,lucius-feng/cookiecutter,agconti/cookiecutter,michaeljoseph/cookiecutter,ramiroluz/cookiecutter,letolab/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,sp1rs/cookiecutter,kkujawinski/cookiecutter,vincentbernat/cookiecutter,agconti/cookiecutter,lucius-feng/cookiecutter,drgarcia1986/cookiecutter,drgarcia1986/cookiecutter,atlassian/cookiecutter,ionelmc/cookiecutter,lgp171188/cookiecutter,audreyr/cookiecutter,nhomar/cookiecutter,alex/cookiecutter,cichm/cookiecutter,moi65/cookiecutter,lgp171188/cookiecutter,hackebrot/cookiecutter,tylerdave/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter,janusnic/cookiecutter,utek/cookiecutter,moi65/cookiecutter,christabor/cookiecutter,vintasoftware/cookiecutter,alex/cookiecutter,cichm/cookiecutter,tylerdave/cookiecutter,letolab/cookiecutter,Vauxoo/cookiecutter,Springerle/cookiecutter,jhermann/cookiecutter,vintasoftware/cookiecutter,cguardia/cookiecutter,Vauxoo/cookiecutter,benthomasson/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,christabor/cookiecutter,venumech/cookiecutter,luzfcb/cookiecutter,terryjbates/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter
a5ddab3208992ca6ab655ddef9a4155d5fc6bc55
tests/grammar_test.py
tests/grammar_test.py
import nose from parser_tool import get_parser, parse sentences = ( # N[s] V[i] "Brad drives", # N[s] V[t] N[p] "Angela drives cars", # N[s] V[t] Det N[s] "Brad buys the house", # Det[s] N[s] V[i] "a dog walks" ) grammar = get_parser("grammars/feat1.fcfg", trace=0) def test_grammar(): global sentences, parser for sent in sentences: print "Testing: %s" % sent trees = parse(grammar, sent) assert len(trees) > 0 if __name__=="__main__": nose.main()
import nose from parser_tool import get_parser, parse sentences = ( # PN V[i] "Brad drives", # PN V[t] N[p] "Angela drives cars", # PN V[t] Det N[s] "Brad buys the house", # Det[s] N[s] V[i] "a dog walks", # Det[p] N[p] V[i] "these dogs walk", # Det[p] N[p] V[t] Det N[s] "the cars enter the house", # A N[p] V[t] Det N[s] "red cars enter the house", # Det A N[s] V[t] Det N[s] "a red car enters the house", # PN V[t] Det A N[s] "Brad buys a red car", ) grammar = get_parser("grammars/feat1.fcfg", trace=0) def test_grammar(): global sentences, parser for sent in sentences: print "Testing: %s" % sent trees = parse(grammar, sent) assert len(trees) > 0 if __name__=="__main__": nose.main()
Increase testing coverage of grammar
Increase testing coverage of grammar * added some sample sentences with ajectives
Python
mit
caninemwenja/marker,kmwenja/marker
2759119ea8a2afe6c47575825aef9ae59c1ce921
python/misc/oo/CSStudent.py
python/misc/oo/CSStudent.py
null
# Code from https://www.geeksforgeeks.org/g-fact-34-class-or-static-variables-in-python/ # Copy - Paste here to test it directly # Python program to show that the variables with a value # assigned in class declaration, are class variables # Class for Computer Science Student class CSStudent: stream = 'cse' # Class Variable def __init__(self,name,roll): self.name = name # Instance Variable self.roll = roll # Instance Variable # Objects of CSStudent class a = CSStudent('Geek', 1) b = CSStudent('Nerd', 2) print(a.stream) # prints "cse" print(b.stream) # prints "cse" print(a.name) # prints "Geek" print(b.name) # prints "Nerd" print(a.roll) # prints "1" print(b.roll) # prints "2" # Class variables can be accessed using class # name also print(CSStudent.stream) # prints "cse" # Now if we change the stream for just a it won't be changed for b a.stream = 'ece' print(a.stream) # prints 'ece' print(b.stream) # prints 'cse' # To change the stream for all instances of the class we can change it # directly from the class CSStudent.stream = 'mech' print(a.stream) # prints 'mech' <-- error here, it prints ece print(b.stream) # prints 'mech'
Test d'un bout de code de GeeksForGeeks
Test d'un bout de code de GeeksForGeeks
Python
mit
TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts,TGITS/programming-workouts
9437b7fa2ef7f581968d6628561940dcb1e3f4ad
test_tws/__init__.py
test_tws/__init__.py
'''Unit test package for package "tws".''' __copyright__ = "Copyright (c) 2008 Kevin J Bluck" __version__ = "$Id$" import socket from tws import EWrapper def test_import(): '''Verify successful import of top-level "tws" package''' import tws assert tws class mock_wrapper(EWrapper): def __init__(self): self.errors = [] def error(self, id, code, text): self.errors.append((id, code, text)) class mock_socket(object): def __init__(self): self._peer = () def connect(self, peer, error=False): if error: raise socket.error() self._peer = peer def getpeername(self): if not self._peer: raise socket.error() return self._peer def makefile(self, mode): return StringIO()
'''Unit test package for package "tws".''' __copyright__ = "Copyright (c) 2008 Kevin J Bluck" __version__ = "$Id$" import socket from tws import EWrapper def test_import(): '''Verify successful import of top-level "tws" package''' import tws assert tws class mock_wrapper(EWrapper): def __init__(self): self.calldata = [] self.errors = [] def error(self, id, code, text): self.errors.append((id, code, text)) def __getattr__(self, name): # Any arbitrary unknown attribute is mapped to a function call which is # recorded into self.calldata. return lambda *args, **kwds: self.calldata.append((name, args, kwds)) class mock_socket(object): def __init__(self): self._peer = () def connect(self, peer, error=False): if error: raise socket.error() self._peer = peer def getpeername(self): if not self._peer: raise socket.error() return self._peer def makefile(self, mode): return StringIO()
Implement a __getattr__() for mock_wrapper that just returns a lambda that records whatever call was attempted along with the call params.
Implement a __getattr__() for mock_wrapper that just returns a lambda that records whatever call was attempted along with the call params.
Python
bsd-3-clause
kbluck/pytws,kbluck/pytws
36716fe51800a19567c49e734d320b38d441054e
zerver/migrations/0003_custom_indexes.py
zerver/migrations/0003_custom_indexes.py
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('zerver', '0002_django_1_8'), ] operations = [ migrations.RunSQL("CREATE INDEX upper_subject_idx ON zerver_message ((upper(subject)));", reverse_sql="DROP INDEX upper_subject_idx;"), migrations.RunSQL("CREATE INDEX upper_stream_name_idx ON zerver_stream ((upper(name)));", reverse_sql="DROP INDEX upper_stream_name_idx;") ]
Add remaining custom indexes that were created by South migrations
Add remaining custom indexes that were created by South migrations (imported from commit 9798afa8161af4ae6b3fa0c5f4894a3211b77cd4)
Python
apache-2.0
punchagan/zulip,yocome/zulip,bluesea/zulip,armooo/zulip,zacps/zulip,vabs22/zulip,Juanvulcano/zulip,hj3938/zulip,zwily/zulip,grave-w-grave/zulip,dxq-git/zulip,LeeRisk/zulip,so0k/zulip,jerryge/zulip,technicalpickles/zulip,guiquanz/zulip,alliejones/zulip,firstblade/zulip,Drooids/zulip,xuanhan863/zulip,praveenaki/zulip,ApsOps/zulip,thomasboyt/zulip,vakila/zulip,mdavid/zulip,sonali0901/zulip,hustlzp/zulip,LeeRisk/zulip,andersk/zulip,vikas-parashar/zulip,zacps/zulip,jainayush975/zulip,umkay/zulip,ahmadassaf/zulip,wavelets/zulip,Juanvulcano/zulip,SmartPeople/zulip,joshisa/zulip,ericzhou2008/zulip,nicholasbs/zulip,LeeRisk/zulip,wavelets/zulip,shaunstanislaus/zulip,xuxiao/zulip,lfranchi/zulip,jessedhillon/zulip,ahmadassaf/zulip,esander91/zulip,stamhe/zulip,johnny9/zulip,Diptanshu8/zulip,akuseru/zulip,Vallher/zulip,eeshangarg/zulip,joyhchen/zulip,karamcnair/zulip,JanzTam/zulip,dawran6/zulip,m1ssou/zulip,Jianchun1/zulip,wangdeshui/zulip,Batterfii/zulip,udxxabp/zulip,hengqujushi/zulip,bluesea/zulip,dawran6/zulip,rishig/zulip,themass/zulip,AZtheAsian/zulip,hayderimran7/zulip,schatt/zulip,atomic-labs/zulip,praveenaki/zulip,seapasulli/zulip,firstblade/zulip,jessedhillon/zulip,Frouk/zulip,rht/zulip,MariaFaBella85/zulip,Suninus/zulip,voidException/zulip,JanzTam/zulip,zulip/zulip,zofuthan/zulip,udxxabp/zulip,lfranchi/zulip,amyliu345/zulip,kokoar/zulip,peguin40/zulip,wangdeshui/zulip,noroot/zulip,Drooids/zulip,glovebx/zulip,wavelets/zulip,AZtheAsian/zulip,bitemyapp/zulip,punchagan/zulip,bowlofstew/zulip,he15his/zulip,TigorC/zulip,codeKonami/zulip,rishig/zulip,ipernet/zulip,amallia/zulip,tdr130/zulip,luyifan/zulip,amyliu345/zulip,johnnygaddarr/zulip,bssrdf/zulip,Jianchun1/zulip,dwrpayne/zulip,krtkmj/zulip,aakash-cr7/zulip,Cheppers/zulip,yuvipanda/zulip,PhilSk/zulip,johnnygaddarr/zulip,aliceriot/zulip,isht3/zulip,DazWorrall/zulip,bastianh/zulip,DazWorrall/zulip,jonesgithub/zulip,souravbadami/zulip,kokoar/zulip,jphilipsen05/zulip,Vallher/zulip,amanharitsh123/zulip,Juanvulcano/zulip,armooo/zulip,he15his/zulip,dxq-git/zulip,Gabriel0402/zulip,KingxBanana/zulip,ikasumiwt/zulip,EasonYi/zulip,dnmfarrell/zulip,zachallaun/zulip,jonesgithub/zulip,hayderimran7/zulip,mohsenSy/zulip,huangkebo/zulip,adnanh/zulip,swinghu/zulip,so0k/zulip,hackerkid/zulip,akuseru/zulip,jerryge/zulip,PhilSk/zulip,sharmaeklavya2/zulip,hackerkid/zulip,saitodisse/zulip,dxq-git/zulip,jrowan/zulip,brainwane/zulip,brockwhittaker/zulip,Suninus/zulip,kaiyuanheshang/zulip,udxxabp/zulip,amallia/zulip,adnanh/zulip,armooo/zulip,AZtheAsian/zulip,verma-varsha/zulip,zwily/zulip,nicholasbs/zulip,susansls/zulip,xuxiao/zulip,huangkebo/zulip,rht/zulip,JanzTam/zulip,dhcrzf/zulip,ipernet/zulip,mahim97/zulip,he15his/zulip,rishig/zulip,jackrzhang/zulip,atomic-labs/zulip,akuseru/zulip,so0k/zulip,zhaoweigg/zulip,avastu/zulip,timabbott/zulip,jimmy54/zulip,technicalpickles/zulip,johnny9/zulip,eastlhu/zulip,ipernet/zulip,adnanh/zulip,calvinleenyc/zulip,wdaher/zulip,dawran6/zulip,vaidap/zulip,MariaFaBella85/zulip,glovebx/zulip,tdr130/zulip,showell/zulip,hayderimran7/zulip,gkotian/zulip,RobotCaleb/zulip,babbage/zulip,bluesea/zulip,ashwinirudrappa/zulip,mohsenSy/zulip,christi3k/zulip,souravbadami/zulip,technicalpickles/zulip,mdavid/zulip,joshisa/zulip,wavelets/zulip,jimmy54/zulip,shubhamdhama/zulip,shaunstanislaus/zulip,thomasboyt/zulip,stamhe/zulip,itnihao/zulip,vabs22/zulip,andersk/zulip,jerryge/zulip,sup95/zulip,MayB/zulip,bastianh/zulip,esander91/zulip,saitodisse/zulip,aakash-cr7/zulip,calvinleenyc/zulip,kaiyuanheshang/zulip,shubhamdhama/zulip,souravbadami/zulip,susansls/zulip,seapasulli/zulip,arpitpanwar/zulip,firstblade/zulip,eastlhu/zulip,Suninus/zulip,kaiyuanheshang/zulip,tiansiyuan/zulip,sup95/zulip,dnmfarrell/zulip,noroot/zulip,RobotCaleb/zulip,sharmaeklavya2/zulip,bitemyapp/zulip,mahim97/zulip,technicalpickles/zulip,zhaoweigg/zulip,armooo/zulip,Cheppers/zulip,xuxiao/zulip,vikas-parashar/zulip,saitodisse/zulip,thomasboyt/zulip,jonesgithub/zulip,sonali0901/zulip,kaiyuanheshang/zulip,noroot/zulip,nicholasbs/zulip,Diptanshu8/zulip,vakila/zulip,dxq-git/zulip,shubhamdhama/zulip,ufosky-server/zulip,dnmfarrell/zulip,hackerkid/zulip,arpitpanwar/zulip,AZtheAsian/zulip,JPJPJPOPOP/zulip,Suninus/zulip,christi3k/zulip,cosmicAsymmetry/zulip,christi3k/zulip,pradiptad/zulip,KingxBanana/zulip,proliming/zulip,adnanh/zulip,vabs22/zulip,hustlzp/zulip,praveenaki/zulip,ericzhou2008/zulip,zorojean/zulip,mansilladev/zulip,fw1121/zulip,willingc/zulip,KingxBanana/zulip,bowlofstew/zulip,avastu/zulip,dhcrzf/zulip,vikas-parashar/zulip,LeeRisk/zulip,themass/zulip,kou/zulip,developerfm/zulip,sharmaeklavya2/zulip,dxq-git/zulip,tdr130/zulip,zachallaun/zulip,adnanh/zulip,bitemyapp/zulip,rht/zulip,moria/zulip,aliceriot/zulip,jphilipsen05/zulip,moria/zulip,dotcool/zulip,ryansnowboarder/zulip,esander91/zulip,sup95/zulip,grave-w-grave/zulip,eeshangarg/zulip,gigawhitlocks/zulip,KJin99/zulip,synicalsyntax/zulip,akuseru/zulip,tbutter/zulip,Cheppers/zulip,Diptanshu8/zulip,punchagan/zulip,dotcool/zulip,samatdav/zulip,timabbott/zulip,Galexrt/zulip,fw1121/zulip,zorojean/zulip,thomasboyt/zulip,zhaoweigg/zulip,littledogboy/zulip,EasonYi/zulip,jackrzhang/zulip,hafeez3000/zulip,easyfmxu/zulip,showell/zulip,mdavid/zulip,hackerkid/zulip,amallia/zulip,dattatreya303/zulip,mohsenSy/zulip,jeffcao/zulip,suxinde2009/zulip,tiansiyuan/zulip,christi3k/zulip,gkotian/zulip,susansls/zulip,Juanvulcano/zulip,tiansiyuan/zulip,wweiradio/zulip,LAndreas/zulip,hustlzp/zulip,andersk/zulip,brainwane/zulip,dhcrzf/zulip,JPJPJPOPOP/zulip,krtkmj/zulip,ufosky-server/zulip,pradiptad/zulip,lfranchi/zulip,amanharitsh123/zulip,niftynei/zulip,technicalpickles/zulip,paxapy/zulip,kou/zulip,PaulPetring/zulip,cosmicAsymmetry/zulip,zulip/zulip,umkay/zulip,calvinleenyc/zulip,fw1121/zulip,sup95/zulip,joyhchen/zulip,glovebx/zulip,yocome/zulip,Cheppers/zulip,ericzhou2008/zulip,cosmicAsymmetry/zulip,fw1121/zulip,Frouk/zulip,sonali0901/zulip,vakila/zulip,rishig/zulip,nicholasbs/zulip,j831/zulip,ufosky-server/zulip,shrikrishnaholla/zulip,yocome/zulip,wangdeshui/zulip,aliceriot/zulip,proliming/zulip,jainayush975/zulip,LAndreas/zulip,punchagan/zulip,ryanbackman/zulip,synicalsyntax/zulip,hustlzp/zulip,Qgap/zulip,bssrdf/zulip,levixie/zulip,easyfmxu/zulip,natanovia/zulip,peguin40/zulip,amanharitsh123/zulip,so0k/zulip,brockwhittaker/zulip,TigorC/zulip,huangkebo/zulip,MayB/zulip,glovebx/zulip,timabbott/zulip,tommyip/zulip,zofuthan/zulip,ahmadassaf/zulip,samatdav/zulip,peguin40/zulip,armooo/zulip,nicholasbs/zulip,nicholasbs/zulip,levixie/zulip,karamcnair/zulip,shaunstanislaus/zulip,zofuthan/zulip,TigorC/zulip,tbutter/zulip,paxapy/zulip,yuvipanda/zulip,zorojean/zulip,showell/zulip,kokoar/zulip,LAndreas/zulip,tommyip/zulip,littledogboy/zulip,verma-varsha/zulip,jessedhillon/zulip,alliejones/zulip,amyliu345/zulip,ericzhou2008/zulip,voidException/zulip,willingc/zulip,swinghu/zulip,jessedhillon/zulip,bssrdf/zulip,pradiptad/zulip,showell/zulip,dotcool/zulip,umkay/zulip,sharmaeklavya2/zulip,deer-hope/zulip,shrikrishnaholla/zulip,KJin99/zulip,gigawhitlocks/zulip,peiwei/zulip,tbutter/zulip,xuanhan863/zulip,jackrzhang/zulip,vikas-parashar/zulip,bluesea/zulip,joshisa/zulip,jerryge/zulip,qq1012803704/zulip,shaunstanislaus/zulip,bluesea/zulip,vikas-parashar/zulip,showell/zulip,ashwinirudrappa/zulip,natanovia/zulip,kou/zulip,vaidap/zulip,niftynei/zulip,brainwane/zulip,dnmfarrell/zulip,stamhe/zulip,tommyip/zulip,itnihao/zulip,ryansnowboarder/zulip,amallia/zulip,tommyip/zulip,hayderimran7/zulip,Cheppers/zulip,AZtheAsian/zulip,firstblade/zulip,christi3k/zulip,firstblade/zulip,atomic-labs/zulip,Jianchun1/zulip,armooo/zulip,Galexrt/zulip,dwrpayne/zulip,thomasboyt/zulip,Gabriel0402/zulip,ufosky-server/zulip,zofuthan/zulip,codeKonami/zulip,wweiradio/zulip,JPJPJPOPOP/zulip,dotcool/zulip,schatt/zulip,tommyip/zulip,reyha/zulip,moria/zulip,avastu/zulip,SmartPeople/zulip,udxxabp/zulip,tiansiyuan/zulip,SmartPeople/zulip,themass/zulip,littledogboy/zulip,dattatreya303/zulip,eastlhu/zulip,Gabriel0402/zulip,gigawhitlocks/zulip,seapasulli/zulip,Jianchun1/zulip,fw1121/zulip,ericzhou2008/zulip,codeKonami/zulip,easyfmxu/zulip,zwily/zulip,verma-varsha/zulip,niftynei/zulip,itnihao/zulip,jerryge/zulip,alliejones/zulip,hj3938/zulip,isht3/zulip,Jianchun1/zulip,Drooids/zulip,natanovia/zulip,rht/zulip,Frouk/zulip,susansls/zulip,arpith/zulip,ryansnowboarder/zulip,Jianchun1/zulip,armooo/zulip,Batterfii/zulip,sup95/zulip,hengqujushi/zulip,eastlhu/zulip,Frouk/zulip,Gabriel0402/zulip,brockwhittaker/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,deer-hope/zulip,Qgap/zulip,ahmadassaf/zulip,jphilipsen05/zulip,jonesgithub/zulip,bssrdf/zulip,kaiyuanheshang/zulip,bluesea/zulip,dawran6/zulip,firstblade/zulip,mahim97/zulip,wdaher/zulip,esander91/zulip,peiwei/zulip,amyliu345/zulip,EasonYi/zulip,hayderimran7/zulip,alliejones/zulip,guiquanz/zulip,lfranchi/zulip,MariaFaBella85/zulip,thomasboyt/zulip,zwily/zulip,JanzTam/zulip,guiquanz/zulip,LAndreas/zulip,grave-w-grave/zulip,levixie/zulip,deer-hope/zulip,jessedhillon/zulip,shrikrishnaholla/zulip,easyfmxu/zulip,eastlhu/zulip,jimmy54/zulip,peiwei/zulip,dattatreya303/zulip,luyifan/zulip,alliejones/zulip,aps-sids/zulip,jimmy54/zulip,jackrzhang/zulip,pradiptad/zulip,ericzhou2008/zulip,vaidap/zulip,synicalsyntax/zulip,karamcnair/zulip,blaze225/zulip,xuanhan863/zulip,eastlhu/zulip,brainwane/zulip,ashwinirudrappa/zulip,isht3/zulip,JanzTam/zulip,bitemyapp/zulip,littledogboy/zulip,mohsenSy/zulip,kou/zulip,LeeRisk/zulip,Vallher/zulip,DazWorrall/zulip,proliming/zulip,eeshangarg/zulip,moria/zulip,aps-sids/zulip,joshisa/zulip,ericzhou2008/zulip,atomic-labs/zulip,tiansiyuan/zulip,Drooids/zulip,ufosky-server/zulip,MayB/zulip,umkay/zulip,developerfm/zulip,atomic-labs/zulip,hayderimran7/zulip,pradiptad/zulip,easyfmxu/zulip,m1ssou/zulip,EasonYi/zulip,schatt/zulip,ipernet/zulip,brockwhittaker/zulip,dotcool/zulip,ikasumiwt/zulip,qq1012803704/zulip,esander91/zulip,ApsOps/zulip,sonali0901/zulip,calvinleenyc/zulip,kokoar/zulip,DazWorrall/zulip,EasonYi/zulip,arpith/zulip,tommyip/zulip,ryansnowboarder/zulip,zacps/zulip,qq1012803704/zulip,vakila/zulip,easyfmxu/zulip,rishig/zulip,Suninus/zulip,amanharitsh123/zulip,deer-hope/zulip,johnnygaddarr/zulip,ApsOps/zulip,stamhe/zulip,dnmfarrell/zulip,developerfm/zulip,ryanbackman/zulip,swinghu/zulip,vakila/zulip,thomasboyt/zulip,rht/zulip,bssrdf/zulip,eeshangarg/zulip,fw1121/zulip,kokoar/zulip,MayB/zulip,brainwane/zulip,littledogboy/zulip,noroot/zulip,johnny9/zulip,esander91/zulip,timabbott/zulip,seapasulli/zulip,bitemyapp/zulip,shubhamdhama/zulip,rishig/zulip,amyliu345/zulip,SmartPeople/zulip,Frouk/zulip,Gabriel0402/zulip,zorojean/zulip,ryansnowboarder/zulip,wweiradio/zulip,hafeez3000/zulip,babbage/zulip,levixie/zulip,Qgap/zulip,shaunstanislaus/zulip,krtkmj/zulip,babbage/zulip,seapasulli/zulip,wweiradio/zulip,hengqujushi/zulip,KJin99/zulip,Diptanshu8/zulip,hafeez3000/zulip,LeeRisk/zulip,samatdav/zulip,littledogboy/zulip,udxxabp/zulip,zofuthan/zulip,zulip/zulip,AZtheAsian/zulip,saitodisse/zulip,swinghu/zulip,esander91/zulip,luyifan/zulip,johnnygaddarr/zulip,vabs22/zulip,johnny9/zulip,atomic-labs/zulip,johnnygaddarr/zulip,karamcnair/zulip,zulip/zulip,shrikrishnaholla/zulip,brainwane/zulip,dattatreya303/zulip,eeshangarg/zulip,amallia/zulip,adnanh/zulip,Suninus/zulip,moria/zulip,zacps/zulip,Qgap/zulip,noroot/zulip,amallia/zulip,qq1012803704/zulip,j831/zulip,j831/zulip,jeffcao/zulip,vikas-parashar/zulip,kaiyuanheshang/zulip,KJin99/zulip,ikasumiwt/zulip,codeKonami/zulip,ApsOps/zulip,dotcool/zulip,jainayush975/zulip,bluesea/zulip,ashwinirudrappa/zulip,yocome/zulip,jrowan/zulip,jerryge/zulip,MayB/zulip,KJin99/zulip,LAndreas/zulip,wangdeshui/zulip,jimmy54/zulip,dotcool/zulip,peiwei/zulip,mdavid/zulip,ryansnowboarder/zulip,udxxabp/zulip,glovebx/zulip,bastianh/zulip,hackerkid/zulip,isht3/zulip,KJin99/zulip,itnihao/zulip,arpith/zulip,paxapy/zulip,hengqujushi/zulip,punchagan/zulip,deer-hope/zulip,souravbadami/zulip,TigorC/zulip,stamhe/zulip,amanharitsh123/zulip,levixie/zulip,yocome/zulip,zachallaun/zulip,glovebx/zulip,aps-sids/zulip,hj3938/zulip,zhaoweigg/zulip,hafeez3000/zulip,samatdav/zulip,zulip/zulip,brockwhittaker/zulip,KJin99/zulip,akuseru/zulip,DazWorrall/zulip,aliceriot/zulip,ashwinirudrappa/zulip,arpith/zulip,peiwei/zulip,hj3938/zulip,xuanhan863/zulip,andersk/zulip,dnmfarrell/zulip,swinghu/zulip,xuxiao/zulip,calvinleenyc/zulip,kokoar/zulip,Galexrt/zulip,Gabriel0402/zulip,bssrdf/zulip,joshisa/zulip,niftynei/zulip,mahim97/zulip,karamcnair/zulip,ryanbackman/zulip,paxapy/zulip,qq1012803704/zulip,zwily/zulip,karamcnair/zulip,tommyip/zulip,grave-w-grave/zulip,itnihao/zulip,susansls/zulip,krtkmj/zulip,hj3938/zulip,joshisa/zulip,aliceriot/zulip,MayB/zulip,MariaFaBella85/zulip,andersk/zulip,avastu/zulip,rishig/zulip,vaidap/zulip,luyifan/zulip,peguin40/zulip,jphilipsen05/zulip,mansilladev/zulip,jeffcao/zulip,babbage/zulip,gigawhitlocks/zulip,kokoar/zulip,RobotCaleb/zulip,developerfm/zulip,dhcrzf/zulip,PaulPetring/zulip,arpith/zulip,wdaher/zulip,yocome/zulip,vakila/zulip,ApsOps/zulip,dattatreya303/zulip,so0k/zulip,dwrpayne/zulip,Cheppers/zulip,zacps/zulip,eastlhu/zulip,RobotCaleb/zulip,aps-sids/zulip,natanovia/zulip,hafeez3000/zulip,RobotCaleb/zulip,shrikrishnaholla/zulip,tbutter/zulip,he15his/zulip,guiquanz/zulip,Qgap/zulip,voidException/zulip,adnanh/zulip,natanovia/zulip,wangdeshui/zulip,bowlofstew/zulip,codeKonami/zulip,Batterfii/zulip,ufosky-server/zulip,shubhamdhama/zulip,wweiradio/zulip,wdaher/zulip,jackrzhang/zulip,yuvipanda/zulip,arpitpanwar/zulip,joshisa/zulip,mdavid/zulip,firstblade/zulip,huangkebo/zulip,avastu/zulip,umkay/zulip,andersk/zulip,tbutter/zulip,tiansiyuan/zulip,verma-varsha/zulip,ryanbackman/zulip,arpitpanwar/zulip,tdr130/zulip,xuanhan863/zulip,themass/zulip,samatdav/zulip,verma-varsha/zulip,niftynei/zulip,deer-hope/zulip,jainayush975/zulip,samatdav/zulip,Batterfii/zulip,synicalsyntax/zulip,aakash-cr7/zulip,JPJPJPOPOP/zulip,cosmicAsymmetry/zulip,shubhamdhama/zulip,PaulPetring/zulip,bssrdf/zulip,johnny9/zulip,akuseru/zulip,bastianh/zulip,tdr130/zulip,itnihao/zulip,guiquanz/zulip,yuvipanda/zulip,MariaFaBella85/zulip,Juanvulcano/zulip,shrikrishnaholla/zulip,ahmadassaf/zulip,Batterfii/zulip,zwily/zulip,JPJPJPOPOP/zulip,Drooids/zulip,zorojean/zulip,mohsenSy/zulip,he15his/zulip,babbage/zulip,suxinde2009/zulip,gkotian/zulip,fw1121/zulip,seapasulli/zulip,schatt/zulip,lfranchi/zulip,RobotCaleb/zulip,JanzTam/zulip,souravbadami/zulip,shrikrishnaholla/zulip,gigawhitlocks/zulip,hengqujushi/zulip,KingxBanana/zulip,m1ssou/zulip,JanzTam/zulip,synicalsyntax/zulip,xuanhan863/zulip,hj3938/zulip,praveenaki/zulip,punchagan/zulip,Vallher/zulip,jimmy54/zulip,hayderimran7/zulip,gigawhitlocks/zulip,brainwane/zulip,gigawhitlocks/zulip,tbutter/zulip,sup95/zulip,ikasumiwt/zulip,Vallher/zulip,Vallher/zulip,christi3k/zulip,itnihao/zulip,alliejones/zulip,PaulPetring/zulip,MariaFaBella85/zulip,reyha/zulip,jerryge/zulip,noroot/zulip,developerfm/zulip,glovebx/zulip,dawran6/zulip,susansls/zulip,hustlzp/zulip,dxq-git/zulip,voidException/zulip,ipernet/zulip,avastu/zulip,mdavid/zulip,xuanhan863/zulip,atomic-labs/zulip,johnny9/zulip,joyhchen/zulip,jonesgithub/zulip,akuseru/zulip,willingc/zulip,bowlofstew/zulip,peguin40/zulip,jonesgithub/zulip,johnny9/zulip,hackerkid/zulip,rht/zulip,MariaFaBella85/zulip,PhilSk/zulip,showell/zulip,developerfm/zulip,PhilSk/zulip,jphilipsen05/zulip,m1ssou/zulip,bastianh/zulip,KingxBanana/zulip,Juanvulcano/zulip,levixie/zulip,jeffcao/zulip,SmartPeople/zulip,arpitpanwar/zulip,PaulPetring/zulip,swinghu/zulip,m1ssou/zulip,sharmaeklavya2/zulip,jrowan/zulip,jonesgithub/zulip,sonali0901/zulip,pradiptad/zulip,zhaoweigg/zulip,themass/zulip,ryanbackman/zulip,peiwei/zulip,ashwinirudrappa/zulip,swinghu/zulip,xuxiao/zulip,blaze225/zulip,zachallaun/zulip,proliming/zulip,willingc/zulip,jeffcao/zulip,eeshangarg/zulip,amallia/zulip,reyha/zulip,kaiyuanheshang/zulip,TigorC/zulip,xuxiao/zulip,stamhe/zulip,dxq-git/zulip,zofuthan/zulip,hafeez3000/zulip,isht3/zulip,krtkmj/zulip,wweiradio/zulip,codeKonami/zulip,LAndreas/zulip,showell/zulip,luyifan/zulip,jrowan/zulip,easyfmxu/zulip,mohsenSy/zulip,so0k/zulip,dwrpayne/zulip,wdaher/zulip,schatt/zulip,zacps/zulip,aakash-cr7/zulip,mansilladev/zulip,zhaoweigg/zulip,blaze225/zulip,alliejones/zulip,zwily/zulip,zachallaun/zulip,zulip/zulip,j831/zulip,yuvipanda/zulip,johnnygaddarr/zulip,ufosky-server/zulip,he15his/zulip,dwrpayne/zulip,hengqujushi/zulip,technicalpickles/zulip,andersk/zulip,grave-w-grave/zulip,praveenaki/zulip,aps-sids/zulip,hengqujushi/zulip,ikasumiwt/zulip,wangdeshui/zulip,Suninus/zulip,Drooids/zulip,bowlofstew/zulip,peiwei/zulip,mansilladev/zulip,zhaoweigg/zulip,johnnygaddarr/zulip,jrowan/zulip,dhcrzf/zulip,seapasulli/zulip,bitemyapp/zulip,willingc/zulip,hafeez3000/zulip,voidException/zulip,themass/zulip,krtkmj/zulip,praveenaki/zulip,wangdeshui/zulip,Frouk/zulip,hackerkid/zulip,calvinleenyc/zulip,voidException/zulip,blaze225/zulip,stamhe/zulip,willingc/zulip,technicalpickles/zulip,Diptanshu8/zulip,jeffcao/zulip,peguin40/zulip,joyhchen/zulip,aps-sids/zulip,zorojean/zulip,tiansiyuan/zulip,PhilSk/zulip,yuvipanda/zulip,qq1012803704/zulip,yocome/zulip,paxapy/zulip,vabs22/zulip,EasonYi/zulip,Gabriel0402/zulip,wavelets/zulip,bitemyapp/zulip,pradiptad/zulip,reyha/zulip,tdr130/zulip,karamcnair/zulip,wavelets/zulip,m1ssou/zulip,krtkmj/zulip,timabbott/zulip,Vallher/zulip,RobotCaleb/zulip,timabbott/zulip,suxinde2009/zulip,zachallaun/zulip,grave-w-grave/zulip,jainayush975/zulip,voidException/zulip,yuvipanda/zulip,synicalsyntax/zulip,souravbadami/zulip,dhcrzf/zulip,sharmaeklavya2/zulip,dhcrzf/zulip,blaze225/zulip,zulip/zulip,rht/zulip,PhilSk/zulip,mansilladev/zulip,ahmadassaf/zulip,nicholasbs/zulip,jrowan/zulip,reyha/zulip,ApsOps/zulip,tbutter/zulip,noroot/zulip,KingxBanana/zulip,verma-varsha/zulip,kou/zulip,blaze225/zulip,m1ssou/zulip,saitodisse/zulip,jessedhillon/zulip,j831/zulip,kou/zulip,ryansnowboarder/zulip,dwrpayne/zulip,levixie/zulip,PaulPetring/zulip,wweiradio/zulip,moria/zulip,dattatreya303/zulip,amanharitsh123/zulip,jainayush975/zulip,luyifan/zulip,lfranchi/zulip,shaunstanislaus/zulip,saitodisse/zulip,tdr130/zulip,Cheppers/zulip,gkotian/zulip,deer-hope/zulip,jackrzhang/zulip,he15his/zulip,aakash-cr7/zulip,suxinde2009/zulip,zofuthan/zulip,suxinde2009/zulip,ikasumiwt/zulip,punchagan/zulip,codeKonami/zulip,jeffcao/zulip,MayB/zulip,umkay/zulip,isht3/zulip,saitodisse/zulip,cosmicAsymmetry/zulip,themass/zulip,vaidap/zulip,DazWorrall/zulip,paxapy/zulip,dnmfarrell/zulip,willingc/zulip,developerfm/zulip,LeeRisk/zulip,proliming/zulip,EasonYi/zulip,joyhchen/zulip,bastianh/zulip,schatt/zulip,sonali0901/zulip,niftynei/zulip,qq1012803704/zulip,mansilladev/zulip,brockwhittaker/zulip,jessedhillon/zulip,ikasumiwt/zulip,so0k/zulip,natanovia/zulip,synicalsyntax/zulip,dwrpayne/zulip,amyliu345/zulip,guiquanz/zulip,mahim97/zulip,natanovia/zulip,PaulPetring/zulip,timabbott/zulip,jphilipsen05/zulip,Galexrt/zulip,ApsOps/zulip,schatt/zulip,moria/zulip,shubhamdhama/zulip,littledogboy/zulip,jackrzhang/zulip,Batterfii/zulip,j831/zulip,jimmy54/zulip,Galexrt/zulip,shaunstanislaus/zulip,mahim97/zulip,wdaher/zulip,aliceriot/zulip,umkay/zulip,kou/zulip,aps-sids/zulip,hustlzp/zulip,vaidap/zulip,bowlofstew/zulip,arpitpanwar/zulip,wdaher/zulip,arpitpanwar/zulip,mansilladev/zulip,gkotian/zulip,eeshangarg/zulip,LAndreas/zulip,DazWorrall/zulip,proliming/zulip,hustlzp/zulip,SmartPeople/zulip,joyhchen/zulip,Qgap/zulip,babbage/zulip,ipernet/zulip,huangkebo/zulip,praveenaki/zulip,lfranchi/zulip,hj3938/zulip,guiquanz/zulip,suxinde2009/zulip,Drooids/zulip,bastianh/zulip,ashwinirudrappa/zulip,suxinde2009/zulip,Galexrt/zulip,gkotian/zulip,babbage/zulip,vakila/zulip,cosmicAsymmetry/zulip,proliming/zulip,Galexrt/zulip,aliceriot/zulip,ipernet/zulip,xuxiao/zulip,Qgap/zulip,vabs22/zulip,luyifan/zulip,ryanbackman/zulip,Diptanshu8/zulip,udxxabp/zulip,wavelets/zulip,gkotian/zulip,reyha/zulip,zorojean/zulip,dawran6/zulip,Frouk/zulip,huangkebo/zulip,TigorC/zulip,bowlofstew/zulip,zachallaun/zulip,avastu/zulip,aakash-cr7/zulip,huangkebo/zulip,mdavid/zulip,arpith/zulip,Batterfii/zulip
5edc89f4ba516ba497f2b171bba865487116bbe0
camoco/__init__.py
camoco/__init__.py
""" Camoco Library - CoAnalysis of Molecular Components CacheMoneyCorn """ __license__ = """ Creative Commons Non-Commercial 4.0 Generic http://creativecommons.org/licenses/by-nc/4.0/ """ __version__ = '0.3.0' import sys import os import numpy import pyximport pyximport.install(setup_args={ "include_dirs":numpy.get_include() }) import matplotlib matplotlib.use('Agg') from .Config import cf from .Camoco import Camoco from .Expr import Expr from .COB import COB from .RefGen import RefGen from .RefGenDist import * from .PCCUP import * from .Ontology import Ontology,Term from .GWAS import GWAS from .HapMap import HapMap from .Locus import Locus from .Tools import available_datasets,del_dataset from .Tools import mv_dataset,redescribe_dataset from .GEO import Family from .GOnt import GOnt from .Annotation import GWASData # Create yourself Camoco.create('Camoco','Mother Database')
""" Camoco Library - CoAnalysis of Molecular Components CacheMoneyCorn """ __license__ = """ Creative Commons Non-Commercial 4.0 Generic http://creativecommons.org/licenses/by-nc/4.0/ """ __version__ = '0.3.0-dev' import sys import os import numpy import pyximport pyximport.install(setup_args={ "include_dirs":numpy.get_include() }) import matplotlib matplotlib.use('Agg') from .Config import cf from .Camoco import Camoco from .Expr import Expr from .COB import COB from .RefGen import RefGen from .RefGenDist import * from .PCCUP import * from .Ontology import Ontology,Term from .GWAS import GWAS from .HapMap import HapMap from .Locus import Locus from .Tools import available_datasets,del_dataset from .Tools import mv_dataset,redescribe_dataset from .GEO import Family from .GOnt import GOnt from .Annotation import GWASData # Create yourself Camoco.create('Camoco','Mother Database')
Add dev option to version string
Add dev option to version string
Python
mit
schae234/Camoco,schae234/Camoco
4f404a71cb7ee912bca8184fe94c97d6cfba1186
preprocessing_tools/solid_rotation_y.py
preprocessing_tools/solid_rotation_y.py
null
''' Rotates the protein by a solid angle on the plane xz ''' import numpy import os from argparse import ArgumentParser from move_prot_helper import (read_vertex, read_pqr, rotate_y, modify_pqr) def read_inputs(): """ Parse command-line arguments to run move_protein. User should provide: -inMesh : str, mesh file you want to rotate. -inpqr : str, pqr of the object you want to rotate. -alpha_y: float [degrees], rotation angle, about the dipole moment. -name : str, output file name. """ parser = ArgumentParser(description='Manage solid_rotation_y command line arguments') parser.add_argument('-im', '--inMesh', dest='im', type=str, default=None, help="mesh file you want to rotate") parser.add_argument('-ip', '--inpqr', dest='ip', type=str, default=None, help="pqr of the object you want to rotate") parser.add_argument('-angy', '--angle_y', dest='angy', type=float, default=None, help="rotation angle in the plane xz") parser.add_argument('-n', '--name', dest='name', type=str, default='', help="output file name") return parser.parse_args() args = read_inputs() inMesh = args.im inpqr = args.ip angle_y = float(args.angy)*numpy.pi/180. name = args.name outMesh = inMesh + name outpqr = inpqr + name #Read mesh and pqr #vert = read_vertex(inMesh+'.vert', float) vert = numpy.loadtxt(inMesh+'.vert', dtype=float) xq, q, Nq = read_pqr(inpqr+'.pqr', float) xq_new = rotate_y(xq, angle_y) vert_new = rotate_y(vert, angle_y) ctr = numpy.average(vert_new, axis=0) r_min_last = numpy.min(numpy.linalg.norm(vert_new, axis=1)) idx_rmin_last = numpy.argmin(numpy.linalg.norm(vert_new, axis=1)) print ('Desired configuration:') print ('\tProtein is centered, {}'.format(ctr)) print ('\tProtein r minimum is {}, located at {}'.format(r_min_last, vert_new[idx_rmin_last, :])) #### Save to file numpy.savetxt(outMesh+'.vert', vert_new) cmd = 'cp '+inMesh+'.face '+outMesh+'.face' os.system(cmd) modify_pqr(inpqr+'.pqr', outpqr+'.pqr', xq_new) print ('\nWritten to '+outMesh+'.vert(.face) and '+outpqr+'.pqr')
Add script to rotate a solid angle in the xz plane
Add script to rotate a solid angle in the xz plane
Python
bsd-3-clause
barbagroup/pygbe,barbagroup/pygbe,barbagroup/pygbe
0d6d1e735e3c149f6adec370832949a81b930a56
tests/test_driller.py
tests/test_driller.py
import nose import driller import logging l = logging.getLogger("driller.tests.test_driller") import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries')) def test_drilling_cgc(): ''' test drilling on the cgc binary, palindrome. ''' binary = "cgc_scored_event_1/cgc/0b32aa01_01" # fuzzbitmap says every transition is worth satisfying d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~") new_inputs = d.drill() nose.tools.assert_equal(len(new_inputs), 7) # make sure driller produced a new input which hits the easter egg nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs))) def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": run_all()
import nose import driller import logging l = logging.getLogger("driller.tests.test_driller") import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) def test_drilling_cgc(): ''' test drilling on the cgc binary, palindrome. ''' binary = "cgc_scored_event_1/cgc/0b32aa01_01" # fuzzbitmap says every transition is worth satisfying d = driller.Driller(os.path.join(bin_location, binary), "AAAA", "\xff"*65535, "whatever~") new_inputs = d.drill() nose.tools.assert_equal(len(new_inputs), 7) # make sure driller produced a new input which hits the easter egg nose.tools.assert_true(any(filter(lambda x: x[1].startswith('^'), new_inputs))) def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": run_all()
Update binaries path with private repo
Update binaries path with private repo
Python
bsd-2-clause
shellphish/driller
0082f48347b5f75263687e59b8c000b66cad8b77
salt/_modules/caasp_orch.py
salt/_modules/caasp_orch.py
from __future__ import absolute_import def __virtual__(): return "caasp_orch" def sync_all(): ''' Syncronize everything before starting a new orchestration ''' __utils__['caasp_log.debug']('orch: refreshing all') __salt__['saltutil.sync_all'](refresh=True) __utils__['caasp_log.debug']('orch: synchronizing the mine') __salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
from __future__ import absolute_import def __virtual__(): return "caasp_orch" def sync_all(): ''' Syncronize everything before starting a new orchestration ''' __utils__['caasp_log.debug']('orch: refreshing all') __salt__['saltutil.sync_all'](refresh=True) # make sure we refresh modules synchronously __salt__['saltutil.refresh_modules'](async=False) # noqa: W606 __utils__['caasp_log.debug']('orch: synchronizing the mine') __salt__['saltutil.runner']('mine.update', tgt='*', clear=True)
Make sure we refresh modules synchronously
Make sure we refresh modules synchronously bsc#1124784 Signed-off-by: Alvaro Saurin <[email protected]>
Python
apache-2.0
kubic-project/salt,kubic-project/salt,kubic-project/salt
f430bfecc2a4022967260ff4cb80e3cb9ff84790
setup.py
setup.py
#!/usr/bin/python # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import gettext import os import subprocess import setuptools setuptools.setup( name='heat_jeos', version='1', description='The heat-jeos project provides services for creating ' '(J)ust (E)nough (O)perating (S)ystem images', license='Apache License (2.0)', author='Heat API Developers', author_email='[email protected]', url='http://heat-api.org.org/', packages=setuptools.find_packages(exclude=['bin']), include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python :: 2.6', 'Environment :: No Input/Output (Daemon)', ], scripts=['bin/heat-jeos'], py_modules=[])
#!/usr/bin/python # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import gettext import os import subprocess import setuptools setuptools.setup( name='heat-jeos', version='1', description='The heat-jeos project provides services for creating ' '(J)ust (E)nough (O)perating (S)ystem images', license='Apache License (2.0)', author='Heat API Developers', author_email='[email protected]', url='http://heat-api.org.org/', packages=setuptools.find_packages(exclude=['bin']), include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python :: 2.6', 'Environment :: No Input/Output (Daemon)', ], scripts=['bin/heat-jeos'], py_modules=[])
Change name from heat_jeos to heat-jeos
Change name from heat_jeos to heat-jeos Signed-off-by: Jeff Peeler <[email protected]>
Python
apache-2.0
sdake/heat-jeos,steveb/heat-cfntools,openstack/heat-cfntools,bbandaru/heat-cfntools,steveb/heat-cfntools
5abe9a29ae586907304649fe6682e3e8997da310
app/views.py
app/views.py
from index import app from flask import render_template, request from config import BASE_URL from query import get_callout, get_billboard SHEET_ID = 'tzE2PsqJoWRpENlMr-ZlS8A' #SHEET_ID = 'tIk5itVcfOHUmakkmpjCcxw' # Demo sheet #@app.route('/') #def index(): # page_url = BASE_URL + request.path # page_title = 'Audio Player' # stream_name = "My Place" # # social = { # 'title': "VPR Audio Player", # 'subtitle': "", # 'img': "static/img/logo/vpr-logo-share.jpg", # 'description': "Listen to the live streams of VPR News, VPR Classical, the BBC, Jazz24 and My Place.", # 'twitter_text': "News, Classical, the BBC and more. The VPR Audio Player:", # 'twitter_hashtag': "" # } # # return render_template('content.html', # page_title=page_title, # social=social, # stream_name=stream_name, # page_url=page_url) # @app.route('/billboard') def billboard(): billboard = get_billboard(SHEET_ID) return render_template('billboard.html', billboard=billboard) @app.route('/callout') def callout(): callout = get_callout(SHEET_ID) return render_template('callout.html', callout=callout)
from index import app from flask import render_template, request from config import BASE_URL from query import get_callout, get_billboard SHEET_ID = 'tzE2PsqJoWRpENlMr-ZlS8A' #SHEET_ID = 'tIk5itVcfOHUmakkmpjCcxw' # Demo sheet @app.route('/') def index(): page_url = BASE_URL + request.path page_title = 'Audio Player' stream_name = "Replay" social = { 'title': "VPR Audio Player", 'subtitle': "", 'img': "static/img/logo/vpr-logo-share.jpg", 'description': "Listen to the live streams of VPR News, VPR Classical, the BBC, Jazz24 and My Place.", 'twitter_text': "News, Classical, the BBC and more. The VPR Audio Player:", 'twitter_hashtag': "" } return render_template('content.html', page_title=page_title, social=social, stream_name=stream_name, page_url=page_url) @app.route('/billboard') def billboard(): billboard = get_billboard(SHEET_ID) return render_template('billboard.html', billboard=billboard) @app.route('/callout') def callout(): callout = get_callout(SHEET_ID) return render_template('callout.html', callout=callout)
Update stream name to Replay
Update stream name to Replay
Python
apache-2.0
vprnet/audio-player,vprnet/audio-player,vprnet/audio-player
4336a5d3eaf5500a6f3041b30c7887361dea5737
tests/test_formatting.py
tests/test_formatting.py
# -*- coding: utf-8 -*- import click def test_basic_functionality(runner): @click.command() def cli(): """First paragraph. This is a very long second paragraph and not correctly wrapped but it will be rewrapped. \b This is a paragraph without rewrapping. \b 1 2 3 And this is a paragraph that will be rewrapped again. """ result = runner.invoke(cli, ['--help'], terminal_width=60) assert not result.exception assert result.output.splitlines() == [ 'Usage: cli [OPTIONS]', '', ' First paragraph.', '', ' This is a very long second paragraph and not correctly', ' wrapped but it will be rewrapped.', '', ' This is', ' a paragraph', ' without rewrapping.', '', ' 1', ' 2', ' 3', '', ' And this is a paragraph that will be rewrapped again.', '', 'Options:', ' --help Show this message and exit.', ]
# -*- coding: utf-8 -*- import click def test_basic_functionality(runner): @click.command() def cli(): """First paragraph. This is a very long second paragraph and not correctly wrapped but it will be rewrapped. \b This is a paragraph without rewrapping. \b 1 2 3 And this is a paragraph that will be rewrapped again. """ result = runner.invoke(cli, ['--help'], terminal_width=60) assert not result.exception assert result.output.splitlines() == [ 'Usage: cli [OPTIONS]', '', ' First paragraph.', '', ' This is a very long second paragraph and not correctly', ' wrapped but it will be rewrapped.', '', ' This is', ' a paragraph', ' without rewrapping.', '', ' 1', ' 2', ' 3', '', ' And this is a paragraph that will be rewrapped again.', '', 'Options:', ' --help Show this message and exit.', ] def test_wrapping_long_options_strings(runner): @click.group() def cli(): """Top level command """ @cli.group() def a_very_long(): """Second level """ @a_very_long.command() @click.argument('first') @click.argument('second') @click.argument('third') @click.argument('fourth') def command(): """A command. """ result = runner.invoke(cli, ['a_very_long', 'command', '--help'], terminal_width=54) assert not result.exception assert result.output.splitlines() == [ 'Usage: cli a_very_long command [OPTIONS] FIRST SECOND', ' THIRD FOURTH', '', ' A command.', '', 'Options:', ' --help Show this message and exit.', ]
Add failing test for formatting
Add failing test for formatting
Python
bsd-3-clause
her0e1c1/click,MakerDAO/click,Akasurde/click,scalp42/click,khwilson/click,polinom/click,amjith/click,hellodk/click,jvrsantacruz/click,naoyat/click,lucius-feng/click,dastergon/click,TomRegan/click,hackebrot/click,cbandera/click,oss6/click,GeoffColburn/click,willingc/click,pallets/click,pgkelley4/click,glorizen/click,mitsuhiko/click,ternus/click,andela-ooladayo/click,hugopeixoto/click,gambogi/click,ma-ric/click,pombredanne/click,Nudies/click,nchammas/click
33282c65743c86cbad38160b801e7155ab16c60f
tests/data_checks/test_gwas_catalog_coverage.py
tests/data_checks/test_gwas_catalog_coverage.py
null
# ------------------------------------------------ # built-ins import unittest # local from utils.base import TestPostgapBase # ------------------------------------------------ class TestGWASCatalogCoverage(TestPostgapBase): def test_each_gwas_efo_covered(self): self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT') def test_each_gwas_snp_covered(self): self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT') if __name__ == '__main__': unittest.main()
Add placeholder for gwas catalog coverage
Add placeholder for gwas catalog coverage
Python
apache-2.0
Ensembl/cttv024,Ensembl/cttv024
b4d97079b6a74e9a2001b50c66b9eee6bc57ba66
swiftclient/__init__.py
swiftclient/__init__.py
# -*- encoding: utf-8 -*- # Copyright (c) 2012 Rackspace # flake8: noqa # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """" OpenStack Swift Python client binding. """ from .client import * # At setup.py time, we haven't installed anything yet, so there # is nothing that is able to set this version property. Squelching # that exception here should be fine- if there are problems with # pkg_resources in a real install, that will manifest itself as # an error still try: from swiftclient import version __version__ = version.version_string except Exception: pass
# -*- encoding: utf-8 -*- # Copyright (c) 2012 Rackspace # flake8: noqa # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ OpenStack Swift Python client binding. """ from .client import * # At setup.py time, we haven't installed anything yet, so there # is nothing that is able to set this version property. Squelching # that exception here should be fine- if there are problems with # pkg_resources in a real install, that will manifest itself as # an error still try: from swiftclient import version __version__ = version.version_string except Exception: pass
Remove extra double quote from docstring
Remove extra double quote from docstring The extra " was visible on http://docs.openstack.org/developer/python-swiftclient/swiftclient.html Change-Id: I7d61c8259a4f13464c11ae7e3fa28eb3a58e4baa
Python
apache-2.0
krnflake/python-hubicclient,varunarya10/python-swiftclient,jeseem/python-swiftclient,JioCloud/python-swiftclient,sohonetlabs/python-swiftclient,pratikmallya/python-swiftclient,iostackproject/IO-Bandwidth-Differentiation-Client,JioCloud/python-swiftclient,openstack/python-swiftclient,pratikmallya/python-swiftclient,varunarya10/python-swiftclient,sohonetlabs/python-swiftclient,ironsmile/python-swiftclient,VyacheslavHashov/python-swiftclient,iostackproject/IO-Bandwidth-Differentiation-Client,jeseem/python-swiftclient,VyacheslavHashov/python-swiftclient,openstack/python-swiftclient,ironsmile/python-swiftclient
a62dc18745f952b3fcb05ddf4768758e25883698
accelerator/migrations/0058_grant_staff_clearance_for_existing_staff_members.py
accelerator/migrations/0058_grant_staff_clearance_for_existing_staff_members.py
null
# -*- coding: utf-8 -*- # Generated by Django 1.10.8 on 2019-06-12 19:38 from __future__ import unicode_literals from django.db import migrations STAFF = "Staff" # don't import from models in migrations. def grant_staff_clearances_for_role_grantees(apps, program_role): Clearance = apps.get_model('accelerator', 'Clearance') program_family = program_role.program.program_family user_ids = program_role.programrolegrant_set.values_list( "person_id", flat=True) for user_id in user_ids: Clearance.objects.get_or_create( user_id=user_id, program_family=program_family, defaults={"level": STAFF}) def grant_clearances_for_mc_staff_users(apps, schema_editor): ProgramRole = apps.get_model('accelerator', "ProgramRole") for program_role in ProgramRole.objects.filter( user_role__name=STAFF): grant_staff_clearances_for_role_grantees(apps, program_role) def revoke_staff_clearances(apps, schema_editor): Clearance = apps.get_model("accelerator", "Clearance") Clearance.objects.filter(level=STAFF).delete() class Migration(migrations.Migration): dependencies = [ ('accelerator', '0057_add_clearance_level_staff'), ] operations = [ migrations.RunPython( grant_clearances_for_mc_staff_users, revoke_staff_clearances) ]
Add datamigration to create staff clearances
[AC-6516] Add datamigration to create staff clearances
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
a02739cc7b1384e51f44d86a05af5a9845469fca
pygame/__init__.py
pygame/__init__.py
""" XXX: fish """ from pygame.color import Color from pygame.rect import Rect from pygame.surface import Surface from pygame.constants import * from pygame import ( display, color, surface, time, event, constants, sprite, mouse, locals, image, transform, pkgdata, font, mixer, cursors, key, draw ) from pygame.base import ( init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder, register_quit ) from pygame._error import get_error, set_error, SDLError # map our exceptions on pygame's default error = SDLError
""" XXX: fish """ from pygame.color import Color from pygame.rect import Rect from pygame.surface import Surface from pygame.constants import * from pygame import ( display, color, surface, time, event, constants, sprite, mouse, locals, image, transform, pkgdata, font, mixer, cursors, key, draw ) from pygame.base import ( init, quit, HAVE_NEWBUF, get_sdl_version, get_sdl_byteorder, register_quit ) from pygame._error import get_error, set_error, SDLError from pygame.mask import Mask # map our exceptions on pygame's default error = SDLError
Add Mask to toplevel pygame namespace
Add Mask to toplevel pygame namespace
Python
lgpl-2.1
CTPUG/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi,CTPUG/pygame_cffi
8377bbca8b49a7a973d5521795d6df238774db8b
codenamegenerator/__init__.py
codenamegenerator/__init__.py
from typing import List import csv import os import random DICT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "dicts") def dictionary_sample(name: str, sample: int = 1) -> List[str]: # TODO: Cache counting, and use file.seek to speed file reading. fname = os.path.join(DICT_DIR, f"{name}.csv") if not os.path.exists(fname): raise ValueError(f"{name} dictionary does not exists.") with open(fname, "rt") as csvfile: csvreader = csv.DictReader( csvfile, fieldnames=["NAME"], delimiter=",", quotechar='"' ) names = [row["NAME"] for row in csvreader] return random.sample(names, sample) def generate_codenames( prefix: str = "adjectives", suffix: str = "mobi_notable_scientists_and_hackers", num: int = 1, ) -> List[str]: prefixes = dictionary_sample(prefix, num) suffixes = dictionary_sample(suffix, num) return [f"{prefix} {suffix}" for prefix, suffix in zip(prefixes, suffixes)]
from typing import List import csv import os import random DICT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "dicts") def dictionary_sample(name: str, sample: int = 1) -> List[str]: # TODO: Cache counting, and use file.seek to speed file reading. fname = os.path.join(DICT_DIR, f"{name}.csv") if not os.path.exists(fname): raise ValueError(f"{name} dictionary does not exists.") with open(fname, "rt") as csvfile: csvreader = csv.DictReader( csvfile, fieldnames=["NAME"], delimiter=",", quotechar='"' ) names = [row["NAME"] for row in csvreader if row["NAME"].strip() != ""] return random.sample(names, sample) def generate_codenames( prefix: str = "adjectives", suffix: str = "mobi_notable_scientists_and_hackers", num: int = 1, ) -> List[str]: prefixes = dictionary_sample(prefix, num) suffixes = dictionary_sample(suffix, num) return [f"{prefix} {suffix}" for prefix, suffix in zip(prefixes, suffixes)]
Fix for empty lines in data sets
Fix for empty lines in data sets
Python
mit
mariocesar/namegenerator
afcb007c7c8b68bc4cace5ff6d634330f70603ec
channels/management/commands/runworker.py
channels/management/commands/runworker.py
from django.core.management import BaseCommand, CommandError from channels import DEFAULT_CHANNEL_LAYER from channels.layers import get_channel_layer from channels.log import setup_logger from channels.routing import get_default_application from channels.worker import Worker class Command(BaseCommand): leave_locale_alone = True def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument( "--layer", action="store", dest="layer", default=DEFAULT_CHANNEL_LAYER, help="Channel layer alias to use, if not the default.", ) parser.add_argument( "channels", nargs="+", help="Channels to listen on." ) def handle(self, *args, **options): # Get the backend to use self.verbosity = options.get("verbosity", 1) # Get the channel layer they asked for (or see if one isn't configured) if "layer" in options: self.channel_layer = get_channel_layer(options["layer"]) else: self.channel_layer = get_channel_layer() if self.channel_layer is None: raise CommandError("You do not have any CHANNEL_LAYERS configured.") # Run the worker self.logger = setup_logger("django.channels", self.verbosity) self.logger.info("Running worker for channels %s", options["channels"]) worker = Worker( application=get_default_application(), channels=options["channels"], channel_layer=self.channel_layer, ) worker.run()
from django.core.management import BaseCommand, CommandError from channels import DEFAULT_CHANNEL_LAYER from channels.layers import get_channel_layer from channels.log import setup_logger from channels.routing import get_default_application from channels.worker import Worker class Command(BaseCommand): leave_locale_alone = True worker_class = Worker def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument( "--layer", action="store", dest="layer", default=DEFAULT_CHANNEL_LAYER, help="Channel layer alias to use, if not the default.", ) parser.add_argument( "channels", nargs="+", help="Channels to listen on." ) def handle(self, *args, **options): # Get the backend to use self.verbosity = options.get("verbosity", 1) # Get the channel layer they asked for (or see if one isn't configured) if "layer" in options: self.channel_layer = get_channel_layer(options["layer"]) else: self.channel_layer = get_channel_layer() if self.channel_layer is None: raise CommandError("You do not have any CHANNEL_LAYERS configured.") # Run the worker self.logger = setup_logger("django.channels", self.verbosity) self.logger.info("Running worker for channels %s", options["channels"]) worker = self.worker_class( application=get_default_application(), channels=options["channels"], channel_layer=self.channel_layer, ) worker.run()
Allow subclasses to customise the worker class
Allow subclasses to customise the worker class
Python
bsd-3-clause
django/channels,andrewgodwin/django-channels,andrewgodwin/channels
60352e8a3c41ec804ac1bd6b9f3af4bf611edc0b
profiles/views.py
profiles/views.py
from django.conf import settings from django.contrib.auth.decorators import login_required from django.contrib.sites.models import Site from django.core.urlresolvers import reverse from django.views.generic import FormView, TemplateView from django.utils.datastructures import MultiValueDictKeyError from incuna.utils import get_class_from_path from profiles.models import Profile from profiles.utils import class_view_decorator try: ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS) except AttributeError: from forms import ProfileForm @class_view_decorator(login_required) class ProfileView(TemplateView): template_name = 'profiles/profile.html' @class_view_decorator(login_required) class ProfileEdit(FormView): form_class = ProfileForm template_name = 'profiles/profile_form.html' def form_valid(self, form): instance = super(ProfileEdit, self).form_valid(form) self.request.user.message_set.create(message='Your profile has been updated.') return instance def get_context_data(self, **kwargs): context = super(ProfileEdit, self).get_context_data(**kwargs) context['site'] = Site.objects.get_current() return context def get_object(self): if isinstance(self.request.user, Profile): return self.request.user return self.request.user.profile def get_success_url(self): try: return self.request.GET['next'] except MultiValueDictKeyError: return reverse('profile')
from django.conf import settings from django.contrib.auth.decorators import login_required from django.contrib.sites.models import Site from django.core.urlresolvers import reverse from django.utils.datastructures import MultiValueDictKeyError from django.views.generic import TemplateView, UpdateView from incuna.utils import get_class_from_path from profiles.models import Profile from profiles.utils import class_view_decorator try: ProfileForm = get_class_from_path(settings.PROFILE_FORM_CLASS) except AttributeError: from forms import ProfileForm @class_view_decorator(login_required) class ProfileView(TemplateView): template_name = 'profiles/profile.html' @class_view_decorator(login_required) class ProfileEdit(UpdateView): form_class = ProfileForm template_name = 'profiles/profile_form.html' def form_valid(self, form): instance = super(ProfileEdit, self).form_valid(form) self.request.user.message_set.create(message='Your profile has been updated.') return instance def get_context_data(self, **kwargs): context = super(ProfileEdit, self).get_context_data(**kwargs) context['site'] = Site.objects.get_current() return context def get_object(self): if isinstance(self.request.user, Profile): return self.request.user return self.request.user.profile def get_success_url(self): try: return self.request.GET['next'] except MultiValueDictKeyError: return reverse('profile')
Use an update view instead of form view
Use an update view instead of form view
Python
bsd-2-clause
incuna/django-extensible-profiles
cb6fa6b54ca3e1908037a1b1a3399d8bd4b1be58
djoser/compat.py
djoser/compat.py
from djoser.conf import settings try: from django.contrib.auth.password_validation import validate_password except ImportError: from password_validation import validate_password __all__ = ['settings', 'validate_password'] def get_user_email(user): email_field_name = get_user_email_field_name(user) return getattr(user, email_field_name, None) def get_user_email_field_name(user): try: # Assume we are Django >= 1.11 return user.get_email_field_name() except AttributeError: # we are using Django < 1.11 return settings.USER_EMAIL_FIELD_NAME
from djoser.conf import settings try: from django.contrib.auth.password_validation import validate_password except ImportError: # pragma: no cover from password_validation import validate_password __all__ = ['settings', 'validate_password'] def get_user_email(user): email_field_name = get_user_email_field_name(user) return getattr(user, email_field_name, None) def get_user_email_field_name(user): try: # Assume we are Django >= 1.11 return user.get_email_field_name() except AttributeError: # we are using Django < 1.11 return settings.USER_EMAIL_FIELD_NAME
Fix invalid fallback leading to circular calls
Fix invalid fallback leading to circular calls Remove redundant finally
Python
mit
sunscrapers/djoser,akalipetis/djoser,sunscrapers/djoser,sunscrapers/djoser,akalipetis/djoser
a015eea9dfe94fe10ee44159f92b9ce196c14f2f
hooks/settings/settings_gunicorn.py
hooks/settings/settings_gunicorn.py
# -*- coding: utf-8 -*- """ Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/ Parameters you might want to override: GUNICORN_BIND="0.0.0.0:8005" """ import os workers = 4 bind = "0.0.0.0:8005" worker_class = "eventlet" worker_connections = 10 # Overwrite some Gunicorns params by ENV variables for k, v in os.environ.items(): if k.startswith("GUNICORN_"): key = k.split('_', 1)[1].lower() locals()[key] = v
# -*- coding: utf-8 -*- """ Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/ Parameters you might want to override: GUNICORN_BIND="0.0.0.0:8005" """ import os workers = 4 bind = "0.0.0.0:8005" worker_class = "eventlet" worker_connections = 100 # Overwrite some Gunicorns params by ENV variables for k, v in os.environ.items(): if k.startswith("GUNICORN_"): key = k.split('_', 1)[1].lower() locals()[key] = v
Increase Gunicorn worker eventlet connections, 10 -> 100.
Increase Gunicorn worker eventlet connections, 10 -> 100.
Python
mit
business-factory/captain-hook
03340917e96b7076ca420bea4e121f89c05935f6
censusreporter/config/prod/settings.py
censusreporter/config/prod/settings.py
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = [ 'censusreporter.org', 'www.censusreporter.org', 'censusreporter.dokku.censusreporter.org', ] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } }
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } }
Allow all hosts to support Dokku's healthcheck
Allow all hosts to support Dokku's healthcheck
Python
mit
censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter
f8eb93f1845a7776c61a59bafc6fdeb689712aff
examples/comp/ask_user_dialog.py
examples/comp/ask_user_dialog.py
"""Example showing the Ask User dialog controls and overall usage.""" import fusionless as fu dialog = fu.AskUserDialog() dialog.add_text("text", default="Default text value") dialog.add_position("position", default=(0.2, 0.8)) dialog.add_slider("slider", default=0.5, min=-10, max=10) dialog.add_screw("screw") dialog.add_file_browse("file", default="C:/path/to/foo") dialog.add_path_browse("path") dialog.add_clip_browse("clip") dialog.add_checkbox("checkbox", name="Do not check this!") dialog.add_dropdown("dropdown", options=["A", "B", "C"]) dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"]) result = dialog.show() if result is None: # Dialog was cancelled pass else: checked = result['checkbox'] if checked: print("You sure are living on the edge!") import pprint pprint.pprint(result)
"""Example showing the Ask User dialog controls and overall usage.""" import fusionless as fu dialog = fu.AskUserDialog("Example Ask User Dialog") dialog.add_text("text", default="Default text value") dialog.add_position("position", default=(0.2, 0.8)) dialog.add_slider("slider", default=0.5, min=-10, max=10) dialog.add_screw("screw") dialog.add_file_browse("file", default="C:/path/to/foo") dialog.add_path_browse("path") dialog.add_clip_browse("clip") dialog.add_checkbox("checkbox", name="Do not check this!") dialog.add_dropdown("dropdown", options=["A", "B", "C"]) dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"]) result = dialog.show() if result is None: # Dialog was cancelled pass else: checked = result['checkbox'] if checked: print("You sure are living on the edge!") import pprint pprint.pprint(result)
Add dialog title to example
Add dialog title to example
Python
bsd-3-clause
BigRoy/fusionless,BigRoy/fusionscript
a8497dcbeaa9d39a98402c6e46c86cd10c7d4de3
tests/unit/client_regression.py
tests/unit/client_regression.py
null
"""Unit tests for the client library. TODO: we have plans to move ./client.py to integration/, since those are really integration tests. Once that's done we should move this to ./client.py; it's here now to avoid name collisions/conflicts. """ import flask import pytest from schema import Schema from hil import config, rest from hil.client.base import FailedAPICallException from hil.client.client import Client from hil.test_common import HybridHTTPClient, fail_on_log_warnings, \ fresh_database, server_init, config_testsuite fail_on_log_warnings = pytest.fixture(fail_on_log_warnings) fresh_database = pytest.fixture(fresh_database) server_init = pytest.fixture(server_init) @pytest.fixture() def configure(): """Fixture to load the HIL config.""" config_testsuite() config.load_extensions() pytestmark = pytest.mark.usefixtures('fail_on_log_warnings', 'configure', 'fresh_database', 'server_init') def test_non_json_response(): """The client library should raise an error when the response body is unexpectedly not JSON. """ # Endpoint is arbitrary: endpoint = 'http:/127.0.0.1:9933' client = Client(endpoint, HybridHTTPClient(endpoint)) # Override one of the API calls with a different implementation: # pylint: disable=unused-variable @rest.rest_call('GET', '/nodes/free', Schema({})) def list_free_nodes(): """Mock API call for testing; always raises an error.""" flask.abort(500) try: client.node.list('free') assert False, 'Client library did not report an error!' except FailedAPICallException as e: # Make sure it's the right error: assert e.error_type == 'unknown', 'Wrong error type.'
Add a regression test for the error-silencing issue.
Add a regression test for the error-silencing issue.
Python
apache-2.0
CCI-MOC/haas
411175d40b449a793528920c3745ca831f6f55e0
debug_toolbar/panels/version.py
debug_toolbar/panels/version.py
import sys import django from django.conf import settings from django.utils.translation import ugettext_lazy as _ from debug_toolbar.panels import DebugPanel class VersionDebugPanel(DebugPanel): """ Panel that displays the Django version. """ name = 'Version' template = 'debug_toolbar/panels/versions.html' has_content = True def nav_title(self): return _('Versions') def nav_subtitle(self): return 'Django %s' % django.get_version() def url(self): return '' def title(self): return _('Versions') def process_response(self, request, response): versions = {} versions['Python'] = '%d.%d.%d' % sys.version_info[:3] for app in settings.INSTALLED_APPS + ['django']: name = app.split('.')[-1].replace('_', ' ').capitalize() __import__(app) app = sys.modules[app] if hasattr(app, 'get_version'): get_version = app.get_version if callable(get_version): version = get_version() else: version = get_version elif hasattr(app, 'VERSION'): version = app.VERSION elif hasattr(app, '__version__'): version = app.__version__ else: continue if isinstance(version, (list, tuple)): version = '.'.join(str(o) for o in version) versions[name] = version self.record_stats({ 'versions': versions, 'paths': sys.path, })
import sys import django from django.conf import settings from django.utils.translation import ugettext_lazy as _ from debug_toolbar.panels import DebugPanel class VersionDebugPanel(DebugPanel): """ Panel that displays the Django version. """ name = 'Version' template = 'debug_toolbar/panels/versions.html' has_content = True def nav_title(self): return _('Versions') def nav_subtitle(self): return 'Django %s' % django.get_version() def url(self): return '' def title(self): return _('Versions') def process_response(self, request, response): versions = {} versions['Python'] = '%d.%d.%d' % sys.version_info[:3] for app in list(settings.INSTALLED_APPS) + ['django']: name = app.split('.')[-1].replace('_', ' ').capitalize() __import__(app) app = sys.modules[app] if hasattr(app, 'get_version'): get_version = app.get_version if callable(get_version): version = get_version() else: version = get_version elif hasattr(app, 'VERSION'): version = app.VERSION elif hasattr(app, '__version__'): version = app.__version__ else: continue if isinstance(version, (list, tuple)): version = '.'.join(str(o) for o in version) versions[name] = version self.record_stats({ 'versions': versions, 'paths': sys.path, })
Convert settings.INSTALLED_APPS to list before concatenating django.
Convert settings.INSTALLED_APPS to list before concatenating django. According to the Django documentation settings.INSTALLED_APPS is a tuple. To go for sure that only list + list are concatenated, settings.INSTALLED_APPS is converted to list type before adding ['django'].
Python
bsd-3-clause
stored/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,megcunningham/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,spookylukey/django-debug-toolbar,Endika/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,sidja/django-debug-toolbar,peap/django-debug-toolbar,ivelum/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,sidja/django-debug-toolbar,peap/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,peap/django-debug-toolbar,Endika/django-debug-toolbar,barseghyanartur/django-debug-toolbar,ivelum/django-debug-toolbar,barseghyanartur/django-debug-toolbar,calvinpy/django-debug-toolbar,tim-schilling/django-debug-toolbar,jazzband/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,tim-schilling/django-debug-toolbar,guilhermetavares/django-debug-toolbar,sidja/django-debug-toolbar,megcunningham/django-debug-toolbar,calvinpy/django-debug-toolbar,jazzband/django-debug-toolbar,seperman/django-debug-toolbar,jazzband/django-debug-toolbar,seperman/django-debug-toolbar,calvinpy/django-debug-toolbar,pevzi/django-debug-toolbar,stored/django-debug-toolbar,pevzi/django-debug-toolbar,pevzi/django-debug-toolbar,ivelum/django-debug-toolbar,spookylukey/django-debug-toolbar,seperman/django-debug-toolbar,spookylukey/django-debug-toolbar,Endika/django-debug-toolbar,megcunningham/django-debug-toolbar
06a052c7f60fd413f39b8e313e44bfeea970896a
work/admin.py
work/admin.py
# -*- coding: utf-8 -*- from django.contrib import admin from django import forms from django.utils.translation import ugettext_lazy as _ from parler.admin import TranslatableTabularInline from adminsortable.admin import SortableTabularInline from cms.admin.placeholderadmin import PlaceholderAdminMixin from allink_core.allink_base.admin import AllinkBaseAdminSortable from allink_apps.work.models import Work, Highlights class HighlightsInline(SortableTabularInline, TranslatableTabularInline): model = Highlights extra = 1 max_num = 10 verbose_name = _(u'Highlight') verbose_name_plural = _(u'Highlights') @admin.register(Work) class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable): inlines = [HighlightsInline, ] list_filter = ('is_active', 'categories',) def get_fieldsets(self, request, obj=None): fieldsets = ( (None, { 'fields': ( 'is_active', 'title', 'slug', 'lead', 'preview_image', ), }), ) fieldsets += self.get_base_fieldsets() return fieldsets def formfield_for_dbfield(self, db_field, **kwargs): if db_field.name == 'lead': kwargs['widget'] = forms.Textarea return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
# -*- coding: utf-8 -*- from django.contrib import admin from django import forms from parler.admin import TranslatableTabularInline from adminsortable.admin import SortableTabularInline from cms.admin.placeholderadmin import PlaceholderAdminMixin from allink_core.allink_base.admin import AllinkBaseAdminSortable from allink_apps.work.models import Work, Highlights class HighlightsInline(SortableTabularInline, TranslatableTabularInline): model = Highlights extra = 1 max_num = 10 verbose_name = _(u'Highlight') verbose_name_plural = _(u'Highlights') @admin.register(Work) class WorkAdmin(PlaceholderAdminMixin, AllinkBaseAdminSortable): inlines = [HighlightsInline, ] list_filter = ('is_active', 'categories',) def get_fieldsets(self, request, obj=None): fieldsets = ( (None, { 'fields': ( 'is_active', 'title', 'slug', 'lead', 'preview_image', ), }), ) fieldsets += self.get_base_fieldsets() return fieldsets def formfield_for_dbfield(self, db_field, **kwargs): if db_field.name == 'lead': kwargs['widget'] = forms.Textarea return super(WorkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
TEST allink_apps subtree - pulling
TEST allink_apps subtree - pulling
Python
bsd-3-clause
allink/allink-apps,allink/allink-apps
73a9889f0e43d2b1dc94e2235a94cb888e0eda89
zeus/utils/sentry.py
zeus/utils/sentry.py
from functools import wraps from sentry_sdk import Hub def span(op, desc_or_func=None): def inner(func): @wraps(func) def wrapped(*args, **kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): return func(*args, **kwargs) return wrapped return inner
import asyncio from contextlib import contextmanager from functools import wraps from sentry_sdk import Hub # https://stackoverflow.com/questions/44169998/how-to-create-a-python-decorator-that-can-wrap-either-coroutine-or-function def span(op, desc_or_func=None): def inner(func): @contextmanager def wrap_with_span(args, kwargs): if callable(desc_or_func): description = desc_or_func(*args, **kwargs) else: description = desc_or_func with Hub.current.start_span(op=op, description=description): yield @wraps(func) def wrapper(*args, **kwargs): if not asyncio.iscoroutinefunction(func): with wrap_with_span(args, kwargs): return func(*args, **kwargs) else: async def tmp(): with wrap_with_span(args, kwargs): return await func(*args, **kwargs) return tmp() return wrapper return inner
Fix span decorator to work with asyncio
Fix span decorator to work with asyncio
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus