commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
5ac176fafd35bfa675e1718b74a8c6ef4dc74629
skoleintra/pgWeekplans.py
skoleintra/pgWeekplans.py
import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('class'): continue if not [c for c in line['class'].split() if c.startswith('linje')]: continue links = line.findAll('a') assert(len(links) >= 1) # find week date title = links[0].text # find url url = links[0]['href'] url = URL_PREFIX + urllib.quote(url.encode('iso-8859-1'), safe=':/?=&%') bs = surllib.skoleGetURL(url, True) msg = semail.Message('weekplans', bs) msg.setTitle(u'%s' % title) msg.updatePersonDate() msg.maybeSend() def skoleWeekplans(): global bs # surllib.skoleLogin() config.log(u'Kigger efter nye ugeplaner') # read the initial page bs = surllib.skoleGetURL(URL_MAIN, True, True) docFindWeekplans(bs) if __name__ == '__main__': # test skoleWeekplans()
import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('class'): continue if not [c for c in line['class'].split() if c.startswith('linje')]: continue links = line.findAll('a') assert(len(links) >= 1) # find week date title = links[0].text # find url url = links[0]['href'] url = url.encode('iso-8859-1') url = URL_PREFIX + urllib.quote(url, safe=':/?=&%') bs = surllib.skoleGetURL(url, True) msg = semail.Message('weekplans', bs) msg.setTitle(u'%s' % title) msg.updatePersonDate() msg.maybeSend() def skoleWeekplans(): global bs # surllib.skoleLogin() config.log(u'Kigger efter nye ugeplaner') # read the initial page bs = surllib.skoleGetURL(URL_MAIN, True, True) docFindWeekplans(bs) if __name__ == '__main__': # test skoleWeekplans()
Make code comply to PEP8
Make code comply to PEP8
Python
bsd-2-clause
bennyslbs/fskintra
import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' + def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('class'): continue if not [c for c in line['class'].split() if c.startswith('linje')]: continue links = line.findAll('a') assert(len(links) >= 1) # find week date title = links[0].text # find url url = links[0]['href'] + url = url.encode('iso-8859-1') - url = URL_PREFIX + urllib.quote(url.encode('iso-8859-1'), safe=':/?=&%') + url = URL_PREFIX + urllib.quote(url, safe=':/?=&%') bs = surllib.skoleGetURL(url, True) msg = semail.Message('weekplans', bs) msg.setTitle(u'%s' % title) msg.updatePersonDate() msg.maybeSend() def skoleWeekplans(): global bs # surllib.skoleLogin() config.log(u'Kigger efter nye ugeplaner') # read the initial page bs = surllib.skoleGetURL(URL_MAIN, True, True) docFindWeekplans(bs) if __name__ == '__main__': # test skoleWeekplans()
Make code comply to PEP8
## Code Before: import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('class'): continue if not [c for c in line['class'].split() if c.startswith('linje')]: continue links = line.findAll('a') assert(len(links) >= 1) # find week date title = links[0].text # find url url = links[0]['href'] url = URL_PREFIX + urllib.quote(url.encode('iso-8859-1'), safe=':/?=&%') bs = surllib.skoleGetURL(url, True) msg = semail.Message('weekplans', bs) msg.setTitle(u'%s' % title) msg.updatePersonDate() msg.maybeSend() def skoleWeekplans(): global bs # surllib.skoleLogin() config.log(u'Kigger efter nye ugeplaner') # read the initial page bs = surllib.skoleGetURL(URL_MAIN, True, True) docFindWeekplans(bs) if __name__ == '__main__': # test skoleWeekplans() ## Instruction: Make code comply to PEP8 ## Code After: import re import config import surllib import semail import datetime import urllib URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' def docFindWeekplans(bs): trs = bs.findAll('tr') for line in trs: if not line.has_key('class'): continue if not [c for c in line['class'].split() if c.startswith('linje')]: continue links = line.findAll('a') assert(len(links) >= 1) # find week date title = links[0].text # find url url = links[0]['href'] url = url.encode('iso-8859-1') url = URL_PREFIX + urllib.quote(url, safe=':/?=&%') bs = surllib.skoleGetURL(url, True) msg = semail.Message('weekplans', bs) msg.setTitle(u'%s' % title) msg.updatePersonDate() msg.maybeSend() def skoleWeekplans(): global bs # surllib.skoleLogin() config.log(u'Kigger efter nye ugeplaner') # read the initial page bs = surllib.skoleGetURL(URL_MAIN, True, True) docFindWeekplans(bs) if __name__ == '__main__': # test skoleWeekplans()
# ... existing code ... URL_MAIN = URL_PREFIX + 'Ugeplaner.asp' # ... modified code ... url = links[0]['href'] url = url.encode('iso-8859-1') url = URL_PREFIX + urllib.quote(url, safe=':/?=&%') # ... rest of the code ...
3ca7c667cbf37499dc959b336b9ff0e88f5d4275
dbarray/tests/run.py
dbarray/tests/run.py
"""From http://stackoverflow.com/a/12260597/400691""" import sys from django.conf import settings settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'dbarray', 'HOST': 'localhost' } }, INSTALLED_APPS=( 'dbarray.tests', # 'django.contrib.auth', # 'django.contrib.contenttypes', # 'django.contrib.sessions', # 'django.contrib.admin', ), ) try: from django.test.runner import DiscoverRunner except ImportError: # Fallback for django < 1.6 from discover_runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['dbarray']) if failures: sys.exit(1)
"""From http://stackoverflow.com/a/12260597/400691""" import sys from django.conf import settings settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'dbarray', 'HOST': 'localhost' } }, INSTALLED_APPS=('dbarray.tests',), ) try: from django.test.runner import DiscoverRunner except ImportError: # Fallback for django < 1.6 from discover_runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['dbarray']) if failures: sys.exit(1)
Remove commented code .
Remove commented code [ci skip].
Python
bsd-3-clause
ecometrica/django-dbarray
"""From http://stackoverflow.com/a/12260597/400691""" import sys from django.conf import settings settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'dbarray', 'HOST': 'localhost' } }, + INSTALLED_APPS=('dbarray.tests',), - INSTALLED_APPS=( - 'dbarray.tests', - # 'django.contrib.auth', - # 'django.contrib.contenttypes', - # 'django.contrib.sessions', - # 'django.contrib.admin', - ), ) + try: from django.test.runner import DiscoverRunner except ImportError: # Fallback for django < 1.6 from discover_runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['dbarray']) if failures: sys.exit(1)
Remove commented code .
## Code Before: """From http://stackoverflow.com/a/12260597/400691""" import sys from django.conf import settings settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'dbarray', 'HOST': 'localhost' } }, INSTALLED_APPS=( 'dbarray.tests', # 'django.contrib.auth', # 'django.contrib.contenttypes', # 'django.contrib.sessions', # 'django.contrib.admin', ), ) try: from django.test.runner import DiscoverRunner except ImportError: # Fallback for django < 1.6 from discover_runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['dbarray']) if failures: sys.exit(1) ## Instruction: Remove commented code . ## Code After: """From http://stackoverflow.com/a/12260597/400691""" import sys from django.conf import settings settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'dbarray', 'HOST': 'localhost' } }, INSTALLED_APPS=('dbarray.tests',), ) try: from django.test.runner import DiscoverRunner except ImportError: # Fallback for django < 1.6 from discover_runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['dbarray']) if failures: sys.exit(1)
# ... existing code ... }, INSTALLED_APPS=('dbarray.tests',), ) try: # ... rest of the code ...
deb70e977ad59a84fbafe2251f60f2da1d4abf20
astral/api/app.py
astral/api/app.py
import threading import tornado.httpserver import tornado.ioloop import tornado.web from astral.conf import settings from urls import url_patterns class NodeWebAPI(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings.TORNADO_SETTINGS) def run(): from astral.api.handlers.events import queue_listener event_thread = threading.Thread(target=queue_listener) event_thread.daemon = True event_thread.start() app = NodeWebAPI() http_server = tornado.httpserver.HTTPServer(app) http_server.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": run()
import threading import tornado.httpserver import tornado.ioloop import tornado.web from astral.conf import settings from urls import url_patterns class NodeWebAPI(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings.TORNADO_SETTINGS) def run(): from astral.api.handlers.events import queue_listener event_thread = threading.Thread(target=queue_listener) event_thread.daemon = True event_thread.start() app = NodeWebAPI() app.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": run()
Use shortcut listen() for starting the HTTPServer.
Use shortcut listen() for starting the HTTPServer.
Python
mit
peplin/astral
import threading import tornado.httpserver import tornado.ioloop import tornado.web from astral.conf import settings from urls import url_patterns class NodeWebAPI(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings.TORNADO_SETTINGS) def run(): from astral.api.handlers.events import queue_listener event_thread = threading.Thread(target=queue_listener) event_thread.daemon = True event_thread.start() app = NodeWebAPI() - http_server = tornado.httpserver.HTTPServer(app) - http_server.listen(settings.TORNADO_SETTINGS['port']) + app.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": run()
Use shortcut listen() for starting the HTTPServer.
## Code Before: import threading import tornado.httpserver import tornado.ioloop import tornado.web from astral.conf import settings from urls import url_patterns class NodeWebAPI(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings.TORNADO_SETTINGS) def run(): from astral.api.handlers.events import queue_listener event_thread = threading.Thread(target=queue_listener) event_thread.daemon = True event_thread.start() app = NodeWebAPI() http_server = tornado.httpserver.HTTPServer(app) http_server.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": run() ## Instruction: Use shortcut listen() for starting the HTTPServer. ## Code After: import threading import tornado.httpserver import tornado.ioloop import tornado.web from astral.conf import settings from urls import url_patterns class NodeWebAPI(tornado.web.Application): def __init__(self): tornado.web.Application.__init__(self, url_patterns, **settings.TORNADO_SETTINGS) def run(): from astral.api.handlers.events import queue_listener event_thread = threading.Thread(target=queue_listener) event_thread.daemon = True event_thread.start() app = NodeWebAPI() app.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": run()
// ... existing code ... app = NodeWebAPI() app.listen(settings.TORNADO_SETTINGS['port']) tornado.ioloop.IOLoop.instance().start() // ... rest of the code ...
8866de1785cc6961d2111f1e0f55b781a7de660d
_markerlib/__init__.py
_markerlib/__init__.py
"""Used by pkg_resources to interpret PEP 345 environment markers.""" from _markerlib.markers import default_environment, compile, interpret, as_function
"""Used by pkg_resources to interpret PEP 345 environment markers.""" from _markerlib.markers import default_environment, compile, interpret
Remove missing import (since b62968cd2666)
Remove missing import (since b62968cd2666) --HG-- branch : distribute extra : rebase_source : d1190f895d794dfcb838f7eb40a60ab07b8b309e
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
"""Used by pkg_resources to interpret PEP 345 environment markers.""" - from _markerlib.markers import default_environment, compile, interpret, as_function + from _markerlib.markers import default_environment, compile, interpret
Remove missing import (since b62968cd2666)
## Code Before: """Used by pkg_resources to interpret PEP 345 environment markers.""" from _markerlib.markers import default_environment, compile, interpret, as_function ## Instruction: Remove missing import (since b62968cd2666) ## Code After: """Used by pkg_resources to interpret PEP 345 environment markers.""" from _markerlib.markers import default_environment, compile, interpret
# ... existing code ... """Used by pkg_resources to interpret PEP 345 environment markers.""" from _markerlib.markers import default_environment, compile, interpret # ... rest of the code ...
f996755665c9e55af5139a473b859aa0eb507515
back2back/wsgi.py
back2back/wsgi.py
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling, MediaCling application = Cling(MediaCling(get_wsgi_application()))
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling application = Cling(get_wsgi_application())
Remove MediaCling as there isn't any.
Remove MediaCling as there isn't any.
Python
bsd-2-clause
mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application - from dj_static import Cling, MediaCling + from dj_static import Cling - application = Cling(MediaCling(get_wsgi_application())) + application = Cling(get_wsgi_application())
Remove MediaCling as there isn't any.
## Code Before: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling, MediaCling application = Cling(MediaCling(get_wsgi_application())) ## Instruction: Remove MediaCling as there isn't any. ## Code After: import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings") from django.core.wsgi import get_wsgi_application from dj_static import Cling application = Cling(get_wsgi_application())
... from django.core.wsgi import get_wsgi_application from dj_static import Cling application = Cling(get_wsgi_application()) ...
6a827bee5263c9bb5d34d6ac971581c62e827e7d
pinax/comments/models.py
pinax/comments/models.py
from datetime import datetime from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Comment(models.Model): author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE) name = models.CharField(max_length=100) email = models.CharField(max_length=255, blank=True) website = models.CharField(max_length=255, blank=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.IntegerField() content_object = GenericForeignKey() comment = models.TextField() submit_date = models.DateTimeField(default=datetime.now) ip_address = models.GenericIPAddressField(null=True) public = models.BooleanField(default=True) @property def data(self): return { "pk": self.pk, "comment": self.comment, "author": self.author.username if self.author else "", "name": self.name, "email": self.email, "website": self.website, "submit_date": str(self.submit_date) } def __str__(self): return "pk=%d" % self.pk # pragma: no cover
from datetime import datetime from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models class Comment(models.Model): author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE) name = models.CharField(max_length=100) email = models.CharField(max_length=255, blank=True) website = models.CharField(max_length=255, blank=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.IntegerField() content_object = GenericForeignKey() comment = models.TextField() submit_date = models.DateTimeField(default=datetime.now) ip_address = models.GenericIPAddressField(null=True) public = models.BooleanField(default=True) @property def data(self): return { "pk": self.pk, "comment": self.comment, "author": self.author.username if self.author else "", "name": self.name, "email": self.email, "website": self.website, "submit_date": str(self.submit_date) } def __str__(self): return "pk=%d" % self.pk # pragma: no cover
Change syntax to drop support
Change syntax to drop support
Python
mit
pinax/pinax-comments,pinax/pinax-comments,eldarion/dialogos
from datetime import datetime from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models - from django.utils.encoding import python_2_unicode_compatible - @python_2_unicode_compatible class Comment(models.Model): author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE) name = models.CharField(max_length=100) email = models.CharField(max_length=255, blank=True) website = models.CharField(max_length=255, blank=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.IntegerField() content_object = GenericForeignKey() comment = models.TextField() submit_date = models.DateTimeField(default=datetime.now) ip_address = models.GenericIPAddressField(null=True) public = models.BooleanField(default=True) @property def data(self): return { "pk": self.pk, "comment": self.comment, "author": self.author.username if self.author else "", "name": self.name, "email": self.email, "website": self.website, "submit_date": str(self.submit_date) } def __str__(self): return "pk=%d" % self.pk # pragma: no cover
Change syntax to drop support
## Code Before: from datetime import datetime from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Comment(models.Model): author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE) name = models.CharField(max_length=100) email = models.CharField(max_length=255, blank=True) website = models.CharField(max_length=255, blank=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.IntegerField() content_object = GenericForeignKey() comment = models.TextField() submit_date = models.DateTimeField(default=datetime.now) ip_address = models.GenericIPAddressField(null=True) public = models.BooleanField(default=True) @property def data(self): return { "pk": self.pk, "comment": self.comment, "author": self.author.username if self.author else "", "name": self.name, "email": self.email, "website": self.website, "submit_date": str(self.submit_date) } def __str__(self): return "pk=%d" % self.pk # pragma: no cover ## Instruction: Change syntax to drop support ## Code After: from datetime import datetime from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models class Comment(models.Model): author = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, related_name="comments", on_delete=models.CASCADE) name = models.CharField(max_length=100) email = models.CharField(max_length=255, blank=True) website = models.CharField(max_length=255, blank=True) content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.IntegerField() content_object = GenericForeignKey() comment = models.TextField() submit_date = models.DateTimeField(default=datetime.now) ip_address = models.GenericIPAddressField(null=True) public = models.BooleanField(default=True) @property def data(self): return { "pk": self.pk, "comment": self.comment, "author": self.author.username if self.author else "", "name": self.name, "email": self.email, "website": self.website, "submit_date": str(self.submit_date) } def __str__(self): return "pk=%d" % self.pk # pragma: no cover
# ... existing code ... from django.db import models # ... modified code ... class Comment(models.Model): # ... rest of the code ...
82796dfb24c3e65b669d4336948e76e2a64cf73f
LR/lr/model/node_service.py
LR/lr/model/node_service.py
''' Created on Mar 17, 2011 Base model class for learning registry data model @author: jpoyau ''' from base_model import createBaseModel, ModelParser, defaultCouchServer, appConfig from pylons import * import datetime, logging log = logging.getLogger(__name__) SPEC_SERVICE_DESCRIPTION= appConfig['spec.models.node_service_description'] DB_NODE = appConfig['couchdb.db.node'] class NodeServiceModel(createBaseModel(SPEC_SERVICE_DESCRIPTION, DB_NODE)): PUBLISH='publish' ACCESS = 'access' BROKER = 'broker' ADMINISTRATIVE='administrative' def __init__(self, data=None): super(NodeServiceModel,self).__init__(data)
''' Created on Mar 17, 2011 Base model class for learning registry data model @author: jpoyau ''' from base_model import createBaseModel, ModelParser, defaultCouchServer, appConfig from pylons import * import datetime, logging log = logging.getLogger(__name__) SPEC_SERVICE_DESCRIPTION= appConfig['spec.models.node_service_description'] DB_NODE = appConfig['couchdb.db.node'] class NodeServiceModel(createBaseModel(SPEC_SERVICE_DESCRIPTION, DB_NODE)): PUBLISH='publish' ACCESS = 'access' BROKER = 'broker' DISTRIBUTE = 'distribute' ADMINISTRATIVE='administrative' def __init__(self, data=None): super(NodeServiceModel,self).__init__(data)
Add 'distribute' to the node services
Add 'distribute' to the node services
Python
apache-2.0
jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry
''' Created on Mar 17, 2011 Base model class for learning registry data model @author: jpoyau ''' from base_model import createBaseModel, ModelParser, defaultCouchServer, appConfig from pylons import * import datetime, logging log = logging.getLogger(__name__) SPEC_SERVICE_DESCRIPTION= appConfig['spec.models.node_service_description'] DB_NODE = appConfig['couchdb.db.node'] class NodeServiceModel(createBaseModel(SPEC_SERVICE_DESCRIPTION, DB_NODE)): PUBLISH='publish' ACCESS = 'access' BROKER = 'broker' + DISTRIBUTE = 'distribute' ADMINISTRATIVE='administrative' def __init__(self, data=None): super(NodeServiceModel,self).__init__(data)
Add 'distribute' to the node services
## Code Before: ''' Created on Mar 17, 2011 Base model class for learning registry data model @author: jpoyau ''' from base_model import createBaseModel, ModelParser, defaultCouchServer, appConfig from pylons import * import datetime, logging log = logging.getLogger(__name__) SPEC_SERVICE_DESCRIPTION= appConfig['spec.models.node_service_description'] DB_NODE = appConfig['couchdb.db.node'] class NodeServiceModel(createBaseModel(SPEC_SERVICE_DESCRIPTION, DB_NODE)): PUBLISH='publish' ACCESS = 'access' BROKER = 'broker' ADMINISTRATIVE='administrative' def __init__(self, data=None): super(NodeServiceModel,self).__init__(data) ## Instruction: Add 'distribute' to the node services ## Code After: ''' Created on Mar 17, 2011 Base model class for learning registry data model @author: jpoyau ''' from base_model import createBaseModel, ModelParser, defaultCouchServer, appConfig from pylons import * import datetime, logging log = logging.getLogger(__name__) SPEC_SERVICE_DESCRIPTION= appConfig['spec.models.node_service_description'] DB_NODE = appConfig['couchdb.db.node'] class NodeServiceModel(createBaseModel(SPEC_SERVICE_DESCRIPTION, DB_NODE)): PUBLISH='publish' ACCESS = 'access' BROKER = 'broker' DISTRIBUTE = 'distribute' ADMINISTRATIVE='administrative' def __init__(self, data=None): super(NodeServiceModel,self).__init__(data)
# ... existing code ... BROKER = 'broker' DISTRIBUTE = 'distribute' ADMINISTRATIVE='administrative' # ... rest of the code ...
c8cc85f0d10093ae9cd42ee4cc7dabef46718645
ood/controllers/simple.py
ood/controllers/simple.py
import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state = SimpleServerState.objects.get(ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state, _ = SimpleServerState.objects.get_or_create( ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
Create SimpleServerState object if it doesn't exist.
Create SimpleServerState object if it doesn't exist.
Python
mit
markrcote/ood,markrcote/ood,markrcote/ood,markrcote/ood
import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): - self.state = SimpleServerState.objects.get(ood=ood_instance) + self.state, _ = SimpleServerState.objects.get_or_create( + ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
Create SimpleServerState object if it doesn't exist.
## Code Before: import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state = SimpleServerState.objects.get(ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response ## Instruction: Create SimpleServerState object if it doesn't exist. ## Code After: import socket from ood.minecraft import Client from ood.models import SimpleServerState class SimpleServerController(object): def __init__(self, ood_instance): self.state, _ = SimpleServerState.objects.get_or_create( ood=ood_instance) self.mcc = Client(ood_instance) def start(self): self.mcc.reset_player_info() return self._send_cmd('start') def stop(self): return self._send_cmd('stop') def running(self): response = self._send_cmd('running').lower() return response == 'true' def _send_cmd(self, cmd): buf = '' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((self.state.ip_address, self.state.port)) s.sendall('%s\n' % cmd) while '\n' not in buf: buf += s.recv(1024) response, nl, buf = buf.partition('\n') s.close() return response
// ... existing code ... def __init__(self, ood_instance): self.state, _ = SimpleServerState.objects.get_or_create( ood=ood_instance) self.mcc = Client(ood_instance) // ... rest of the code ...
9698e531ffd528b6b56e285f5cf8087aa06d4a02
test/conftest.py
test/conftest.py
import pytest @pytest.fixture def namespaces(): import class_namespaces return class_namespaces @pytest.fixture def compat(): import class_namespaces.compat return class_namespaces.compat @pytest.fixture def abc(): import class_namespaces.compat.abc return class_namespaces.compat.abc
import pytest @pytest.fixture def namespaces(): import class_namespaces return class_namespaces @pytest.fixture def namespace(namespaces): return namespaces.Namespace @pytest.fixture def compat(): import class_namespaces.compat return class_namespaces.compat @pytest.fixture def abc(): import class_namespaces.compat.abc return class_namespaces.compat.abc
Add fixture for Namespace specifically.
Add fixture for Namespace specifically.
Python
mit
mwchase/class-namespaces,mwchase/class-namespaces
import pytest @pytest.fixture def namespaces(): import class_namespaces return class_namespaces + + + @pytest.fixture + def namespace(namespaces): + return namespaces.Namespace @pytest.fixture def compat(): import class_namespaces.compat return class_namespaces.compat @pytest.fixture def abc(): import class_namespaces.compat.abc return class_namespaces.compat.abc
Add fixture for Namespace specifically.
## Code Before: import pytest @pytest.fixture def namespaces(): import class_namespaces return class_namespaces @pytest.fixture def compat(): import class_namespaces.compat return class_namespaces.compat @pytest.fixture def abc(): import class_namespaces.compat.abc return class_namespaces.compat.abc ## Instruction: Add fixture for Namespace specifically. ## Code After: import pytest @pytest.fixture def namespaces(): import class_namespaces return class_namespaces @pytest.fixture def namespace(namespaces): return namespaces.Namespace @pytest.fixture def compat(): import class_namespaces.compat return class_namespaces.compat @pytest.fixture def abc(): import class_namespaces.compat.abc return class_namespaces.compat.abc
# ... existing code ... return class_namespaces @pytest.fixture def namespace(namespaces): return namespaces.Namespace # ... rest of the code ...
93effb501a50d8265afd37826fbcab4dd4a87611
qa_app/views.py
qa_app/views.py
from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query(email=session['email']).first() attempts = Attempts.query(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
Fix user and attempts sqlalchemia request.
Fix user and attempts sqlalchemia request.
Python
apache-2.0
molecul/qa_app_flask,molecul/qa_app_flask,molecul/qa_app_flask
from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): - user = Users.query(email=session['email']).first() + user = Users.query.filter_by(email=session['email']).first() - attempts = Attempts.query(user_id=user.id).all() + attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
Fix user and attempts sqlalchemia request.
## Code Before: from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query(email=session['email']).first() attempts = Attempts.query(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts) ## Instruction: Fix user and attempts sqlalchemia request. ## Code After: from flask import Blueprint, render_template, request, session from flask_login import login_required from qa_app.models import Users, Attempts views = Blueprint('views', __name__) @views.before_request def redirect_setup(): if request.path.startswith("/static"): return @views.route('/') def index(): return render_template("index.html", page="Home") @views.route('/profile') @login_required def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts)
// ... existing code ... def profile(): user = Users.query.filter_by(email=session['email']).first() attempts = Attempts.query.filter_by(user_id=user.id).all() return render_template("profile.html", page="Profile", user=user, attempts=attempts) // ... rest of the code ...
593d152bd6eec64bc8ee504020ba0e5e2345966c
wafer/pages/views.py
wafer/pages/views.py
from django.http import Http404 from django.core.exceptions import PermissionDenied from django.views.generic import DetailView, TemplateView, UpdateView from wafer.pages.models import Page from wafer.pages.forms import PageForm class ShowPage(DetailView): template_name = 'wafer.pages/page.html' model = Page class EditPage(UpdateView): template_name = 'wafer.pages/page_form.html' model = Page form_class = PageForm fields = ['name', 'content'] def slug(request, url): """Look up a page by url (which is a tree of slugs)""" page = None for slug in url.split('/'): if not slug: continue try: page = Page.objects.get(slug=slug, parent=page) except Page.DoesNotExist: raise Http404 if page is None: try: page = Page.objects.get(slug='index') except Page.DoesNotExist: return TemplateView.as_view( template_name='wafer/index.html')(request) if 'edit' in request.GET.keys(): if not request.user.has_perm('pages.change_page'): raise PermissionDenied return EditPage.as_view()(request, pk=page.id) return ShowPage.as_view()(request, pk=page.id)
from django.http import Http404 from django.core.exceptions import PermissionDenied from django.views.generic import DetailView, TemplateView, UpdateView from wafer.pages.models import Page from wafer.pages.forms import PageForm class ShowPage(DetailView): template_name = 'wafer.pages/page.html' model = Page class EditPage(UpdateView): template_name = 'wafer.pages/page_form.html' model = Page form_class = PageForm def slug(request, url): """Look up a page by url (which is a tree of slugs)""" page = None for slug in url.split('/'): if not slug: continue try: page = Page.objects.get(slug=slug, parent=page) except Page.DoesNotExist: raise Http404 if page is None: try: page = Page.objects.get(slug='index') except Page.DoesNotExist: return TemplateView.as_view( template_name='wafer/index.html')(request) if 'edit' in request.GET.keys(): if not request.user.has_perm('pages.change_page'): raise PermissionDenied return EditPage.as_view()(request, pk=page.id) return ShowPage.as_view()(request, pk=page.id)
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
Python
isc
CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer
from django.http import Http404 from django.core.exceptions import PermissionDenied from django.views.generic import DetailView, TemplateView, UpdateView from wafer.pages.models import Page from wafer.pages.forms import PageForm class ShowPage(DetailView): template_name = 'wafer.pages/page.html' model = Page class EditPage(UpdateView): template_name = 'wafer.pages/page_form.html' model = Page form_class = PageForm - fields = ['name', 'content'] def slug(request, url): """Look up a page by url (which is a tree of slugs)""" page = None for slug in url.split('/'): if not slug: continue try: page = Page.objects.get(slug=slug, parent=page) except Page.DoesNotExist: raise Http404 if page is None: try: page = Page.objects.get(slug='index') except Page.DoesNotExist: return TemplateView.as_view( template_name='wafer/index.html')(request) if 'edit' in request.GET.keys(): if not request.user.has_perm('pages.change_page'): raise PermissionDenied return EditPage.as_view()(request, pk=page.id) return ShowPage.as_view()(request, pk=page.id)
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
## Code Before: from django.http import Http404 from django.core.exceptions import PermissionDenied from django.views.generic import DetailView, TemplateView, UpdateView from wafer.pages.models import Page from wafer.pages.forms import PageForm class ShowPage(DetailView): template_name = 'wafer.pages/page.html' model = Page class EditPage(UpdateView): template_name = 'wafer.pages/page_form.html' model = Page form_class = PageForm fields = ['name', 'content'] def slug(request, url): """Look up a page by url (which is a tree of slugs)""" page = None for slug in url.split('/'): if not slug: continue try: page = Page.objects.get(slug=slug, parent=page) except Page.DoesNotExist: raise Http404 if page is None: try: page = Page.objects.get(slug='index') except Page.DoesNotExist: return TemplateView.as_view( template_name='wafer/index.html')(request) if 'edit' in request.GET.keys(): if not request.user.has_perm('pages.change_page'): raise PermissionDenied return EditPage.as_view()(request, pk=page.id) return ShowPage.as_view()(request, pk=page.id) ## Instruction: Remove unneeded field specifier from EditPage form to make Django 1.8 happy ## Code After: from django.http import Http404 from django.core.exceptions import PermissionDenied from django.views.generic import DetailView, TemplateView, UpdateView from wafer.pages.models import Page from wafer.pages.forms import PageForm class ShowPage(DetailView): template_name = 'wafer.pages/page.html' model = Page class EditPage(UpdateView): template_name = 'wafer.pages/page_form.html' model = Page form_class = PageForm def slug(request, url): """Look up a page by url (which is a tree of slugs)""" page = None for slug in url.split('/'): if not slug: continue try: page = Page.objects.get(slug=slug, parent=page) except Page.DoesNotExist: raise Http404 if page is None: try: page = Page.objects.get(slug='index') except Page.DoesNotExist: return TemplateView.as_view( template_name='wafer/index.html')(request) if 'edit' in request.GET.keys(): if not request.user.has_perm('pages.change_page'): raise PermissionDenied return EditPage.as_view()(request, pk=page.id) return ShowPage.as_view()(request, pk=page.id)
// ... existing code ... form_class = PageForm // ... rest of the code ...
fcf3511a586b5efe4a86674ccd4c80c67ec2ed14
tracker/src/main/tracker/util/connection.py
tracker/src/main/tracker/util/connection.py
import os from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from sqlalchemy.orm.scoping import scoped_session DB_URL = os.environ['DB_URL'] Base = automap_base() engine = create_engine(DB_URL) Base.prepare(engine, reflect=True) session_factory = sessionmaker(bind=engine, expire_on_commit=False) Session = scoped_session(session_factory)
import os from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from sqlalchemy.orm.scoping import scoped_session DB_URL = os.environ['DB_URL'] if not DB_URL: raise ValueError("DB_URL not present in the environment") Base = automap_base() engine = create_engine(DB_URL) Base.prepare(engine, reflect=True) session_factory = sessionmaker(bind=engine, expire_on_commit=False) Session = scoped_session(session_factory)
Test for DB_URL being present in environment.
Test for DB_URL being present in environment.
Python
mit
llevar/germline-regenotyper,llevar/germline-regenotyper
import os from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from sqlalchemy.orm.scoping import scoped_session DB_URL = os.environ['DB_URL'] + + if not DB_URL: + raise ValueError("DB_URL not present in the environment") + Base = automap_base() engine = create_engine(DB_URL) Base.prepare(engine, reflect=True) session_factory = sessionmaker(bind=engine, expire_on_commit=False) Session = scoped_session(session_factory)
Test for DB_URL being present in environment.
## Code Before: import os from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from sqlalchemy.orm.scoping import scoped_session DB_URL = os.environ['DB_URL'] Base = automap_base() engine = create_engine(DB_URL) Base.prepare(engine, reflect=True) session_factory = sessionmaker(bind=engine, expire_on_commit=False) Session = scoped_session(session_factory) ## Instruction: Test for DB_URL being present in environment. ## Code After: import os from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine from sqlalchemy.orm.scoping import scoped_session DB_URL = os.environ['DB_URL'] if not DB_URL: raise ValueError("DB_URL not present in the environment") Base = automap_base() engine = create_engine(DB_URL) Base.prepare(engine, reflect=True) session_factory = sessionmaker(bind=engine, expire_on_commit=False) Session = scoped_session(session_factory)
// ... existing code ... DB_URL = os.environ['DB_URL'] if not DB_URL: raise ValueError("DB_URL not present in the environment") Base = automap_base() // ... rest of the code ...
550106fbff26c16cdf2269dc0778814c05ed1e3b
nap/apps.py
nap/apps.py
from django.apps import AppConfig from django.utils.module_loading import autodiscover_modules class NapConfig(AppConfig): '''App Config that performs auto-discover on ready.''' def ready(self): super(NapConfig, self).ready() autodiscover_modules('publishers')
from django.apps import AppConfig from django.utils.module_loading import autodiscover_modules class NapConfig(AppConfig): '''App Config that performs auto-discover on ready.''' name = 'nap' def ready(self): super(NapConfig, self).ready() autodiscover_modules('publishers')
Fix to include mandatory name attribute
Fix to include mandatory name attribute
Python
bsd-3-clause
MarkusH/django-nap,limbera/django-nap
from django.apps import AppConfig from django.utils.module_loading import autodiscover_modules class NapConfig(AppConfig): '''App Config that performs auto-discover on ready.''' + + name = 'nap' def ready(self): super(NapConfig, self).ready() autodiscover_modules('publishers')
Fix to include mandatory name attribute
## Code Before: from django.apps import AppConfig from django.utils.module_loading import autodiscover_modules class NapConfig(AppConfig): '''App Config that performs auto-discover on ready.''' def ready(self): super(NapConfig, self).ready() autodiscover_modules('publishers') ## Instruction: Fix to include mandatory name attribute ## Code After: from django.apps import AppConfig from django.utils.module_loading import autodiscover_modules class NapConfig(AppConfig): '''App Config that performs auto-discover on ready.''' name = 'nap' def ready(self): super(NapConfig, self).ready() autodiscover_modules('publishers')
// ... existing code ... '''App Config that performs auto-discover on ready.''' name = 'nap' // ... rest of the code ...
3c30166378d37c812cecb505a3d9023b079d24be
app/__init__.py
app/__init__.py
from gevent import monkey monkey.patch_all() # Imports import os from flask import Flask, render_template from flask_socketio import SocketIO import boto3 # Configure app socketio = SocketIO() app = Flask(__name__) app.config.from_object(os.environ["APP_SETTINGS"]) import nltk try: nltk.data.find('tokenizers/punkt') except LookupError: nltk.download("punkt") # DB db = boto3.resource("dynamodb", region_name=app.config["DYNAMO_REGION"], endpoint_url=app.config["DYNAMO_DATABASE_URI"]) s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"]) from app import models models.initialize() # Initialize the controllers from app import controllers # Initialize app w/SocketIO socketio.init_app(app) # HTTP error handling @app.errorhandler(404) def not_found(error): return render_template("404.html"), 404
from gevent import monkey monkey.patch_all() # Imports import os from flask import Flask, render_template from flask_socketio import SocketIO import boto3 # Configure app socketio = SocketIO() app = Flask(__name__) app.config.from_object(os.environ["APP_SETTINGS"]) import nltk nltk.download("punkt") # DB db = boto3.resource("dynamodb", region_name=app.config["DYNAMO_REGION"], endpoint_url=app.config["DYNAMO_DATABASE_URI"]) s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"]) from app import models models.initialize() # Initialize the controllers from app import controllers # Initialize app w/SocketIO socketio.init_app(app) # HTTP error handling @app.errorhandler(404) def not_found(error): return render_template("404.html"), 404
Fix stupid nltk data download thing
Fix stupid nltk data download thing
Python
mit
PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews
from gevent import monkey monkey.patch_all() # Imports import os from flask import Flask, render_template from flask_socketio import SocketIO import boto3 # Configure app socketio = SocketIO() app = Flask(__name__) app.config.from_object(os.environ["APP_SETTINGS"]) import nltk - try: - nltk.data.find('tokenizers/punkt') - except LookupError: - nltk.download("punkt") + nltk.download("punkt") # DB db = boto3.resource("dynamodb", region_name=app.config["DYNAMO_REGION"], endpoint_url=app.config["DYNAMO_DATABASE_URI"]) s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"]) from app import models models.initialize() # Initialize the controllers from app import controllers # Initialize app w/SocketIO socketio.init_app(app) # HTTP error handling @app.errorhandler(404) def not_found(error): return render_template("404.html"), 404
Fix stupid nltk data download thing
## Code Before: from gevent import monkey monkey.patch_all() # Imports import os from flask import Flask, render_template from flask_socketio import SocketIO import boto3 # Configure app socketio = SocketIO() app = Flask(__name__) app.config.from_object(os.environ["APP_SETTINGS"]) import nltk try: nltk.data.find('tokenizers/punkt') except LookupError: nltk.download("punkt") # DB db = boto3.resource("dynamodb", region_name=app.config["DYNAMO_REGION"], endpoint_url=app.config["DYNAMO_DATABASE_URI"]) s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"]) from app import models models.initialize() # Initialize the controllers from app import controllers # Initialize app w/SocketIO socketio.init_app(app) # HTTP error handling @app.errorhandler(404) def not_found(error): return render_template("404.html"), 404 ## Instruction: Fix stupid nltk data download thing ## Code After: from gevent import monkey monkey.patch_all() # Imports import os from flask import Flask, render_template from flask_socketio import SocketIO import boto3 # Configure app socketio = SocketIO() app = Flask(__name__) app.config.from_object(os.environ["APP_SETTINGS"]) import nltk nltk.download("punkt") # DB db = boto3.resource("dynamodb", region_name=app.config["DYNAMO_REGION"], endpoint_url=app.config["DYNAMO_DATABASE_URI"]) s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"]) from app import models models.initialize() # Initialize the controllers from app import controllers # Initialize app w/SocketIO socketio.init_app(app) # HTTP error handling @app.errorhandler(404) def not_found(error): return render_template("404.html"), 404
// ... existing code ... import nltk nltk.download("punkt") // ... rest of the code ...
f672d140987614c5e4e80114cf28f2f6350be233
pyBattleship.py
pyBattleship.py
import boards def main(): playerDead = False enemyDead = False enemyBoard = boards.makeEnemyBoard() enemyLocations = boards.setupEnemyBoard() playerBoard = boards.makePlayerBoard() print("----BATTLESHIP----") boards.printBoards(enemyBoard, playerBoard) while not playerDead and not enemyDead: row = int(input("Guess row: ")) col = int(input("Guess coloumn: ")) #Make true for testing purposes enemyDead = True if enemyDead: print("You win!") else: print("You lose!") main()
import boards def main(): MAX_HITS = 17 enemyDead = False playerDead = False hitsOnEnemy = 0 hitsOnPlayer = 0 turn = 1 enemyBoard = boards.makeEnemyBoard() enemyLocations = boards.setupEnemyBoard() playerBoard = boards.makePlayerBoard() print("----BATTLESHIP----") while not playerDead and not enemyDead: boards.printBoards(enemyBoard, playerBoard) print("Turn " + str(turn)) row = int(input("Guess row: ")) - 1 col = int(input("Guess coloumn: ")) - 1 #Player choice evaluated if enemyLocations[row][col] == "o": #It's a hit! enemyBoard[row][col] = "*" hitsOnEnemy += 1 else: if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1): print("Oops, that's not even in the ocean.") elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"): print("You guessed that one already.") else: #It's a miss enemyBoard[row][col] = "X" #Check if either player is dead if hitsOnEnemy == MAX_HITS: enemyDead = True elif hitsOnPlayer == MAX_HITS: playerDead = True turn += 1 #Make true for testing purposes enemyDead = True if enemyDead: print("YOU WIN!") else: print("YOU LOSE!") main()
Add player input evaluation, determines hit/miss
Add player input evaluation, determines hit/miss
Python
apache-2.0
awhittle3/pyBattleship
import boards def main(): + MAX_HITS = 17 + enemyDead = False playerDead = False - enemyDead = False + hitsOnEnemy = 0 + hitsOnPlayer = 0 + turn = 1 enemyBoard = boards.makeEnemyBoard() enemyLocations = boards.setupEnemyBoard() playerBoard = boards.makePlayerBoard() print("----BATTLESHIP----") + - boards.printBoards(enemyBoard, playerBoard) - while not playerDead and not enemyDead: + boards.printBoards(enemyBoard, playerBoard) + print("Turn " + str(turn)) - row = int(input("Guess row: ")) + row = int(input("Guess row: ")) - 1 - col = int(input("Guess coloumn: ")) + col = int(input("Guess coloumn: ")) - 1 + + #Player choice evaluated + if enemyLocations[row][col] == "o": + #It's a hit! + enemyBoard[row][col] = "*" + hitsOnEnemy += 1 + + else: + if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1): + print("Oops, that's not even in the ocean.") + elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"): + print("You guessed that one already.") + else: + #It's a miss + enemyBoard[row][col] = "X" + + #Check if either player is dead + if hitsOnEnemy == MAX_HITS: + enemyDead = True + elif hitsOnPlayer == MAX_HITS: + playerDead = True + + turn += 1 #Make true for testing purposes enemyDead = True if enemyDead: - print("You win!") + print("YOU WIN!") else: - print("You lose!") + print("YOU LOSE!") main()
Add player input evaluation, determines hit/miss
## Code Before: import boards def main(): playerDead = False enemyDead = False enemyBoard = boards.makeEnemyBoard() enemyLocations = boards.setupEnemyBoard() playerBoard = boards.makePlayerBoard() print("----BATTLESHIP----") boards.printBoards(enemyBoard, playerBoard) while not playerDead and not enemyDead: row = int(input("Guess row: ")) col = int(input("Guess coloumn: ")) #Make true for testing purposes enemyDead = True if enemyDead: print("You win!") else: print("You lose!") main() ## Instruction: Add player input evaluation, determines hit/miss ## Code After: import boards def main(): MAX_HITS = 17 enemyDead = False playerDead = False hitsOnEnemy = 0 hitsOnPlayer = 0 turn = 1 enemyBoard = boards.makeEnemyBoard() enemyLocations = boards.setupEnemyBoard() playerBoard = boards.makePlayerBoard() print("----BATTLESHIP----") while not playerDead and not enemyDead: boards.printBoards(enemyBoard, playerBoard) print("Turn " + str(turn)) row = int(input("Guess row: ")) - 1 col = int(input("Guess coloumn: ")) - 1 #Player choice evaluated if enemyLocations[row][col] == "o": #It's a hit! enemyBoard[row][col] = "*" hitsOnEnemy += 1 else: if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1): print("Oops, that's not even in the ocean.") elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"): print("You guessed that one already.") else: #It's a miss enemyBoard[row][col] = "X" #Check if either player is dead if hitsOnEnemy == MAX_HITS: enemyDead = True elif hitsOnPlayer == MAX_HITS: playerDead = True turn += 1 #Make true for testing purposes enemyDead = True if enemyDead: print("YOU WIN!") else: print("YOU LOSE!") main()
// ... existing code ... def main(): MAX_HITS = 17 enemyDead = False playerDead = False hitsOnEnemy = 0 hitsOnPlayer = 0 turn = 1 // ... modified code ... print("----BATTLESHIP----") while not playerDead and not enemyDead: boards.printBoards(enemyBoard, playerBoard) print("Turn " + str(turn)) row = int(input("Guess row: ")) - 1 col = int(input("Guess coloumn: ")) - 1 #Player choice evaluated if enemyLocations[row][col] == "o": #It's a hit! enemyBoard[row][col] = "*" hitsOnEnemy += 1 else: if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1): print("Oops, that's not even in the ocean.") elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"): print("You guessed that one already.") else: #It's a miss enemyBoard[row][col] = "X" #Check if either player is dead if hitsOnEnemy == MAX_HITS: enemyDead = True elif hitsOnPlayer == MAX_HITS: playerDead = True turn += 1 ... if enemyDead: print("YOU WIN!") else: print("YOU LOSE!") // ... rest of the code ...
ed0b33dc0866100c2bede3579711af761d5cb159
plumeria/util/__init__.py
plumeria/util/__init__.py
MIME_TYPES = { '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', } def to_mimetype(ext): if ext.lower() in MIME_TYPES: return MIME_TYPES[ext.lower()]
MIME_TYPES = { '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', '.txt': 'text/plain', } def to_mimetype(ext): if ext.lower() in MIME_TYPES: return MIME_TYPES[ext.lower()] else: return "application/octet-stream"
Fix mimetype detection to handle more types.
Fix mimetype detection to handle more types.
Python
mit
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
MIME_TYPES = { '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', + '.txt': 'text/plain', } def to_mimetype(ext): if ext.lower() in MIME_TYPES: return MIME_TYPES[ext.lower()] + else: + return "application/octet-stream"
Fix mimetype detection to handle more types.
## Code Before: MIME_TYPES = { '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', } def to_mimetype(ext): if ext.lower() in MIME_TYPES: return MIME_TYPES[ext.lower()] ## Instruction: Fix mimetype detection to handle more types. ## Code After: MIME_TYPES = { '.png': 'image/png', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', '.txt': 'text/plain', } def to_mimetype(ext): if ext.lower() in MIME_TYPES: return MIME_TYPES[ext.lower()] else: return "application/octet-stream"
# ... existing code ... '.gif': 'image/gif', '.txt': 'text/plain', } # ... modified code ... return MIME_TYPES[ext.lower()] else: return "application/octet-stream" # ... rest of the code ...
8f31a87ace324c519eac8d883cf0327d08f48df0
lib/ansiblelint/rules/VariableHasSpacesRule.py
lib/ansiblelint/rules/VariableHasSpacesRule.py
from ansiblelint import AnsibleLintRule import re class VariableHasSpacesRule(AnsibleLintRule): id = '206' shortdesc = 'Variables should have spaces before and after: {{ var_name }}' description = 'Variables should have spaces before and after: ``{{ var_name }}``' severity = 'LOW' tags = ['formatting'] version_added = 'v4.0.0' variable_syntax = re.compile(r"{{.*}}") bracket_regex = re.compile(r"{{[^{' -]|[^ '}-]}}") def match(self, file, line): if not self.variable_syntax.search(line): return return self.bracket_regex.search(line)
from ansiblelint import AnsibleLintRule import re class VariableHasSpacesRule(AnsibleLintRule): id = '206' shortdesc = 'Variables should have spaces before and after: {{ var_name }}' description = 'Variables should have spaces before and after: ``{{ var_name }}``' severity = 'LOW' tags = ['formatting'] version_added = 'v4.0.0' variable_syntax = re.compile(r"{{.*}}") bracket_regex = re.compile(r"{{[^{' -]|[^ '}-]}}") def match(self, file, line): if not self.variable_syntax.search(line): return line_exclude_json = re.sub(r"[^{]{'\w+': ?[^{]{.*?}}", "", line) return self.bracket_regex.search(line_exclude_json)
Fix nested JSON obj false positive
var-space-rule: Fix nested JSON obj false positive When using compact form nested JSON object within a Jinja2 context as shown in the following example: set_fact:"{{ {'test': {'subtest': variable}} }}" 'variable}}' will raise a false positive [206] error. This commit adds an intermediate step within 206 (VariableHasSpacesRule.py) rule to exclude nested JSON object before matching for an actual error. Fixes: #665 Signed-off-by: Simon Kheng <765fd267c62104898c4dfafd2f027edd838d8b13@gmail.com>
Python
mit
willthames/ansible-lint
from ansiblelint import AnsibleLintRule import re class VariableHasSpacesRule(AnsibleLintRule): id = '206' shortdesc = 'Variables should have spaces before and after: {{ var_name }}' description = 'Variables should have spaces before and after: ``{{ var_name }}``' severity = 'LOW' tags = ['formatting'] version_added = 'v4.0.0' variable_syntax = re.compile(r"{{.*}}") bracket_regex = re.compile(r"{{[^{' -]|[^ '}-]}}") def match(self, file, line): if not self.variable_syntax.search(line): return + line_exclude_json = re.sub(r"[^{]{'\w+': ?[^{]{.*?}}", "", line) - return self.bracket_regex.search(line) + return self.bracket_regex.search(line_exclude_json)
Fix nested JSON obj false positive
## Code Before: from ansiblelint import AnsibleLintRule import re class VariableHasSpacesRule(AnsibleLintRule): id = '206' shortdesc = 'Variables should have spaces before and after: {{ var_name }}' description = 'Variables should have spaces before and after: ``{{ var_name }}``' severity = 'LOW' tags = ['formatting'] version_added = 'v4.0.0' variable_syntax = re.compile(r"{{.*}}") bracket_regex = re.compile(r"{{[^{' -]|[^ '}-]}}") def match(self, file, line): if not self.variable_syntax.search(line): return return self.bracket_regex.search(line) ## Instruction: Fix nested JSON obj false positive ## Code After: from ansiblelint import AnsibleLintRule import re class VariableHasSpacesRule(AnsibleLintRule): id = '206' shortdesc = 'Variables should have spaces before and after: {{ var_name }}' description = 'Variables should have spaces before and after: ``{{ var_name }}``' severity = 'LOW' tags = ['formatting'] version_added = 'v4.0.0' variable_syntax = re.compile(r"{{.*}}") bracket_regex = re.compile(r"{{[^{' -]|[^ '}-]}}") def match(self, file, line): if not self.variable_syntax.search(line): return line_exclude_json = re.sub(r"[^{]{'\w+': ?[^{]{.*?}}", "", line) return self.bracket_regex.search(line_exclude_json)
// ... existing code ... return line_exclude_json = re.sub(r"[^{]{'\w+': ?[^{]{.*?}}", "", line) return self.bracket_regex.search(line_exclude_json) // ... rest of the code ...
b28dd26792be9125d2fd3d5657431bc6ee7a5470
lobster/cmssw/actions.py
lobster/cmssw/actions.py
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
Add message in log with plotting process id.
Add message in log with plotting process id.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() + logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
Add message in log with plotting process id.
## Code Before: import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now ## Instruction: Add message in log with plotting process id. ## Code After: import datetime import multiprocessing from lobster.cmssw.plotting import Plotter logger = multiprocessing.get_logger() class DummyPlotter(object): def make_plots(*args, **kwargs): pass class Actions(object): def __init__(self, config): if 'plotdir' in config: logger.info('plots in {0} will be updated automatically'.format(config['plotdir'])) if 'foremen logs' in config: logger.info('foremen logs will be included from: {0}'.format(', '.join(config['foremen logs']))) plotter = Plotter(config['filename'], config['plotdir']) else: plotter = DummyPlotter() def plotf(q): while q.get() not in ('stop', None): plotter.make_plots(foremen=config.get('foremen logs')) self.plotq = multiprocessing.Queue() self.plotp = multiprocessing.Process(target=plotf, args=(self.plotq,)) self.plotp.start() logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) self.__last = datetime.datetime.now() def __del__(self): self.plotq.put('stop') def take(self): now = datetime.datetime.now() if (now - self.__last).seconds > 15 * 60: self.plotq.put('plot') self.__last = now
# ... existing code ... self.plotp.start() logger.info('spawning process for automatic plotting with pid {0}'.format(self.plotp.pid)) # ... rest of the code ...
43dce889a79b77445eebe0d0e15532b64e7728d5
tests/test_upbeatbot.py
tests/test_upbeatbot.py
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
Use more descriptive unit test names
Use more descriptive unit test names
Python
mit
nickdibari/UpBeatBot
import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() - def test_chosen_animal_returned(self): + def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') - def test_random_animal_returned_with_text(self): + def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) - def test_random_returned_no_text(self): + def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
Use more descriptive unit test names
## Code Before: import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal) ## Instruction: Use more descriptive unit test names ## Code After: import unittest from libs.upbeatbot import UpBeatBot class TestUpbeatBot(unittest.TestCase): @classmethod def setUpClass(cls): cls.upbeat_bot = UpBeatBot() def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' animal = self.upbeat_bot._get_animal_from_message(tweet) self.assertEqual(animal, 'dog') def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' animal = self.upbeat_bot._get_animal_from_message(tweet) # Not really a test, just ensuring *something* is returned self.assertTrue(animal) def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string animal = self.upbeat_bot._get_animal_from_message(tweet) # Ditto as above self.assertTrue(animal)
// ... existing code ... def test_get_animal_from_message_chosen_animal_returned(self): tweet = 'Hey @upbeatbot send me a dog!' // ... modified code ... def test__get_animal_from_message_random_animal_returned_with_text(self): tweet = 'Hey @upbeatbot send me a pic!' ... def test__get_animal_from_message_random_returned_no_text(self): tweet = '@upbeatbot' # Minimum viable string // ... rest of the code ...
528c10b3988a93668c6a0d4c0b8a7de2667204b1
frontend/ligscore/results_page.py
frontend/ligscore/results_page.py
from flask import request import saliweb.frontend import collections Transform = collections.namedtuple('Transform', ['number', 'score']) def show_results_page(job): show_from = get_int('from', 1) show_to = get_int('to', 20) with open(job.get_path('input.txt')) as fh: receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ') num_transforms = 0 transforms = [] with open(job.get_path('score.list')) as fh: for line in fh: spl = line.rstrip('\r\n').split() if len(spl) > 0: num_transforms += 1 if num_transforms >= show_from and num_transforms <= show_to: transforms.append(Transform(number=num_transforms, score="%.2f" % float(spl[-1]))) return saliweb.frontend.render_results_template( "results_ok.html", receptor=receptor, ligand=ligand, scoretype=scoretype, transforms=transforms, show_from=show_from, show_to=show_to, num_transforms=num_transforms, job=job) def get_int(name, default): try: return int(request.args.get(name, "")) except ValueError: return default
from flask import request import saliweb.frontend import collections Transform = collections.namedtuple('Transform', ['number', 'score']) def show_results_page(job): show_from = request.args.get('from', 1, type=int) show_to = request.args.get('to', 20, type=int) with open(job.get_path('input.txt')) as fh: receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ') num_transforms = 0 transforms = [] with open(job.get_path('score.list')) as fh: for line in fh: spl = line.rstrip('\r\n').split() if len(spl) > 0: num_transforms += 1 if num_transforms >= show_from and num_transforms <= show_to: transforms.append(Transform(number=num_transforms, score="%.2f" % float(spl[-1]))) return saliweb.frontend.render_results_template( "results_ok.html", receptor=receptor, ligand=ligand, scoretype=scoretype, transforms=transforms, show_from=show_from, show_to=show_to, num_transforms=num_transforms, job=job)
Drop our own get_int() function
Drop our own get_int() function We don't need a custom function to get an int parameter; flask/werkzeug already handles this.
Python
lgpl-2.1
salilab/ligscore,salilab/ligscore
from flask import request import saliweb.frontend import collections Transform = collections.namedtuple('Transform', ['number', 'score']) def show_results_page(job): - show_from = get_int('from', 1) - show_to = get_int('to', 20) + show_from = request.args.get('from', 1, type=int) + show_to = request.args.get('to', 20, type=int) with open(job.get_path('input.txt')) as fh: receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ') num_transforms = 0 transforms = [] with open(job.get_path('score.list')) as fh: for line in fh: spl = line.rstrip('\r\n').split() if len(spl) > 0: num_transforms += 1 if num_transforms >= show_from and num_transforms <= show_to: transforms.append(Transform(number=num_transforms, score="%.2f" % float(spl[-1]))) return saliweb.frontend.render_results_template( "results_ok.html", receptor=receptor, ligand=ligand, scoretype=scoretype, transforms=transforms, show_from=show_from, show_to=show_to, num_transforms=num_transforms, job=job) - - def get_int(name, default): - try: - return int(request.args.get(name, "")) - except ValueError: - return default -
Drop our own get_int() function
## Code Before: from flask import request import saliweb.frontend import collections Transform = collections.namedtuple('Transform', ['number', 'score']) def show_results_page(job): show_from = get_int('from', 1) show_to = get_int('to', 20) with open(job.get_path('input.txt')) as fh: receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ') num_transforms = 0 transforms = [] with open(job.get_path('score.list')) as fh: for line in fh: spl = line.rstrip('\r\n').split() if len(spl) > 0: num_transforms += 1 if num_transforms >= show_from and num_transforms <= show_to: transforms.append(Transform(number=num_transforms, score="%.2f" % float(spl[-1]))) return saliweb.frontend.render_results_template( "results_ok.html", receptor=receptor, ligand=ligand, scoretype=scoretype, transforms=transforms, show_from=show_from, show_to=show_to, num_transforms=num_transforms, job=job) def get_int(name, default): try: return int(request.args.get(name, "")) except ValueError: return default ## Instruction: Drop our own get_int() function ## Code After: from flask import request import saliweb.frontend import collections Transform = collections.namedtuple('Transform', ['number', 'score']) def show_results_page(job): show_from = request.args.get('from', 1, type=int) show_to = request.args.get('to', 20, type=int) with open(job.get_path('input.txt')) as fh: receptor, ligand, scoretype = fh.readline().rstrip('\r\n').split(' ') num_transforms = 0 transforms = [] with open(job.get_path('score.list')) as fh: for line in fh: spl = line.rstrip('\r\n').split() if len(spl) > 0: num_transforms += 1 if num_transforms >= show_from and num_transforms <= show_to: transforms.append(Transform(number=num_transforms, score="%.2f" % float(spl[-1]))) return saliweb.frontend.render_results_template( "results_ok.html", receptor=receptor, ligand=ligand, scoretype=scoretype, transforms=transforms, show_from=show_from, show_to=show_to, num_transforms=num_transforms, job=job)
// ... existing code ... def show_results_page(job): show_from = request.args.get('from', 1, type=int) show_to = request.args.get('to', 20, type=int) with open(job.get_path('input.txt')) as fh: // ... modified code ... num_transforms=num_transforms, job=job) // ... rest of the code ...
697833caade1323ddb9a0b4e51031f1d494262cd
201705/migonzalvar/biggest_set.py
201705/migonzalvar/biggest_set.py
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic print(f'Duration: {elapsed} seconds') if elapsed >= secs: print('Limit reached. Stopping.') raise SystemExit(0) def do(): for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300): result = has_subset_sum_zero(source) print(f'Result: {result}') print('Continue...') print() if __name__ == '__main__': do()
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() if __name__ == '__main__': do()
Use several strategies for performance
Use several strategies for performance
Python
bsd-3-clause
VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto
from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic - print(f'Duration: {elapsed} seconds') - if elapsed >= secs: - print('Limit reached. Stopping.') - raise SystemExit(0) + duration.elapsed = elapsed + + + def nosolution_case(N): + return range(1, N + 1) + + + def negative_worst_case(N): + case = list(range(-N + 1, 0)) + case += [abs(sum(case))] + return case + + + def positive_worst_case(N): + case = list(range(1, N)) + case.insert(0, - sum(case)) + return case def do(): + strategies = [nosolution_case, negative_worst_case, positive_worst_case] + for strategy in strategies: + print(f'## Using {strategy.__name__}') - for n in range(1, 100, 10): - source = range(1, n) - print(f'Length: {n} items') - with less_than(300): - result = has_subset_sum_zero(source) - print(f'Result: {result}') - print('Continue...') print() + for n in range(1, 100, 10): + source = range(1, n) + print(f'Length: {n} items') + with less_than(300) as duration: + result = has_subset_sum_zero(source) + print(f'Result: {result}') + print(f'Duration: {duration.elapsed} seconds') + if duration.elapsed >= secs: + print('Limit reached. Stopping.') + break + print('Continue searching...') + print() if __name__ == '__main__': do()
Use several strategies for performance
## Code Before: from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic print(f'Duration: {elapsed} seconds') if elapsed >= secs: print('Limit reached. Stopping.') raise SystemExit(0) def do(): for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300): result = has_subset_sum_zero(source) print(f'Result: {result}') print('Continue...') print() if __name__ == '__main__': do() ## Instruction: Use several strategies for performance ## Code After: from contextlib import contextmanager import time from main import has_subset_sum_zero class Duration: def __init__(self, elapsed=None): self.elapsed = elapsed @contextmanager def less_than(secs): duration = Duration() tic = time.time() yield duration elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() if __name__ == '__main__': do()
// ... existing code ... elapsed = time.time() - tic duration.elapsed = elapsed def nosolution_case(N): return range(1, N + 1) def negative_worst_case(N): case = list(range(-N + 1, 0)) case += [abs(sum(case))] return case def positive_worst_case(N): case = list(range(1, N)) case.insert(0, - sum(case)) return case // ... modified code ... def do(): strategies = [nosolution_case, negative_worst_case, positive_worst_case] for strategy in strategies: print(f'## Using {strategy.__name__}') print() for n in range(1, 100, 10): source = range(1, n) print(f'Length: {n} items') with less_than(300) as duration: result = has_subset_sum_zero(source) print(f'Result: {result}') print(f'Duration: {duration.elapsed} seconds') if duration.elapsed >= secs: print('Limit reached. Stopping.') break print('Continue searching...') print() // ... rest of the code ...
edfd2edc5496cb412477b7409f43aa53acf7dea9
tests/test_loadproblem.py
tests/test_loadproblem.py
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') problem = loadproblem.load_file_as_module(problem_dir,file_name) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') self.assertRaises(Exception, loadproblem. \ load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main()
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) if __name__ == '__main__': unittest.main()
Fix parameter values for load function
Fix parameter values for load function
Python
apache-2.0
patrickspencer/mathdeck,patrickspencer/mathdeck
import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' - problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + file = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'fixtures','loadproblem') + 'fixtures','loadproblem', file_name) - problem = loadproblem.load_file_as_module(problem_dir,file_name) + problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' - problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + file = os.path.join(os.path.dirname(os.path.realpath(__file__)), - 'fixtures','loadproblem') + 'fixtures','loadproblem', file_name) - self.assertRaises(Exception, loadproblem. \ + self.assertRaises(Exception, loadproblem.load_file_as_module(file)) - load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main()
Fix parameter values for load function
## Code Before: import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') problem = loadproblem.load_file_as_module(problem_dir,file_name) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem') self.assertRaises(Exception, loadproblem. \ load_file_as_module(problem_dir,file_name)) if __name__ == '__main__': unittest.main() ## Instruction: Fix parameter values for load function ## Code After: import unittest import os from mathdeck import loadproblem class TestMathdeckLoadProblem(unittest.TestCase): def test_loadproblem_has_answers_attribute(self): file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) def test_loadproblem_has_no_answers_attribute(self): file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) if __name__ == '__main__': unittest.main()
// ... existing code ... file_name = 'has_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) problem = loadproblem.load_file_as_module(file) self.assertTrue(hasattr(problem,'answers')) // ... modified code ... file_name = 'has_no_answers_attribute.py' file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures','loadproblem', file_name) self.assertRaises(Exception, loadproblem.load_file_as_module(file)) // ... rest of the code ...
e6e6918b54d691803c48f217f0074d5bcdd9df50
endpoint/csp.py
endpoint/csp.py
import falcon, util, json, sys, traceback # Content-security policy reports of frontend # Every CSP report is forwarded to ksi-admin@fi.muni.cz. # This is testing solution, if a lot of spam occurs, some intelligence should # be added to this endpoint. class CSP(object): def on_post(self, req, resp): data = json.loads(req.stream.read()) text = "<p>" + util.config.ksi_web() + \ "<br><pre>" + json.dumps(data, indent=4) + "</pre></p>" + \ util.mail.easteregg() try: util.mail.send("ksi-admin@fi.muni.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) req.context['result'] = {} resp.status = falcon.HTTP_200
import falcon, util, json, sys, traceback # Content-security policy reports of frontend # Every CSP report is forwarded to ksi-admin@fi.muni.cz. # This is testing solution, if a lot of spam occurs, some intelligence should # be added to this endpoint. class CSP(object): def on_post(self, req, resp): data = json.loads(req.stream.read()) text = "<p>" + util.config.ksi_web() + \ "<br><pre>" + json.dumps(data, indent=4) + "</pre></p>" + \ util.mail.easteregg() try: util.mail.send("me@apophis.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) req.context['result'] = {} resp.status = falcon.HTTP_200
Send CSP reports right to apophis.
Send CSP reports right to apophis.
Python
mit
fi-ksi/web-backend,fi-ksi/web-backend
import falcon, util, json, sys, traceback # Content-security policy reports of frontend # Every CSP report is forwarded to ksi-admin@fi.muni.cz. # This is testing solution, if a lot of spam occurs, some intelligence should # be added to this endpoint. class CSP(object): def on_post(self, req, resp): data = json.loads(req.stream.read()) text = "<p>" + util.config.ksi_web() + \ "<br><pre>" + json.dumps(data, indent=4) + "</pre></p>" + \ util.mail.easteregg() try: - util.mail.send("ksi-admin@fi.muni.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) + util.mail.send("me@apophis.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) req.context['result'] = {} resp.status = falcon.HTTP_200
Send CSP reports right to apophis.
## Code Before: import falcon, util, json, sys, traceback # Content-security policy reports of frontend # Every CSP report is forwarded to ksi-admin@fi.muni.cz. # This is testing solution, if a lot of spam occurs, some intelligence should # be added to this endpoint. class CSP(object): def on_post(self, req, resp): data = json.loads(req.stream.read()) text = "<p>" + util.config.ksi_web() + \ "<br><pre>" + json.dumps(data, indent=4) + "</pre></p>" + \ util.mail.easteregg() try: util.mail.send("ksi-admin@fi.muni.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) req.context['result'] = {} resp.status = falcon.HTTP_200 ## Instruction: Send CSP reports right to apophis. ## Code After: import falcon, util, json, sys, traceback # Content-security policy reports of frontend # Every CSP report is forwarded to ksi-admin@fi.muni.cz. # This is testing solution, if a lot of spam occurs, some intelligence should # be added to this endpoint. class CSP(object): def on_post(self, req, resp): data = json.loads(req.stream.read()) text = "<p>" + util.config.ksi_web() + \ "<br><pre>" + json.dumps(data, indent=4) + "</pre></p>" + \ util.mail.easteregg() try: util.mail.send("me@apophis.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) req.context['result'] = {} resp.status = falcon.HTTP_200
# ... existing code ... try: util.mail.send("me@apophis.cz", "[KSI-WEB] CSP report", text.decode('utf-8'), ) except: # ... rest of the code ...
d5cd1eddf1ecf0c463a90d0e69413aadd311977a
lots/urls.py
lots/urls.py
from django.conf.urls import patterns, include, url from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ) urlpatterns += patterns('', url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, }), url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.STATIC_ROOT, }),)
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), )
Revert "Picture access from admin console"
Revert "Picture access from admin console" This reverts commit 324fa160fb629f6c4537ca15212c0822e8ac436d.
Python
mit
opencleveland/large-lots,skorasaurus/large-lots,opencleveland/large-lots,skorasaurus/large-lots,skorasaurus/large-lots,skorasaurus/large-lots,opencleveland/large-lots,opencleveland/large-lots
from django.conf.urls import patterns, include, url - from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), + url(r'^django-admin/', include(admin.site.urls)), ) - urlpatterns += patterns('', - url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { - 'document_root': settings.MEDIA_ROOT, - }), - url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { - 'document_root': settings.STATIC_ROOT, - }),) -
Revert "Picture access from admin console"
## Code Before: from django.conf.urls import patterns, include, url from django.conf import settings from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ) urlpatterns += patterns('', url(r'^media/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.MEDIA_ROOT, }), url(r'^static/(?P<path>.*)$', 'django.views.static.serve', { 'document_root': settings.STATIC_ROOT, }),) ## Instruction: Revert "Picture access from admin console" ## Code After: from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: url(r'^$', 'lots_client.views.home', name='home'), url(r'^status/$', 'lots_client.views.status', name='status'), url(r'^apply/$', 'lots_client.views.apply', name='apply'), url(r'^apply-confirm/(?P<tracking_id>\S+)/$', 'lots_client.views.apply_confirm', name='apply_confirm'), url(r'^faq/$', 'lots_client.views.faq', name='faq'), url(r'^about/$', 'lots_client.views.about', name='about'), url(r'^lots-admin/$', 'lots_admin.views.lots_admin', name='lots_admin'), url(r'^lots-admin-map/$', 'lots_admin.views.lots_admin_map', name='lots_admin_map'), url(r'^csv-dump/$', 'lots_admin.views.csv_dump', name='csv_dump'), url(r'^lots-login/$', 'lots_admin.views.lots_login', name='lots_login'), url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), )
// ... existing code ... from django.conf.urls import patterns, include, url // ... modified code ... url(r'^logout/$', 'lots_admin.views.lots_logout', name='logout'), url(r'^django-admin/', include(admin.site.urls)), ... ) // ... rest of the code ...
76ca06c26d74aaad1f0773321fdd382b12addcdc
src/django_easyfilters/utils.py
src/django_easyfilters/utils.py
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject import six def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not six.PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: try: rel = opts.get_field_by_name(name)[0] except FieldDoesNotExist: return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject from six import PY3 def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: rel = opts.get_field_by_name(name)[0] if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
Fix error handling in get_model_field (passthrough).
Fix error handling in get_model_field (passthrough).
Python
mit
ionelmc/django-easyfilters,ionelmc/django-easyfilters
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject - import six + from six import PY3 + def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ - if not six.PY3: + if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: - try: - rel = opts.get_field_by_name(name)[0] + rel = opts.get_field_by_name(name)[0] - except FieldDoesNotExist: - return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
Fix error handling in get_model_field (passthrough).
## Code Before: try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject import six def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not six.PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: try: rel = opts.get_field_by_name(name)[0] except FieldDoesNotExist: return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m ## Instruction: Fix error handling in get_model_field (passthrough). ## Code After: try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject from six import PY3 def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: rel = opts.get_field_by_name(name)[0] if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
... from django.db.models.related import RelatedObject from six import PY3 ... """ if not PY3: klass.__unicode__ = klass.__str__ ... for name in parts[:-1]: rel = opts.get_field_by_name(name)[0] if isinstance(rel, RelatedObject): ...
c65d11f82b6d33d4940cdfd7b4d6b81e083c6e34
setup.py
setup.py
from distutils.core import setup setup( name='django_autologin', version='0.1', packages=['django_autologin', 'django_autologin.templatetags'], install_requires=['django>=1.0'], description='Token generator and processor to provide automatic login links for users' )
from distutils.core import setup setup( name='django_autologin', version='0.1', packages=['django_autologin', 'django_autologin.templatetags'], install_requires=['django>=1.0'], description='Token generator and processor to provide automatic login links for users' )
Use 4 spaces for indentation.
Use 4 spaces for indentation.
Python
bsd-3-clause
playfire/django-autologin
from distutils.core import setup setup( - name='django_autologin', + name='django_autologin', - version='0.1', + version='0.1', - packages=['django_autologin', 'django_autologin.templatetags'], + packages=['django_autologin', 'django_autologin.templatetags'], - install_requires=['django>=1.0'], + install_requires=['django>=1.0'], - description='Token generator and processor to provide automatic login links for users' + description='Token generator and processor to provide automatic login links for users' )
Use 4 spaces for indentation.
## Code Before: from distutils.core import setup setup( name='django_autologin', version='0.1', packages=['django_autologin', 'django_autologin.templatetags'], install_requires=['django>=1.0'], description='Token generator and processor to provide automatic login links for users' ) ## Instruction: Use 4 spaces for indentation. ## Code After: from distutils.core import setup setup( name='django_autologin', version='0.1', packages=['django_autologin', 'django_autologin.templatetags'], install_requires=['django>=1.0'], description='Token generator and processor to provide automatic login links for users' )
# ... existing code ... setup( name='django_autologin', version='0.1', packages=['django_autologin', 'django_autologin.templatetags'], install_requires=['django>=1.0'], description='Token generator and processor to provide automatic login links for users' ) # ... rest of the code ...
6ecada90e944ee976197e0ee79baf1d711a20803
cla_public/apps/base/forms.py
cla_public/apps/base/forms.py
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM class FeedbackForm(Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM from cla_public.apps.checker.honeypot import Honeypot class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
Add honeypot field to feedback form
Add honeypot field to feedback form
Python
mit
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
"Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM + from cla_public.apps.checker.honeypot import Honeypot + - class FeedbackForm(Form): + class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
Add honeypot field to feedback form
## Code Before: "Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM class FeedbackForm(Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM) ## Instruction: Add honeypot field to feedback form ## Code After: "Base forms" from flask_wtf import Form from wtforms import StringField, TextAreaField from cla_public.apps.base.fields import MultiRadioField from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \ HELP_FILLING_IN_FORM from cla_public.apps.checker.honeypot import Honeypot class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ideas = TextAreaField(u'Do you have any ideas for how it could be improved?') feel_about_service = MultiRadioField( u'Overall, how did you feel about the service you received today?', choices=FEEL_ABOUT_SERVICE) help_filling_in_form = MultiRadioField( u'Did you have any help filling in this form?', choices=HELP_FILLING_IN_FORM)
... HELP_FILLING_IN_FORM from cla_public.apps.checker.honeypot import Honeypot class FeedbackForm(Honeypot, Form): difficulty = TextAreaField(u'Did you have any difficulty with this service?') ...
abebc8a1153a9529a0f805207492cf2f5edece62
cbor2/__init__.py
cbor2/__init__.py
from .decoder import load, loads, CBORDecoder, CBORDecodeError # noqa from .encoder import dump, dumps, CBOREncoder, CBOREncodeError, shareable_encoder # noqa from .types import CBORTag, CBORSimpleValue, undefined # noqa
from .decoder import load, loads, CBORDecoder # noqa from .encoder import dump, dumps, CBOREncoder, shareable_encoder # noqa from .types import ( # noqa CBORError, CBOREncodeError, CBORDecodeError, CBORTag, CBORSimpleValue, undefined ) try: from _cbor2 import * # noqa except ImportError: # Couldn't import the optimized C version; ignore the failure and leave the # pure Python implementations in place. pass else: # The pure Python implementations are replaced with the optimized C # variants, but we still need to create the encoder dictionaries for the C # variant here (this is much simpler than doing so in C, and doesn't affect # overall performance as it's a one-off initialization cost). def _init_cbor2(): from collections import OrderedDict from .encoder import default_encoders, canonical_encoders from .types import CBORTag, CBORSimpleValue, undefined # noqa import _cbor2 _cbor2.default_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in default_encoders.items() ]) _cbor2.canonical_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in canonical_encoders.items() ]) _init_cbor2() del _init_cbor2
Make the package import both variants
Make the package import both variants Favouring the C variant where it successfully imports. This commit also handles generating the encoding dictionaries for the C variant from those defined for the Python variant (this is much simpler than doing this in C).
Python
mit
agronholm/cbor2,agronholm/cbor2,agronholm/cbor2
- from .decoder import load, loads, CBORDecoder, CBORDecodeError # noqa + from .decoder import load, loads, CBORDecoder # noqa - from .encoder import dump, dumps, CBOREncoder, CBOREncodeError, shareable_encoder # noqa + from .encoder import dump, dumps, CBOREncoder, shareable_encoder # noqa - from .types import CBORTag, CBORSimpleValue, undefined # noqa + from .types import ( # noqa + CBORError, + CBOREncodeError, + CBORDecodeError, + CBORTag, + CBORSimpleValue, + undefined + ) + try: + from _cbor2 import * # noqa + except ImportError: + # Couldn't import the optimized C version; ignore the failure and leave the + # pure Python implementations in place. + pass + else: + # The pure Python implementations are replaced with the optimized C + # variants, but we still need to create the encoder dictionaries for the C + # variant here (this is much simpler than doing so in C, and doesn't affect + # overall performance as it's a one-off initialization cost). + def _init_cbor2(): + from collections import OrderedDict + from .encoder import default_encoders, canonical_encoders + from .types import CBORTag, CBORSimpleValue, undefined # noqa + import _cbor2 + _cbor2.default_encoders = OrderedDict([ + (( + _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else + _cbor2.CBORTag if type_ is CBORTag else + type(_cbor2.undefined) if type_ is type(undefined) else + type_ + ), getattr(_cbor2.CBOREncoder, method.__name__)) + for type_, method in default_encoders.items() + ]) + _cbor2.canonical_encoders = OrderedDict([ + (( + _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else + _cbor2.CBORTag if type_ is CBORTag else + type(_cbor2.undefined) if type_ is type(undefined) else + type_ + ), getattr(_cbor2.CBOREncoder, method.__name__)) + for type_, method in canonical_encoders.items() + ]) + _init_cbor2() + del _init_cbor2 +
Make the package import both variants
## Code Before: from .decoder import load, loads, CBORDecoder, CBORDecodeError # noqa from .encoder import dump, dumps, CBOREncoder, CBOREncodeError, shareable_encoder # noqa from .types import CBORTag, CBORSimpleValue, undefined # noqa ## Instruction: Make the package import both variants ## Code After: from .decoder import load, loads, CBORDecoder # noqa from .encoder import dump, dumps, CBOREncoder, shareable_encoder # noqa from .types import ( # noqa CBORError, CBOREncodeError, CBORDecodeError, CBORTag, CBORSimpleValue, undefined ) try: from _cbor2 import * # noqa except ImportError: # Couldn't import the optimized C version; ignore the failure and leave the # pure Python implementations in place. pass else: # The pure Python implementations are replaced with the optimized C # variants, but we still need to create the encoder dictionaries for the C # variant here (this is much simpler than doing so in C, and doesn't affect # overall performance as it's a one-off initialization cost). def _init_cbor2(): from collections import OrderedDict from .encoder import default_encoders, canonical_encoders from .types import CBORTag, CBORSimpleValue, undefined # noqa import _cbor2 _cbor2.default_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in default_encoders.items() ]) _cbor2.canonical_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in canonical_encoders.items() ]) _init_cbor2() del _init_cbor2
... from .decoder import load, loads, CBORDecoder # noqa from .encoder import dump, dumps, CBOREncoder, shareable_encoder # noqa from .types import ( # noqa CBORError, CBOREncodeError, CBORDecodeError, CBORTag, CBORSimpleValue, undefined ) try: from _cbor2 import * # noqa except ImportError: # Couldn't import the optimized C version; ignore the failure and leave the # pure Python implementations in place. pass else: # The pure Python implementations are replaced with the optimized C # variants, but we still need to create the encoder dictionaries for the C # variant here (this is much simpler than doing so in C, and doesn't affect # overall performance as it's a one-off initialization cost). def _init_cbor2(): from collections import OrderedDict from .encoder import default_encoders, canonical_encoders from .types import CBORTag, CBORSimpleValue, undefined # noqa import _cbor2 _cbor2.default_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in default_encoders.items() ]) _cbor2.canonical_encoders = OrderedDict([ (( _cbor2.CBORSimpleValue if type_ is CBORSimpleValue else _cbor2.CBORTag if type_ is CBORTag else type(_cbor2.undefined) if type_ is type(undefined) else type_ ), getattr(_cbor2.CBOREncoder, method.__name__)) for type_, method in canonical_encoders.items() ]) _init_cbor2() del _init_cbor2 ...
13c968f9f345f58775750f1f83ca7881cee2755a
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
import pandas as pd import sys df = pd.read_csv(sys.argv[1]) df.columns = [c.lower() for c in df.columns] from sqlalchemy import create_engine engine = create_engine('postgresql://pcawg_admin:pcawg@localhost:5432/germline_genotype_tracking') df.to_sql("pcawg_samples", engine)
import pandas as pd import sys df = pd.read_csv(sys.argv[1]) df.columns = [c.lower() for c in df.columns] from sqlalchemy import create_engine engine = create_engine('postgresql://pcawg_admin:pcawg@run-tracking-db.service.consul:5432/germline_genotype_tracking') df.to_sql("pcawg_samples", engine)
Use Tracking DB Service URL rather than localhost in the DB connection string.
Use Tracking DB Service URL rather than localhost in the DB connection string.
Python
mit
llevar/germline-regenotyper,llevar/germline-regenotyper
import pandas as pd import sys df = pd.read_csv(sys.argv[1]) df.columns = [c.lower() for c in df.columns] from sqlalchemy import create_engine - engine = create_engine('postgresql://pcawg_admin:pcawg@localhost:5432/germline_genotype_tracking') + engine = create_engine('postgresql://pcawg_admin:pcawg@run-tracking-db.service.consul:5432/germline_genotype_tracking') df.to_sql("pcawg_samples", engine)
Use Tracking DB Service URL rather than localhost in the DB connection string.
## Code Before: import pandas as pd import sys df = pd.read_csv(sys.argv[1]) df.columns = [c.lower() for c in df.columns] from sqlalchemy import create_engine engine = create_engine('postgresql://pcawg_admin:pcawg@localhost:5432/germline_genotype_tracking') df.to_sql("pcawg_samples", engine) ## Instruction: Use Tracking DB Service URL rather than localhost in the DB connection string. ## Code After: import pandas as pd import sys df = pd.read_csv(sys.argv[1]) df.columns = [c.lower() for c in df.columns] from sqlalchemy import create_engine engine = create_engine('postgresql://pcawg_admin:pcawg@run-tracking-db.service.consul:5432/germline_genotype_tracking') df.to_sql("pcawg_samples", engine)
... from sqlalchemy import create_engine engine = create_engine('postgresql://pcawg_admin:pcawg@run-tracking-db.service.consul:5432/germline_genotype_tracking') ...
43662a6417a9d589bac2ab49e5b9b5441adf1115
atomic/__init__.py
atomic/__init__.py
from .atomic_data import AtomicData from .collisional_radiative import CollRadEquilibrium from .time_dependent_rates import RateEquations, RateEquationsWithDiffusion from .radiation import Radiation from .electron_cooling import ElectronCooling element = AtomicData.from_element
import os, sys sys.path.append(os.path.join(os.path.dirname(__file__))) from .atomic_data import AtomicData from .collisional_radiative import CollRadEquilibrium from .time_dependent_rates import RateEquations, RateEquationsWithDiffusion from .radiation import Radiation from .electron_cooling import ElectronCooling element = AtomicData.from_element
Set path to find _xxdata.so files
Set path to find _xxdata.so files
Python
mit
cfe316/atomic
+ import os, sys + sys.path.append(os.path.join(os.path.dirname(__file__))) from .atomic_data import AtomicData from .collisional_radiative import CollRadEquilibrium from .time_dependent_rates import RateEquations, RateEquationsWithDiffusion from .radiation import Radiation from .electron_cooling import ElectronCooling element = AtomicData.from_element
Set path to find _xxdata.so files
## Code Before: from .atomic_data import AtomicData from .collisional_radiative import CollRadEquilibrium from .time_dependent_rates import RateEquations, RateEquationsWithDiffusion from .radiation import Radiation from .electron_cooling import ElectronCooling element = AtomicData.from_element ## Instruction: Set path to find _xxdata.so files ## Code After: import os, sys sys.path.append(os.path.join(os.path.dirname(__file__))) from .atomic_data import AtomicData from .collisional_radiative import CollRadEquilibrium from .time_dependent_rates import RateEquations, RateEquationsWithDiffusion from .radiation import Radiation from .electron_cooling import ElectronCooling element = AtomicData.from_element
# ... existing code ... import os, sys sys.path.append(os.path.join(os.path.dirname(__file__))) from .atomic_data import AtomicData # ... rest of the code ...
6ee8ee2467d9c61b03a268a6b8d8ea9bc3cfe9e0
Lib/defcon/tools/fuzzyNumber.py
Lib/defcon/tools/fuzzyNumber.py
class FuzzyNumber(object): def __init__(self, value, threshold): self.value = value self.threshold = threshold def __repr__(self): return '[%d %d]' % (self.value, self.threshold) def __cmp__(self, other): if abs(self.value - other.value) < self.threshold: return 0 else: return cmp(self.value, other.value)
class FuzzyNumber(object): def __init__(self, value, threshold): self.value = value self.threshold = threshold def __repr__(self): return "[%f %f]" % (self.value, self.threshold) def __cmp__(self, other): if hasattr(other, "value"): if abs(self.value - other.value) < self.threshold: return 0 else: return cmp(self.value, other.value) return cmp(self.value, other)
Allow for comparing to objects other than FuzzyNumber objects.
Allow for comparing to objects other than FuzzyNumber objects.
Python
mit
anthrotype/defcon,moyogo/defcon,typemytype/defcon,typesupply/defcon,adrientetar/defcon
class FuzzyNumber(object): def __init__(self, value, threshold): self.value = value self.threshold = threshold def __repr__(self): - return '[%d %d]' % (self.value, self.threshold) + return "[%f %f]" % (self.value, self.threshold) def __cmp__(self, other): + if hasattr(other, "value"): - if abs(self.value - other.value) < self.threshold: + if abs(self.value - other.value) < self.threshold: - return 0 + return 0 - else: + else: - return cmp(self.value, other.value) + return cmp(self.value, other.value) + return cmp(self.value, other)
Allow for comparing to objects other than FuzzyNumber objects.
## Code Before: class FuzzyNumber(object): def __init__(self, value, threshold): self.value = value self.threshold = threshold def __repr__(self): return '[%d %d]' % (self.value, self.threshold) def __cmp__(self, other): if abs(self.value - other.value) < self.threshold: return 0 else: return cmp(self.value, other.value) ## Instruction: Allow for comparing to objects other than FuzzyNumber objects. ## Code After: class FuzzyNumber(object): def __init__(self, value, threshold): self.value = value self.threshold = threshold def __repr__(self): return "[%f %f]" % (self.value, self.threshold) def __cmp__(self, other): if hasattr(other, "value"): if abs(self.value - other.value) < self.threshold: return 0 else: return cmp(self.value, other.value) return cmp(self.value, other)
// ... existing code ... def __repr__(self): return "[%f %f]" % (self.value, self.threshold) // ... modified code ... def __cmp__(self, other): if hasattr(other, "value"): if abs(self.value - other.value) < self.threshold: return 0 else: return cmp(self.value, other.value) return cmp(self.value, other) // ... rest of the code ...
b776a05c8bb57d63259263c985883422f56298c7
pyvac/helpers/calendar.py
pyvac/helpers/calendar.py
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
import urllib import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url url_obj = urllib.quote(url_obj, safe='/:') return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
Fix bug with ics url format with latest vobject version
Fix bug with ics url format with latest vobject version
Python
bsd-3-clause
sayoun/pyvac,sayoun/pyvac,sayoun/pyvac
+ import urllib import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url + url_obj = urllib.quote(url_obj, safe='/:') return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
Fix bug with ics url format with latest vobject version
## Code Before: import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True ## Instruction: Fix bug with ics url format with latest vobject version ## Code After: import urllib import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url url_obj = urllib.quote(url_obj, safe='/:') return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
# ... existing code ... import urllib import logging # ... modified code ... url_obj = event.url url_obj = urllib.quote(url_obj, safe='/:') return str(url_obj) # ... rest of the code ...
a626e97bdb8816ed46760c55ad402b64e391538a
revenue/admin.py
revenue/admin.py
from django.contrib import admin from django.core.exceptions import ValidationError from django.forms import BaseInlineFormSet, ModelForm from django.utils.translation import ugettext_lazy as _ from revenue.models import Receipt, FeeLine class FeeLinesInlineFormSet(BaseInlineFormSet): def clean(self): super(FeeLinesInlineFormSet, self).clean() total = 0 for form in self.forms: if not form.is_valid() or form.cleaned_data.get('DELETE'): return # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) self.instance.total_amount = total print(self.instance) class FeeLineForm(ModelForm): def clean(self): if self.cleaned_data['date_start'] > self.cleaned_data['date_end']: raise ValidationError(_("Date start must be before date end")) class FeeLinesInline(admin.TabularInline): form = FeeLineForm model = FeeLine formset = FeeLinesInlineFormSet extra = 1 def get_extra (self, request, obj=None, **kwargs): # Don't add any extra forms if the related object already exists. if obj: return 0 return self.extra class ReceiptAdmin(admin.ModelAdmin): readonly_fields = ['total_amount'] inlines = [FeeLinesInline] admin.site.register(Receipt, ReceiptAdmin)
from django.contrib import admin from django.core.exceptions import ValidationError from django import forms from django.utils.translation import ugettext_lazy as _ from revenue.models import Receipt, FeeLine class FeeLinesInlineFormSet(forms.BaseInlineFormSet): def clean(self): super(FeeLinesInlineFormSet, self).clean() total = 0 for form in self.forms: if not form.is_valid() or form.cleaned_data.get('DELETE'): continue # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) self.instance.total_amount = total class FeeLineForm(forms.ModelForm): def clean(self): if self.cleaned_data['date_start'] > self.cleaned_data['date_end']: raise ValidationError(_("Date start must be before date end")) class FeeLinesInline(admin.TabularInline): form = FeeLineForm model = FeeLine formset = FeeLinesInlineFormSet extra = 1 def get_extra (self, request, obj=None, **kwargs): # Don't add any extra forms if the related object already exists. if obj: return 0 return self.extra class ReceiptAdmin(admin.ModelAdmin): readonly_fields = ['total_amount'] inlines = [FeeLinesInline] admin.site.register(Receipt, ReceiptAdmin)
Fix how we calculate total to really account for deleted objects
Fix how we calculate total to really account for deleted objects
Python
mpl-2.0
jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django
from django.contrib import admin from django.core.exceptions import ValidationError - from django.forms import BaseInlineFormSet, ModelForm + from django import forms from django.utils.translation import ugettext_lazy as _ from revenue.models import Receipt, FeeLine - class FeeLinesInlineFormSet(BaseInlineFormSet): + class FeeLinesInlineFormSet(forms.BaseInlineFormSet): def clean(self): super(FeeLinesInlineFormSet, self).clean() total = 0 for form in self.forms: if not form.is_valid() or form.cleaned_data.get('DELETE'): - return # there are other errors in the form or the item was deleted + continue # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) self.instance.total_amount = total - print(self.instance) - class FeeLineForm(ModelForm): + class FeeLineForm(forms.ModelForm): def clean(self): if self.cleaned_data['date_start'] > self.cleaned_data['date_end']: raise ValidationError(_("Date start must be before date end")) class FeeLinesInline(admin.TabularInline): form = FeeLineForm model = FeeLine formset = FeeLinesInlineFormSet extra = 1 def get_extra (self, request, obj=None, **kwargs): # Don't add any extra forms if the related object already exists. if obj: return 0 return self.extra class ReceiptAdmin(admin.ModelAdmin): readonly_fields = ['total_amount'] inlines = [FeeLinesInline] admin.site.register(Receipt, ReceiptAdmin)
Fix how we calculate total to really account for deleted objects
## Code Before: from django.contrib import admin from django.core.exceptions import ValidationError from django.forms import BaseInlineFormSet, ModelForm from django.utils.translation import ugettext_lazy as _ from revenue.models import Receipt, FeeLine class FeeLinesInlineFormSet(BaseInlineFormSet): def clean(self): super(FeeLinesInlineFormSet, self).clean() total = 0 for form in self.forms: if not form.is_valid() or form.cleaned_data.get('DELETE'): return # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) self.instance.total_amount = total print(self.instance) class FeeLineForm(ModelForm): def clean(self): if self.cleaned_data['date_start'] > self.cleaned_data['date_end']: raise ValidationError(_("Date start must be before date end")) class FeeLinesInline(admin.TabularInline): form = FeeLineForm model = FeeLine formset = FeeLinesInlineFormSet extra = 1 def get_extra (self, request, obj=None, **kwargs): # Don't add any extra forms if the related object already exists. if obj: return 0 return self.extra class ReceiptAdmin(admin.ModelAdmin): readonly_fields = ['total_amount'] inlines = [FeeLinesInline] admin.site.register(Receipt, ReceiptAdmin) ## Instruction: Fix how we calculate total to really account for deleted objects ## Code After: from django.contrib import admin from django.core.exceptions import ValidationError from django import forms from django.utils.translation import ugettext_lazy as _ from revenue.models import Receipt, FeeLine class FeeLinesInlineFormSet(forms.BaseInlineFormSet): def clean(self): super(FeeLinesInlineFormSet, self).clean() total = 0 for form in self.forms: if not form.is_valid() or form.cleaned_data.get('DELETE'): continue # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) self.instance.total_amount = total class FeeLineForm(forms.ModelForm): def clean(self): if self.cleaned_data['date_start'] > self.cleaned_data['date_end']: raise ValidationError(_("Date start must be before date end")) class FeeLinesInline(admin.TabularInline): form = FeeLineForm model = FeeLine formset = FeeLinesInlineFormSet extra = 1 def get_extra (self, request, obj=None, **kwargs): # Don't add any extra forms if the related object already exists. if obj: return 0 return self.extra class ReceiptAdmin(admin.ModelAdmin): readonly_fields = ['total_amount'] inlines = [FeeLinesInline] admin.site.register(Receipt, ReceiptAdmin)
# ... existing code ... from django.core.exceptions import ValidationError from django import forms from django.utils.translation import ugettext_lazy as _ # ... modified code ... class FeeLinesInlineFormSet(forms.BaseInlineFormSet): def clean(self): ... if not form.is_valid() or form.cleaned_data.get('DELETE'): continue # there are other errors in the form or the item was deleted total += form.cleaned_data.get('amount', 0) ... self.instance.total_amount = total ... class FeeLineForm(forms.ModelForm): def clean(self): # ... rest of the code ...
5516b125bb00b928d85a044d3df777e1b0004d03
ovp_organizations/migrations/0008_auto_20161207_1941.py
ovp_organizations/migrations/0008_auto_20161207_1941.py
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
Add ".only" restriction to query on migration 0008
Add ".only" restriction to query on migration 0008
Python
agpl-3.0
OpenVolunteeringPlatform/django-ovp-organizations,OpenVolunteeringPlatform/django-ovp-organizations
from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): - for organization in Organization.objects.all(): + for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): - for organization in Organization.objects.all(): + for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
Add ".only" restriction to query on migration 0008
## Code Before: from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ] ## Instruction: Add ".only" restriction to query on migration 0008 ## Code After: from __future__ import unicode_literals from django.db import migrations from ovp_organizations.models import Organization def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() class Migration(migrations.Migration): dependencies = [ ('ovp_organizations', '0007_organization_members'), ] operations = [ migrations.RunPython(add_members, reverse_code=remove_members) ]
// ... existing code ... def add_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.add(organization.owner) // ... modified code ... def remove_members(apps, schema_editor): for organization in Organization.objects.only('pk', 'members').all(): organization.members.clear() // ... rest of the code ...
e582a8632409cdf5625b51978e742ca9282c3d6f
show_vmbstereocamera.py
show_vmbstereocamera.py
import sys from PySide import QtGui import VisionToolkit as vt # # Main application # if __name__ == '__main__' : application = QtGui.QApplication( sys.argv ) widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) widget.show() sys.exit( application.exec_() )
import sys import cv2 import numpy as np #from PySide import QtGui import VisionToolkit as vt # # Image callback function # def Callback( frame_left, frame_right ) : # Put images side by side stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 ) # Resize image for display stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 ) # Display the stereo image cv2.imshow( 'StereoVision', stereo_image ) cv2.waitKey( 1 ) # # Main application # if __name__ == '__main__' : # application = QtGui.QApplication( sys.argv ) # widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) # widget.show() # sys.exit( application.exec_() ) # Initialize the Vimba driver vt.VmbStartup() # Initialize the stereo cameras camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' ) # Connect the cameras camera.Open() # Start image acquisition camera.StartCapture( Callback ) # Wait for user key press raw_input( 'Press enter to stop the capture...' ) # Stop image acquisition camera.StopCapture() # Disconnect the camera camera.Close() # Shutdown Vimba vt.VmbShutdown() # Cleanup OpenCV cv2.destroyAllWindows()
Add OpenCV display for debug.
Add OpenCV display for debug.
Python
mit
microy/PyStereoVisionToolkit,microy/VisionToolkit,microy/StereoVision,microy/VisionToolkit,microy/StereoVision,microy/PyStereoVisionToolkit
import sys + import cv2 + import numpy as np - from PySide import QtGui + #from PySide import QtGui import VisionToolkit as vt + + + # + # Image callback function + # + def Callback( frame_left, frame_right ) : + # Put images side by side + stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 ) + # Resize image for display + stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 ) + # Display the stereo image + cv2.imshow( 'StereoVision', stereo_image ) + cv2.waitKey( 1 ) # # Main application # if __name__ == '__main__' : - application = QtGui.QApplication( sys.argv ) + # application = QtGui.QApplication( sys.argv ) - widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) + # widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) - widget.show() + # widget.show() - sys.exit( application.exec_() ) + # sys.exit( application.exec_() ) + # Initialize the Vimba driver + vt.VmbStartup() + # Initialize the stereo cameras + camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' ) + # Connect the cameras + camera.Open() + # Start image acquisition + camera.StartCapture( Callback ) + # Wait for user key press + raw_input( 'Press enter to stop the capture...' ) + # Stop image acquisition + camera.StopCapture() + # Disconnect the camera + camera.Close() + # Shutdown Vimba + vt.VmbShutdown() + # Cleanup OpenCV + cv2.destroyAllWindows() +
Add OpenCV display for debug.
## Code Before: import sys from PySide import QtGui import VisionToolkit as vt # # Main application # if __name__ == '__main__' : application = QtGui.QApplication( sys.argv ) widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) widget.show() sys.exit( application.exec_() ) ## Instruction: Add OpenCV display for debug. ## Code After: import sys import cv2 import numpy as np #from PySide import QtGui import VisionToolkit as vt # # Image callback function # def Callback( frame_left, frame_right ) : # Put images side by side stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 ) # Resize image for display stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 ) # Display the stereo image cv2.imshow( 'StereoVision', stereo_image ) cv2.waitKey( 1 ) # # Main application # if __name__ == '__main__' : # application = QtGui.QApplication( sys.argv ) # widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) # widget.show() # sys.exit( application.exec_() ) # Initialize the Vimba driver vt.VmbStartup() # Initialize the stereo cameras camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' ) # Connect the cameras camera.Open() # Start image acquisition camera.StartCapture( Callback ) # Wait for user key press raw_input( 'Press enter to stop the capture...' ) # Stop image acquisition camera.StopCapture() # Disconnect the camera camera.Close() # Shutdown Vimba vt.VmbShutdown() # Cleanup OpenCV cv2.destroyAllWindows()
// ... existing code ... import sys import cv2 import numpy as np #from PySide import QtGui import VisionToolkit as vt # # Image callback function # def Callback( frame_left, frame_right ) : # Put images side by side stereo_image = np.concatenate( ( frame_left.image, frame_right.image ), axis = 1 ) # Resize image for display stereo_image = cv2.resize( stereo_image, None, fx=0.4, fy=0.4 ) # Display the stereo image cv2.imshow( 'StereoVision', stereo_image ) cv2.waitKey( 1 ) // ... modified code ... # application = QtGui.QApplication( sys.argv ) # widget = vt.VmbStereoCameraWidget( '50-0503326223', '50-0503323406' ) # widget.show() # sys.exit( application.exec_() ) # Initialize the Vimba driver vt.VmbStartup() # Initialize the stereo cameras camera = vt.VmbStereoCamera( '50-0503326223', '50-0503323406' ) # Connect the cameras camera.Open() # Start image acquisition camera.StartCapture( Callback ) # Wait for user key press raw_input( 'Press enter to stop the capture...' ) # Stop image acquisition camera.StopCapture() # Disconnect the camera camera.Close() # Shutdown Vimba vt.VmbShutdown() # Cleanup OpenCV cv2.destroyAllWindows() // ... rest of the code ...
1b9aa9909b284489c9f8a5d38b1c5520d5916dc7
feature_extraction/measurements/__init__.py
feature_extraction/measurements/__init__.py
from collections import defaultdict from feature_extraction.util import DefaultAttributeDict class Measurement(object): """ A generic feature measurement. Attributes ---------- default_options Can be set by subclasses to set default option values """ default_options = {} def __init__(self, options=None): """ When initializing this measurement, options can be passed. These are exposed to internal algorithms as `self.options`. Parameters ---------- options : dict A dict of options for this measurement. """ self.options = DefaultAttributeDict() self.options.update(self.default_options or {}) self.options.update(options or {}) from .pixelaverage import PixelAverage from .texture_haralick import HaralickTexture
from collections import defaultdict from feature_extraction.util import AttributeDict class Measurement(object): """ A generic feature measurement. Attributes ---------- default_options Can be set by subclasses to set default option values """ default_options = {} def __init__(self, options=None): """ When initializing this measurement, options can be passed. These are exposed to internal algorithms as `self.options`. Parameters ---------- options : dict A dict of options for this measurement. """ self.options = AttributeDict() self.options.update(self.default_options or {}) self.options.update(options or {}) from .pixelaverage import PixelAverage from .texture_haralick import HaralickTexture
Switch back to AttributeDict for measurement options
Switch back to AttributeDict for measurement options
Python
apache-2.0
widoptimization-willett/feature-extraction
from collections import defaultdict - from feature_extraction.util import DefaultAttributeDict + from feature_extraction.util import AttributeDict class Measurement(object): """ A generic feature measurement. Attributes ---------- default_options Can be set by subclasses to set default option values """ default_options = {} def __init__(self, options=None): """ When initializing this measurement, options can be passed. These are exposed to internal algorithms as `self.options`. Parameters ---------- options : dict A dict of options for this measurement. """ - self.options = DefaultAttributeDict() + self.options = AttributeDict() self.options.update(self.default_options or {}) self.options.update(options or {}) from .pixelaverage import PixelAverage from .texture_haralick import HaralickTexture
Switch back to AttributeDict for measurement options
## Code Before: from collections import defaultdict from feature_extraction.util import DefaultAttributeDict class Measurement(object): """ A generic feature measurement. Attributes ---------- default_options Can be set by subclasses to set default option values """ default_options = {} def __init__(self, options=None): """ When initializing this measurement, options can be passed. These are exposed to internal algorithms as `self.options`. Parameters ---------- options : dict A dict of options for this measurement. """ self.options = DefaultAttributeDict() self.options.update(self.default_options or {}) self.options.update(options or {}) from .pixelaverage import PixelAverage from .texture_haralick import HaralickTexture ## Instruction: Switch back to AttributeDict for measurement options ## Code After: from collections import defaultdict from feature_extraction.util import AttributeDict class Measurement(object): """ A generic feature measurement. Attributes ---------- default_options Can be set by subclasses to set default option values """ default_options = {} def __init__(self, options=None): """ When initializing this measurement, options can be passed. These are exposed to internal algorithms as `self.options`. Parameters ---------- options : dict A dict of options for this measurement. """ self.options = AttributeDict() self.options.update(self.default_options or {}) self.options.update(options or {}) from .pixelaverage import PixelAverage from .texture_haralick import HaralickTexture
... from collections import defaultdict from feature_extraction.util import AttributeDict ... """ self.options = AttributeDict() self.options.update(self.default_options or {}) ...
cc3d89d4357099ba2df1628e9d91e48c743bd471
api/common/views.py
api/common/views.py
import subprocess from django.conf import settings from django.http import JsonResponse, HttpResponseBadRequest from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token @csrf_exempt def deploy(request): deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY') # branch = request.POST.get('BRANCH') commit = request.POST.get('COMMIT') if deploy_secret_key != settings.SECRET_KEY: return HttpResponseBadRequest('Incorrect key.') subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE) return JsonResponse({'result': 'deploy started'}) def social_redirect(request): token, _ = Token.objects.get_or_create(user=request.user) return redirect('http://localhost:3000/finish-steam/{}'.format(token.key))
import subprocess from django.conf import settings from django.http import JsonResponse, HttpResponseBadRequest from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token @csrf_exempt def deploy(request): deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY') # branch = request.POST.get('BRANCH') commit = request.POST.get('COMMIT') if deploy_secret_key != settings.SECRET_KEY: return HttpResponseBadRequest('Incorrect key.') subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE) return JsonResponse({'result': 'deploy started'}) def social_redirect(request): token, _ = Token.objects.get_or_create(user=request.user) return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
Fix incorrect social redirect link
Fix incorrect social redirect link
Python
apache-2.0
prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder
import subprocess from django.conf import settings from django.http import JsonResponse, HttpResponseBadRequest from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token @csrf_exempt def deploy(request): deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY') # branch = request.POST.get('BRANCH') commit = request.POST.get('COMMIT') if deploy_secret_key != settings.SECRET_KEY: return HttpResponseBadRequest('Incorrect key.') subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE) return JsonResponse({'result': 'deploy started'}) def social_redirect(request): token, _ = Token.objects.get_or_create(user=request.user) - return redirect('http://localhost:3000/finish-steam/{}'.format(token.key)) + return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
Fix incorrect social redirect link
## Code Before: import subprocess from django.conf import settings from django.http import JsonResponse, HttpResponseBadRequest from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token @csrf_exempt def deploy(request): deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY') # branch = request.POST.get('BRANCH') commit = request.POST.get('COMMIT') if deploy_secret_key != settings.SECRET_KEY: return HttpResponseBadRequest('Incorrect key.') subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE) return JsonResponse({'result': 'deploy started'}) def social_redirect(request): token, _ = Token.objects.get_or_create(user=request.user) return redirect('http://localhost:3000/finish-steam/{}'.format(token.key)) ## Instruction: Fix incorrect social redirect link ## Code After: import subprocess from django.conf import settings from django.http import JsonResponse, HttpResponseBadRequest from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt from rest_framework.authtoken.models import Token @csrf_exempt def deploy(request): deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY') # branch = request.POST.get('BRANCH') commit = request.POST.get('COMMIT') if deploy_secret_key != settings.SECRET_KEY: return HttpResponseBadRequest('Incorrect key.') subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE) return JsonResponse({'result': 'deploy started'}) def social_redirect(request): token, _ = Token.objects.get_or_create(user=request.user) return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key))
... token, _ = Token.objects.get_or_create(user=request.user) return redirect('http://dotateamfinder.com/finish-steam/{}'.format(token.key)) ...
d4a0a85673b5d61b82c65e77efcd6518da719952
pmxbot/__init__.py
pmxbot/__init__.py
import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname='pmxbot', database='sqlite:pmxbot.sqlite', server_host='localhost', server_port=6667, use_ssl=False, password=None, nickserv_password=None, silent_bot=False, log_channels=[], other_channels=[], places=['London', 'Tokyo', 'New York'], librarypaste='http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname='pmxbot', database='sqlite:pmxbot.sqlite', server_host='localhost', server_port=6667, use_ssl=False, password=None, nickserv_password=None, silent_bot=False, log_channels=[], other_channels=[], librarypaste='http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
Remove places default config. It doesn't appear to be used anywhere.
Remove places default config. It doesn't appear to be used anywhere.
Python
mit
yougov/pmxbot,yougov/pmxbot,yougov/pmxbot
import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname='pmxbot', database='sqlite:pmxbot.sqlite', server_host='localhost', server_port=6667, use_ssl=False, password=None, nickserv_password=None, silent_bot=False, log_channels=[], other_channels=[], - places=['London', 'Tokyo', 'New York'], librarypaste='http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
Remove places default config. It doesn't appear to be used anywhere.
## Code Before: import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname='pmxbot', database='sqlite:pmxbot.sqlite', server_host='localhost', server_port=6667, use_ssl=False, password=None, nickserv_password=None, silent_bot=False, log_channels=[], other_channels=[], places=['London', 'Tokyo', 'New York'], librarypaste='http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object" ## Instruction: Remove places default config. It doesn't appear to be used anywhere. ## Code After: import socket import logging as _logging from .dictlib import ConfigDict config = ConfigDict( bot_nickname='pmxbot', database='sqlite:pmxbot.sqlite', server_host='localhost', server_port=6667, use_ssl=False, password=None, nickserv_password=None, silent_bot=False, log_channels=[], other_channels=[], librarypaste='http://paste.jaraco.com', ) config['logs URL'] = 'http://' + socket.getfqdn() config['log level'] = _logging.INFO "The config object"
# ... existing code ... other_channels=[], librarypaste='http://paste.jaraco.com', # ... rest of the code ...
aa092529bb643eabae45ae051ecd99d6bebb88ea
src/som/vmobjects/abstract_object.py
src/som/vmobjects/abstract_object.py
class AbstractObject(object): def __init__(self): pass def send(self, frame, selector_string, arguments, universe): # Turn the selector string into a selector selector = universe.symbol_for(selector_string) invokable = self.get_class(universe).lookup_invokable(selector) return invokable.invoke(frame, self, arguments) def send_does_not_understand(self, frame, selector, arguments, universe): # Compute the number of arguments number_of_arguments = selector.get_number_of_signature_arguments() arguments_array = universe.new_array_with_length(number_of_arguments) for i in range(0, number_of_arguments): arguments_array.set_indexable_field(i, arguments[i]) args = [selector, arguments_array] self.send(frame, "doesNotUnderstand:arguments:", args, universe) def send_unknown_global(self, frame, global_name, universe): arguments = [global_name] self.send(frame, "unknownGlobal:", arguments, universe) def send_escaped_block(self, frame, block, universe): arguments = [block] self.send(frame, "escapedBlock:", arguments, universe) def get_class(self, universe): raise NotImplementedError("Subclasses need to implement get_class(universe).") def is_invokable(self): return False def __str__(self): from som.vm.universe import get_current return "a " + self.get_class(get_current()).get_name().get_string()
class AbstractObject(object): def __init__(self): pass def send(self, frame, selector_string, arguments, universe): # Turn the selector string into a selector selector = universe.symbol_for(selector_string) invokable = self.get_class(universe).lookup_invokable(selector) return invokable.invoke(frame, self, arguments) def send_does_not_understand(self, frame, selector, arguments, universe): # Compute the number of arguments number_of_arguments = selector.get_number_of_signature_arguments() arguments_array = universe.new_array_with_length(number_of_arguments) for i in range(0, number_of_arguments): arguments_array.set_indexable_field(i, arguments[i]) args = [selector, arguments_array] return self.send(frame, "doesNotUnderstand:arguments:", args, universe) def send_unknown_global(self, frame, global_name, universe): arguments = [global_name] return self.send(frame, "unknownGlobal:", arguments, universe) def send_escaped_block(self, frame, block, universe): arguments = [block] return self.send(frame, "escapedBlock:", arguments, universe) def get_class(self, universe): raise NotImplementedError("Subclasses need to implement get_class(universe).") def is_invokable(self): return False def __str__(self): from som.vm.universe import get_current return "a " + self.get_class(get_current()).get_name().get_string()
Send methods need to return the result
Send methods need to return the result Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
Python
mit
smarr/RTruffleSOM,SOM-st/PySOM,SOM-st/RPySOM,smarr/PySOM,SOM-st/RTruffleSOM,SOM-st/RPySOM,SOM-st/RTruffleSOM,SOM-st/PySOM,smarr/RTruffleSOM,smarr/PySOM
class AbstractObject(object): def __init__(self): pass def send(self, frame, selector_string, arguments, universe): # Turn the selector string into a selector selector = universe.symbol_for(selector_string) invokable = self.get_class(universe).lookup_invokable(selector) return invokable.invoke(frame, self, arguments) def send_does_not_understand(self, frame, selector, arguments, universe): # Compute the number of arguments number_of_arguments = selector.get_number_of_signature_arguments() arguments_array = universe.new_array_with_length(number_of_arguments) for i in range(0, number_of_arguments): arguments_array.set_indexable_field(i, arguments[i]) args = [selector, arguments_array] - self.send(frame, "doesNotUnderstand:arguments:", args, universe) + return self.send(frame, "doesNotUnderstand:arguments:", args, universe) def send_unknown_global(self, frame, global_name, universe): arguments = [global_name] - self.send(frame, "unknownGlobal:", arguments, universe) + return self.send(frame, "unknownGlobal:", arguments, universe) def send_escaped_block(self, frame, block, universe): arguments = [block] - self.send(frame, "escapedBlock:", arguments, universe) + return self.send(frame, "escapedBlock:", arguments, universe) def get_class(self, universe): raise NotImplementedError("Subclasses need to implement get_class(universe).") def is_invokable(self): return False def __str__(self): from som.vm.universe import get_current return "a " + self.get_class(get_current()).get_name().get_string()
Send methods need to return the result
## Code Before: class AbstractObject(object): def __init__(self): pass def send(self, frame, selector_string, arguments, universe): # Turn the selector string into a selector selector = universe.symbol_for(selector_string) invokable = self.get_class(universe).lookup_invokable(selector) return invokable.invoke(frame, self, arguments) def send_does_not_understand(self, frame, selector, arguments, universe): # Compute the number of arguments number_of_arguments = selector.get_number_of_signature_arguments() arguments_array = universe.new_array_with_length(number_of_arguments) for i in range(0, number_of_arguments): arguments_array.set_indexable_field(i, arguments[i]) args = [selector, arguments_array] self.send(frame, "doesNotUnderstand:arguments:", args, universe) def send_unknown_global(self, frame, global_name, universe): arguments = [global_name] self.send(frame, "unknownGlobal:", arguments, universe) def send_escaped_block(self, frame, block, universe): arguments = [block] self.send(frame, "escapedBlock:", arguments, universe) def get_class(self, universe): raise NotImplementedError("Subclasses need to implement get_class(universe).") def is_invokable(self): return False def __str__(self): from som.vm.universe import get_current return "a " + self.get_class(get_current()).get_name().get_string() ## Instruction: Send methods need to return the result ## Code After: class AbstractObject(object): def __init__(self): pass def send(self, frame, selector_string, arguments, universe): # Turn the selector string into a selector selector = universe.symbol_for(selector_string) invokable = self.get_class(universe).lookup_invokable(selector) return invokable.invoke(frame, self, arguments) def send_does_not_understand(self, frame, selector, arguments, universe): # Compute the number of arguments number_of_arguments = selector.get_number_of_signature_arguments() arguments_array = universe.new_array_with_length(number_of_arguments) for i in range(0, number_of_arguments): arguments_array.set_indexable_field(i, arguments[i]) args = [selector, arguments_array] return self.send(frame, "doesNotUnderstand:arguments:", args, universe) def send_unknown_global(self, frame, global_name, universe): arguments = [global_name] return self.send(frame, "unknownGlobal:", arguments, universe) def send_escaped_block(self, frame, block, universe): arguments = [block] return self.send(frame, "escapedBlock:", arguments, universe) def get_class(self, universe): raise NotImplementedError("Subclasses need to implement get_class(universe).") def is_invokable(self): return False def __str__(self): from som.vm.universe import get_current return "a " + self.get_class(get_current()).get_name().get_string()
# ... existing code ... args = [selector, arguments_array] return self.send(frame, "doesNotUnderstand:arguments:", args, universe) # ... modified code ... arguments = [global_name] return self.send(frame, "unknownGlobal:", arguments, universe) ... arguments = [block] return self.send(frame, "escapedBlock:", arguments, universe) # ... rest of the code ...
f8ae44cb19584a2b7d08b08dc4f32651acfe90f9
core/templatetags/tags.py
core/templatetags/tags.py
from core.models import Comment, Tag from django import template register = template.Library() #May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template @register.simple_tag def recent_comments(): comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3] output = '<ul id="recent">' for comment in comments: if not comment.name: comment.name = "Anonymous" if comment.website: output += '<li><a href="' + comment.website + '">' + comment.name + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' elif comment.user: output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' else: output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' output += '</ul>' return output @register.simple_tag def tags(): tags = Tag.objects.order_by('?')[:10] return tags
from core.models import Comment, Tag from django import template register = template.Library() #May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template @register.simple_tag def recent_comments(): comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3] output = '<ul id="recent">' for comment in comments: if not comment.name: comment.name = "Anonymous" elif comment.user: output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' else: output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' output += '</ul>' return output @register.simple_tag def tags(): tags = Tag.objects.order_by('?')[:10] return tags
Remove website from recent comments
Remove website from recent comments
Python
bsd-2-clause
mburst/burstolio,mburst/burstolio,mburst/burstolio
from core.models import Comment, Tag from django import template register = template.Library() #May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template @register.simple_tag def recent_comments(): comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3] output = '<ul id="recent">' for comment in comments: if not comment.name: comment.name = "Anonymous" - if comment.website: - output += '<li><a href="' + comment.website + '">' + comment.name + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' elif comment.user: output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' else: output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' output += '</ul>' return output @register.simple_tag def tags(): tags = Tag.objects.order_by('?')[:10] return tags
Remove website from recent comments
## Code Before: from core.models import Comment, Tag from django import template register = template.Library() #May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template @register.simple_tag def recent_comments(): comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3] output = '<ul id="recent">' for comment in comments: if not comment.name: comment.name = "Anonymous" if comment.website: output += '<li><a href="' + comment.website + '">' + comment.name + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' elif comment.user: output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' else: output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' output += '</ul>' return output @register.simple_tag def tags(): tags = Tag.objects.order_by('?')[:10] return tags ## Instruction: Remove website from recent comments ## Code After: from core.models import Comment, Tag from django import template register = template.Library() #May want to ditch this for a middleware that passes in the comments object so that I can do the manipulations in the actual template @register.simple_tag def recent_comments(): comments = Comment.objects.select_related('entry').filter(deleted=False, spam=False).order_by('-id')[:3] output = '<ul id="recent">' for comment in comments: if not comment.name: comment.name = "Anonymous" elif comment.user: output += '<li><a href="http://www.github.com/mburst">' + comment.user.get_full_name() + '</a> - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' else: output += '<li>' + comment.name + ' - <a href="' + comment.entry.get_absolute_url() + '">' + comment.entry.title + '</a></li>' output += '</ul>' return output @register.simple_tag def tags(): tags = Tag.objects.order_by('?')[:10] return tags
... comment.name = "Anonymous" elif comment.user: ...
8c90485e5cab6294a38cfc9332eda6fe8ca15483
project/config.py
project/config.py
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000']
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
Add two new domains to whitelist for CORS.
Add two new domains to whitelist for CORS.
Python
apache-2.0
AustinStoneProjects/Founderati-Server,AustinStoneProjects/Founderati-Server
import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 - config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000'] + config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
Add two new domains to whitelist for CORS.
## Code Before: import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000'] ## Instruction: Add two new domains to whitelist for CORS. ## Code After: import os config = {} system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR') system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR') config['HOST'] = '' config['PORT'] = 5000 config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost' config['MONGODB_PORT'] = 27017 config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost' config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
... config['ELASTIC_PORT'] = 9200 config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000'] ...
ae8f9c39cd75d837a4cb5a4cea4d3d11fd1cabed
tests/test_comments.py
tests/test_comments.py
from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line)
from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line) def test_add_to_line(): assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os"
Add additional test case for comments
Add additional test case for comments
Python
mit
PyCQA/isort,PyCQA/isort
from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line) + + def test_add_to_line(): + assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os" +
Add additional test case for comments
## Code Before: from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line) ## Instruction: Add additional test case for comments ## Code After: from hypothesis_auto import auto_pytest_magic from isort import comments auto_pytest_magic(comments.parse) auto_pytest_magic(comments.add_to_line) def test_add_to_line(): assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os"
// ... existing code ... auto_pytest_magic(comments.add_to_line) def test_add_to_line(): assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os" // ... rest of the code ...
d48ae791364a0d29d60636adfde1f143858794cd
api/identifiers/serializers.py
api/identifiers/serializers.py
from rest_framework import serializers as ser from api.base.utils import absolute_reverse from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) filterable_fields = frozenset(['category']) value = ser.CharField(read_only=True) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) id = IDField(source='_id', read_only=True) links = LinksField({'self': 'self_url'}) class Meta: type_ = 'identifiers' def get_absolute_url(self, obj): return obj.absolute_api_v2_url def get_id(self, obj): return obj._id def get_detail_url(self, obj): import ipdb; ipdb.set_trace() return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) def self_url(self, obj): return absolute_reverse('identifiers:identifier-detail', kwargs={ 'identifier_id': obj._id, })
from rest_framework import serializers as ser from api.base.utils import absolute_reverse from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) filterable_fields = frozenset(['category']) value = ser.CharField(read_only=True) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) id = IDField(source='_id', read_only=True) links = LinksField({'self': 'self_url'}) class Meta: type_ = 'identifiers' def get_absolute_url(self, obj): return obj.absolute_api_v2_url def get_id(self, obj): return obj._id def get_detail_url(self, obj): return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) def self_url(self, obj): return absolute_reverse('identifiers:identifier-detail', kwargs={ 'identifier_id': obj._id, })
Remove rogue debugger how embarassing
Remove rogue debugger how embarassing
Python
apache-2.0
rdhyee/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,acshi/osf.io,abought/osf.io,amyshi188/osf.io,erinspace/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,leb2dg/osf.io,mattclark/osf.io,samchrisinger/osf.io,alexschiller/osf.io,mluke93/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,rdhyee/osf.io,caneruguz/osf.io,laurenrevere/osf.io,amyshi188/osf.io,acshi/osf.io,saradbowman/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,aaxelb/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,mfraezz/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,mattclark/osf.io,SSJohns/osf.io,acshi/osf.io,mluke93/osf.io,cslzchen/osf.io,wearpants/osf.io,hmoco/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,wearpants/osf.io,abought/osf.io,felliott/osf.io,wearpants/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,jnayak1/osf.io,TomBaxter/osf.io,abought/osf.io,pattisdr/osf.io,aaxelb/osf.io,Nesiehr/osf.io,jnayak1/osf.io,crcresearch/osf.io,alexschiller/osf.io,baylee-d/osf.io,baylee-d/osf.io,chennan47/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,emetsger/osf.io,samchrisinger/osf.io,adlius/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,chrisseto/osf.io,jnayak1/osf.io,binoculars/osf.io,erinspace/osf.io,adlius/osf.io,cwisecarver/osf.io,emetsger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,kwierman/osf.io,wearpants/osf.io,mluo613/osf.io,leb2dg/osf.io,zamattiac/osf.io,adlius/osf.io,cslzchen/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,erinspace/osf.io,monikagrabowska/osf.io,SSJohns/osf.io,chennan47/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,pattisdr/osf.io,TomBaxter/osf.io,sloria/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,emetsger/osf.io,binoculars/osf.io,Nesiehr/osf.io,zamattiac/osf.io,kwierman/osf.io,mluo613/osf.io,icereval/osf.io,hmoco/osf.io,saradbowman/osf.io,caseyrollins/osf.io,mfraezz/osf.io,caneruguz/osf.io,felliott/osf.io,mluo613/osf.io,rdhyee/osf.io,emetsger/osf.io,Nesiehr/osf.io,alexschiller/osf.io,cslzchen/osf.io,hmoco/osf.io,Nesiehr/osf.io,kwierman/osf.io,chrisseto/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,felliott/osf.io,acshi/osf.io,sloria/osf.io,amyshi188/osf.io,mfraezz/osf.io,adlius/osf.io,HalcyonChimera/osf.io,felliott/osf.io,laurenrevere/osf.io,mluo613/osf.io,caseyrollins/osf.io,aaxelb/osf.io,kwierman/osf.io,monikagrabowska/osf.io,mluke93/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,icereval/osf.io,mattclark/osf.io,cslzchen/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,abought/osf.io,icereval/osf.io,pattisdr/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,acshi/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,mluke93/osf.io,Johnetordoff/osf.io
from rest_framework import serializers as ser from api.base.utils import absolute_reverse from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) filterable_fields = frozenset(['category']) value = ser.CharField(read_only=True) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) id = IDField(source='_id', read_only=True) links = LinksField({'self': 'self_url'}) class Meta: type_ = 'identifiers' def get_absolute_url(self, obj): return obj.absolute_api_v2_url def get_id(self, obj): return obj._id def get_detail_url(self, obj): - import ipdb; ipdb.set_trace() return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) def self_url(self, obj): return absolute_reverse('identifiers:identifier-detail', kwargs={ 'identifier_id': obj._id, })
Remove rogue debugger how embarassing
## Code Before: from rest_framework import serializers as ser from api.base.utils import absolute_reverse from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) filterable_fields = frozenset(['category']) value = ser.CharField(read_only=True) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) id = IDField(source='_id', read_only=True) links = LinksField({'self': 'self_url'}) class Meta: type_ = 'identifiers' def get_absolute_url(self, obj): return obj.absolute_api_v2_url def get_id(self, obj): return obj._id def get_detail_url(self, obj): import ipdb; ipdb.set_trace() return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) def self_url(self, obj): return absolute_reverse('identifiers:identifier-detail', kwargs={ 'identifier_id': obj._id, }) ## Instruction: Remove rogue debugger how embarassing ## Code After: from rest_framework import serializers as ser from api.base.utils import absolute_reverse from api.base.serializers import JSONAPISerializer, RelationshipField, IDField, LinksField class IdentifierSerializer(JSONAPISerializer): category = ser.CharField(read_only=True) filterable_fields = frozenset(['category']) value = ser.CharField(read_only=True) referent = RelationshipField( related_view='registrations:registration-detail', related_view_kwargs={'node_id': '<referent._id>'}, ) id = IDField(source='_id', read_only=True) links = LinksField({'self': 'self_url'}) class Meta: type_ = 'identifiers' def get_absolute_url(self, obj): return obj.absolute_api_v2_url def get_id(self, obj): return obj._id def get_detail_url(self, obj): return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) def self_url(self, obj): return absolute_reverse('identifiers:identifier-detail', kwargs={ 'identifier_id': obj._id, })
# ... existing code ... def get_detail_url(self, obj): return '{}/identifiers/{}'.format(obj.absolute_api_v2_url, obj._id) # ... rest of the code ...
3518e9088ecbbc273f922ba418d2962d6af2dda5
feature_extraction/measurements/texture_haralick.py
feature_extraction/measurements/texture_haralick.py
from . import Measurement import feature_extraction.util.cleanup as cleanup class HaralickTexture(Measurement): def compute(self, image): return []
from . import Measurement import feature_extraction.util.cleanup as cleanup from skimage.morphology import binary_erosion, disk class HaralickTexture(Measurement): default_options = { 'clip_cell_borders': True, 'erode_cell': False, 'erode_cell_amount': False, } def __init__(self, options=None): super(HaralickTexture, self).__init__(options) def compute(self, image): # -- preprocessing if self.options.clip_cell_borders: # get the cell boundary mask mask = cleanup.cell_boundary_mask(image) # if we're told to, erode the mask with a disk by some amount if self.options.erode_cell: mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount)) # mask the image image = image[mask] # -- haralick setup and run return []
Add cell-boundary preprocessing to HaralickTexture measurement
Add cell-boundary preprocessing to HaralickTexture measurement
Python
apache-2.0
widoptimization-willett/feature-extraction
from . import Measurement import feature_extraction.util.cleanup as cleanup + from skimage.morphology import binary_erosion, disk class HaralickTexture(Measurement): + default_options = { + 'clip_cell_borders': True, + 'erode_cell': False, + 'erode_cell_amount': False, + } + def __init__(self, options=None): + super(HaralickTexture, self).__init__(options) + def compute(self, image): + # -- preprocessing + if self.options.clip_cell_borders: + # get the cell boundary mask + mask = cleanup.cell_boundary_mask(image) + + # if we're told to, erode the mask with a disk by some amount + if self.options.erode_cell: + mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount)) + + # mask the image + image = image[mask] + + # -- haralick setup and run + return []
Add cell-boundary preprocessing to HaralickTexture measurement
## Code Before: from . import Measurement import feature_extraction.util.cleanup as cleanup class HaralickTexture(Measurement): def compute(self, image): return [] ## Instruction: Add cell-boundary preprocessing to HaralickTexture measurement ## Code After: from . import Measurement import feature_extraction.util.cleanup as cleanup from skimage.morphology import binary_erosion, disk class HaralickTexture(Measurement): default_options = { 'clip_cell_borders': True, 'erode_cell': False, 'erode_cell_amount': False, } def __init__(self, options=None): super(HaralickTexture, self).__init__(options) def compute(self, image): # -- preprocessing if self.options.clip_cell_borders: # get the cell boundary mask mask = cleanup.cell_boundary_mask(image) # if we're told to, erode the mask with a disk by some amount if self.options.erode_cell: mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount)) # mask the image image = image[mask] # -- haralick setup and run return []
... import feature_extraction.util.cleanup as cleanup from skimage.morphology import binary_erosion, disk ... class HaralickTexture(Measurement): default_options = { 'clip_cell_borders': True, 'erode_cell': False, 'erode_cell_amount': False, } def __init__(self, options=None): super(HaralickTexture, self).__init__(options) def compute(self, image): # -- preprocessing if self.options.clip_cell_borders: # get the cell boundary mask mask = cleanup.cell_boundary_mask(image) # if we're told to, erode the mask with a disk by some amount if self.options.erode_cell: mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount)) # mask the image image = image[mask] # -- haralick setup and run return [] ...
f22fa6d0c1b7e3bde95554f87af7254c2c381c41
django_app_lti/urls.py
django_app_lti/urls.py
from django.urls import path from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view urlpatterns = [ path('', LTILaunchView.as_view(), name='index'), path('launch', LTILaunchView.as_view(), name='launch'), path('config', LTIToolConfigView.as_view(), name='config'), path('logout', logout_view, name="logout"), path('logged-out', logged_out_view, name="logged-out"), ]
from django.urls import path from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view app_name = 'lti' urlpatterns = [ path('', LTILaunchView.as_view(), name='index'), path('launch', LTILaunchView.as_view(), name='launch'), path('config', LTIToolConfigView.as_view(), name='config'), path('logout', logout_view, name="logout"), path('logged-out', logged_out_view, name="logged-out"), ]
Add app_name to url module
Add app_name to url module
Python
bsd-3-clause
Harvard-ATG/django-app-lti
from django.urls import path from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view + app_name = 'lti' urlpatterns = [ path('', LTILaunchView.as_view(), name='index'), path('launch', LTILaunchView.as_view(), name='launch'), path('config', LTIToolConfigView.as_view(), name='config'), path('logout', logout_view, name="logout"), path('logged-out', logged_out_view, name="logged-out"), ]
Add app_name to url module
## Code Before: from django.urls import path from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view urlpatterns = [ path('', LTILaunchView.as_view(), name='index'), path('launch', LTILaunchView.as_view(), name='launch'), path('config', LTIToolConfigView.as_view(), name='config'), path('logout', logout_view, name="logout"), path('logged-out', logged_out_view, name="logged-out"), ] ## Instruction: Add app_name to url module ## Code After: from django.urls import path from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view app_name = 'lti' urlpatterns = [ path('', LTILaunchView.as_view(), name='index'), path('launch', LTILaunchView.as_view(), name='launch'), path('config', LTIToolConfigView.as_view(), name='config'), path('logout', logout_view, name="logout"), path('logged-out', logged_out_view, name="logged-out"), ]
// ... existing code ... app_name = 'lti' urlpatterns = [ // ... rest of the code ...
4e30a58386afb5b34bd83c8115c55e5d09b8f631
common/views.py
common/views.py
from django.shortcuts import render from common.models.Furniture import Furniture from common.models.Plan import Plan def overlay(request, floor=1): edit_rooms = False if request.method == 'POST': if 'floor' in request.POST: floor = request.POST['floor'] if 'edit_rooms' in request.POST: edit_rooms = True rooms = Plan.objects.filter(floor=floor) furnitures = Furniture.objects.filter(floor=floor) radious=10 return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
from django.shortcuts import render from common.models.Furniture import Furniture from common.models.Plan import Plan def overlay(request, floor=1): edit_rooms = False if request.method == 'POST': if 'floor' in request.POST: floor = request.POST['floor'] if 'edit_rooms' in request.POST: edit_rooms = True rooms = Plan.objects.select_related('room__id').filter(floor=floor) furnitures = Furniture.objects.select_related('device').filter(floor=floor) radious=10 return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
Improve performance by prefetching where needed
Improve performance by prefetching where needed
Python
agpl-3.0
Pajn/RAXA-Django,Pajn/RAXA-Django
from django.shortcuts import render from common.models.Furniture import Furniture from common.models.Plan import Plan def overlay(request, floor=1): edit_rooms = False if request.method == 'POST': if 'floor' in request.POST: floor = request.POST['floor'] if 'edit_rooms' in request.POST: edit_rooms = True - rooms = Plan.objects.filter(floor=floor) + rooms = Plan.objects.select_related('room__id').filter(floor=floor) - furnitures = Furniture.objects.filter(floor=floor) + furnitures = Furniture.objects.select_related('device').filter(floor=floor) radious=10 return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
Improve performance by prefetching where needed
## Code Before: from django.shortcuts import render from common.models.Furniture import Furniture from common.models.Plan import Plan def overlay(request, floor=1): edit_rooms = False if request.method == 'POST': if 'floor' in request.POST: floor = request.POST['floor'] if 'edit_rooms' in request.POST: edit_rooms = True rooms = Plan.objects.filter(floor=floor) furnitures = Furniture.objects.filter(floor=floor) radious=10 return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms}) ## Instruction: Improve performance by prefetching where needed ## Code After: from django.shortcuts import render from common.models.Furniture import Furniture from common.models.Plan import Plan def overlay(request, floor=1): edit_rooms = False if request.method == 'POST': if 'floor' in request.POST: floor = request.POST['floor'] if 'edit_rooms' in request.POST: edit_rooms = True rooms = Plan.objects.select_related('room__id').filter(floor=floor) furnitures = Furniture.objects.select_related('device').filter(floor=floor) radious=10 return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
# ... existing code ... rooms = Plan.objects.select_related('room__id').filter(floor=floor) furnitures = Furniture.objects.select_related('device').filter(floor=floor) radious=10 # ... rest of the code ...
c8921cf12418762c17d0b858ea2e134f292b2838
fireplace/cards/wog/neutral_epic.py
fireplace/cards/wog/neutral_epic.py
from ..utils import * ## # Minions class OG_271: "Scaled Nightmare" events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e")) class OG_271e: atk = lambda self, i: i * 2
from ..utils import * ## # Minions class OG_271: "Scaled Nightmare" events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e")) class OG_271e: atk = lambda self, i: i * 2 class OG_272: "Twilight Summoner" deathrattle = Summon(CONTROLLER, "OG_272t") class OG_337: "Cyclopian Horror" play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS) OG_337e = buff(health=1)
Implement Twilight Summoner and Cyclopian Horror
Implement Twilight Summoner and Cyclopian Horror
Python
agpl-3.0
beheh/fireplace,NightKev/fireplace,jleclanche/fireplace
from ..utils import * ## # Minions class OG_271: "Scaled Nightmare" events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e")) class OG_271e: atk = lambda self, i: i * 2 + + class OG_272: + "Twilight Summoner" + deathrattle = Summon(CONTROLLER, "OG_272t") + + + class OG_337: + "Cyclopian Horror" + play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS) + + OG_337e = buff(health=1) +
Implement Twilight Summoner and Cyclopian Horror
## Code Before: from ..utils import * ## # Minions class OG_271: "Scaled Nightmare" events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e")) class OG_271e: atk = lambda self, i: i * 2 ## Instruction: Implement Twilight Summoner and Cyclopian Horror ## Code After: from ..utils import * ## # Minions class OG_271: "Scaled Nightmare" events = OWN_TURN_BEGIN.on(Buff(SELF, "OG_271e")) class OG_271e: atk = lambda self, i: i * 2 class OG_272: "Twilight Summoner" deathrattle = Summon(CONTROLLER, "OG_272t") class OG_337: "Cyclopian Horror" play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS) OG_337e = buff(health=1)
// ... existing code ... atk = lambda self, i: i * 2 class OG_272: "Twilight Summoner" deathrattle = Summon(CONTROLLER, "OG_272t") class OG_337: "Cyclopian Horror" play = Buff(SELF, "OG_337e") * Count(ENEMY_MINIONS) OG_337e = buff(health=1) // ... rest of the code ...
5c97b9911a2dafde5fd1e4c40cda4e84974eb855
assembla/lib.py
assembla/lib.py
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __setitem__(self, key, value): self.data[key] = value def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
Python
mit
markfinger/assembla
from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] + def __setitem__(self, key, value): + self.data[key] = value + def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) + + def __repr__(self): + if 'name' in self.data: + return "<%s: %s>" % (type(self).__name__, self.data['name']) + + if ('number' in self.data) and ('summary' in self.data): + return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) + + return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
## Code Before: from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper ## Instruction: Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets. ## Code After: from functools import wraps class AssemblaObject(object): """ Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`. """ def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __setitem__(self, key, value): self.data[key] = value def keys(self): return self.data.keys() def values(self): return self.data.values() def get(self, *args, **kwargs): return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() def assembla_filter(func): """ Filters :data for the objects in it which possess attributes equal in name/value to a key/value in kwargs. Each key/value combination in kwargs is compared against the object, so multiple keyword arguments can be passed in to constrain the filtering. """ @wraps(func) def wrapper(class_instance, **kwargs): results = func(class_instance) if not kwargs: return results else: return filter( # Find the objects who have an equal number of matching attr/value # combinations as `len(kwargs)` lambda obj: len(kwargs) == len( filter( lambda boolean: boolean, [obj.get(attr_name) == value for attr_name, value in kwargs.iteritems()] ) ), results ) return wrapper
... def __setitem__(self, key, value): self.data[key] = value def keys(self): ... return self.data.get(*args, **kwargs) def __repr__(self): if 'name' in self.data: return "<%s: %s>" % (type(self).__name__, self.data['name']) if ('number' in self.data) and ('summary' in self.data): return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary']) return super(AssemblaObject, self).__repr__() ...
0a5e4194fe06b20b4eaacaa9452403f70076ccd3
base_solver.py
base_solver.py
from datetime import datetime class BaseSolver(object): task = None best_solution = None best_distance = float('inf') search_time = None cycles = 0 def __init__(self, task): self.task = task def run(self): start_time = datetime.now() self.best_solution, self.best_distance, self.cycles = self.run_search() finish_time = datetime.now() self.search_time = finish_time - start_time def run_search(self): # dummy - this is where one should implement the algorithm pass def get_summary(self): if self.best_solution is None: return u'Run the solver first' txt = ( '========== {solver_name} ==========\n' 'run {cycles} cycles for: {search_time}\n' 'best found solution: {best_solution}\n' 'distance: {distance}\n' ) return txt.format( solver_name=str(self.__class__), cycles=self.cycles, search_time=self.search_time, best_solution=self.best_solution, distance=self.best_distance )
from datetime import datetime class RunSolverFirst(Exception): pass class BaseSolver(object): task = None best_solution = None best_distance = float('inf') search_time = None cycles = 0 def __init__(self, task): self.task = task def run(self): start_time = datetime.now() self.best_solution, self.best_distance, self.cycles = self.run_search() finish_time = datetime.now() self.search_time = finish_time - start_time def run_search(self): # dummy - this is where one should implement the algorithm pass def get_summary(self): if self.best_solution is None: raise RunSolverFirst(u'Run the solver first') txt = ( '========== {solver_name} ==========\n' 'run {cycles} cycles for: {search_time}\n' 'best found solution: {best_solution}\n' 'distance: {distance}\n' ) return txt.format( solver_name=str(self.__class__), cycles=self.cycles, search_time=self.search_time, best_solution=self.best_solution, distance=self.best_distance )
Add run solver first exception type
Add run solver first exception type
Python
mit
Cosiek/KombiVojager
from datetime import datetime + + class RunSolverFirst(Exception): + pass + class BaseSolver(object): task = None best_solution = None best_distance = float('inf') search_time = None cycles = 0 def __init__(self, task): self.task = task def run(self): start_time = datetime.now() self.best_solution, self.best_distance, self.cycles = self.run_search() finish_time = datetime.now() self.search_time = finish_time - start_time def run_search(self): # dummy - this is where one should implement the algorithm pass def get_summary(self): if self.best_solution is None: - return u'Run the solver first' + raise RunSolverFirst(u'Run the solver first') txt = ( '========== {solver_name} ==========\n' 'run {cycles} cycles for: {search_time}\n' 'best found solution: {best_solution}\n' 'distance: {distance}\n' ) return txt.format( solver_name=str(self.__class__), cycles=self.cycles, search_time=self.search_time, best_solution=self.best_solution, distance=self.best_distance )
Add run solver first exception type
## Code Before: from datetime import datetime class BaseSolver(object): task = None best_solution = None best_distance = float('inf') search_time = None cycles = 0 def __init__(self, task): self.task = task def run(self): start_time = datetime.now() self.best_solution, self.best_distance, self.cycles = self.run_search() finish_time = datetime.now() self.search_time = finish_time - start_time def run_search(self): # dummy - this is where one should implement the algorithm pass def get_summary(self): if self.best_solution is None: return u'Run the solver first' txt = ( '========== {solver_name} ==========\n' 'run {cycles} cycles for: {search_time}\n' 'best found solution: {best_solution}\n' 'distance: {distance}\n' ) return txt.format( solver_name=str(self.__class__), cycles=self.cycles, search_time=self.search_time, best_solution=self.best_solution, distance=self.best_distance ) ## Instruction: Add run solver first exception type ## Code After: from datetime import datetime class RunSolverFirst(Exception): pass class BaseSolver(object): task = None best_solution = None best_distance = float('inf') search_time = None cycles = 0 def __init__(self, task): self.task = task def run(self): start_time = datetime.now() self.best_solution, self.best_distance, self.cycles = self.run_search() finish_time = datetime.now() self.search_time = finish_time - start_time def run_search(self): # dummy - this is where one should implement the algorithm pass def get_summary(self): if self.best_solution is None: raise RunSolverFirst(u'Run the solver first') txt = ( '========== {solver_name} ==========\n' 'run {cycles} cycles for: {search_time}\n' 'best found solution: {best_solution}\n' 'distance: {distance}\n' ) return txt.format( solver_name=str(self.__class__), cycles=self.cycles, search_time=self.search_time, best_solution=self.best_solution, distance=self.best_distance )
# ... existing code ... from datetime import datetime class RunSolverFirst(Exception): pass # ... modified code ... if self.best_solution is None: raise RunSolverFirst(u'Run the solver first') # ... rest of the code ...
cd70e1150d3822a0f158e06c382ad8841760040e
mla_game/apps/transcript/management/commands/update_stats_for_all_eligible_transcripts.py
mla_game/apps/transcript/management/commands/update_stats_for_all_eligible_transcripts.py
from django.core.management.base import BaseCommand from django.db.models import Prefetch from mla_game.apps.transcript.tasks import update_transcript_stats from ...models import ( Transcript, TranscriptPhraseVote, TranscriptPhraseCorrection, ) class Command(BaseCommand): help = '''Find all Transcripts with votes or corrections and update stats for that transcript''' def handle(self, *args, **options): eligible_transcripts = set() transcript_qs = Transcript.objects.only('pk') downvotes = TranscriptPhraseVote.objects.filter( upvote=False ).prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) upvotes = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in downvotes] ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in upvotes] ) transcripts_to_process = Transcript.objects.filter( pk__in=eligible_transcripts).only('pk') for transcript in transcripts_to_process: update_transcript_stats(transcript)
from django.core.management.base import BaseCommand from django.db.models import Prefetch from mla_game.apps.transcript.tasks import update_transcript_stats from ...models import ( Transcript, TranscriptPhraseVote, TranscriptPhraseCorrection, ) class Command(BaseCommand): help = '''Find all Transcripts with votes or corrections and update stats for that transcript''' def handle(self, *args, **options): eligible_transcripts = set() transcript_qs = Transcript.objects.only('pk') votes = TranscriptPhraseVote.objects.filter( upvote__in=[True, False] ).prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) corrections = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in votes] ) eligible_transcripts.update( [correction.transcript_phrase.transcript.pk for correction in corrections] ) transcripts_to_process = Transcript.objects.filter( pk__in=eligible_transcripts).only('pk') for transcript in transcripts_to_process: update_transcript_stats(transcript)
Update management command for various changes
Update management command for various changes
Python
mit
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
from django.core.management.base import BaseCommand from django.db.models import Prefetch from mla_game.apps.transcript.tasks import update_transcript_stats from ...models import ( Transcript, TranscriptPhraseVote, TranscriptPhraseCorrection, ) class Command(BaseCommand): help = '''Find all Transcripts with votes or corrections and update stats for that transcript''' def handle(self, *args, **options): eligible_transcripts = set() transcript_qs = Transcript.objects.only('pk') - downvotes = TranscriptPhraseVote.objects.filter( + votes = TranscriptPhraseVote.objects.filter( - upvote=False + upvote__in=[True, False] ).prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) - - upvotes = TranscriptPhraseCorrection.objects.all().prefetch_related( + corrections = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) + eligible_transcripts.update( - [vote.transcript_phrase.transcript.pk for vote in downvotes] + [vote.transcript_phrase.transcript.pk for vote in votes] ) eligible_transcripts.update( - [vote.transcript_phrase.transcript.pk for vote in upvotes] + [correction.transcript_phrase.transcript.pk for correction in corrections] ) transcripts_to_process = Transcript.objects.filter( pk__in=eligible_transcripts).only('pk') for transcript in transcripts_to_process: update_transcript_stats(transcript)
Update management command for various changes
## Code Before: from django.core.management.base import BaseCommand from django.db.models import Prefetch from mla_game.apps.transcript.tasks import update_transcript_stats from ...models import ( Transcript, TranscriptPhraseVote, TranscriptPhraseCorrection, ) class Command(BaseCommand): help = '''Find all Transcripts with votes or corrections and update stats for that transcript''' def handle(self, *args, **options): eligible_transcripts = set() transcript_qs = Transcript.objects.only('pk') downvotes = TranscriptPhraseVote.objects.filter( upvote=False ).prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) upvotes = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in downvotes] ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in upvotes] ) transcripts_to_process = Transcript.objects.filter( pk__in=eligible_transcripts).only('pk') for transcript in transcripts_to_process: update_transcript_stats(transcript) ## Instruction: Update management command for various changes ## Code After: from django.core.management.base import BaseCommand from django.db.models import Prefetch from mla_game.apps.transcript.tasks import update_transcript_stats from ...models import ( Transcript, TranscriptPhraseVote, TranscriptPhraseCorrection, ) class Command(BaseCommand): help = '''Find all Transcripts with votes or corrections and update stats for that transcript''' def handle(self, *args, **options): eligible_transcripts = set() transcript_qs = Transcript.objects.only('pk') votes = TranscriptPhraseVote.objects.filter( upvote__in=[True, False] ).prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) corrections = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in votes] ) eligible_transcripts.update( [correction.transcript_phrase.transcript.pk for correction in corrections] ) transcripts_to_process = Transcript.objects.filter( pk__in=eligible_transcripts).only('pk') for transcript in transcripts_to_process: update_transcript_stats(transcript)
... transcript_qs = Transcript.objects.only('pk') votes = TranscriptPhraseVote.objects.filter( upvote__in=[True, False] ).prefetch_related( ... ) corrections = TranscriptPhraseCorrection.objects.all().prefetch_related( Prefetch('transcript_phrase__transcript', queryset=transcript_qs) ... ) eligible_transcripts.update( [vote.transcript_phrase.transcript.pk for vote in votes] ) ... eligible_transcripts.update( [correction.transcript_phrase.transcript.pk for correction in corrections] ) ...
15cb312fd7acbb7fae67cb3953537a95274f9d40
saleor/search/forms.py
saleor/search/forms.py
from __future__ import unicode_literals from django import forms from django.utils.translation import pgettext from .backends import elasticsearch class SearchForm(forms.Form): q = forms.CharField( label=pgettext('Search form label', 'Query'), required=True) def search(self): return elasticsearch.search(self.cleaned_data['q'])
from __future__ import unicode_literals from django import forms from django.utils.translation import pgettext from .backends import picker class SearchForm(forms.Form): q = forms.CharField( label=pgettext('Search form label', 'Query'), required=True) def search(self): search = picker.pick_backend() return search(self.cleaned_data['q'])
Use backend picker in storefront search form
Use backend picker in storefront search form
Python
bsd-3-clause
UITools/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor
from __future__ import unicode_literals from django import forms from django.utils.translation import pgettext - from .backends import elasticsearch + from .backends import picker class SearchForm(forms.Form): q = forms.CharField( label=pgettext('Search form label', 'Query'), required=True) def search(self): + search = picker.pick_backend() - return elasticsearch.search(self.cleaned_data['q']) + return search(self.cleaned_data['q'])
Use backend picker in storefront search form
## Code Before: from __future__ import unicode_literals from django import forms from django.utils.translation import pgettext from .backends import elasticsearch class SearchForm(forms.Form): q = forms.CharField( label=pgettext('Search form label', 'Query'), required=True) def search(self): return elasticsearch.search(self.cleaned_data['q']) ## Instruction: Use backend picker in storefront search form ## Code After: from __future__ import unicode_literals from django import forms from django.utils.translation import pgettext from .backends import picker class SearchForm(forms.Form): q = forms.CharField( label=pgettext('Search form label', 'Query'), required=True) def search(self): search = picker.pick_backend() return search(self.cleaned_data['q'])
... from django.utils.translation import pgettext from .backends import picker ... def search(self): search = picker.pick_backend() return search(self.cleaned_data['q']) ...
7a1a90cbaba73da44efeaf385865519cfa078a6c
astropy/vo/samp/tests/test_hub_script.py
astropy/vo/samp/tests/test_hub_script.py
import sys from ..hub_script import hub_script from ..utils import ALLOW_INTERNET def setup_module(module): ALLOW_INTERNET.set(False) def test_hub_script(): sys.argv.append('-m') # run in multiple mode sys.argv.append('-w') # disable web profile hub_script(timeout=3)
import sys from ..hub_script import hub_script from ..utils import ALLOW_INTERNET def setup_module(module): ALLOW_INTERNET.set(False) def setup_function(function): function.sys_argv_orig = sys.argv sys.argv = ["samp_hub"] def teardown_function(function): sys.argv = function.sys_argv_orig def test_hub_script(): sys.argv.append('-m') # run in multiple mode sys.argv.append('-w') # disable web profile hub_script(timeout=3)
Fix isolation of SAMP hub script test.
Fix isolation of SAMP hub script test.
Python
bsd-3-clause
StuartLittlefair/astropy,dhomeier/astropy,saimn/astropy,funbaker/astropy,larrybradley/astropy,MSeifert04/astropy,kelle/astropy,lpsinger/astropy,bsipocz/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,lpsinger/astropy,lpsinger/astropy,astropy/astropy,funbaker/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,tbabej/astropy,lpsinger/astropy,pllim/astropy,tbabej/astropy,stargaser/astropy,StuartLittlefair/astropy,joergdietrich/astropy,astropy/astropy,dhomeier/astropy,stargaser/astropy,AustereCuriosity/astropy,larrybradley/astropy,bsipocz/astropy,tbabej/astropy,StuartLittlefair/astropy,kelle/astropy,astropy/astropy,larrybradley/astropy,joergdietrich/astropy,mhvk/astropy,tbabej/astropy,funbaker/astropy,mhvk/astropy,stargaser/astropy,astropy/astropy,DougBurke/astropy,DougBurke/astropy,saimn/astropy,mhvk/astropy,funbaker/astropy,astropy/astropy,MSeifert04/astropy,pllim/astropy,joergdietrich/astropy,bsipocz/astropy,larrybradley/astropy,AustereCuriosity/astropy,stargaser/astropy,saimn/astropy,bsipocz/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,pllim/astropy,MSeifert04/astropy,saimn/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,DougBurke/astropy,tbabej/astropy,saimn/astropy,kelle/astropy,kelle/astropy,pllim/astropy,larrybradley/astropy,mhvk/astropy,pllim/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,mhvk/astropy,joergdietrich/astropy,kelle/astropy
import sys from ..hub_script import hub_script from ..utils import ALLOW_INTERNET + def setup_module(module): ALLOW_INTERNET.set(False) + + + def setup_function(function): + function.sys_argv_orig = sys.argv + sys.argv = ["samp_hub"] + + + def teardown_function(function): + sys.argv = function.sys_argv_orig def test_hub_script(): sys.argv.append('-m') # run in multiple mode sys.argv.append('-w') # disable web profile hub_script(timeout=3)
Fix isolation of SAMP hub script test.
## Code Before: import sys from ..hub_script import hub_script from ..utils import ALLOW_INTERNET def setup_module(module): ALLOW_INTERNET.set(False) def test_hub_script(): sys.argv.append('-m') # run in multiple mode sys.argv.append('-w') # disable web profile hub_script(timeout=3) ## Instruction: Fix isolation of SAMP hub script test. ## Code After: import sys from ..hub_script import hub_script from ..utils import ALLOW_INTERNET def setup_module(module): ALLOW_INTERNET.set(False) def setup_function(function): function.sys_argv_orig = sys.argv sys.argv = ["samp_hub"] def teardown_function(function): sys.argv = function.sys_argv_orig def test_hub_script(): sys.argv.append('-m') # run in multiple mode sys.argv.append('-w') # disable web profile hub_script(timeout=3)
# ... existing code ... def setup_module(module): # ... modified code ... ALLOW_INTERNET.set(False) def setup_function(function): function.sys_argv_orig = sys.argv sys.argv = ["samp_hub"] def teardown_function(function): sys.argv = function.sys_argv_orig # ... rest of the code ...
44d6af63406e2f825c44238fd5bde0c49dde0620
nexus/conf.py
nexus/conf.py
from django.conf import settings MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/')
from django.conf import settings MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/') if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False): MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX)
Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX.
Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX. If you want to make custom modifications to the nexus media it makes sense to have it under your own app's media folder and the NEXUS_USE_DJANGO_MEDIA_URL allows the MEDIA_URL to be DRY. This repetition would be a hassle if you have multiple settings files with their own MEDIA_URLs and having to then repeat the NEXUS_MEDIA_PREFIX in each settings file.
Python
apache-2.0
Raekkeri/nexus,graingert/nexus,graingert/nexus,disqus/nexus,YPlan/nexus,disqus/nexus,graingert/nexus,YPlan/nexus,brilliant-org/nexus,YPlan/nexus,roverdotcom/nexus,roverdotcom/nexus,disqus/nexus,brilliant-org/nexus,Raekkeri/nexus,blueprinthealth/nexus,roverdotcom/nexus,blueprinthealth/nexus,blueprinthealth/nexus,brilliant-org/nexus
from django.conf import settings MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/') + if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False): + MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX) +
Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX.
## Code Before: from django.conf import settings MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/') ## Instruction: Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX. ## Code After: from django.conf import settings MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/') if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False): MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX)
// ... existing code ... MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/') if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False): MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX) // ... rest of the code ...
54add3fa95ab450e5afcbbf7fe8a3205bfc5889c
indra/tests/test_reading_scripts_aws.py
indra/tests/test_reading_scripts_aws.py
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly."
Add test with currently known-stall sentance.
Add test with currently known-stall sentance.
Python
bsd-2-clause
bgyori/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra
import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp + from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return + + @attr('nonpublic') + def test_bad_sparser(): + txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' + 'responses seen with Fos and Jun.') + sp = sparser.process_text(txt, timeout=1) + assert sp is None, "Reading succeeded unexpectedly." +
Add test with currently known-stall sentance.
## Code Before: import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return ## Instruction: Add test with currently known-stall sentance. ## Code After: import boto3 from os import path, chdir from subprocess import check_call from nose.plugins.attrib import attr from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser s3 = boto3.client('s3') HERE = path.dirname(path.abspath(__file__)) @attr('nonpublic') def test_normal_pmid_reading_call(): chdir(path.expanduser('~')) # Put an id file on s3 basename = 'local_pmid_test_run' s3_prefix = 'reading_results/%s/' % basename s3.put_object(Bucket='bigmech', Key=s3_prefix + 'pmids', Body='\n'.join(['PMID000test%d' % n for n in range(4)])) # Call the reading tool sub = srp.PmidSubmitter(basename, ['sparser']) job_name, cmd = sub._make_command(0, 2) check_call(cmd) # Remove garbage on s3 res = s3.list_objects(Bucket='bigmech', Prefix=s3_prefix) for entry in res['Contents']: print("Removing %s..." % entry['Key']) s3.delete_object(Bucket='bigmech', Key=entry['Key']) return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly."
// ... existing code ... from indra.tools.reading import submit_reading_pipeline as srp from indra.sources import sparser // ... modified code ... return @attr('nonpublic') def test_bad_sparser(): txt = ('Disruption of the AP-1 binding site reversed the transcriptional ' 'responses seen with Fos and Jun.') sp = sparser.process_text(txt, timeout=1) assert sp is None, "Reading succeeded unexpectedly." // ... rest of the code ...
e0f296e776e2aaed2536eeebfb4900a23973aaf5
tests/test_json.py
tests/test_json.py
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
Add '*.json' file extensions to test pattern list.
Add '*.json' file extensions to test pattern list.
Python
mit
jonlabelle/SublimeJsPrettier,jonlabelle/SublimeJsPrettier
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', + '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
Add '*.json' file extensions to test pattern list.
## Code Before: from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f) ## Instruction: Add '*.json' file extensions to test pattern list. ## Code After: from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
# ... existing code ... '*.sublime-menu', '*.json' ) # ... rest of the code ...
0774eea1027bc9bb88b5289854aa26109f258712
great_expectations/exceptions.py
great_expectations/exceptions.py
class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message
class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message self.batch_kwargs = batch_kwargs
Add batch_kwargs to custom error
Add batch_kwargs to custom error
Python
apache-2.0
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message + self.batch_kwargs = batch_kwargs
Add batch_kwargs to custom error
## Code Before: class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message ## Instruction: Add batch_kwargs to custom error ## Code After: class GreatExpectationsError(Exception): pass class ExpectationsConfigNotFoundError(GreatExpectationsError): def __init__(self, data_asset_name): self.data_asset_name = data_asset_name self.message = "No expectations config found for data_asset_name %s" % data_asset_name class BatchKwargsError(GreatExpectationsError): def __init__(self, message, batch_kwargs): self.message = message self.batch_kwargs = batch_kwargs
# ... existing code ... self.message = message self.batch_kwargs = batch_kwargs # ... rest of the code ...
7d24695c7e94e787b5d66854db7cc6dc1abcbf10
polyaxon/tracker/publish_tracker.py
polyaxon/tracker/publish_tracker.py
import analytics from django.db import InterfaceError, OperationalError, ProgrammingError from tracker.service import TrackerService class PublishTrackerService(TrackerService): def __init__(self, key=''): self.cluster_id = None self.analytics = analytics self.analytics.write_key = key def get_cluster_id(self): if self.cluster_id: return self.cluster_id from clusters.models import Cluster try: cluster_uuid = Cluster.load().uuid.hex self.cluster_id = cluster_uuid except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError): pass return self.cluster_id def record_event(self, event): if not self.cluster_id: return if event.event_type == 'cluster.created': self.analytics.identify( self.get_cluster_id(), event.serialize(dumps=False), ) self.analytics.track( self.get_cluster_id(), event.event_type, event.serialize(dumps=False), ) def setup(self): super(PublishTrackerService, self).setup() self.cluster_id = self.get_cluster_id()
import analytics from django.db import InterfaceError, OperationalError, ProgrammingError from tracker.service import TrackerService class PublishTrackerService(TrackerService): def __init__(self, key=''): self.cluster_id = None self.analytics = analytics self.analytics.write_key = key def get_cluster_id(self): if self.cluster_id: return self.cluster_id from clusters.models import Cluster try: cluster_uuid = Cluster.load().uuid.hex self.cluster_id = cluster_uuid except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError): pass return self.cluster_id def record_event(self, event): cluster_id = self.get_cluster_id() if not cluster_id: return if event.event_type == 'cluster.created': self.analytics.identify( cluster_id, event.serialize(dumps=False), ) self.analytics.track( cluster_id, event.event_type, event.serialize(dumps=False), ) def setup(self): super(PublishTrackerService, self).setup() self.cluster_id = self.get_cluster_id()
Update check on cluster id
Update check on cluster id
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
import analytics from django.db import InterfaceError, OperationalError, ProgrammingError from tracker.service import TrackerService class PublishTrackerService(TrackerService): def __init__(self, key=''): self.cluster_id = None self.analytics = analytics self.analytics.write_key = key def get_cluster_id(self): if self.cluster_id: return self.cluster_id from clusters.models import Cluster try: cluster_uuid = Cluster.load().uuid.hex self.cluster_id = cluster_uuid except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError): pass return self.cluster_id def record_event(self, event): + cluster_id = self.get_cluster_id() - if not self.cluster_id: + if not cluster_id: return if event.event_type == 'cluster.created': self.analytics.identify( - self.get_cluster_id(), + cluster_id, event.serialize(dumps=False), ) self.analytics.track( - self.get_cluster_id(), + cluster_id, event.event_type, event.serialize(dumps=False), ) def setup(self): super(PublishTrackerService, self).setup() self.cluster_id = self.get_cluster_id()
Update check on cluster id
## Code Before: import analytics from django.db import InterfaceError, OperationalError, ProgrammingError from tracker.service import TrackerService class PublishTrackerService(TrackerService): def __init__(self, key=''): self.cluster_id = None self.analytics = analytics self.analytics.write_key = key def get_cluster_id(self): if self.cluster_id: return self.cluster_id from clusters.models import Cluster try: cluster_uuid = Cluster.load().uuid.hex self.cluster_id = cluster_uuid except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError): pass return self.cluster_id def record_event(self, event): if not self.cluster_id: return if event.event_type == 'cluster.created': self.analytics.identify( self.get_cluster_id(), event.serialize(dumps=False), ) self.analytics.track( self.get_cluster_id(), event.event_type, event.serialize(dumps=False), ) def setup(self): super(PublishTrackerService, self).setup() self.cluster_id = self.get_cluster_id() ## Instruction: Update check on cluster id ## Code After: import analytics from django.db import InterfaceError, OperationalError, ProgrammingError from tracker.service import TrackerService class PublishTrackerService(TrackerService): def __init__(self, key=''): self.cluster_id = None self.analytics = analytics self.analytics.write_key = key def get_cluster_id(self): if self.cluster_id: return self.cluster_id from clusters.models import Cluster try: cluster_uuid = Cluster.load().uuid.hex self.cluster_id = cluster_uuid except (Cluster.DoesNotExist, InterfaceError, ProgrammingError, OperationalError): pass return self.cluster_id def record_event(self, event): cluster_id = self.get_cluster_id() if not cluster_id: return if event.event_type == 'cluster.created': self.analytics.identify( cluster_id, event.serialize(dumps=False), ) self.analytics.track( cluster_id, event.event_type, event.serialize(dumps=False), ) def setup(self): super(PublishTrackerService, self).setup() self.cluster_id = self.get_cluster_id()
// ... existing code ... def record_event(self, event): cluster_id = self.get_cluster_id() if not cluster_id: return // ... modified code ... self.analytics.identify( cluster_id, event.serialize(dumps=False), ... self.analytics.track( cluster_id, event.event_type, // ... rest of the code ...
d10656527cf3a0fe3d47827d8d2f27fda4cb2a5c
zseqfile/__init__.py
zseqfile/__init__.py
# Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, )
from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
Define a few convenience aliases in the public API
Define a few convenience aliases in the public API
Python
bsd-3-clause
wbolster/zseqfile
- # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) + open_gz = open_gzip + open_bz2 = open_bzip2 + open_xz = open_lzma +
Define a few convenience aliases in the public API
## Code Before: # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) ## Instruction: Define a few convenience aliases in the public API ## Code After: from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
... from .zseqfile import ( # noqa ... ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma ...
5f8d59646875d4e4aa75ec22a2ddc666c1802a23
readthedocs/core/utils/tasks/__init__.py
readthedocs/core/utils/tasks/__init__.py
from .permission_checks import user_id_matches from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data
from .permission_checks import user_id_matches from .public import PublicTask from .public import TaskNoPermission from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data
Revert previous commit by adding missing imports
Revert previous commit by adding missing imports
Python
mit
rtfd/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,pombredanne/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org
from .permission_checks import user_id_matches + from .public import PublicTask + from .public import TaskNoPermission from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data
Revert previous commit by adding missing imports
## Code Before: from .permission_checks import user_id_matches from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data ## Instruction: Revert previous commit by adding missing imports ## Code After: from .permission_checks import user_id_matches from .public import PublicTask from .public import TaskNoPermission from .public import permission_check from .public import get_public_task_data from .retrieve import TaskNotFound from .retrieve import get_task_data
# ... existing code ... from .permission_checks import user_id_matches from .public import PublicTask from .public import TaskNoPermission from .public import permission_check # ... rest of the code ...
d4c8b0f15cd1694b84f8dab7936571d9f9bca42f
tests/people/test_managers.py
tests/people/test_managers.py
import pytest from components.people.factories import IdolFactory from components.people.models import Idol from components.people.constants import STATUS pytestmark = pytest.mark.django_db class TestIdols: @pytest.fixture def idols(self): idols = [] [idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)] [idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)] return idols def test_active_manager(self, idols): assert len(Idol.objects.active()) == 3 def test_inactive_manager(self, idols): assert len(Idol.objects.inactive()) == 2
import datetime import pytest from components.people.constants import STATUS from components.people.factories import GroupFactory, IdolFactory from components.people.models import Group, Idol pytestmark = pytest.mark.django_db class TestIdols: @pytest.fixture def status(self): idols = [] [idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)] [idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)] return idols def test_active_manager(self, status): assert len(Idol.objects.active()) == 3 def test_inactive_manager(self, status): assert len(Idol.objects.inactive()) == 2 class TestGroups: @pytest.fixture def status(self): groups = [ GroupFactory(), GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)), GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30)) ] return groups def test_active_manager(self, status): assert len(Group.objects.active()) == 2 def test_inactive_manager(self, status): assert len(Group.objects.inactive()) == 1
Test group managers. Rename idols() => status().
Test group managers. Rename idols() => status().
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
+ import datetime import pytest - from components.people.factories import IdolFactory - from components.people.models import Idol from components.people.constants import STATUS + from components.people.factories import GroupFactory, IdolFactory + from components.people.models import Group, Idol pytestmark = pytest.mark.django_db class TestIdols: @pytest.fixture - def idols(self): + def status(self): idols = [] [idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)] [idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)] return idols - def test_active_manager(self, idols): + def test_active_manager(self, status): assert len(Idol.objects.active()) == 3 - def test_inactive_manager(self, idols): + def test_inactive_manager(self, status): assert len(Idol.objects.inactive()) == 2 + + class TestGroups: + @pytest.fixture + def status(self): + groups = [ + GroupFactory(), + GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)), + GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30)) + ] + return groups + + def test_active_manager(self, status): + assert len(Group.objects.active()) == 2 + + def test_inactive_manager(self, status): + assert len(Group.objects.inactive()) == 1 +
Test group managers. Rename idols() => status().
## Code Before: import pytest from components.people.factories import IdolFactory from components.people.models import Idol from components.people.constants import STATUS pytestmark = pytest.mark.django_db class TestIdols: @pytest.fixture def idols(self): idols = [] [idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)] [idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)] return idols def test_active_manager(self, idols): assert len(Idol.objects.active()) == 3 def test_inactive_manager(self, idols): assert len(Idol.objects.inactive()) == 2 ## Instruction: Test group managers. Rename idols() => status(). ## Code After: import datetime import pytest from components.people.constants import STATUS from components.people.factories import GroupFactory, IdolFactory from components.people.models import Group, Idol pytestmark = pytest.mark.django_db class TestIdols: @pytest.fixture def status(self): idols = [] [idols.append(IdolFactory(status=STATUS.active)) for i in xrange(3)] [idols.append(IdolFactory(status=STATUS.former)) for i in xrange(2)] return idols def test_active_manager(self, status): assert len(Idol.objects.active()) == 3 def test_inactive_manager(self, status): assert len(Idol.objects.inactive()) == 2 class TestGroups: @pytest.fixture def status(self): groups = [ GroupFactory(), GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)), GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30)) ] return groups def test_active_manager(self, status): assert len(Group.objects.active()) == 2 def test_inactive_manager(self, status): assert len(Group.objects.inactive()) == 1
... import datetime import pytest ... from components.people.constants import STATUS from components.people.factories import GroupFactory, IdolFactory from components.people.models import Group, Idol ... @pytest.fixture def status(self): idols = [] ... def test_active_manager(self, status): assert len(Idol.objects.active()) == 3 ... def test_inactive_manager(self, status): assert len(Idol.objects.inactive()) == 2 class TestGroups: @pytest.fixture def status(self): groups = [ GroupFactory(), GroupFactory(ended=datetime.date.today() + datetime.timedelta(days=30)), GroupFactory(ended=datetime.date.today() - datetime.timedelta(days=30)) ] return groups def test_active_manager(self, status): assert len(Group.objects.active()) == 2 def test_inactive_manager(self, status): assert len(Group.objects.inactive()) == 1 ...
5cca245f84a87f503c8e16577b7dba635d689a26
opencc/__main__.py
opencc/__main__.py
from __future__ import print_function import argparse import sys from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<file>', help='Configuration file') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a configuration file.", file=sys.stderr) return 1 cc = OpenCC(args.config) with open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
from __future__ import print_function import argparse import sys import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
Add support for Python 2.6 and 2.7
Add support for Python 2.6 and 2.7 Remove the following error when using Python 2.6 and 2.7. TypeError: 'encoding' is an invalid keyword argument for this function Python 3 operation is unchanged
Python
apache-2.0
yichen0831/opencc-python
from __future__ import print_function import argparse import sys + import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') - parser.add_argument('-c', '--config', metavar='<file>', + parser.add_argument('-c', '--config', metavar='<conversion>', - help='Configuration file') + help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: - print("Please specify a configuration file.", file=sys.stderr) + print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) - with open(args.input if args.input else 0, encoding=args.in_enc) as f: + with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) - with open(args.output if args.output else 1, 'w', + with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
Add support for Python 2.6 and 2.7
## Code Before: from __future__ import print_function import argparse import sys from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<file>', help='Configuration file') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a configuration file.", file=sys.stderr) return 1 cc = OpenCC(args.config) with open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main()) ## Instruction: Add support for Python 2.6 and 2.7 ## Code After: from __future__ import print_function import argparse import sys import io from opencc import OpenCC def main(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--input', metavar='<file>', help='Read original text from <file>.') parser.add_argument('-o', '--output', metavar='<file>', help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', help='Encoding for input') parser.add_argument('--out-enc', metavar='<encoding>', default='UTF-8', help='Encoding for output') args = parser.parse_args() if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 cc = OpenCC(args.config) with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: f.write(output_str) return 0 if __name__ == '__main__': sys.exit(main())
// ... existing code ... import sys import io from opencc import OpenCC // ... modified code ... help='Write converted text to <file>.') parser.add_argument('-c', '--config', metavar='<conversion>', help='Conversion') parser.add_argument('--in-enc', metavar='<encoding>', default='UTF-8', ... if args.config is None: print("Please specify a conversion.", file=sys.stderr) return 1 ... with io.open(args.input if args.input else 0, encoding=args.in_enc) as f: input_str = f.read() ... output_str = cc.convert(input_str) with io.open(args.output if args.output else 1, 'w', encoding=args.out_enc) as f: // ... rest of the code ...
e53d324f2ac4874c1f56bbf00dfee47c6b059e5d
fluidreview/admin.py
fluidreview/admin.py
"""Admin interface for fluidreview""" from django.contrib import admin from bootcamp.utils import get_field_names from fluidreview.models import WebhookRequest, OAuthToken class WebhookRequestAdmin(admin.ModelAdmin): """Admin for WebhookRequest""" model = WebhookRequest readonly_fields = get_field_names(WebhookRequest) def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False class OAuthTokenAdmin(admin.ModelAdmin): """Admin for OAuthToken""" model = OAuthToken admin.site.register(WebhookRequest, WebhookRequestAdmin) admin.site.register(OAuthToken, OAuthTokenAdmin)
"""Admin interface for fluidreview""" from django.contrib import admin from bootcamp.utils import get_field_names from fluidreview.models import WebhookRequest, OAuthToken class WebhookRequestAdmin(admin.ModelAdmin): """Admin for WebhookRequest""" model = WebhookRequest readonly_fields = get_field_names(WebhookRequest) ordering = ('-created_on',) list_filter = ('award_id', 'status') search_fields = ('user_email', 'user_id', 'submission_id') def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False class OAuthTokenAdmin(admin.ModelAdmin): """Admin for OAuthToken""" model = OAuthToken admin.site.register(WebhookRequest, WebhookRequestAdmin) admin.site.register(OAuthToken, OAuthTokenAdmin)
Sort webhook requests by date
Sort webhook requests by date
Python
bsd-3-clause
mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce,mitodl/bootcamp-ecommerce
"""Admin interface for fluidreview""" from django.contrib import admin from bootcamp.utils import get_field_names from fluidreview.models import WebhookRequest, OAuthToken class WebhookRequestAdmin(admin.ModelAdmin): """Admin for WebhookRequest""" model = WebhookRequest readonly_fields = get_field_names(WebhookRequest) + ordering = ('-created_on',) + list_filter = ('award_id', 'status') + search_fields = ('user_email', 'user_id', 'submission_id') def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False class OAuthTokenAdmin(admin.ModelAdmin): """Admin for OAuthToken""" model = OAuthToken admin.site.register(WebhookRequest, WebhookRequestAdmin) admin.site.register(OAuthToken, OAuthTokenAdmin)
Sort webhook requests by date
## Code Before: """Admin interface for fluidreview""" from django.contrib import admin from bootcamp.utils import get_field_names from fluidreview.models import WebhookRequest, OAuthToken class WebhookRequestAdmin(admin.ModelAdmin): """Admin for WebhookRequest""" model = WebhookRequest readonly_fields = get_field_names(WebhookRequest) def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False class OAuthTokenAdmin(admin.ModelAdmin): """Admin for OAuthToken""" model = OAuthToken admin.site.register(WebhookRequest, WebhookRequestAdmin) admin.site.register(OAuthToken, OAuthTokenAdmin) ## Instruction: Sort webhook requests by date ## Code After: """Admin interface for fluidreview""" from django.contrib import admin from bootcamp.utils import get_field_names from fluidreview.models import WebhookRequest, OAuthToken class WebhookRequestAdmin(admin.ModelAdmin): """Admin for WebhookRequest""" model = WebhookRequest readonly_fields = get_field_names(WebhookRequest) ordering = ('-created_on',) list_filter = ('award_id', 'status') search_fields = ('user_email', 'user_id', 'submission_id') def has_add_permission(self, request): return False def has_delete_permission(self, request, obj=None): return False class OAuthTokenAdmin(admin.ModelAdmin): """Admin for OAuthToken""" model = OAuthToken admin.site.register(WebhookRequest, WebhookRequestAdmin) admin.site.register(OAuthToken, OAuthTokenAdmin)
... readonly_fields = get_field_names(WebhookRequest) ordering = ('-created_on',) list_filter = ('award_id', 'status') search_fields = ('user_email', 'user_id', 'submission_id') ...
d52b47eaad73f818974b7feec83fa3b15ddb5aac
form_utils_bootstrap3/tests/__init__.py
form_utils_bootstrap3/tests/__init__.py
import os import django from django.conf import settings if not settings.configured: settings_dict = dict( INSTALLED_APPS=[ 'django.contrib.contenttypes', 'django.contrib.auth', 'bootstrap3', 'form_utils', ], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'), MEDIA_URL='/media/', STATIC_URL='/static/', MIDDLEWARE_CLASSES=[], BOOTSTRAP3={ 'form_renderers': { 'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer' } } ) settings.configure(**settings_dict) if django.VERSION >= (1, 7): django.setup()
import os import django from django.conf import settings if not settings.configured: settings_dict = dict( INSTALLED_APPS=[ 'django.contrib.contenttypes', 'django.contrib.auth', 'bootstrap3', 'form_utils', ], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'), MEDIA_URL='/media/', STATIC_URL='/static/', MIDDLEWARE_CLASSES=[], BOOTSTRAP3={ 'form_renderers': { 'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer' } } ) if django.VERSION >= (1, 8): settings_dict['TEMPLATES'] = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [] } ] settings.configure(**settings_dict) if django.VERSION >= (1, 7): django.setup()
Fix tests for Django trunk
Fix tests for Django trunk
Python
mit
federicobond/django-form-utils-bootstrap3
import os import django from django.conf import settings if not settings.configured: settings_dict = dict( INSTALLED_APPS=[ 'django.contrib.contenttypes', 'django.contrib.auth', 'bootstrap3', 'form_utils', ], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'), MEDIA_URL='/media/', STATIC_URL='/static/', MIDDLEWARE_CLASSES=[], BOOTSTRAP3={ 'form_renderers': { 'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer' } } ) + if django.VERSION >= (1, 8): + settings_dict['TEMPLATES'] = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [] + } + ] + settings.configure(**settings_dict) if django.VERSION >= (1, 7): django.setup()
Fix tests for Django trunk
## Code Before: import os import django from django.conf import settings if not settings.configured: settings_dict = dict( INSTALLED_APPS=[ 'django.contrib.contenttypes', 'django.contrib.auth', 'bootstrap3', 'form_utils', ], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'), MEDIA_URL='/media/', STATIC_URL='/static/', MIDDLEWARE_CLASSES=[], BOOTSTRAP3={ 'form_renderers': { 'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer' } } ) settings.configure(**settings_dict) if django.VERSION >= (1, 7): django.setup() ## Instruction: Fix tests for Django trunk ## Code After: import os import django from django.conf import settings if not settings.configured: settings_dict = dict( INSTALLED_APPS=[ 'django.contrib.contenttypes', 'django.contrib.auth', 'bootstrap3', 'form_utils', ], DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'), MEDIA_URL='/media/', STATIC_URL='/static/', MIDDLEWARE_CLASSES=[], BOOTSTRAP3={ 'form_renderers': { 'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer' } } ) if django.VERSION >= (1, 8): settings_dict['TEMPLATES'] = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [] } ] settings.configure(**settings_dict) if django.VERSION >= (1, 7): django.setup()
... if django.VERSION >= (1, 8): settings_dict['TEMPLATES'] = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [] } ] settings.configure(**settings_dict) ...
ac11d7e7f90a2ee6a240be5fd95093f98c7d42dc
db/create_db.py
db/create_db.py
from models import Base, engine, MetricType from sqlalchemy.orm import Session import uuid import os # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all(engine) session = Session(engine) def initialize_metric_types(): metric_types = [None] * 2 metric_types[0] = MetricType() metric_types[0].id = str(uuid.uuid4()) metric_types[0].name = 'Temperature' metric_types[0].min_value = -50.0 metric_types[0].max_value = 50.0 metric_types[0].unit = 'C' metric_types[1] = MetricType() metric_types[1].id = str(uuid.uuid4()) metric_types[1].name = 'Humidity' metric_types[1].min_value = 0.0 metric_types[1].max_value = 100.0 metric_types[1].unit = '%' session.add_all(metric_types) session.commit() try: os.remove('station_db.db') except Exception as ex: pass initialize_metric_types()
from models import Base, engine, MetricType from sqlalchemy.orm import Session import uuid import os try: os.remove('station_db.db') except Exception as ex: pass # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all(engine) session = Session(engine) def initialize_metric_types(): metric_types = [None] * 2 metric_types[0] = MetricType() metric_types[0].id = str(uuid.uuid4()) metric_types[0].name = 'Temperature' metric_types[0].min_value = -50.0 metric_types[0].max_value = 50.0 metric_types[0].unit = 'C' metric_types[1] = MetricType() metric_types[1].id = str(uuid.uuid4()) metric_types[1].name = 'Humidity' metric_types[1].min_value = 0.0 metric_types[1].max_value = 100.0 metric_types[1].unit = '%' session.add_all(metric_types) session.commit() initialize_metric_types()
Fix for lack of file.
Fix for lack of file. Signed-off-by: Maciej Szankin <33c1fdf481c8e628d4c6db7ea8dc77f49f2fa5d7@szankin.pl>
Python
mit
joannarozes/ddb,joannarozes/ddb,joannarozes/ddb,joannarozes/ddb
from models import Base, engine, MetricType from sqlalchemy.orm import Session import uuid import os + + try: + os.remove('station_db.db') + except Exception as ex: + pass # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all(engine) session = Session(engine) def initialize_metric_types(): metric_types = [None] * 2 metric_types[0] = MetricType() metric_types[0].id = str(uuid.uuid4()) metric_types[0].name = 'Temperature' metric_types[0].min_value = -50.0 metric_types[0].max_value = 50.0 metric_types[0].unit = 'C' metric_types[1] = MetricType() metric_types[1].id = str(uuid.uuid4()) metric_types[1].name = 'Humidity' metric_types[1].min_value = 0.0 metric_types[1].max_value = 100.0 metric_types[1].unit = '%' session.add_all(metric_types) session.commit() - try: - os.remove('station_db.db') - except Exception as ex: - pass - initialize_metric_types()
Fix for lack of file.
## Code Before: from models import Base, engine, MetricType from sqlalchemy.orm import Session import uuid import os # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all(engine) session = Session(engine) def initialize_metric_types(): metric_types = [None] * 2 metric_types[0] = MetricType() metric_types[0].id = str(uuid.uuid4()) metric_types[0].name = 'Temperature' metric_types[0].min_value = -50.0 metric_types[0].max_value = 50.0 metric_types[0].unit = 'C' metric_types[1] = MetricType() metric_types[1].id = str(uuid.uuid4()) metric_types[1].name = 'Humidity' metric_types[1].min_value = 0.0 metric_types[1].max_value = 100.0 metric_types[1].unit = '%' session.add_all(metric_types) session.commit() try: os.remove('station_db.db') except Exception as ex: pass initialize_metric_types() ## Instruction: Fix for lack of file. ## Code After: from models import Base, engine, MetricType from sqlalchemy.orm import Session import uuid import os try: os.remove('station_db.db') except Exception as ex: pass # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all(engine) session = Session(engine) def initialize_metric_types(): metric_types = [None] * 2 metric_types[0] = MetricType() metric_types[0].id = str(uuid.uuid4()) metric_types[0].name = 'Temperature' metric_types[0].min_value = -50.0 metric_types[0].max_value = 50.0 metric_types[0].unit = 'C' metric_types[1] = MetricType() metric_types[1].id = str(uuid.uuid4()) metric_types[1].name = 'Humidity' metric_types[1].min_value = 0.0 metric_types[1].max_value = 100.0 metric_types[1].unit = '%' session.add_all(metric_types) session.commit() initialize_metric_types()
// ... existing code ... import os try: os.remove('station_db.db') except Exception as ex: pass // ... modified code ... initialize_metric_types() // ... rest of the code ...
ccd2afdc687c3d6b7d01bed130e1b0097a4fdc2d
src/damis/run_experiment.py
src/damis/run_experiment.py
import sys from damis.models import Experiment exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) exp.status = 'FINISHED' exp.save()
import sys from damis.models import Experiment, Connection from damis.settings import BUILDOUT_DIR from os.path import splitext from algorithms.preprocess import transpose def transpose_data_callable(X, c, *args, **kwargs): X_absolute = BUILDOUT_DIR + '/var/www' + X Y = '%s_transposed%s' % splitext(X) Y_absolute = BUILDOUT_DIR + '/var/www' + Y transpose(X_absolute, Y_absolute, int(c)) return [('Y', Y)] def do_nothing(*args, **kwargs): return [] # Collables which get SERVICES = { "UPLOAD FILE": do_nothing, "EXISTING FILE": do_nothing, "MIDAS FILE": do_nothing, "TECHNICAL DETAILS": do_nothing, "CHART": do_nothing, # "CLEAN DATA", # "FILTER DATA", # "SPLIT DATA", "TRANSPOSE DATA": transpose_data_callable, # "TRANSFORM DATA": transform_data_callable, # "STAT PRIMITIVES", # "MLP", # "C45", # "KMEANS", # "PCA", # "SMACOF", # "DMA", # "SDS", # "SAMANN", # "SOM", # "SOMMDS", # "SELECT FEATURES", } ## Recursively walk through through tasks. def execute_tasks(task): # Get INPUT and COMMON parameter values. kwargs = {} for pv in task.parameter_values.all(): cons = Connection.objects.filter(target=pv) if cons: value = cons[0].source.value else: value = pv.value kwargs[pv.parameter.name] = value # Call executable service = SERVICES[task.algorithm.title] response = service(**kwargs) # Response dict: name -> value # Set OUTPUT parameter values and save. for name, value in response: pv = task.parameter_values.get(parameter__name=name) pv.value = value pv.save() task.status = 'SAVED' task.save() ## Call its following tasks for pv in task.parameter_values.all(): for con in Connection.objects.filter(source=pv): next_task = con.target.task if next_task.status == 'SAVED': execute_tasks(next_task) if __name__ == '__main__': exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) first_task = exp.tasks.filter(algorithm__category='DATA')[0] execute_tasks(first_task) exp.status = 'FINISHED' exp.save()
Implement experiment workflow execution with transpose method.
Implement experiment workflow execution with transpose method.
Python
agpl-3.0
InScience/DAMIS-old,InScience/DAMIS-old
import sys - from damis.models import Experiment + from damis.models import Experiment, Connection + from damis.settings import BUILDOUT_DIR + from os.path import splitext + from algorithms.preprocess import transpose - exp_pk = sys.argv[1] - exp = Experiment.objects.get(pk=exp_pk) - exp.status = 'FINISHED' - exp.save() + def transpose_data_callable(X, c, *args, **kwargs): + X_absolute = BUILDOUT_DIR + '/var/www' + X + Y = '%s_transposed%s' % splitext(X) + Y_absolute = BUILDOUT_DIR + '/var/www' + Y + transpose(X_absolute, Y_absolute, int(c)) + return [('Y', Y)] + def do_nothing(*args, **kwargs): + return [] + + # Collables which get + SERVICES = { + "UPLOAD FILE": do_nothing, + "EXISTING FILE": do_nothing, + "MIDAS FILE": do_nothing, + "TECHNICAL DETAILS": do_nothing, + "CHART": do_nothing, + # "CLEAN DATA", + # "FILTER DATA", + # "SPLIT DATA", + "TRANSPOSE DATA": transpose_data_callable, + # "TRANSFORM DATA": transform_data_callable, + # "STAT PRIMITIVES", + # "MLP", + # "C45", + # "KMEANS", + # "PCA", + # "SMACOF", + # "DMA", + # "SDS", + # "SAMANN", + # "SOM", + # "SOMMDS", + # "SELECT FEATURES", + } + + ## Recursively walk through through tasks. + def execute_tasks(task): + # Get INPUT and COMMON parameter values. + kwargs = {} + for pv in task.parameter_values.all(): + cons = Connection.objects.filter(target=pv) + if cons: + value = cons[0].source.value + else: + value = pv.value + kwargs[pv.parameter.name] = value + + # Call executable + service = SERVICES[task.algorithm.title] + response = service(**kwargs) # Response dict: name -> value + + # Set OUTPUT parameter values and save. + for name, value in response: + pv = task.parameter_values.get(parameter__name=name) + pv.value = value + pv.save() + + task.status = 'SAVED' + task.save() + + ## Call its following tasks + for pv in task.parameter_values.all(): + for con in Connection.objects.filter(source=pv): + next_task = con.target.task + if next_task.status == 'SAVED': + execute_tasks(next_task) + + + if __name__ == '__main__': + exp_pk = sys.argv[1] + exp = Experiment.objects.get(pk=exp_pk) + first_task = exp.tasks.filter(algorithm__category='DATA')[0] + execute_tasks(first_task) + exp.status = 'FINISHED' + exp.save() +
Implement experiment workflow execution with transpose method.
## Code Before: import sys from damis.models import Experiment exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) exp.status = 'FINISHED' exp.save() ## Instruction: Implement experiment workflow execution with transpose method. ## Code After: import sys from damis.models import Experiment, Connection from damis.settings import BUILDOUT_DIR from os.path import splitext from algorithms.preprocess import transpose def transpose_data_callable(X, c, *args, **kwargs): X_absolute = BUILDOUT_DIR + '/var/www' + X Y = '%s_transposed%s' % splitext(X) Y_absolute = BUILDOUT_DIR + '/var/www' + Y transpose(X_absolute, Y_absolute, int(c)) return [('Y', Y)] def do_nothing(*args, **kwargs): return [] # Collables which get SERVICES = { "UPLOAD FILE": do_nothing, "EXISTING FILE": do_nothing, "MIDAS FILE": do_nothing, "TECHNICAL DETAILS": do_nothing, "CHART": do_nothing, # "CLEAN DATA", # "FILTER DATA", # "SPLIT DATA", "TRANSPOSE DATA": transpose_data_callable, # "TRANSFORM DATA": transform_data_callable, # "STAT PRIMITIVES", # "MLP", # "C45", # "KMEANS", # "PCA", # "SMACOF", # "DMA", # "SDS", # "SAMANN", # "SOM", # "SOMMDS", # "SELECT FEATURES", } ## Recursively walk through through tasks. def execute_tasks(task): # Get INPUT and COMMON parameter values. kwargs = {} for pv in task.parameter_values.all(): cons = Connection.objects.filter(target=pv) if cons: value = cons[0].source.value else: value = pv.value kwargs[pv.parameter.name] = value # Call executable service = SERVICES[task.algorithm.title] response = service(**kwargs) # Response dict: name -> value # Set OUTPUT parameter values and save. for name, value in response: pv = task.parameter_values.get(parameter__name=name) pv.value = value pv.save() task.status = 'SAVED' task.save() ## Call its following tasks for pv in task.parameter_values.all(): for con in Connection.objects.filter(source=pv): next_task = con.target.task if next_task.status == 'SAVED': execute_tasks(next_task) if __name__ == '__main__': exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) first_task = exp.tasks.filter(algorithm__category='DATA')[0] execute_tasks(first_task) exp.status = 'FINISHED' exp.save()
... import sys from damis.models import Experiment, Connection from damis.settings import BUILDOUT_DIR from os.path import splitext from algorithms.preprocess import transpose def transpose_data_callable(X, c, *args, **kwargs): X_absolute = BUILDOUT_DIR + '/var/www' + X Y = '%s_transposed%s' % splitext(X) Y_absolute = BUILDOUT_DIR + '/var/www' + Y transpose(X_absolute, Y_absolute, int(c)) return [('Y', Y)] def do_nothing(*args, **kwargs): return [] # Collables which get SERVICES = { "UPLOAD FILE": do_nothing, "EXISTING FILE": do_nothing, "MIDAS FILE": do_nothing, "TECHNICAL DETAILS": do_nothing, "CHART": do_nothing, # "CLEAN DATA", # "FILTER DATA", # "SPLIT DATA", "TRANSPOSE DATA": transpose_data_callable, # "TRANSFORM DATA": transform_data_callable, # "STAT PRIMITIVES", # "MLP", # "C45", # "KMEANS", # "PCA", # "SMACOF", # "DMA", # "SDS", # "SAMANN", # "SOM", # "SOMMDS", # "SELECT FEATURES", } ## Recursively walk through through tasks. def execute_tasks(task): # Get INPUT and COMMON parameter values. kwargs = {} for pv in task.parameter_values.all(): cons = Connection.objects.filter(target=pv) if cons: value = cons[0].source.value else: value = pv.value kwargs[pv.parameter.name] = value # Call executable service = SERVICES[task.algorithm.title] response = service(**kwargs) # Response dict: name -> value # Set OUTPUT parameter values and save. for name, value in response: pv = task.parameter_values.get(parameter__name=name) pv.value = value pv.save() task.status = 'SAVED' task.save() ## Call its following tasks for pv in task.parameter_values.all(): for con in Connection.objects.filter(source=pv): next_task = con.target.task if next_task.status == 'SAVED': execute_tasks(next_task) if __name__ == '__main__': exp_pk = sys.argv[1] exp = Experiment.objects.get(pk=exp_pk) first_task = exp.tasks.filter(algorithm__category='DATA')[0] execute_tasks(first_task) exp.status = 'FINISHED' exp.save() ...
308cbf1f62e254643a0ad47db8ad55eb63e1c888
argonauts/testutils.py
argonauts/testutils.py
import json import functools from django.conf import settings from django.test import Client, TestCase __all__ = ['JsonTestClient', 'JsonTestCase'] class JsonTestClient(Client): def _json_request(self, method, url, data=None, *args, **kwargs): method_func = getattr(super(JsonTestClient, self), method) if method == 'get': encode = lambda x: x else: encode = json.dumps if data is not None: resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs) else: resp = method_func(url, content_type='application/json', *args, **kwargs) if resp['Content-Type'].startswith('application/json') and resp.content: charset = resp.charset or settings.DEFAULT_CHARSET resp.json = json.loads(resp.content.decode(charset)) return resp def __getattribute__(self, attr): if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'): return functools.partial(self._json_request, attr) else: return super(JsonTestClient, self).__getattribute__(attr) class JsonTestCase(TestCase): client_class = JsonTestClient
import json import functools from django.conf import settings from django.test import Client, TestCase __all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase'] class JsonTestClient(Client): def _json_request(self, method, url, data=None, *args, **kwargs): method_func = getattr(super(JsonTestClient, self), method) if method == 'get': encode = lambda x: x else: encode = json.dumps if data is not None: resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs) else: resp = method_func(url, content_type='application/json', *args, **kwargs) if resp['Content-Type'].startswith('application/json') and resp.content: charset = resp.charset or settings.DEFAULT_CHARSET resp.json = json.loads(resp.content.decode(charset)) return resp def __getattribute__(self, attr): if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'): return functools.partial(self._json_request, attr) else: return super(JsonTestClient, self).__getattribute__(attr) class JsonTestMixin(object): client_class = JsonTestClient class JsonTestCase(JsonTestMixin, TestCase): pass
Make the TestCase a mixin
Make the TestCase a mixin
Python
bsd-2-clause
fusionbox/django-argonauts
import json import functools from django.conf import settings from django.test import Client, TestCase - __all__ = ['JsonTestClient', 'JsonTestCase'] + __all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase'] class JsonTestClient(Client): def _json_request(self, method, url, data=None, *args, **kwargs): method_func = getattr(super(JsonTestClient, self), method) if method == 'get': encode = lambda x: x else: encode = json.dumps if data is not None: resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs) else: resp = method_func(url, content_type='application/json', *args, **kwargs) if resp['Content-Type'].startswith('application/json') and resp.content: charset = resp.charset or settings.DEFAULT_CHARSET resp.json = json.loads(resp.content.decode(charset)) return resp def __getattribute__(self, attr): if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'): return functools.partial(self._json_request, attr) else: return super(JsonTestClient, self).__getattribute__(attr) - class JsonTestCase(TestCase): + class JsonTestMixin(object): client_class = JsonTestClient + class JsonTestCase(JsonTestMixin, TestCase): + pass +
Make the TestCase a mixin
## Code Before: import json import functools from django.conf import settings from django.test import Client, TestCase __all__ = ['JsonTestClient', 'JsonTestCase'] class JsonTestClient(Client): def _json_request(self, method, url, data=None, *args, **kwargs): method_func = getattr(super(JsonTestClient, self), method) if method == 'get': encode = lambda x: x else: encode = json.dumps if data is not None: resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs) else: resp = method_func(url, content_type='application/json', *args, **kwargs) if resp['Content-Type'].startswith('application/json') and resp.content: charset = resp.charset or settings.DEFAULT_CHARSET resp.json = json.loads(resp.content.decode(charset)) return resp def __getattribute__(self, attr): if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'): return functools.partial(self._json_request, attr) else: return super(JsonTestClient, self).__getattribute__(attr) class JsonTestCase(TestCase): client_class = JsonTestClient ## Instruction: Make the TestCase a mixin ## Code After: import json import functools from django.conf import settings from django.test import Client, TestCase __all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase'] class JsonTestClient(Client): def _json_request(self, method, url, data=None, *args, **kwargs): method_func = getattr(super(JsonTestClient, self), method) if method == 'get': encode = lambda x: x else: encode = json.dumps if data is not None: resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs) else: resp = method_func(url, content_type='application/json', *args, **kwargs) if resp['Content-Type'].startswith('application/json') and resp.content: charset = resp.charset or settings.DEFAULT_CHARSET resp.json = json.loads(resp.content.decode(charset)) return resp def __getattribute__(self, attr): if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'): return functools.partial(self._json_request, attr) else: return super(JsonTestClient, self).__getattribute__(attr) class JsonTestMixin(object): client_class = JsonTestClient class JsonTestCase(JsonTestMixin, TestCase): pass
... __all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase'] ... class JsonTestMixin(object): client_class = JsonTestClient class JsonTestCase(JsonTestMixin, TestCase): pass ...
8209b77a16c899436418dbc85dc891f671949bfc
bot/logger/message_sender/asynchronous.py
bot/logger/message_sender/asynchronous.py
from bot.logger.message_sender import IntermediateMessageSender, MessageSender from bot.multithreading.work import Work from bot.multithreading.worker import Worker class AsynchronousMessageSender(IntermediateMessageSender): def __init__(self, sender: MessageSender, worker: Worker): super().__init__(sender) self.worker = worker def send(self, text): self.worker.post(Work(lambda: self.sender.send(text), "async_message_send"))
from bot.logger.message_sender import IntermediateMessageSender, MessageSender from bot.multithreading.work import Work from bot.multithreading.worker import Worker class AsynchronousMessageSender(IntermediateMessageSender): def __init__(self, sender: MessageSender, worker: Worker): super().__init__(sender) self.worker = worker def send(self, text): self.worker.post(Work(lambda: self.sender.send(text), "asynchronous_message_sender:send"))
Clarify work action in AsynchronousMessageSender
Clarify work action in AsynchronousMessageSender
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
from bot.logger.message_sender import IntermediateMessageSender, MessageSender from bot.multithreading.work import Work from bot.multithreading.worker import Worker class AsynchronousMessageSender(IntermediateMessageSender): def __init__(self, sender: MessageSender, worker: Worker): super().__init__(sender) self.worker = worker def send(self, text): - self.worker.post(Work(lambda: self.sender.send(text), "async_message_send")) + self.worker.post(Work(lambda: self.sender.send(text), "asynchronous_message_sender:send"))
Clarify work action in AsynchronousMessageSender
## Code Before: from bot.logger.message_sender import IntermediateMessageSender, MessageSender from bot.multithreading.work import Work from bot.multithreading.worker import Worker class AsynchronousMessageSender(IntermediateMessageSender): def __init__(self, sender: MessageSender, worker: Worker): super().__init__(sender) self.worker = worker def send(self, text): self.worker.post(Work(lambda: self.sender.send(text), "async_message_send")) ## Instruction: Clarify work action in AsynchronousMessageSender ## Code After: from bot.logger.message_sender import IntermediateMessageSender, MessageSender from bot.multithreading.work import Work from bot.multithreading.worker import Worker class AsynchronousMessageSender(IntermediateMessageSender): def __init__(self, sender: MessageSender, worker: Worker): super().__init__(sender) self.worker = worker def send(self, text): self.worker.post(Work(lambda: self.sender.send(text), "asynchronous_message_sender:send"))
// ... existing code ... def send(self, text): self.worker.post(Work(lambda: self.sender.send(text), "asynchronous_message_sender:send")) // ... rest of the code ...
46bcad1e20e57f66498e7a70b8f3be929115bde6
incunafein/module/page/extensions/prepared_date.py
incunafein/module/page/extensions/prepared_date.py
from django.db import models def get_prepared_date(cls): return cls.prepared_date or cls.parent.prepared_date def register(cls, admin_cls): cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) cls.add_to_class('get_prepared_date', get_prepared_date)
from django.db import models def register(cls, admin_cls): cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) def getter(): if not cls._prepared_date: try: return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date except IndexError: return None return cls._prepared_date def setter(value): cls._prepared_date = value cls.prepared_date = property(getter, setter) if admin_cls and admin_cls.fieldsets: admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
Return parent date if there isn't one on the current object
Return parent date if there isn't one on the current object Look for a prepared date in the ancestors of the current object and use that if it exists
Python
bsd-2-clause
incuna/incuna-feincms,incuna/incuna-feincms,incuna/incuna-feincms
from django.db import models - def get_prepared_date(cls): - return cls.prepared_date or cls.parent.prepared_date + def register(cls, admin_cls): + cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) - def register(cls, admin_cls): - cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) - cls.add_to_class('get_prepared_date', get_prepared_date) + def getter(): + if not cls._prepared_date: + try: + return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date + except IndexError: + return None + return cls._prepared_date + + def setter(value): + cls._prepared_date = value + + cls.prepared_date = property(getter, setter) + + if admin_cls and admin_cls.fieldsets: + admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
Return parent date if there isn't one on the current object
## Code Before: from django.db import models def get_prepared_date(cls): return cls.prepared_date or cls.parent.prepared_date def register(cls, admin_cls): cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) cls.add_to_class('get_prepared_date', get_prepared_date) ## Instruction: Return parent date if there isn't one on the current object ## Code After: from django.db import models def register(cls, admin_cls): cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) def getter(): if not cls._prepared_date: try: return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date except IndexError: return None return cls._prepared_date def setter(value): cls._prepared_date = value cls.prepared_date = property(getter, setter) if admin_cls and admin_cls.fieldsets: admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
... def register(cls, admin_cls): cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True)) def getter(): if not cls._prepared_date: try: return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date except IndexError: return None return cls._prepared_date def setter(value): cls._prepared_date = value cls.prepared_date = property(getter, setter) if admin_cls and admin_cls.fieldsets: admin_cls.fieldsets[2][1]['fields'].append('_prepared_date') ...
2b07fdcefdc915e69580016d9c0a08ab8e478ce7
chatterbot/adapters/logic/closest_match.py
chatterbot/adapters/logic/closest_match.py
from .base_match import BaseMatchAdapter from fuzzywuzzy import fuzz class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text, statement.text) if ratio > confidence: confidence = ratio closest_match = statement ''' closest_match, confidence = process.extractOne( input_statement.text, text_of_all_statements ) ''' # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
from .base_match import BaseMatchAdapter from fuzzywuzzy import fuzz class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text, statement.text) if ratio > confidence: confidence = ratio closest_match = statement # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
Remove commented out method call.
Remove commented out method call.
Python
bsd-3-clause
Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal,Gustavo6046/ChatterBot,vkosuri/ChatterBot,gunthercox/ChatterBot,Reinaesaya/OUIRL-ChatBot,davizucon/ChatterBot
from .base_match import BaseMatchAdapter from fuzzywuzzy import fuzz class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text, statement.text) if ratio > confidence: confidence = ratio closest_match = statement - ''' - closest_match, confidence = process.extractOne( - input_statement.text, - text_of_all_statements - ) - ''' - # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
Remove commented out method call.
## Code Before: from .base_match import BaseMatchAdapter from fuzzywuzzy import fuzz class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text, statement.text) if ratio > confidence: confidence = ratio closest_match = statement ''' closest_match, confidence = process.extractOne( input_statement.text, text_of_all_statements ) ''' # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match ## Instruction: Remove commented out method call. ## Code After: from .base_match import BaseMatchAdapter from fuzzywuzzy import fuzz class ClosestMatchAdapter(BaseMatchAdapter): """ The ClosestMatchAdapter creates a response by using fuzzywuzzy's process class to extract the most similar response to the input. This adapter selects a response to an input statement by selecting the closest known matching statement based on the Levenshtein Distance between the text of each statement. """ def get(self, input_statement): """ Takes a statement string and a list of statement strings. Returns the closest matching statement from the list. """ statement_list = self.context.storage.get_response_statements() if not statement_list: if self.has_storage_context: # Use a randomly picked statement return 0, self.context.storage.get_random() else: raise self.EmptyDatasetException() confidence = -1 closest_match = input_statement # Find the closest matching known statement for statement in statement_list: ratio = fuzz.ratio(input_statement.text, statement.text) if ratio > confidence: confidence = ratio closest_match = statement # Convert the confidence integer to a percent confidence /= 100.0 return confidence, closest_match
# ... existing code ... # Convert the confidence integer to a percent # ... rest of the code ...
6fa5c20f4d3b6ea9716adbf4c5fd50739f2f987e
protractor/test.py
protractor/test.py
import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) protractor_command += ' --params.live_server_url={}'.format(self.live_server_url) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def get_protractor_params(self): """A hook for adding params that protractor will receive.""" return { 'live_server_url': self.live_server_url } def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) for key, value in self.get_protractor_params().iteritems(): protractor_command += ' --params.{key}={value}'.format( key=key, value=value ) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
Add hook for protactor params
Add hook for protactor params
Python
mit
jpulec/django-protractor,penguin359/django-protractor
import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() + def get_protractor_params(self): + """A hook for adding params that protractor will receive.""" + return { + 'live_server_url': self.live_server_url + } + def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) - protractor_command += ' --params.live_server_url={}'.format(self.live_server_url) + for key, value in self.get_protractor_params().iteritems(): + protractor_command += ' --params.{key}={value}'.format( + key=key, value=value + ) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
Add hook for protactor params
## Code Before: import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) protractor_command += ' --params.live_server_url={}'.format(self.live_server_url) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0) ## Instruction: Add hook for protactor params ## Code After: import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def get_protractor_params(self): """A hook for adding params that protractor will receive.""" return { 'live_server_url': self.live_server_url } def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) for key, value in self.get_protractor_params().iteritems(): protractor_command += ' --params.{key}={value}'.format( key=key, value=value ) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
# ... existing code ... def get_protractor_params(self): """A hook for adding params that protractor will receive.""" return { 'live_server_url': self.live_server_url } def test_run(self): # ... modified code ... protractor_command += ' --suite {}'.format(self.suite) for key, value in self.get_protractor_params().iteritems(): protractor_command += ' --params.{key}={value}'.format( key=key, value=value ) return_code = subprocess.call(protractor_command.split()) # ... rest of the code ...
872875ecb3b5a09c14da8836cdd4b7a6f6610675
vumi/transports/mxit/responses.py
vumi/transports/mxit/responses.py
import re from twisted.web.template import Element, renderer, XMLFile from twisted.python.filepath import FilePath class ResponseParser(object): HEADER_PATTERN = r'^(.*)[\r\n]{1,2}\d?' ITEM_PATTERN = r'^(\d+)\. (.+)$' def __init__(self, content): header_match = re.match(self.HEADER_PATTERN, content) if header_match: [self.header] = header_match.groups() self.items = re.findall(self.ITEM_PATTERN, content, re.MULTILINE) else: self.header = content self.items = [] @classmethod def parse(cls, content): p = cls(content) return p.header, p.items class MxitResponse(Element): loader = XMLFile( FilePath('vumi/transports/mxit/templates/response.xml')) def __init__(self, message, loader=None): self.header, self.items = ResponseParser.parse(message['content']) super(MxitResponse, self).__init__(loader or self.loader) @renderer def render_header(self, request, tag): return tag(self.header) @renderer def render_body(self, request, tag): if not self.items: return '' return tag @renderer def render_item(self, request, tag): for index, text in self.items: yield tag.clone().fillSlots(index=str(index), text=text)
import re from twisted.web.template import Element, renderer, XMLFile from twisted.python.filepath import FilePath from vumi.utils import PkgResources MXIT_RESOURCES = PkgResources(__name__) class ResponseParser(object): HEADER_PATTERN = r'^(.*)[\r\n]{1,2}\d?' ITEM_PATTERN = r'^(\d+)\. (.+)$' def __init__(self, content): header_match = re.match(self.HEADER_PATTERN, content) if header_match: [self.header] = header_match.groups() self.items = re.findall(self.ITEM_PATTERN, content, re.MULTILINE) else: self.header = content self.items = [] @classmethod def parse(cls, content): p = cls(content) return p.header, p.items class MxitResponse(Element): loader = XMLFile(FilePath(MXIT_RESOURCES.path('templates/response.xml'))) def __init__(self, message, loader=None): self.header, self.items = ResponseParser.parse(message['content']) super(MxitResponse, self).__init__(loader or self.loader) @renderer def render_header(self, request, tag): return tag(self.header) @renderer def render_body(self, request, tag): if not self.items: return '' return tag @renderer def render_item(self, request, tag): for index, text in self.items: yield tag.clone().fillSlots(index=str(index), text=text)
Use PkgResources helper to load response templates.
Use PkgResources helper to load response templates.
Python
bsd-3-clause
TouK/vumi,TouK/vumi,TouK/vumi,harrissoerja/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix
import re from twisted.web.template import Element, renderer, XMLFile from twisted.python.filepath import FilePath + + from vumi.utils import PkgResources + + + MXIT_RESOURCES = PkgResources(__name__) class ResponseParser(object): HEADER_PATTERN = r'^(.*)[\r\n]{1,2}\d?' ITEM_PATTERN = r'^(\d+)\. (.+)$' def __init__(self, content): header_match = re.match(self.HEADER_PATTERN, content) if header_match: [self.header] = header_match.groups() self.items = re.findall(self.ITEM_PATTERN, content, re.MULTILINE) else: self.header = content self.items = [] @classmethod def parse(cls, content): p = cls(content) return p.header, p.items class MxitResponse(Element): + loader = XMLFile(FilePath(MXIT_RESOURCES.path('templates/response.xml'))) - loader = XMLFile( - FilePath('vumi/transports/mxit/templates/response.xml')) def __init__(self, message, loader=None): self.header, self.items = ResponseParser.parse(message['content']) super(MxitResponse, self).__init__(loader or self.loader) @renderer def render_header(self, request, tag): return tag(self.header) @renderer def render_body(self, request, tag): if not self.items: return '' return tag @renderer def render_item(self, request, tag): for index, text in self.items: yield tag.clone().fillSlots(index=str(index), text=text)
Use PkgResources helper to load response templates.
## Code Before: import re from twisted.web.template import Element, renderer, XMLFile from twisted.python.filepath import FilePath class ResponseParser(object): HEADER_PATTERN = r'^(.*)[\r\n]{1,2}\d?' ITEM_PATTERN = r'^(\d+)\. (.+)$' def __init__(self, content): header_match = re.match(self.HEADER_PATTERN, content) if header_match: [self.header] = header_match.groups() self.items = re.findall(self.ITEM_PATTERN, content, re.MULTILINE) else: self.header = content self.items = [] @classmethod def parse(cls, content): p = cls(content) return p.header, p.items class MxitResponse(Element): loader = XMLFile( FilePath('vumi/transports/mxit/templates/response.xml')) def __init__(self, message, loader=None): self.header, self.items = ResponseParser.parse(message['content']) super(MxitResponse, self).__init__(loader or self.loader) @renderer def render_header(self, request, tag): return tag(self.header) @renderer def render_body(self, request, tag): if not self.items: return '' return tag @renderer def render_item(self, request, tag): for index, text in self.items: yield tag.clone().fillSlots(index=str(index), text=text) ## Instruction: Use PkgResources helper to load response templates. ## Code After: import re from twisted.web.template import Element, renderer, XMLFile from twisted.python.filepath import FilePath from vumi.utils import PkgResources MXIT_RESOURCES = PkgResources(__name__) class ResponseParser(object): HEADER_PATTERN = r'^(.*)[\r\n]{1,2}\d?' ITEM_PATTERN = r'^(\d+)\. (.+)$' def __init__(self, content): header_match = re.match(self.HEADER_PATTERN, content) if header_match: [self.header] = header_match.groups() self.items = re.findall(self.ITEM_PATTERN, content, re.MULTILINE) else: self.header = content self.items = [] @classmethod def parse(cls, content): p = cls(content) return p.header, p.items class MxitResponse(Element): loader = XMLFile(FilePath(MXIT_RESOURCES.path('templates/response.xml'))) def __init__(self, message, loader=None): self.header, self.items = ResponseParser.parse(message['content']) super(MxitResponse, self).__init__(loader or self.loader) @renderer def render_header(self, request, tag): return tag(self.header) @renderer def render_body(self, request, tag): if not self.items: return '' return tag @renderer def render_item(self, request, tag): for index, text in self.items: yield tag.clone().fillSlots(index=str(index), text=text)
... from twisted.python.filepath import FilePath from vumi.utils import PkgResources MXIT_RESOURCES = PkgResources(__name__) ... class MxitResponse(Element): loader = XMLFile(FilePath(MXIT_RESOURCES.path('templates/response.xml'))) ...
697bf0c23786794e35b0b9f72c878bb762d296b9
benches/cprofile_pyproj.py
benches/cprofile_pyproj.py
import numpy as np from pyproj import Proj, transform # London bounding box N = 51.691874116909894 E = 0.3340155643740321 S = 51.28676016315085 W = -0.5103750689005356 osgb36 = Proj(init='epsg:27700') wgs84 = Proj(init='epsg:4326') num_coords = 1000000 lon_ls = np.random.uniform(W, E, [num_coords]) lat_ls = np.random.uniform(S, N, [num_coords]) if __name__ == "__main__": for x in xrange(50): transform(wgs84, osgb36, lon_ls, lat_ls)
import numpy as np from pyproj import Proj, transform # London bounding box N = 51.691874116909894 E = 0.3340155643740321 S = 51.28676016315085 W = -0.5103750689005356 # osgb36 = Proj(init='epsg:27700') osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb') wgs84 = Proj(init='epsg:4326') num_coords = 1000000 lon_ls = np.random.uniform(W, E, [num_coords]) lat_ls = np.random.uniform(S, N, [num_coords]) if __name__ == "__main__": for x in xrange(50): transform(wgs84, osgb36, lon_ls, lat_ls)
Use NTv2 transform for Pyproj
Use NTv2 transform for Pyproj
Python
mit
urschrei/lonlat_bng,urschrei/rust_bng,urschrei/lonlat_bng,urschrei/rust_bng,urschrei/lonlat_bng
import numpy as np from pyproj import Proj, transform # London bounding box N = 51.691874116909894 E = 0.3340155643740321 S = 51.28676016315085 W = -0.5103750689005356 - osgb36 = Proj(init='epsg:27700') + # osgb36 = Proj(init='epsg:27700') + osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb') wgs84 = Proj(init='epsg:4326') num_coords = 1000000 lon_ls = np.random.uniform(W, E, [num_coords]) lat_ls = np.random.uniform(S, N, [num_coords]) if __name__ == "__main__": for x in xrange(50): transform(wgs84, osgb36, lon_ls, lat_ls)
Use NTv2 transform for Pyproj
## Code Before: import numpy as np from pyproj import Proj, transform # London bounding box N = 51.691874116909894 E = 0.3340155643740321 S = 51.28676016315085 W = -0.5103750689005356 osgb36 = Proj(init='epsg:27700') wgs84 = Proj(init='epsg:4326') num_coords = 1000000 lon_ls = np.random.uniform(W, E, [num_coords]) lat_ls = np.random.uniform(S, N, [num_coords]) if __name__ == "__main__": for x in xrange(50): transform(wgs84, osgb36, lon_ls, lat_ls) ## Instruction: Use NTv2 transform for Pyproj ## Code After: import numpy as np from pyproj import Proj, transform # London bounding box N = 51.691874116909894 E = 0.3340155643740321 S = 51.28676016315085 W = -0.5103750689005356 # osgb36 = Proj(init='epsg:27700') osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb') wgs84 = Proj(init='epsg:4326') num_coords = 1000000 lon_ls = np.random.uniform(W, E, [num_coords]) lat_ls = np.random.uniform(S, N, [num_coords]) if __name__ == "__main__": for x in xrange(50): transform(wgs84, osgb36, lon_ls, lat_ls)
// ... existing code ... # osgb36 = Proj(init='epsg:27700') osgb36 = Proj('+init=EPSG:27700 +nadgrids=OSTN02_NTv2.gsb') wgs84 = Proj(init='epsg:4326') // ... rest of the code ...
d5a3b6e1eb37883a16c7e98d2a1b7c98d8d67051
layout/tests.py
layout/tests.py
from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home)
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
Add test for home page html content
Add test for home page html content
Python
mit
jvanbrug/scout,jvanbrug/scout
from django.core.urlresolvers import resolve + from django.http import HttpRequest + from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) + def test_home_page_returns_correct_html(self): + expected_html = render_to_string('home.html') + request = HttpRequest() + response = home(request) + actual_html = response.content.decode() + self.assertEqual(actual_html, expected_html) +
Add test for home page html content
## Code Before: from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) ## Instruction: Add test for home page html content ## Code After: from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
// ... existing code ... from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase // ... modified code ... self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html) // ... rest of the code ...
3fc94b4cffcfd08b439386fb2b01aa1e12fec6d5
iati/core/tests/test_data.py
iati/core/tests/test_data.py
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" pass
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass
Test stubs for dataset creation
Test stubs for dataset creation
Python
mit
IATI/iati.core,IATI/iati.core
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" + def test_dataset_no_params(self): + """Test Dataset creation with no parameters.""" - pass + pass + def test_dataset_valid_xml_string(self): + """Test Dataset creation with a valid XML string that is not IATI data.""" + pass + + def test_dataset_valid_iati_string(self): + """Test Dataset creation with a valid IATI XML string.""" + pass + + def test_dataset_invalid_xml_string(self): + """Test Dataset creation with a string that is not valid XML.""" + pass + + def test_dataset_tree(self): + """Test Dataset creation with an etree that is not valid IATI data.""" + pass + + def test_dataset_iati_tree(self): + """Test Dataset creation with a valid IATI etree.""" + pass + + def test_dataset_no_params_strict(self): + """Test Dataset creation with no parameters. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_valid_xml_string_strict(self): + """Test Dataset creation with a valid XML string that is not IATI data. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_valid_iati_string_strict(self): + """Test Dataset creation with a valid IATI XML string. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_invalid_xml_string_strict(self): + """Test Dataset creation with a string that is not valid XML. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_tree_strict(self): + """Test Dataset creation with an etree that is not valid IATI data. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_iati_tree_strict(self): + """Test Dataset creation with a valid IATI etree. + Strict IATI checks are enabled. + """ + pass +
Test stubs for dataset creation
## Code Before: """A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" pass ## Instruction: Test stubs for dataset creation ## Code After: """A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass
# ... existing code ... def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass # ... rest of the code ...
bf2ace8bd6cb0c492ff4347f9c2fe10a003abaff
sqlalchemy_redshift/__init__.py
sqlalchemy_redshift/__init__.py
from pkg_resources import get_distribution, parse_version try: import psycopg2 # noqa: F401 if get_distribution('psycopg2').parsed_version < parse_version('2.5'): raise ImportError('Minimum required version for psycopg2 is 2.5') except ImportError: raise ImportError( 'No module named psycopg2. Please install either ' 'psycopg2 or psycopg2-binary package for CPython ' 'or psycopg2cffi for Pypy.' ) __version__ = get_distribution('sqlalchemy-redshift').version from sqlalchemy.dialects import registry registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect") registry.register( "redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect" )
from pkg_resources import DistributionNotFound, get_distribution, parse_version try: import psycopg2 # noqa: F401 except ImportError: raise ImportError( 'No module named psycopg2. Please install either ' 'psycopg2 or psycopg2-binary package for CPython ' 'or psycopg2cffi for Pypy.' ) from None for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']: try: if get_distribution(package).parsed_version < parse_version('2.5'): raise ImportError('Minimum required version for psycopg2 is 2.5') break except DistributionNotFound: pass else: raise ImportError( 'A module was found named psycopg2, ' 'but the version of it could not be checked ' 'as it was neither the Python package psycopg2, ' 'psycopg2-binary or psycopg2cffi.' ) __version__ = get_distribution('sqlalchemy-redshift').version from sqlalchemy.dialects import registry registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect") registry.register( "redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect" )
Check the version of any of the supported Psycopg2 packages
Check the version of any of the supported Psycopg2 packages A check was introduced in commit 8e0c4857a1c08f257b95d3b1ee5f6eb795d55cdc which would check what version of the 'psycopg2' Python (pip) package was installed as the dependency was removed from setup.py. The check would however only check the 'psycopg2' package and not the other two supported providers of the psycopg2 module, which meant importing the sqlalchemy_redshift module would throw an exception, even though they were installed. This changes the check to check for either of the three supported psycopg2 packages and throws an exception if any of them fail to validate.
Python
mit
sqlalchemy-redshift/sqlalchemy-redshift,graingert/redshift_sqlalchemy,sqlalchemy-redshift/sqlalchemy-redshift
- from pkg_resources import get_distribution, parse_version + from pkg_resources import DistributionNotFound, get_distribution, parse_version try: import psycopg2 # noqa: F401 - if get_distribution('psycopg2').parsed_version < parse_version('2.5'): - raise ImportError('Minimum required version for psycopg2 is 2.5') except ImportError: raise ImportError( 'No module named psycopg2. Please install either ' 'psycopg2 or psycopg2-binary package for CPython ' 'or psycopg2cffi for Pypy.' + ) from None + + for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']: + try: + if get_distribution(package).parsed_version < parse_version('2.5'): + raise ImportError('Minimum required version for psycopg2 is 2.5') + break + except DistributionNotFound: + pass + else: + raise ImportError( + 'A module was found named psycopg2, ' + 'but the version of it could not be checked ' + 'as it was neither the Python package psycopg2, ' + 'psycopg2-binary or psycopg2cffi.' ) __version__ = get_distribution('sqlalchemy-redshift').version from sqlalchemy.dialects import registry registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect") registry.register( "redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect" )
Check the version of any of the supported Psycopg2 packages
## Code Before: from pkg_resources import get_distribution, parse_version try: import psycopg2 # noqa: F401 if get_distribution('psycopg2').parsed_version < parse_version('2.5'): raise ImportError('Minimum required version for psycopg2 is 2.5') except ImportError: raise ImportError( 'No module named psycopg2. Please install either ' 'psycopg2 or psycopg2-binary package for CPython ' 'or psycopg2cffi for Pypy.' ) __version__ = get_distribution('sqlalchemy-redshift').version from sqlalchemy.dialects import registry registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect") registry.register( "redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect" ) ## Instruction: Check the version of any of the supported Psycopg2 packages ## Code After: from pkg_resources import DistributionNotFound, get_distribution, parse_version try: import psycopg2 # noqa: F401 except ImportError: raise ImportError( 'No module named psycopg2. Please install either ' 'psycopg2 or psycopg2-binary package for CPython ' 'or psycopg2cffi for Pypy.' ) from None for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']: try: if get_distribution(package).parsed_version < parse_version('2.5'): raise ImportError('Minimum required version for psycopg2 is 2.5') break except DistributionNotFound: pass else: raise ImportError( 'A module was found named psycopg2, ' 'but the version of it could not be checked ' 'as it was neither the Python package psycopg2, ' 'psycopg2-binary or psycopg2cffi.' ) __version__ = get_distribution('sqlalchemy-redshift').version from sqlalchemy.dialects import registry registry.register("redshift", "sqlalchemy_redshift.dialect", "RedshiftDialect") registry.register( "redshift.psycopg2", "sqlalchemy_redshift.dialect", "RedshiftDialect" )
// ... existing code ... from pkg_resources import DistributionNotFound, get_distribution, parse_version // ... modified code ... import psycopg2 # noqa: F401 except ImportError: ... 'or psycopg2cffi for Pypy.' ) from None for package in ['psycopg2', 'psycopg2-binary', 'psycopg2cffi']: try: if get_distribution(package).parsed_version < parse_version('2.5'): raise ImportError('Minimum required version for psycopg2 is 2.5') break except DistributionNotFound: pass else: raise ImportError( 'A module was found named psycopg2, ' 'but the version of it could not be checked ' 'as it was neither the Python package psycopg2, ' 'psycopg2-binary or psycopg2cffi.' ) // ... rest of the code ...
87438b9dcdbd397d754b4317bd7724e5b663f5b1
dedupsqlfs/lib/cache/simple.py
dedupsqlfs/lib/cache/simple.py
from time import time __author__ = 'sergey' class CacheTTLseconds(object): """ Simple cache storage { key (int | str) : [ timestamp (float), - then added, updated, set to 0 if expired values (int | str) - some data ], ... } """ OFFSET_TIME = 0 OFFSET_VALUE = 1 _max_ttl = 300 _storage = None def __init__(self): self._storage = {} pass def __len__(self): return len(self._storage) def set_max_ttl(self, seconds): self._max_ttl = seconds return self def set(self, key, value): self._storage[ key ] = [time(), value] return self def get(self, key, default=None): # not setted val = self._storage.get(key, [0, default]) now = time() # update time only if value was set if key in self._storage: self._storage[ key ][self.OFFSET_TIME] = now return val def unset(self, key): if key in self._storage: del self._storage[ key ] return self def clear(self): now = time() count = 0 for key, item in tuple(self._storage.items()): if now - item[self.OFFSET_TIME] > self._max_ttl: del self._storage[key] count += 1 return count
from time import time __author__ = 'sergey' class CacheTTLseconds(object): """ Simple cache storage { key (int | str) : [ timestamp (float), - then added, updated, set to 0 if expired values (int | str) - some data ], ... } """ OFFSET_TIME = 0 OFFSET_VALUE = 1 _max_ttl = 300 _storage = None def __init__(self): self._storage = {} pass def __len__(self): return len(self._storage) def set_max_ttl(self, seconds): self._max_ttl = seconds return self def set(self, key, value): self._storage[ key ] = [time(), value] return self def get(self, key, default=None): # not setted val = self._storage.get(key, [0, default])[self.OFFSET_VALUE] now = time() # update time only if value was set if key in self._storage: self._storage[ key ][self.OFFSET_TIME] = now return val def unset(self, key): if key in self._storage: del self._storage[ key ] return self def clear(self): now = time() count = 0 for key, item in tuple(self._storage.items()): if now - item[self.OFFSET_TIME] > self._max_ttl: del self._storage[key] count += 1 return count
Fix value get - use offset
Fix value get - use offset
Python
mit
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
from time import time __author__ = 'sergey' class CacheTTLseconds(object): """ Simple cache storage { key (int | str) : [ timestamp (float), - then added, updated, set to 0 if expired values (int | str) - some data ], ... } """ OFFSET_TIME = 0 OFFSET_VALUE = 1 _max_ttl = 300 _storage = None def __init__(self): self._storage = {} pass def __len__(self): return len(self._storage) def set_max_ttl(self, seconds): self._max_ttl = seconds return self def set(self, key, value): self._storage[ key ] = [time(), value] return self def get(self, key, default=None): # not setted - val = self._storage.get(key, [0, default]) + val = self._storage.get(key, [0, default])[self.OFFSET_VALUE] now = time() # update time only if value was set if key in self._storage: self._storage[ key ][self.OFFSET_TIME] = now return val def unset(self, key): if key in self._storage: del self._storage[ key ] return self def clear(self): now = time() count = 0 for key, item in tuple(self._storage.items()): if now - item[self.OFFSET_TIME] > self._max_ttl: del self._storage[key] count += 1 return count
Fix value get - use offset
## Code Before: from time import time __author__ = 'sergey' class CacheTTLseconds(object): """ Simple cache storage { key (int | str) : [ timestamp (float), - then added, updated, set to 0 if expired values (int | str) - some data ], ... } """ OFFSET_TIME = 0 OFFSET_VALUE = 1 _max_ttl = 300 _storage = None def __init__(self): self._storage = {} pass def __len__(self): return len(self._storage) def set_max_ttl(self, seconds): self._max_ttl = seconds return self def set(self, key, value): self._storage[ key ] = [time(), value] return self def get(self, key, default=None): # not setted val = self._storage.get(key, [0, default]) now = time() # update time only if value was set if key in self._storage: self._storage[ key ][self.OFFSET_TIME] = now return val def unset(self, key): if key in self._storage: del self._storage[ key ] return self def clear(self): now = time() count = 0 for key, item in tuple(self._storage.items()): if now - item[self.OFFSET_TIME] > self._max_ttl: del self._storage[key] count += 1 return count ## Instruction: Fix value get - use offset ## Code After: from time import time __author__ = 'sergey' class CacheTTLseconds(object): """ Simple cache storage { key (int | str) : [ timestamp (float), - then added, updated, set to 0 if expired values (int | str) - some data ], ... } """ OFFSET_TIME = 0 OFFSET_VALUE = 1 _max_ttl = 300 _storage = None def __init__(self): self._storage = {} pass def __len__(self): return len(self._storage) def set_max_ttl(self, seconds): self._max_ttl = seconds return self def set(self, key, value): self._storage[ key ] = [time(), value] return self def get(self, key, default=None): # not setted val = self._storage.get(key, [0, default])[self.OFFSET_VALUE] now = time() # update time only if value was set if key in self._storage: self._storage[ key ][self.OFFSET_TIME] = now return val def unset(self, key): if key in self._storage: del self._storage[ key ] return self def clear(self): now = time() count = 0 for key, item in tuple(self._storage.items()): if now - item[self.OFFSET_TIME] > self._max_ttl: del self._storage[key] count += 1 return count
// ... existing code ... # not setted val = self._storage.get(key, [0, default])[self.OFFSET_VALUE] now = time() // ... rest of the code ...
1e68f5f1fd565a812ef3fdf10c4c40649e3ef398
foundation/organisation/search_indexes.py
foundation/organisation/search_indexes.py
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) mailinglist = indexes.CharField(model_attr='mailinglist') homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') def get_model(self): return NetworkGroup
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
Fix references to old model fields
organisation: Fix references to old model fields
Python
mit
okfn/foundation,okfn/foundation,okfn/foundation,okfn/website,MjAbuz/foundation,okfn/website,okfn/foundation,okfn/website,okfn/website,MjAbuz/foundation,MjAbuz/foundation,MjAbuz/foundation
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) - mailinglist = indexes.CharField(model_attr='mailinglist') - homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') + homepage_url = indexes.CharField(model_attr='homepage_url') + mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
Fix references to old model fields
## Code Before: from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) mailinglist = indexes.CharField(model_attr='mailinglist') homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') def get_model(self): return NetworkGroup ## Instruction: Fix references to old model fields ## Code After: from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
// ... existing code ... text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') // ... rest of the code ...
284d750d7da25b1d3db17ca4d5931e1b6d1d7319
tests/browser/test_editor.py
tests/browser/test_editor.py
from fancypages.test.testcases import SplinterTestCase class TestEditingFancyPage(SplinterTestCase): is_staff = True is_logged_in = True def test_moving_a_block(self): pass
from django.core.urlresolvers import reverse from fancypages.test.testcases import SplinterTestCase class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True def _get_cookie_names(self): return [c.get('name') for c in self.browser.cookies.all()] def test_can_be_opened_by_clicking_the_handle(self): self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-handle').click() self.assertFalse(body_tag.has_class('editor-hidden')) self.assertIn('fpEditorOpened', self._get_cookie_names()) def test_can_be_closed_by_clicking_the_x(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-close').click() body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.assertNotIn('fpEditorOpened', self._get_cookie_names()) def test_remains_opened_when_reloading_the_page(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden'))
Add tests for editor panel JS
Add tests for editor panel JS
Python
bsd-3-clause
tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages
+ from django.core.urlresolvers import reverse + from fancypages.test.testcases import SplinterTestCase - class TestEditingFancyPage(SplinterTestCase): + class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True - def test_moving_a_block(self): - pass + def _get_cookie_names(self): + return [c.get('name') for c in self.browser.cookies.all()] + def test_can_be_opened_by_clicking_the_handle(self): + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-handle').click() + self.assertFalse(body_tag.has_class('editor-hidden')) + self.assertIn('fpEditorOpened', self._get_cookie_names()) + + def test_can_be_closed_by_clicking_the_x(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.browser.find_by_css('#editor-close').click() + body_tag = self.browser.find_by_css('body').first + self.assertTrue(body_tag.has_class('editor-hidden')) + self.assertNotIn('fpEditorOpened', self._get_cookie_names()) + + def test_remains_opened_when_reloading_the_page(self): + self.goto(reverse('home')) + self.browser.find_by_css('#editor-handle').click() + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) + + self.goto(reverse('home')) + body_tag = self.browser.find_by_css('body').first + self.assertFalse(body_tag.has_class('editor-hidden')) +
Add tests for editor panel JS
## Code Before: from fancypages.test.testcases import SplinterTestCase class TestEditingFancyPage(SplinterTestCase): is_staff = True is_logged_in = True def test_moving_a_block(self): pass ## Instruction: Add tests for editor panel JS ## Code After: from django.core.urlresolvers import reverse from fancypages.test.testcases import SplinterTestCase class TestTheEditorPanel(SplinterTestCase): is_staff = True is_logged_in = True def _get_cookie_names(self): return [c.get('name') for c in self.browser.cookies.all()] def test_can_be_opened_by_clicking_the_handle(self): self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-handle').click() self.assertFalse(body_tag.has_class('editor-hidden')) self.assertIn('fpEditorOpened', self._get_cookie_names()) def test_can_be_closed_by_clicking_the_x(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-close').click() body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.assertNotIn('fpEditorOpened', self._get_cookie_names()) def test_remains_opened_when_reloading_the_page(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden'))
// ... existing code ... from django.core.urlresolvers import reverse from fancypages.test.testcases import SplinterTestCase // ... modified code ... class TestTheEditorPanel(SplinterTestCase): is_staff = True ... def _get_cookie_names(self): return [c.get('name') for c in self.browser.cookies.all()] def test_can_be_opened_by_clicking_the_handle(self): self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-handle').click() self.assertFalse(body_tag.has_class('editor-hidden')) self.assertIn('fpEditorOpened', self._get_cookie_names()) def test_can_be_closed_by_clicking_the_x(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.browser.find_by_css('#editor-close').click() body_tag = self.browser.find_by_css('body').first self.assertTrue(body_tag.has_class('editor-hidden')) self.assertNotIn('fpEditorOpened', self._get_cookie_names()) def test_remains_opened_when_reloading_the_page(self): self.goto(reverse('home')) self.browser.find_by_css('#editor-handle').click() body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) self.goto(reverse('home')) body_tag = self.browser.find_by_css('body').first self.assertFalse(body_tag.has_class('editor-hidden')) // ... rest of the code ...
004345f50edd4c4b08727efaf5de7ee60f1f1e48
caffe2/python/operator_test/softplus_op_test.py
caffe2/python/operator_test/softplus_op_test.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import unittest class TestSoftplus(hu.HypothesisTestCase): @given(X=hu.tensor(), **hu.gcs) def test_softplus(self, X, gc, dc): op = core.CreateOperator("Softplus", ["X"], ["Y"]) self.assertDeviceChecks(dc, op, [X], [0]) self.assertGradientChecks(gc, op, [X], 0, [0], stepsize=0.0005) if __name__ == "__main__": unittest.main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import unittest class TestSoftplus(hu.HypothesisTestCase): @given(X=hu.tensor(), **hu.gcs) def test_softplus(self, X, gc, dc): op = core.CreateOperator("Softplus", ["X"], ["Y"]) self.assertDeviceChecks(dc, op, [X], [0]) self.assertGradientChecks(gc, op, [X], 0, [0]) if __name__ == "__main__": unittest.main()
Fix gradient checking for softplus op
Fix gradient checking for softplus op Summary: kmatzen why did you set the stepsize in https://github.com/caffe2/caffe2/commit/ff84e7dea6e118710859d62a7207c06b87ae992e? The test is flaky before this change. Solid afterwards. Closes https://github.com/caffe2/caffe2/pull/841 Differential Revision: D5292112 Pulled By: akyrola fbshipit-source-id: c84715261194ff047606d4ec659b7f89dac3cbb1
Python
apache-2.0
sf-wind/caffe2,xzturn/caffe2,sf-wind/caffe2,pietern/caffe2,sf-wind/caffe2,xzturn/caffe2,Yangqing/caffe2,Yangqing/caffe2,davinwang/caffe2,sf-wind/caffe2,xzturn/caffe2,pietern/caffe2,bwasti/caffe2,sf-wind/caffe2,bwasti/caffe2,davinwang/caffe2,davinwang/caffe2,xzturn/caffe2,pietern/caffe2,pietern/caffe2,davinwang/caffe2,Yangqing/caffe2,Yangqing/caffe2,bwasti/caffe2,bwasti/caffe2,xzturn/caffe2,pietern/caffe2,caffe2/caffe2,davinwang/caffe2,Yangqing/caffe2,bwasti/caffe2
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import unittest class TestSoftplus(hu.HypothesisTestCase): @given(X=hu.tensor(), **hu.gcs) def test_softplus(self, X, gc, dc): op = core.CreateOperator("Softplus", ["X"], ["Y"]) self.assertDeviceChecks(dc, op, [X], [0]) - self.assertGradientChecks(gc, op, [X], 0, [0], stepsize=0.0005) + self.assertGradientChecks(gc, op, [X], 0, [0]) if __name__ == "__main__": unittest.main()
Fix gradient checking for softplus op
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import unittest class TestSoftplus(hu.HypothesisTestCase): @given(X=hu.tensor(), **hu.gcs) def test_softplus(self, X, gc, dc): op = core.CreateOperator("Softplus", ["X"], ["Y"]) self.assertDeviceChecks(dc, op, [X], [0]) self.assertGradientChecks(gc, op, [X], 0, [0], stepsize=0.0005) if __name__ == "__main__": unittest.main() ## Instruction: Fix gradient checking for softplus op ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import core from hypothesis import given import caffe2.python.hypothesis_test_util as hu import unittest class TestSoftplus(hu.HypothesisTestCase): @given(X=hu.tensor(), **hu.gcs) def test_softplus(self, X, gc, dc): op = core.CreateOperator("Softplus", ["X"], ["Y"]) self.assertDeviceChecks(dc, op, [X], [0]) self.assertGradientChecks(gc, op, [X], 0, [0]) if __name__ == "__main__": unittest.main()
# ... existing code ... self.assertDeviceChecks(dc, op, [X], [0]) self.assertGradientChecks(gc, op, [X], 0, [0]) # ... rest of the code ...
99e9ef79178d6e2dffd8ec7ed12b3edbd8b7d0f1
longclaw/longclawbasket/views.py
longclaw/longclawbasket/views.py
from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) return {"basket": items}
from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) total_price = sum(item.total() for item in items) return {"basket": items, "total_price": total_price}
Add basket total to context
Add basket total to context
Python
mit
JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw
from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) - return {"basket": items} + total_price = sum(item.total() for item in items) + return {"basket": items, "total_price": total_price}
Add basket total to context
## Code Before: from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) return {"basket": items} ## Instruction: Add basket total to context ## Code After: from django.shortcuts import render from django.views.generic import ListView from longclaw.longclawbasket.models import BasketItem from longclaw.longclawbasket import utils class BasketView(ListView): model = BasketItem template_name = "longclawbasket/basket.html" def get_context_data(self, **kwargs): items, _ = utils.get_basket_items(self.request) total_price = sum(item.total() for item in items) return {"basket": items, "total_price": total_price}
# ... existing code ... items, _ = utils.get_basket_items(self.request) total_price = sum(item.total() for item in items) return {"basket": items, "total_price": total_price} # ... rest of the code ...
278b17859e4ad7464098a715777fcb755acf258c
doTranscode.py
doTranscode.py
import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile() inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
Python
isc
jeffayle/Transcode
import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file - f = tempfile.NamedTemporaryFile() + f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file
## Code Before: import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile() inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ ## Instruction: Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file ## Code After: import encoders import decoders import config import tempfile import os def transcode(inF, outF, options, type=None): "Transcodes a file" if type == None: type = os.path.splitext(outF)[1][1:].lower() #Get the file's metadata meta = decoders.getMetadata(inF) #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) if not inF_real: return False #Encode it succ = encoders.encode(inF_real, outF, type, options, meta) #Clean up f.close() return succ
# ... existing code ... #Decode the file f = tempfile.NamedTemporaryFile(suffix=".wav") inF_real = decoders.decode(inF, f.name) # ... rest of the code ...
5d663ae690f0c488f7a38f4556c30b169389c441
flaskiwsapp/projects/models/target.py
flaskiwsapp/projects/models/target.py
''' Created on Sep 24, 2016 @author: rtorres ''' from flaskiwsapp.database import SurrogatePK, Model, db, reference_col, relationship, Column from sqlalchemy.dialects.postgresql.base import ENUM from sqlalchemy_utils.types.url import URLType from flask_validator.constraints.internet import ValidateURL AREAS = ('Policies', 'Billing', 'Claims', 'Reports') class Target(SurrogatePK, Model): """A user of the app.""" __tablename__ = 'targets' title = Column(db.String(80), nullable=False) description = Column(db.Text(), nullable=False) client_id = reference_col('clients', nullable=False) client = relationship('Client', backref='targets') client_priority = Column(db.SmallInteger(), nullable=False) product_area = Column(ENUM(*AREAS, name='areas', create_type=False), nullable=False) target_date = Column(db.DateTime(), nullable=False) ticket_url = Column(db.String(256), nullable=False) def __init__(self, title="", password=None, **kwargs): """Create instance.""" db.Model.__init__(self, title=title.strip(), **kwargs) def __str__(self): """String representation of the user. Shows the target title.""" return self.title def get_id(self): return self.id
''' Created on Sep 24, 2016 @author: rtorres ''' from flaskiwsapp.database import SurrogatePK, Model, db, reference_col, relationship, Column from sqlalchemy.dialects.postgresql.base import ENUM AREAS = ('Policies', 'Billing', 'Claims', 'Reports') class Target(SurrogatePK, Model): """A user of the app.""" __tablename__ = 'targets' title = Column(db.String(80), nullable=False) description = Column(db.Text(), nullable=False) client_id = reference_col('clients', nullable=False) client = relationship('Client', backref='targets') client_priority = Column(db.SmallInteger(), nullable=False) product_area = Column(ENUM(*AREAS, name='areas', create_type=False), nullable=False) target_date = Column(db.DateTime(), nullable=False) ticket_url = Column(db.String(256), nullable=False) def __init__(self, title="", password=None, **kwargs): """Create instance.""" db.Model.__init__(self, title=title.strip(), **kwargs) def __str__(self): """String representation of the user. Shows the target title.""" return self.title def get_id(self): return self.id
Remove import from testing packages
Remove import from testing packages
Python
mit
rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel
''' Created on Sep 24, 2016 @author: rtorres ''' from flaskiwsapp.database import SurrogatePK, Model, db, reference_col, relationship, Column from sqlalchemy.dialects.postgresql.base import ENUM - from sqlalchemy_utils.types.url import URLType - from flask_validator.constraints.internet import ValidateURL AREAS = ('Policies', 'Billing', 'Claims', 'Reports') class Target(SurrogatePK, Model): """A user of the app.""" __tablename__ = 'targets' title = Column(db.String(80), nullable=False) description = Column(db.Text(), nullable=False) client_id = reference_col('clients', nullable=False) client = relationship('Client', backref='targets') client_priority = Column(db.SmallInteger(), nullable=False) product_area = Column(ENUM(*AREAS, name='areas', create_type=False), nullable=False) target_date = Column(db.DateTime(), nullable=False) ticket_url = Column(db.String(256), nullable=False) def __init__(self, title="", password=None, **kwargs): """Create instance.""" db.Model.__init__(self, title=title.strip(), **kwargs) def __str__(self): """String representation of the user. Shows the target title.""" return self.title def get_id(self): return self.id
Remove import from testing packages
## Code Before: ''' Created on Sep 24, 2016 @author: rtorres ''' from flaskiwsapp.database import SurrogatePK, Model, db, reference_col, relationship, Column from sqlalchemy.dialects.postgresql.base import ENUM from sqlalchemy_utils.types.url import URLType from flask_validator.constraints.internet import ValidateURL AREAS = ('Policies', 'Billing', 'Claims', 'Reports') class Target(SurrogatePK, Model): """A user of the app.""" __tablename__ = 'targets' title = Column(db.String(80), nullable=False) description = Column(db.Text(), nullable=False) client_id = reference_col('clients', nullable=False) client = relationship('Client', backref='targets') client_priority = Column(db.SmallInteger(), nullable=False) product_area = Column(ENUM(*AREAS, name='areas', create_type=False), nullable=False) target_date = Column(db.DateTime(), nullable=False) ticket_url = Column(db.String(256), nullable=False) def __init__(self, title="", password=None, **kwargs): """Create instance.""" db.Model.__init__(self, title=title.strip(), **kwargs) def __str__(self): """String representation of the user. Shows the target title.""" return self.title def get_id(self): return self.id ## Instruction: Remove import from testing packages ## Code After: ''' Created on Sep 24, 2016 @author: rtorres ''' from flaskiwsapp.database import SurrogatePK, Model, db, reference_col, relationship, Column from sqlalchemy.dialects.postgresql.base import ENUM AREAS = ('Policies', 'Billing', 'Claims', 'Reports') class Target(SurrogatePK, Model): """A user of the app.""" __tablename__ = 'targets' title = Column(db.String(80), nullable=False) description = Column(db.Text(), nullable=False) client_id = reference_col('clients', nullable=False) client = relationship('Client', backref='targets') client_priority = Column(db.SmallInteger(), nullable=False) product_area = Column(ENUM(*AREAS, name='areas', create_type=False), nullable=False) target_date = Column(db.DateTime(), nullable=False) ticket_url = Column(db.String(256), nullable=False) def __init__(self, title="", password=None, **kwargs): """Create instance.""" db.Model.__init__(self, title=title.strip(), **kwargs) def __str__(self): """String representation of the user. Shows the target title.""" return self.title def get_id(self): return self.id
# ... existing code ... from sqlalchemy.dialects.postgresql.base import ENUM # ... rest of the code ...
106ea580471387a3645877f52018ff2880db34f3
live_studio/config/forms.py
live_studio/config/forms.py
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, 'widgets': { 'base': forms.RadioSelect(), 'distribution': forms.RadioSelect(), 'media_type': forms.RadioSelect(), 'architecture': forms.RadioSelect(), 'installer': forms.RadioSelect(), }, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
Use radio buttons for most of the interface.
Use radio buttons for most of the interface. Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
Python
agpl-3.0
lamby/live-studio,lamby/live-studio,lamby/live-studio,debian-live/live-studio,debian-live/live-studio,debian-live/live-studio
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, + 'widgets': { + 'base': forms.RadioSelect(), + 'distribution': forms.RadioSelect(), + 'media_type': forms.RadioSelect(), + 'architecture': forms.RadioSelect(), + 'installer': forms.RadioSelect(), + }, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
Use radio buttons for most of the interface.
## Code Before: from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta})) ## Instruction: Use radio buttons for most of the interface. ## Code After: from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, 'widgets': { 'base': forms.RadioSelect(), 'distribution': forms.RadioSelect(), 'media_type': forms.RadioSelect(), 'architecture': forms.RadioSelect(), 'installer': forms.RadioSelect(), }, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
// ... existing code ... 'fields': fields, 'widgets': { 'base': forms.RadioSelect(), 'distribution': forms.RadioSelect(), 'media_type': forms.RadioSelect(), 'architecture': forms.RadioSelect(), 'installer': forms.RadioSelect(), }, }) // ... rest of the code ...
285eeb1c7565f8fa9fb6ba38ed843601f81cdf4e
tmc/models/document_topic.py
tmc/models/document_topic.py
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
[IMP] Order document topics by name
Python
agpl-3.0
tmcrosario/odoo-tmc
from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' + _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
## Code Before: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id ## Instruction: Order document topics by name ## Code After: from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
// ... existing code ... _inherit = 'tmc.category' _order = 'name' // ... rest of the code ...
5237cb7f1339eb13b4c01f1c3611448a8f865726
terms/templatetags/terms.py
terms/templatetags/terms.py
from django.template import Library from ..html import TermsHTMLReconstructor register = Library() @register.filter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
Make sure the filter arg is a string.
Make sure the filter arg is a string.
Python
bsd-3-clause
BertrandBordage/django-terms,philippeowagner/django-terms,BertrandBordage/django-terms,philippeowagner/django-terms
from django.template import Library + from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter + @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
Make sure the filter arg is a string.
## Code Before: from django.template import Library from ..html import TermsHTMLReconstructor register = Library() @register.filter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out ## Instruction: Make sure the filter arg is a string. ## Code After: from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor register = Library() @register.filter @stringfilter def replace_terms(html): parser = TermsHTMLReconstructor() parser.feed(html) return parser.out
# ... existing code ... from django.template import Library from django.template.defaultfilters import stringfilter from ..html import TermsHTMLReconstructor # ... modified code ... @register.filter @stringfilter def replace_terms(html): # ... rest of the code ...
273f9842bbe407e2e4548c712fed8c709c29dd0a
examples/cassandra_db.py
examples/cassandra_db.py
import logger logger.configure() import guv guv.monkey_patch() import logging from cassandra import cluster log = logging.getLogger() def main(): nodes = ['192.168.20.2'] c = cluster.Cluster(nodes, port=9042) session = c.connect('test') log.info('Execute commands') rows = session.execute('SELECT * FROM numbers') for row in rows: log.info(row) log.warn('Cassandra support is currently incomplete and buggy.') c.shutdown() if __name__ == '__main__': main()
import guv guv.monkey_patch() import logger logger.configure() import logging from cassandra import cluster log = logging.getLogger() def main(): nodes = ['192.168.20.2'] c = cluster.Cluster(nodes, port=9042) session = c.connect('test') log.info('Execute commands') rows = session.execute('SELECT * FROM numbers') for row in rows: log.info(row) c.shutdown() if __name__ == '__main__': main()
Remove warning message (now fixed)
Remove warning message (now fixed)
Python
mit
veegee/guv,veegee/guv
+ import guv + + guv.monkey_patch() + import logger + logger.configure() - - import guv - guv.monkey_patch() import logging from cassandra import cluster log = logging.getLogger() def main(): nodes = ['192.168.20.2'] c = cluster.Cluster(nodes, port=9042) session = c.connect('test') log.info('Execute commands') rows = session.execute('SELECT * FROM numbers') for row in rows: log.info(row) - log.warn('Cassandra support is currently incomplete and buggy.') c.shutdown() if __name__ == '__main__': main()
Remove warning message (now fixed)
## Code Before: import logger logger.configure() import guv guv.monkey_patch() import logging from cassandra import cluster log = logging.getLogger() def main(): nodes = ['192.168.20.2'] c = cluster.Cluster(nodes, port=9042) session = c.connect('test') log.info('Execute commands') rows = session.execute('SELECT * FROM numbers') for row in rows: log.info(row) log.warn('Cassandra support is currently incomplete and buggy.') c.shutdown() if __name__ == '__main__': main() ## Instruction: Remove warning message (now fixed) ## Code After: import guv guv.monkey_patch() import logger logger.configure() import logging from cassandra import cluster log = logging.getLogger() def main(): nodes = ['192.168.20.2'] c = cluster.Cluster(nodes, port=9042) session = c.connect('test') log.info('Execute commands') rows = session.execute('SELECT * FROM numbers') for row in rows: log.info(row) c.shutdown() if __name__ == '__main__': main()
... import guv guv.monkey_patch() import logger logger.configure() ... c.shutdown() ...
223c268d58645fbecdddb24ff84587dc803bfc87
braubuddy/tests/thermometer/test_auto.py
braubuddy/tests/thermometer/test_auto.py
import unittest from mock import patch, call, MagicMock from braubuddy.thermometer import auto from braubuddy.thermometer import dummy from braubuddy.thermometer import ds18b20_gpio from braubuddy.thermometer import temper_usb from braubuddy.thermometer import DeviceError from braubuddy.thermometer import ReadError class TestAuto(unittest.TestCase): @patch('braubuddy.thermometer.ds18b20_gpio.ds18b20') @patch('braubuddy.thermometer.temper_usb.temperusb') def test_dummy_returned_if_no_devices(self, mk_temperusb, mk_ds18b20): """Dummy thermometer is created if no real thermometers discovered.""" mk_ds18b20.DS18B20 = MagicMock(side_effect = Exception('Some Error')) mk_temperusb.TemperHandler.return_value.get_devices.return_value = [] thermometer = auto.AutoThermometer() self.assertIsInstance(thermometer, dummy.DummyThermometer)
import unittest from mock import patch, call, MagicMock from braubuddy.thermometer import auto from braubuddy.thermometer import dummy from braubuddy.thermometer import ds18b20_gpio from braubuddy.thermometer import temper_usb class TestAuto(unittest.TestCase): @patch('braubuddy.thermometer.ds18b20_gpio.ds18b20') @patch('braubuddy.thermometer.temper_usb.temperusb') def test_dummy_returned_if_no_devices(self, mk_temperusb, mk_ds18b20): """Dummy thermometer is created if no real thermometers discovered.""" mk_ds18b20.DS18B20 = MagicMock(side_effect = Exception('Some Error')) mk_temperusb.TemperHandler.return_value.get_devices.return_value = [] thermometer = auto.AutoThermometer() self.assertIsInstance(thermometer, dummy.DummyThermometer)
Fix docstring and remove unnecessary imports.
Fix docstring and remove unnecessary imports.
Python
bsd-3-clause
amorphic/braubuddy,amorphic/braubuddy,amorphic/braubuddy
import unittest from mock import patch, call, MagicMock from braubuddy.thermometer import auto from braubuddy.thermometer import dummy from braubuddy.thermometer import ds18b20_gpio from braubuddy.thermometer import temper_usb - from braubuddy.thermometer import DeviceError - from braubuddy.thermometer import ReadError class TestAuto(unittest.TestCase): @patch('braubuddy.thermometer.ds18b20_gpio.ds18b20') @patch('braubuddy.thermometer.temper_usb.temperusb') def test_dummy_returned_if_no_devices(self, mk_temperusb, mk_ds18b20): """Dummy thermometer is created if no real thermometers discovered.""" mk_ds18b20.DS18B20 = MagicMock(side_effect = Exception('Some Error')) mk_temperusb.TemperHandler.return_value.get_devices.return_value = [] thermometer = auto.AutoThermometer() self.assertIsInstance(thermometer, dummy.DummyThermometer)
Fix docstring and remove unnecessary imports.
## Code Before: import unittest from mock import patch, call, MagicMock from braubuddy.thermometer import auto from braubuddy.thermometer import dummy from braubuddy.thermometer import ds18b20_gpio from braubuddy.thermometer import temper_usb from braubuddy.thermometer import DeviceError from braubuddy.thermometer import ReadError class TestAuto(unittest.TestCase): @patch('braubuddy.thermometer.ds18b20_gpio.ds18b20') @patch('braubuddy.thermometer.temper_usb.temperusb') def test_dummy_returned_if_no_devices(self, mk_temperusb, mk_ds18b20): """Dummy thermometer is created if no real thermometers discovered.""" mk_ds18b20.DS18B20 = MagicMock(side_effect = Exception('Some Error')) mk_temperusb.TemperHandler.return_value.get_devices.return_value = [] thermometer = auto.AutoThermometer() self.assertIsInstance(thermometer, dummy.DummyThermometer) ## Instruction: Fix docstring and remove unnecessary imports. ## Code After: import unittest from mock import patch, call, MagicMock from braubuddy.thermometer import auto from braubuddy.thermometer import dummy from braubuddy.thermometer import ds18b20_gpio from braubuddy.thermometer import temper_usb class TestAuto(unittest.TestCase): @patch('braubuddy.thermometer.ds18b20_gpio.ds18b20') @patch('braubuddy.thermometer.temper_usb.temperusb') def test_dummy_returned_if_no_devices(self, mk_temperusb, mk_ds18b20): """Dummy thermometer is created if no real thermometers discovered.""" mk_ds18b20.DS18B20 = MagicMock(side_effect = Exception('Some Error')) mk_temperusb.TemperHandler.return_value.get_devices.return_value = [] thermometer = auto.AutoThermometer() self.assertIsInstance(thermometer, dummy.DummyThermometer)
# ... existing code ... from braubuddy.thermometer import temper_usb # ... rest of the code ...
311b0d5a0baabbb9c1476a156dbae1b919478704
src/upgradegit/cli.py
src/upgradegit/cli.py
import click import requirements import os import re @click.command() @click.option('--file', default='requirements.txt', help='File to upgrade') @click.option('--branch', default='master', help='Branch to upgrade from') def upgrade(file, branch): lines = [] with open(file, 'r') as f: for req in requirements.parse(f): line = '' if (req.uri): reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))' uri = req.uri.replace('git+ssh://', 'ssh://git@') cmd = 'git ls-remote {} {} HEAD'.format(uri, branch) result = os.popen(cmd).read() result = result.strip() results = re.findall(reg, result) result = results[0][0] line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line) else: name = req.name spec_op = req.specs[0][0] spec_ver = req.specs[0][1] line = '{name}{spec_op}{spec_ver}'.format( name=name, spec_op=spec_op, spec_ver=spec_ver) lines.append(line) with open(file, 'w') as f: for line in lines: f.write(line+'\n') if __name__ == '__main__': upgrade()
import click import requirements import os import re @click.command() @click.option('--file', default='requirements.txt', help='File to upgrade') @click.option('--branch', default='master', help='Branch to upgrade from') def upgrade(file, branch): lines = [] with open(file, 'r') as f: for req in requirements.parse(f): line = '' if (req.uri): reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))' uri = req.uri.replace('git+ssh://', 'ssh://git@') cmd = 'git ls-remote {} {} HEAD'.format(uri, branch) result = os.popen(cmd).read() result = result.strip() results = re.findall(reg, result) result = results[0][0] line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line) else: name = req.name spec_op = req.specs[0][0] spec_ver = req.specs[0][1] line = '{name}{spec_op}{spec_ver}'.format( name=name, spec_op=spec_op, spec_ver=spec_ver) lines.append(line) with open(file, 'w') as f: for line in lines: f.write(line+'\n') if __name__ == '__main__': upgrade()
Allow for requirements without a hash
Allow for requirements without a hash
Python
mit
bevanmw/gitupgrade
import click import requirements import os import re @click.command() @click.option('--file', default='requirements.txt', help='File to upgrade') @click.option('--branch', default='master', help='Branch to upgrade from') def upgrade(file, branch): lines = [] with open(file, 'r') as f: for req in requirements.parse(f): line = '' if (req.uri): reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))' uri = req.uri.replace('git+ssh://', 'ssh://git@') cmd = 'git ls-remote {} {} HEAD'.format(uri, branch) result = os.popen(cmd).read() result = result.strip() results = re.findall(reg, result) result = results[0][0] - line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line) + line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line) else: name = req.name spec_op = req.specs[0][0] spec_ver = req.specs[0][1] line = '{name}{spec_op}{spec_ver}'.format( name=name, spec_op=spec_op, spec_ver=spec_ver) lines.append(line) with open(file, 'w') as f: for line in lines: f.write(line+'\n') if __name__ == '__main__': upgrade()
Allow for requirements without a hash
## Code Before: import click import requirements import os import re @click.command() @click.option('--file', default='requirements.txt', help='File to upgrade') @click.option('--branch', default='master', help='Branch to upgrade from') def upgrade(file, branch): lines = [] with open(file, 'r') as f: for req in requirements.parse(f): line = '' if (req.uri): reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))' uri = req.uri.replace('git+ssh://', 'ssh://git@') cmd = 'git ls-remote {} {} HEAD'.format(uri, branch) result = os.popen(cmd).read() result = result.strip() results = re.findall(reg, result) result = results[0][0] line = re.sub(r'\@([0-9a-f]*)(?=(#|$))', '@'+result, req.line) else: name = req.name spec_op = req.specs[0][0] spec_ver = req.specs[0][1] line = '{name}{spec_op}{spec_ver}'.format( name=name, spec_op=spec_op, spec_ver=spec_ver) lines.append(line) with open(file, 'w') as f: for line in lines: f.write(line+'\n') if __name__ == '__main__': upgrade() ## Instruction: Allow for requirements without a hash ## Code After: import click import requirements import os import re @click.command() @click.option('--file', default='requirements.txt', help='File to upgrade') @click.option('--branch', default='master', help='Branch to upgrade from') def upgrade(file, branch): lines = [] with open(file, 'r') as f: for req in requirements.parse(f): line = '' if (req.uri): reg = r'([0-9a-z]*)(?=(\s+refs\/heads\/'+branch+'))' uri = req.uri.replace('git+ssh://', 'ssh://git@') cmd = 'git ls-remote {} {} HEAD'.format(uri, branch) result = os.popen(cmd).read() result = result.strip() results = re.findall(reg, result) result = results[0][0] line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line) else: name = req.name spec_op = req.specs[0][0] spec_ver = req.specs[0][1] line = '{name}{spec_op}{spec_ver}'.format( name=name, spec_op=spec_op, spec_ver=spec_ver) lines.append(line) with open(file, 'w') as f: for line in lines: f.write(line+'\n') if __name__ == '__main__': upgrade()
# ... existing code ... result = results[0][0] line = re.sub(r'.git(?=(#|$))', '.git@'+result, req.line) # ... rest of the code ...
ce39e4a5573e7b3a882ee4a327b3c9eb088d1d07
senlin/profiles/container/docker.py
senlin/profiles/container/docker.py
from senlin.common.i18n import _ from senlin.common import schema from senlin.profiles import base class DockerProfile(base.Profile): """Profile for a docker container.""" KEYS = ( CONTEXT, IMAGE, NAME, COMMAND, ) = ( 'context', 'image', 'name', 'command', ) properties_schema = { CONTEXT: schema.Map( _('Customized security context for operationg containers.') ), IMAGE: schema.String( _('The image used to create a container') ), NAME: schema.String( _('The name of the container.') ), COMMAND: schema.String( _('The command to run when container is started.') ), } def __init__(self, type_name, name, **kwargs): super(DockerProfile, self).__init__(type_name, name, **kwargs) self._dockerclient = None
from senlin.common.i18n import _ from senlin.common import schema from senlin.profiles import base class DockerProfile(base.Profile): """Profile for a docker container.""" KEYS = ( CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER ) = ( 'context', 'image', 'name', 'command', 'host_node', 'host_cluster', ) properties_schema = { CONTEXT: schema.Map( _('Customized security context for operationg containers.') ), IMAGE: schema.String( _('The image used to create a container') ), NAME: schema.String( _('The name of the container.') ), COMMAND: schema.String( _('The command to run when container is started.') ), HOST_NODE: schema.String( _('The node on which container will be launched.') ), HOST_CLUSTER: schema.String( _('The cluster on which container cluster will be launched.') ), } def __init__(self, type_name, name, **kwargs): super(DockerProfile, self).__init__(type_name, name, **kwargs) self._dockerclient = None
Add 'host_node' and 'host_cluster' properties to container profile
Add 'host_node' and 'host_cluster' properties to container profile Add 'host_node' and 'host_cluster' properties to container profile, in a container profile, either 'host_node' or 'host_cluster' will be assigned a value for a container node creation or a container cluster creation. blueprint container-profile-support Change-Id: Ief464375bf651ebe1770c3fcf0488f29b25a94f4
Python
apache-2.0
stackforge/senlin,openstack/senlin,stackforge/senlin,openstack/senlin,openstack/senlin
from senlin.common.i18n import _ from senlin.common import schema from senlin.profiles import base class DockerProfile(base.Profile): """Profile for a docker container.""" KEYS = ( - CONTEXT, IMAGE, NAME, COMMAND, + CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER ) = ( - 'context', 'image', 'name', 'command', + 'context', 'image', 'name', 'command', 'host_node', 'host_cluster', ) properties_schema = { CONTEXT: schema.Map( _('Customized security context for operationg containers.') ), IMAGE: schema.String( _('The image used to create a container') ), NAME: schema.String( _('The name of the container.') ), COMMAND: schema.String( _('The command to run when container is started.') ), + HOST_NODE: schema.String( + _('The node on which container will be launched.') + ), + HOST_CLUSTER: schema.String( + _('The cluster on which container cluster will be launched.') + ), } def __init__(self, type_name, name, **kwargs): super(DockerProfile, self).__init__(type_name, name, **kwargs) self._dockerclient = None
Add 'host_node' and 'host_cluster' properties to container profile
## Code Before: from senlin.common.i18n import _ from senlin.common import schema from senlin.profiles import base class DockerProfile(base.Profile): """Profile for a docker container.""" KEYS = ( CONTEXT, IMAGE, NAME, COMMAND, ) = ( 'context', 'image', 'name', 'command', ) properties_schema = { CONTEXT: schema.Map( _('Customized security context for operationg containers.') ), IMAGE: schema.String( _('The image used to create a container') ), NAME: schema.String( _('The name of the container.') ), COMMAND: schema.String( _('The command to run when container is started.') ), } def __init__(self, type_name, name, **kwargs): super(DockerProfile, self).__init__(type_name, name, **kwargs) self._dockerclient = None ## Instruction: Add 'host_node' and 'host_cluster' properties to container profile ## Code After: from senlin.common.i18n import _ from senlin.common import schema from senlin.profiles import base class DockerProfile(base.Profile): """Profile for a docker container.""" KEYS = ( CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER ) = ( 'context', 'image', 'name', 'command', 'host_node', 'host_cluster', ) properties_schema = { CONTEXT: schema.Map( _('Customized security context for operationg containers.') ), IMAGE: schema.String( _('The image used to create a container') ), NAME: schema.String( _('The name of the container.') ), COMMAND: schema.String( _('The command to run when container is started.') ), HOST_NODE: schema.String( _('The node on which container will be launched.') ), HOST_CLUSTER: schema.String( _('The cluster on which container cluster will be launched.') ), } def __init__(self, type_name, name, **kwargs): super(DockerProfile, self).__init__(type_name, name, **kwargs) self._dockerclient = None
// ... existing code ... KEYS = ( CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER ) = ( 'context', 'image', 'name', 'command', 'host_node', 'host_cluster', ) // ... modified code ... ), HOST_NODE: schema.String( _('The node on which container will be launched.') ), HOST_CLUSTER: schema.String( _('The cluster on which container cluster will be launched.') ), } // ... rest of the code ...
81d2882d1558ed52fc70927d745474aa46ac1f3b
jarbas/dashboard/admin.py
jarbas/dashboard/admin.py
from django.contrib import admin from jarbas.core.models import Reimbursement class SuspiciousListFilter(admin.SimpleListFilter): title = 'Is suspicious' parameter_name = 'is_suspicions' def lookups(self, request, model_admin): return ( ('yes', 'Yes'), ('no', 'No'), ) def queryset(self, request, queryset): return queryset.suspicions() if self.value() == 'yes' else queryset class ReimbursementModelAdmin(admin.ModelAdmin): list_display = ( 'document_id', 'congressperson_name', 'year', 'subquota_description', 'supplier', 'cnpj_cpf', 'is_suspicious', 'total_net_value', 'available_in_latest_dataset', ) search_fields = ( 'applicant_id', 'cnpj_cpf', 'congressperson_name', 'document_id', 'party', 'state', 'supplier', ) list_filter = ( SuspiciousListFilter, 'available_in_latest_dataset', 'year', 'state', ) def is_suspicious(self, obj): return obj.suspicions is not None is_suspicious.short_description = 'Suspicious' is_suspicious.boolean = True admin.site.register(Reimbursement, ReimbursementModelAdmin)
from django.contrib import admin from jarbas.core.models import Reimbursement class SuspiciousListFilter(admin.SimpleListFilter): title = 'Is suspicious' parameter_name = 'is_suspicions' def lookups(self, request, model_admin): return ( ('yes', 'Yes'), ('no', 'No'), ) def queryset(self, request, queryset): return queryset.suspicions() if self.value() == 'yes' else queryset class ReimbursementModelAdmin(admin.ModelAdmin): list_display = ( 'document_id', 'congressperson_name', 'year', 'subquota_description', 'supplier', 'cnpj_cpf', 'is_suspicious', 'total_net_value', 'available_in_latest_dataset', ) search_fields = ( 'applicant_id', 'cnpj_cpf', 'congressperson_name', 'document_id', 'party', 'state', 'supplier', ) list_filter = ( SuspiciousListFilter, 'available_in_latest_dataset', 'year', 'state', ) readonly_fields = tuple(f.name for f in Reimbursement._meta.fields) def is_suspicious(self, obj): return obj.suspicions is not None is_suspicious.short_description = 'Suspicious' is_suspicious.boolean = True admin.site.register(Reimbursement, ReimbursementModelAdmin)
Mark all fields as read only in the dashboard
Mark all fields as read only in the dashboard
Python
mit
datasciencebr/jarbas,datasciencebr/jarbas,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,datasciencebr/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor
from django.contrib import admin from jarbas.core.models import Reimbursement class SuspiciousListFilter(admin.SimpleListFilter): title = 'Is suspicious' parameter_name = 'is_suspicions' def lookups(self, request, model_admin): return ( ('yes', 'Yes'), ('no', 'No'), ) def queryset(self, request, queryset): return queryset.suspicions() if self.value() == 'yes' else queryset class ReimbursementModelAdmin(admin.ModelAdmin): list_display = ( 'document_id', 'congressperson_name', 'year', 'subquota_description', 'supplier', 'cnpj_cpf', 'is_suspicious', 'total_net_value', 'available_in_latest_dataset', ) search_fields = ( 'applicant_id', 'cnpj_cpf', 'congressperson_name', 'document_id', 'party', 'state', 'supplier', ) list_filter = ( SuspiciousListFilter, 'available_in_latest_dataset', 'year', 'state', ) + readonly_fields = tuple(f.name for f in Reimbursement._meta.fields) def is_suspicious(self, obj): return obj.suspicions is not None is_suspicious.short_description = 'Suspicious' is_suspicious.boolean = True admin.site.register(Reimbursement, ReimbursementModelAdmin)
Mark all fields as read only in the dashboard
## Code Before: from django.contrib import admin from jarbas.core.models import Reimbursement class SuspiciousListFilter(admin.SimpleListFilter): title = 'Is suspicious' parameter_name = 'is_suspicions' def lookups(self, request, model_admin): return ( ('yes', 'Yes'), ('no', 'No'), ) def queryset(self, request, queryset): return queryset.suspicions() if self.value() == 'yes' else queryset class ReimbursementModelAdmin(admin.ModelAdmin): list_display = ( 'document_id', 'congressperson_name', 'year', 'subquota_description', 'supplier', 'cnpj_cpf', 'is_suspicious', 'total_net_value', 'available_in_latest_dataset', ) search_fields = ( 'applicant_id', 'cnpj_cpf', 'congressperson_name', 'document_id', 'party', 'state', 'supplier', ) list_filter = ( SuspiciousListFilter, 'available_in_latest_dataset', 'year', 'state', ) def is_suspicious(self, obj): return obj.suspicions is not None is_suspicious.short_description = 'Suspicious' is_suspicious.boolean = True admin.site.register(Reimbursement, ReimbursementModelAdmin) ## Instruction: Mark all fields as read only in the dashboard ## Code After: from django.contrib import admin from jarbas.core.models import Reimbursement class SuspiciousListFilter(admin.SimpleListFilter): title = 'Is suspicious' parameter_name = 'is_suspicions' def lookups(self, request, model_admin): return ( ('yes', 'Yes'), ('no', 'No'), ) def queryset(self, request, queryset): return queryset.suspicions() if self.value() == 'yes' else queryset class ReimbursementModelAdmin(admin.ModelAdmin): list_display = ( 'document_id', 'congressperson_name', 'year', 'subquota_description', 'supplier', 'cnpj_cpf', 'is_suspicious', 'total_net_value', 'available_in_latest_dataset', ) search_fields = ( 'applicant_id', 'cnpj_cpf', 'congressperson_name', 'document_id', 'party', 'state', 'supplier', ) list_filter = ( SuspiciousListFilter, 'available_in_latest_dataset', 'year', 'state', ) readonly_fields = tuple(f.name for f in Reimbursement._meta.fields) def is_suspicious(self, obj): return obj.suspicions is not None is_suspicious.short_description = 'Suspicious' is_suspicious.boolean = True admin.site.register(Reimbursement, ReimbursementModelAdmin)
... ) readonly_fields = tuple(f.name for f in Reimbursement._meta.fields) ...
bb19c79ebc976bfa390f3c6ecc59ec6e0d03dd7e
speed_spider.py
speed_spider.py
from grab.spider import Spider, Task from grab.tools.logs import default_logging import time import logging from random import randint from grab.util.py3k_support import * URL_28K = 'http://load.local/grab.html' def timer(func): """ Display time taken to execute the decorated function. """ def inner(*args, **kwargs): start = time.time() result = func(*args, **kwargs) total = time.time() - start print('Time: %.2f sec.' % total) return result return inner class SpeedSpider(Spider): def task_generator(self): url_template = 'http://load.local/grab%d.html' #fast_url = 'http://load.local/grab0.html' slow_url = 'http://load.local/slow.html' #yield Task('load', url=slow_url, disable_cache=True) #yield Task('load', url=fast_url, disable_cache=False) for x in xrange(500): disable_flag = True#not (x % 2) yield Task('load', url=url_template % x, disable_cache=disable_flag) #if randint(0, 10) == 10: #yield Task('load', url=slow_url, disable_cache=True) def task_load(self, grab, task): assert 'grab' in grab.response.body print('ok', task.url) @timer def main(): default_logging() bot = SpeedSpider(thread_number=30) bot.setup_cache(database='speed_spider', use_compression=True) bot.run() print(bot.render_stats()) if __name__ == '__main__': main()
from grab.spider import Spider, Task from grab.tools.logs import default_logging import time import logging from random import randint from grab.util.py3k_support import * URL_28K = 'http://load.local/grab.html' def timer(func): """ Display time taken to execute the decorated function. """ def inner(*args, **kwargs): start = time.time() result = func(*args, **kwargs) total = time.time() - start print('Time: %.2f sec.' % total) return result return inner class SpeedSpider(Spider): def task_generator(self): url = 'http://load.local/grab.html' for x in xrange(500): yield Task('load', url=url) def task_load(self, grab, task): assert 'grab' in grab.response.body print('ok', task.url) @timer def main(): default_logging() bot = SpeedSpider(thread_number=30) bot.run() print(bot.render_stats()) if __name__ == '__main__': main()
Change code of speed test
Change code of speed test
Python
mit
shaunstanislaus/grab,alihalabyah/grab,pombredanne/grab-1,alihalabyah/grab,lorien/grab,DDShadoww/grab,maurobaraldi/grab,maurobaraldi/grab,codevlabs/grab,istinspring/grab,DDShadoww/grab,codevlabs/grab,huiyi1990/grab,lorien/grab,kevinlondon/grab,giserh/grab,liorvh/grab,pombredanne/grab-1,shaunstanislaus/grab,raybuhr/grab,huiyi1990/grab,liorvh/grab,kevinlondon/grab,giserh/grab,istinspring/grab,SpaceAppsXploration/grab,raybuhr/grab,SpaceAppsXploration/grab
from grab.spider import Spider, Task from grab.tools.logs import default_logging import time import logging from random import randint from grab.util.py3k_support import * URL_28K = 'http://load.local/grab.html' def timer(func): """ Display time taken to execute the decorated function. """ def inner(*args, **kwargs): start = time.time() result = func(*args, **kwargs) total = time.time() - start print('Time: %.2f sec.' % total) return result return inner class SpeedSpider(Spider): def task_generator(self): - url_template = 'http://load.local/grab%d.html' - #fast_url = 'http://load.local/grab0.html' + url = 'http://load.local/grab.html' - slow_url = 'http://load.local/slow.html' - #yield Task('load', url=slow_url, disable_cache=True) - #yield Task('load', url=fast_url, disable_cache=False) for x in xrange(500): + yield Task('load', url=url) - disable_flag = True#not (x % 2) - yield Task('load', url=url_template % x, disable_cache=disable_flag) - #if randint(0, 10) == 10: - #yield Task('load', url=slow_url, disable_cache=True) def task_load(self, grab, task): assert 'grab' in grab.response.body print('ok', task.url) @timer def main(): default_logging() bot = SpeedSpider(thread_number=30) - bot.setup_cache(database='speed_spider', use_compression=True) bot.run() print(bot.render_stats()) if __name__ == '__main__': main()
Change code of speed test
## Code Before: from grab.spider import Spider, Task from grab.tools.logs import default_logging import time import logging from random import randint from grab.util.py3k_support import * URL_28K = 'http://load.local/grab.html' def timer(func): """ Display time taken to execute the decorated function. """ def inner(*args, **kwargs): start = time.time() result = func(*args, **kwargs) total = time.time() - start print('Time: %.2f sec.' % total) return result return inner class SpeedSpider(Spider): def task_generator(self): url_template = 'http://load.local/grab%d.html' #fast_url = 'http://load.local/grab0.html' slow_url = 'http://load.local/slow.html' #yield Task('load', url=slow_url, disable_cache=True) #yield Task('load', url=fast_url, disable_cache=False) for x in xrange(500): disable_flag = True#not (x % 2) yield Task('load', url=url_template % x, disable_cache=disable_flag) #if randint(0, 10) == 10: #yield Task('load', url=slow_url, disable_cache=True) def task_load(self, grab, task): assert 'grab' in grab.response.body print('ok', task.url) @timer def main(): default_logging() bot = SpeedSpider(thread_number=30) bot.setup_cache(database='speed_spider', use_compression=True) bot.run() print(bot.render_stats()) if __name__ == '__main__': main() ## Instruction: Change code of speed test ## Code After: from grab.spider import Spider, Task from grab.tools.logs import default_logging import time import logging from random import randint from grab.util.py3k_support import * URL_28K = 'http://load.local/grab.html' def timer(func): """ Display time taken to execute the decorated function. """ def inner(*args, **kwargs): start = time.time() result = func(*args, **kwargs) total = time.time() - start print('Time: %.2f sec.' % total) return result return inner class SpeedSpider(Spider): def task_generator(self): url = 'http://load.local/grab.html' for x in xrange(500): yield Task('load', url=url) def task_load(self, grab, task): assert 'grab' in grab.response.body print('ok', task.url) @timer def main(): default_logging() bot = SpeedSpider(thread_number=30) bot.run() print(bot.render_stats()) if __name__ == '__main__': main()
# ... existing code ... def task_generator(self): url = 'http://load.local/grab.html' for x in xrange(500): yield Task('load', url=url) # ... modified code ... bot = SpeedSpider(thread_number=30) bot.run() # ... rest of the code ...
59fa966d43e4fd66669c3390464f60f323cf2865
tests/changes/api/serializer/models/test_command.py
tests/changes/api/serializer/models/test_command.py
from datetime import datetime from changes.api.serializer import serialize from changes.config import db from changes.models import Command from changes.testutils import TestCase class CommandSerializerTest(TestCase): def test_simple(self): project = self.create_project() build = self.create_build(project) job = self.create_job(build) jobphase = self.create_jobphase(job) jobstep = self.create_jobstep(jobphase) command = Command( label='echo 1', jobstep_id=jobstep.id, cwd='/home/foobar', script='echo 1', date_created=datetime(2013, 9, 19, 22, 15, 22), artifacts=['junit.xml'], ) db.session.add(command) db.session.flush() result = serialize(command) assert result['id'] == command.id.hex assert result['dateCreated'] == '2013-09-19T22:15:22' assert result['cwd'] == command.cwd assert result['script'] == command.script
from datetime import datetime from changes.api.serializer import serialize from changes.config import db from changes.models import Command from changes.testutils import TestCase class CommandSerializerTest(TestCase): def test_simple(self): project = self.create_project() build = self.create_build(project) job = self.create_job(build) jobphase = self.create_jobphase(job) jobstep = self.create_jobstep(jobphase) command = Command( label='echo 1', jobstep_id=jobstep.id, cwd='/home/foobar', env={'foo': 'bar'}, script='echo 1', date_created=datetime(2013, 9, 19, 22, 15, 22), artifacts=['junit.xml'], ) db.session.add(command) db.session.flush() result = serialize(command) assert result['id'] == command.id.hex assert result['dateCreated'] == '2013-09-19T22:15:22' assert result['cwd'] == command.cwd assert result['env'] == {'foo': 'bar'} assert result['script'] == command.script
Add tests for env serialization
Add tests for env serialization
Python
apache-2.0
dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes
from datetime import datetime from changes.api.serializer import serialize from changes.config import db from changes.models import Command from changes.testutils import TestCase class CommandSerializerTest(TestCase): def test_simple(self): project = self.create_project() build = self.create_build(project) job = self.create_job(build) jobphase = self.create_jobphase(job) jobstep = self.create_jobstep(jobphase) command = Command( label='echo 1', jobstep_id=jobstep.id, cwd='/home/foobar', + env={'foo': 'bar'}, script='echo 1', date_created=datetime(2013, 9, 19, 22, 15, 22), artifacts=['junit.xml'], ) db.session.add(command) db.session.flush() result = serialize(command) assert result['id'] == command.id.hex assert result['dateCreated'] == '2013-09-19T22:15:22' assert result['cwd'] == command.cwd + assert result['env'] == {'foo': 'bar'} assert result['script'] == command.script
Add tests for env serialization
## Code Before: from datetime import datetime from changes.api.serializer import serialize from changes.config import db from changes.models import Command from changes.testutils import TestCase class CommandSerializerTest(TestCase): def test_simple(self): project = self.create_project() build = self.create_build(project) job = self.create_job(build) jobphase = self.create_jobphase(job) jobstep = self.create_jobstep(jobphase) command = Command( label='echo 1', jobstep_id=jobstep.id, cwd='/home/foobar', script='echo 1', date_created=datetime(2013, 9, 19, 22, 15, 22), artifacts=['junit.xml'], ) db.session.add(command) db.session.flush() result = serialize(command) assert result['id'] == command.id.hex assert result['dateCreated'] == '2013-09-19T22:15:22' assert result['cwd'] == command.cwd assert result['script'] == command.script ## Instruction: Add tests for env serialization ## Code After: from datetime import datetime from changes.api.serializer import serialize from changes.config import db from changes.models import Command from changes.testutils import TestCase class CommandSerializerTest(TestCase): def test_simple(self): project = self.create_project() build = self.create_build(project) job = self.create_job(build) jobphase = self.create_jobphase(job) jobstep = self.create_jobstep(jobphase) command = Command( label='echo 1', jobstep_id=jobstep.id, cwd='/home/foobar', env={'foo': 'bar'}, script='echo 1', date_created=datetime(2013, 9, 19, 22, 15, 22), artifacts=['junit.xml'], ) db.session.add(command) db.session.flush() result = serialize(command) assert result['id'] == command.id.hex assert result['dateCreated'] == '2013-09-19T22:15:22' assert result['cwd'] == command.cwd assert result['env'] == {'foo': 'bar'} assert result['script'] == command.script
// ... existing code ... cwd='/home/foobar', env={'foo': 'bar'}, script='echo 1', // ... modified code ... assert result['cwd'] == command.cwd assert result['env'] == {'foo': 'bar'} assert result['script'] == command.script // ... rest of the code ...
d4db750d2ff2e18c9fced49fffe7a3073880078b
InvenTree/common/apps.py
InvenTree/common/apps.py
from django.apps import AppConfig class CommonConfig(AppConfig): name = 'common' def ready(self): pass
import logging from django.apps import AppConfig logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
Python
mit
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
+ + import logging from django.apps import AppConfig + + + logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): - pass + + self.clear_restart_flag() + def clear_restart_flag(self): + """ + Clear the SERVER_RESTART_REQUIRED setting + """ + + try: + import common.models + + if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): + logger.info("Clearing SERVER_RESTART_REQUIRED flag") + common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) + except: + pass +
Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads
## Code Before: from django.apps import AppConfig class CommonConfig(AppConfig): name = 'common' def ready(self): pass ## Instruction: Clear the SERVER_RESTART_REQUIRED flag automatically when the server reloads ## Code After: import logging from django.apps import AppConfig logger = logging.getLogger('inventree') class CommonConfig(AppConfig): name = 'common' def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass
// ... existing code ... import logging // ... modified code ... from django.apps import AppConfig logger = logging.getLogger('inventree') ... def ready(self): self.clear_restart_flag() def clear_restart_flag(self): """ Clear the SERVER_RESTART_REQUIRED setting """ try: import common.models if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED'): logger.info("Clearing SERVER_RESTART_REQUIRED flag") common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None) except: pass // ... rest of the code ...
97f507ab5869c306ed468c683ca6e6e9b3266f5e
tests/tests/models/authorization_token.py
tests/tests/models/authorization_token.py
from django.test import TestCase from django.contrib.auth.models import User from doac.models import AuthorizationToken, Client, RefreshToken, Scope class TestAuthorizationTokenModel(TestCase): def setUp(self): self.oclient = Client(name="Test Client", access_host="http://localhost/") self.oclient.save() self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") self.scope.save() self.user = User(username="test", password="test", email="test@test.com") self.user.save() self.token = AuthorizationToken(client=self.oclient, user=self.user) self.token.save() self.token.scope = [self.scope] self.token.save() def test_unicode(self): self.assertEqual(unicode(self.token), self.token.token) def test_generate_refresh_token(self): rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsInstance(rt, RefreshToken) rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) self.token.is_active = True rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt)
from django.test import TestCase from django.contrib.auth.models import User from doac.models import AuthorizationToken, Client, RefreshToken, Scope class TestAuthorizationTokenModel(TestCase): def setUp(self): self.oclient = Client(name="Test Client", access_host="http://localhost/") self.oclient.save() self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") self.scope.save() self.user = User(username="test", password="test", email="test@test.com") self.user.save() self.token = AuthorizationToken(client=self.oclient, user=self.user) self.token.save() self.token.scope = [self.scope] self.token.save() def test_unicode(self): self.assertEqual(unicode(self.token), self.token.token) def test_generate_refresh_token_creates(self): rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsInstance(rt, RefreshToken) def test_generate_refresh_token_no_create_twice(self): self.token.generate_refresh_token() rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) def test_generate_refresh_token_never_creates_twice(self): self.token.generate_refresh_token() self.token.is_active = True rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt)
Split up the tests for AuthorizationToken
Split up the tests for AuthorizationToken
Python
mit
Rediker-Software/doac
from django.test import TestCase from django.contrib.auth.models import User from doac.models import AuthorizationToken, Client, RefreshToken, Scope class TestAuthorizationTokenModel(TestCase): def setUp(self): self.oclient = Client(name="Test Client", access_host="http://localhost/") self.oclient.save() - + self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") self.scope.save() - + self.user = User(username="test", password="test", email="test@test.com") self.user.save() - + self.token = AuthorizationToken(client=self.oclient, user=self.user) self.token.save() - + self.token.scope = [self.scope] self.token.save() def test_unicode(self): self.assertEqual(unicode(self.token), self.token.token) - + - def test_generate_refresh_token(self): + def test_generate_refresh_token_creates(self): rt = self.token.generate_refresh_token() - + self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsInstance(rt, RefreshToken) - + + def test_generate_refresh_token_no_create_twice(self): + self.token.generate_refresh_token() rt = self.token.generate_refresh_token() + - - self.assertEqual(RefreshToken.objects.count(), 1) - self.assertIsNone(rt) - - self.token.is_active = True - rt = self.token.generate_refresh_token() - self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) + def test_generate_refresh_token_never_creates_twice(self): + self.token.generate_refresh_token() + self.token.is_active = True + rt = self.token.generate_refresh_token() + + self.assertEqual(RefreshToken.objects.count(), 1) + self.assertIsNone(rt) +
Split up the tests for AuthorizationToken
## Code Before: from django.test import TestCase from django.contrib.auth.models import User from doac.models import AuthorizationToken, Client, RefreshToken, Scope class TestAuthorizationTokenModel(TestCase): def setUp(self): self.oclient = Client(name="Test Client", access_host="http://localhost/") self.oclient.save() self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") self.scope.save() self.user = User(username="test", password="test", email="test@test.com") self.user.save() self.token = AuthorizationToken(client=self.oclient, user=self.user) self.token.save() self.token.scope = [self.scope] self.token.save() def test_unicode(self): self.assertEqual(unicode(self.token), self.token.token) def test_generate_refresh_token(self): rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsInstance(rt, RefreshToken) rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) self.token.is_active = True rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) ## Instruction: Split up the tests for AuthorizationToken ## Code After: from django.test import TestCase from django.contrib.auth.models import User from doac.models import AuthorizationToken, Client, RefreshToken, Scope class TestAuthorizationTokenModel(TestCase): def setUp(self): self.oclient = Client(name="Test Client", access_host="http://localhost/") self.oclient.save() self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") self.scope.save() self.user = User(username="test", password="test", email="test@test.com") self.user.save() self.token = AuthorizationToken(client=self.oclient, user=self.user) self.token.save() self.token.scope = [self.scope] self.token.save() def test_unicode(self): self.assertEqual(unicode(self.token), self.token.token) def test_generate_refresh_token_creates(self): rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsInstance(rt, RefreshToken) def test_generate_refresh_token_no_create_twice(self): self.token.generate_refresh_token() rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt) def test_generate_refresh_token_never_creates_twice(self): self.token.generate_refresh_token() self.token.is_active = True rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) self.assertIsNone(rt)
// ... existing code ... self.oclient.save() self.scope = Scope(short_name="test", full_name="Test Scope", description="Scope for testing") // ... modified code ... self.scope.save() self.user = User(username="test", password="test", email="test@test.com") ... self.user.save() self.token = AuthorizationToken(client=self.oclient, user=self.user) ... self.token.save() self.token.scope = [self.scope] ... self.assertEqual(unicode(self.token), self.token.token) def test_generate_refresh_token_creates(self): rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) ... self.assertIsInstance(rt, RefreshToken) def test_generate_refresh_token_no_create_twice(self): self.token.generate_refresh_token() rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) ... self.assertIsNone(rt) def test_generate_refresh_token_never_creates_twice(self): self.token.generate_refresh_token() self.token.is_active = True ... rt = self.token.generate_refresh_token() self.assertEqual(RefreshToken.objects.count(), 1) // ... rest of the code ...
af3515c8354dd525c2889eda75bfbc5cb7e2ecbf
massa/errors.py
massa/errors.py
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify({'message': e.message}), 404 def invalid_input_handler(e): return jsonify({'message': e.message, 'details': e.details}), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 def invalid_input_handler(e): return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.
Add method to retrieve the DomainError as a dict.
Python
mit
jaapverloop/massa
from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): - return jsonify({'message': e.message}), 404 + return jsonify(e.as_dict()), 404 def invalid_input_handler(e): - return jsonify({'message': e.message, 'details': e.details}), 400 + return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details + + def as_dict(self): + data = {} + if self.message: data['message'] = self.message + if self.details: data['details'] = self.details + return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
Add method to retrieve the DomainError as a dict.
## Code Before: from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify({'message': e.message}), 404 def invalid_input_handler(e): return jsonify({'message': e.message, 'details': e.details}), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.' ## Instruction: Add method to retrieve the DomainError as a dict. ## Code After: from flask import jsonify def register_error_handlers(app): app.register_error_handler(EntityNotFoundError, entity_not_found_handler) app.register_error_handler(InvalidInputError, invalid_input_handler) def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 def invalid_input_handler(e): return jsonify(e.as_dict()), 400 class DomainError(Exception): def __init__(self, message=None, details=None): if message: self.message = message if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data class EntityNotFoundError(DomainError): """Raised when an entity does not exist.""" message = 'Entity does not exist.' class InvalidInputError(DomainError): """Raised when input data is invalid.""" message = 'Input data is invalid.'
// ... existing code ... def entity_not_found_handler(e): return jsonify(e.as_dict()), 404 // ... modified code ... def invalid_input_handler(e): return jsonify(e.as_dict()), 400 ... if details: self.details = details def as_dict(self): data = {} if self.message: data['message'] = self.message if self.details: data['details'] = self.details return data // ... rest of the code ...
1cff4ec8cdac7253be979936a1b06c5bc8264195
misc/sample_project/ser/snake.py
misc/sample_project/ser/snake.py
from __future__ import unicode_literals __all__ = ('is_snake',) def is_snake(word): if not word.isalpha(): raise ValueError("String '{}' is not a word") if word.lower() == 'python': return True if word.lower() == 'питон': return True return False def _parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Check if animal is snake') parser.add_argument('word', help='an animal to check') namespace = parser.parse_args() return namespace def main(): """Entry-point for ser module""" word = _parse_args().word if is_snake(word): print('{} is a snake'.format(word)) return print('{} is not a snake'.format(word)) def plot(): """Plot a snake""" import math import matplotlib.pyplot as plt x = [i / 10 for i in range(100)] plt.plot(x, [math.sin(a) for a in x]) plt.show() plt.close()
from __future__ import unicode_literals __all__ = ('is_snake',) def is_snake(word): """Checks if an animal is a snake Parameters ---------- word : str Animal name Returns ------- bool Example ------- Check if a bear is a snake >>> from ser.snake import is_snake >>> >>> if is_snake('bear'): ... print('Shhhh') ... else: ... print('Argh') Argh """ if not word.isalpha(): raise ValueError("String '{}' is not a word") if word.lower() == 'python': return True if word.lower() == 'питон': return True return False def _parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Check if animal is snake') parser.add_argument('word', help='an animal to check') namespace = parser.parse_args() return namespace def main(): """Entry-point for ser module""" word = _parse_args().word if is_snake(word): print('{} is a snake'.format(word)) return print('{} is not a snake'.format(word)) def plot(): """Plot a snake""" import math import matplotlib.pyplot as plt x = [i / 10 for i in range(100)] plt.plot(x, [math.sin(a) for a in x]) plt.show() plt.close()
Add doctest to the sample project
Add doctest to the sample project
Python
mit
hombit/scientific_python,hombit/scientific_python,hombit/scientific_python,hombit/scientific_python,hombit/scientific_python
from __future__ import unicode_literals __all__ = ('is_snake',) def is_snake(word): + """Checks if an animal is a snake + + Parameters + ---------- + word : str + Animal name + + Returns + ------- + bool + + Example + ------- + Check if a bear is a snake + + >>> from ser.snake import is_snake + >>> + >>> if is_snake('bear'): + ... print('Shhhh') + ... else: + ... print('Argh') + Argh + + """ if not word.isalpha(): raise ValueError("String '{}' is not a word") if word.lower() == 'python': return True if word.lower() == 'питон': return True return False def _parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Check if animal is snake') parser.add_argument('word', help='an animal to check') namespace = parser.parse_args() return namespace def main(): """Entry-point for ser module""" word = _parse_args().word if is_snake(word): print('{} is a snake'.format(word)) return print('{} is not a snake'.format(word)) def plot(): """Plot a snake""" import math import matplotlib.pyplot as plt x = [i / 10 for i in range(100)] plt.plot(x, [math.sin(a) for a in x]) plt.show() plt.close()
Add doctest to the sample project
## Code Before: from __future__ import unicode_literals __all__ = ('is_snake',) def is_snake(word): if not word.isalpha(): raise ValueError("String '{}' is not a word") if word.lower() == 'python': return True if word.lower() == 'питон': return True return False def _parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Check if animal is snake') parser.add_argument('word', help='an animal to check') namespace = parser.parse_args() return namespace def main(): """Entry-point for ser module""" word = _parse_args().word if is_snake(word): print('{} is a snake'.format(word)) return print('{} is not a snake'.format(word)) def plot(): """Plot a snake""" import math import matplotlib.pyplot as plt x = [i / 10 for i in range(100)] plt.plot(x, [math.sin(a) for a in x]) plt.show() plt.close() ## Instruction: Add doctest to the sample project ## Code After: from __future__ import unicode_literals __all__ = ('is_snake',) def is_snake(word): """Checks if an animal is a snake Parameters ---------- word : str Animal name Returns ------- bool Example ------- Check if a bear is a snake >>> from ser.snake import is_snake >>> >>> if is_snake('bear'): ... print('Shhhh') ... else: ... print('Argh') Argh """ if not word.isalpha(): raise ValueError("String '{}' is not a word") if word.lower() == 'python': return True if word.lower() == 'питон': return True return False def _parse_args(args=None): """Parse command line arguments""" parser = argparse.ArgumentParser(description='Check if animal is snake') parser.add_argument('word', help='an animal to check') namespace = parser.parse_args() return namespace def main(): """Entry-point for ser module""" word = _parse_args().word if is_snake(word): print('{} is a snake'.format(word)) return print('{} is not a snake'.format(word)) def plot(): """Plot a snake""" import math import matplotlib.pyplot as plt x = [i / 10 for i in range(100)] plt.plot(x, [math.sin(a) for a in x]) plt.show() plt.close()
... def is_snake(word): """Checks if an animal is a snake Parameters ---------- word : str Animal name Returns ------- bool Example ------- Check if a bear is a snake >>> from ser.snake import is_snake >>> >>> if is_snake('bear'): ... print('Shhhh') ... else: ... print('Argh') Argh """ if not word.isalpha(): ...
194557f236016ec0978e5cc465ba40e7b8dff714
s3backup/main.py
s3backup/main.py
from s3backup.clients import compare, LocalSyncClient def sync(): local_client = LocalSyncClient('/home/michael/Notebooks') current = local_client.get_current_state() index = local_client.get_index_state() print(list(compare(current, index))) local_client.update_index()
import os from s3backup.clients import compare, LocalSyncClient def sync(): target_folder = os.path.expanduser('~/Notebooks') local_client = LocalSyncClient(target_folder) current = local_client.get_current_state() index = local_client.get_index_state() print(list(compare(current, index))) local_client.update_index()
Use expanduser to prevent hardcoding username
Use expanduser to prevent hardcoding username
Python
mit
MichaelAquilina/s3backup,MichaelAquilina/s3backup
+ + import os from s3backup.clients import compare, LocalSyncClient def sync(): - local_client = LocalSyncClient('/home/michael/Notebooks') + target_folder = os.path.expanduser('~/Notebooks') + + local_client = LocalSyncClient(target_folder) current = local_client.get_current_state() index = local_client.get_index_state() print(list(compare(current, index))) local_client.update_index()
Use expanduser to prevent hardcoding username
## Code Before: from s3backup.clients import compare, LocalSyncClient def sync(): local_client = LocalSyncClient('/home/michael/Notebooks') current = local_client.get_current_state() index = local_client.get_index_state() print(list(compare(current, index))) local_client.update_index() ## Instruction: Use expanduser to prevent hardcoding username ## Code After: import os from s3backup.clients import compare, LocalSyncClient def sync(): target_folder = os.path.expanduser('~/Notebooks') local_client = LocalSyncClient(target_folder) current = local_client.get_current_state() index = local_client.get_index_state() print(list(compare(current, index))) local_client.update_index()
... import os ... def sync(): target_folder = os.path.expanduser('~/Notebooks') local_client = LocalSyncClient(target_folder) current = local_client.get_current_state() ...
cd944a2606159c8ea11ffe8075ce4ec186fd799c
tests/basic_test.py
tests/basic_test.py
import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None")
import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
Update tests to use class-based interface
Update tests to use class-based interface
Python
mit
AlterCodex/nxppy,Schoberm/nxppy,AlterCodex/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,AlterCodex/nxppy
import unittest - from either_or import either_or + from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy + reader = nxppy.Mifare() + self.assertIsInstance(reader, nxppy.Mifare) - self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") + self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy + reader = nxppy.Mifare() + self.assertIsInstance(reader, nxppy.Mifare) - self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") + self.assertIsNone(reader.select(), "Card UID is not None")
Update tests to use class-based interface
## Code Before: import unittest from either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy self.assertIsNone(nxppy.read_mifare(), "Card UID is not None") ## Instruction: Update tests to use class-based interface ## Code After: import unittest from tests.either_or import either_or class nxppyTests(unittest.TestCase): """Basic tests for the NXP Read Library python wrapper.""" def test_import(self): """Test that it can be imported""" import nxppy @either_or('detect') def test_detect_mifare_present(self): """Test that we can read the UID from a present Mifare card. Either this test or the "absent" test below will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") @either_or('detect') def test_detect_mifare_absent(self): """Test that an absent card results in a None response. Either this test or the "present" test above will pass, but never both. """ import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None")
... import unittest from tests.either_or import either_or ... import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsInstance(reader.select(), str, "Card UID is not a string") ... import nxppy reader = nxppy.Mifare() self.assertIsInstance(reader, nxppy.Mifare) self.assertIsNone(reader.select(), "Card UID is not None") ...
6ac70bb24b7fab272adb9805fa0509aa2282add4
pysswords/db.py
pysswords/db.py
from glob import glob import os from .credential import Credential from .crypt import create_gpg, load_gpg class Database(object): def __init__(self, path, gpg): self.path = path self.gpg = gpg @classmethod def create(cls, path, passphrase, gpg_bin="gpg"): gpg = create_gpg(gpg_bin, path, passphrase) return Database(path, gpg) @classmethod def from_path(cls, path, gpg_bin="gpg"): gpg = load_gpg(binary=gpg_bin, database_path=path) return Database(path, gpg) @property def gpg_key(self, secret=False): return self.gpg.list_keys(secret=secret)[0] def add(self, credential): encrypted_password = self.gpg.encrypt( credential.password, self.gpg_key ) credential.password = str(encrypted_password) credential.save(database_path=self.path) def credential(self, name): credential_path = os.path.join(self.path, name) credential = Credential.from_path(credential_path) return credential @property def credentials(self): return [self.credential(os.path.basename(c)) for c in glob(self.path + "/**")]
from glob import glob import os from .credential import Credential from .crypt import create_gpg, load_gpg class Database(object): def __init__(self, path, gpg): self.path = path self.gpg = gpg @classmethod def create(cls, path, passphrase, gpg_bin="gpg"): gpg = create_gpg(gpg_bin, path, passphrase) return Database(path, gpg) @classmethod def from_path(cls, path, gpg_bin="gpg"): gpg = load_gpg(binary=gpg_bin, database_path=path) return Database(path, gpg) @property def gpg_key(self): return self.gpg.list_keys(secret=True)[0]["fingerprint"] def add(self, credential): encrypted_password = self.gpg.encrypt( credential.password, self.gpg_key ) credential.password = str(encrypted_password) credential.save(database_path=self.path) def credential(self, name): credential_path = os.path.join(self.path, name) credential = Credential.from_path(credential_path) return credential @property def credentials(self): return [self.credential(os.path.basename(c)) for c in glob(self.path + "/**")]
Fix get gpg key from database
Fix get gpg key from database
Python
mit
scorphus/passpie,marcwebbie/pysswords,marcwebbie/passpie,scorphus/passpie,eiginn/passpie,eiginn/passpie,marcwebbie/passpie
from glob import glob import os from .credential import Credential from .crypt import create_gpg, load_gpg class Database(object): def __init__(self, path, gpg): self.path = path self.gpg = gpg @classmethod def create(cls, path, passphrase, gpg_bin="gpg"): gpg = create_gpg(gpg_bin, path, passphrase) return Database(path, gpg) @classmethod def from_path(cls, path, gpg_bin="gpg"): gpg = load_gpg(binary=gpg_bin, database_path=path) return Database(path, gpg) @property - def gpg_key(self, secret=False): + def gpg_key(self): - return self.gpg.list_keys(secret=secret)[0] + return self.gpg.list_keys(secret=True)[0]["fingerprint"] def add(self, credential): encrypted_password = self.gpg.encrypt( credential.password, self.gpg_key ) credential.password = str(encrypted_password) credential.save(database_path=self.path) def credential(self, name): credential_path = os.path.join(self.path, name) credential = Credential.from_path(credential_path) return credential @property def credentials(self): return [self.credential(os.path.basename(c)) for c in glob(self.path + "/**")]
Fix get gpg key from database
## Code Before: from glob import glob import os from .credential import Credential from .crypt import create_gpg, load_gpg class Database(object): def __init__(self, path, gpg): self.path = path self.gpg = gpg @classmethod def create(cls, path, passphrase, gpg_bin="gpg"): gpg = create_gpg(gpg_bin, path, passphrase) return Database(path, gpg) @classmethod def from_path(cls, path, gpg_bin="gpg"): gpg = load_gpg(binary=gpg_bin, database_path=path) return Database(path, gpg) @property def gpg_key(self, secret=False): return self.gpg.list_keys(secret=secret)[0] def add(self, credential): encrypted_password = self.gpg.encrypt( credential.password, self.gpg_key ) credential.password = str(encrypted_password) credential.save(database_path=self.path) def credential(self, name): credential_path = os.path.join(self.path, name) credential = Credential.from_path(credential_path) return credential @property def credentials(self): return [self.credential(os.path.basename(c)) for c in glob(self.path + "/**")] ## Instruction: Fix get gpg key from database ## Code After: from glob import glob import os from .credential import Credential from .crypt import create_gpg, load_gpg class Database(object): def __init__(self, path, gpg): self.path = path self.gpg = gpg @classmethod def create(cls, path, passphrase, gpg_bin="gpg"): gpg = create_gpg(gpg_bin, path, passphrase) return Database(path, gpg) @classmethod def from_path(cls, path, gpg_bin="gpg"): gpg = load_gpg(binary=gpg_bin, database_path=path) return Database(path, gpg) @property def gpg_key(self): return self.gpg.list_keys(secret=True)[0]["fingerprint"] def add(self, credential): encrypted_password = self.gpg.encrypt( credential.password, self.gpg_key ) credential.password = str(encrypted_password) credential.save(database_path=self.path) def credential(self, name): credential_path = os.path.join(self.path, name) credential = Credential.from_path(credential_path) return credential @property def credentials(self): return [self.credential(os.path.basename(c)) for c in glob(self.path + "/**")]
# ... existing code ... @property def gpg_key(self): return self.gpg.list_keys(secret=True)[0]["fingerprint"] # ... rest of the code ...
7ad1d9afdbf8db2960ac6b402f4da3f1675cc86f
fileupload/models.py
fileupload/models.py
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ picture_file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.picture_file.name
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name
Use the same name for the field in frontend and backend
Use the same name for the field in frontend and backend
Python
mit
sigurdga/django-dropzone-upload,sigurdga/django-dropzone-upload
from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ - picture_file = models.ImageField(upload_to="pictures") + file = models.ImageField(upload_to="pictures") def __unicode__(self): - return self.picture_file.name + return self.file.name
Use the same name for the field in frontend and backend
## Code Before: from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ picture_file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.picture_file.name ## Instruction: Use the same name for the field in frontend and backend ## Code After: from django.db import models class Picture(models.Model): """ This is a small demo using just two fields. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.ImageField(upload_to="pictures") def __unicode__(self): return self.file.name
// ... existing code ... file = models.ImageField(upload_to="pictures") // ... modified code ... def __unicode__(self): return self.file.name // ... rest of the code ...