commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
2a0a29effa48caf5d95ed892d85cee235ebe1624
lamvery/utils.py
lamvery/utils.py
import os import sys import re import shlex import subprocess from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
import os import sys import re import shlex import subprocess ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
Fix error when import lamvery in function
Fix error when import lamvery in function
Python
mit
marcy-terui/lamvery,marcy-terui/lamvery
import os import sys import re import shlex import subprocess - from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): - cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") + print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
Fix error when import lamvery in function
## Code Before: import os import sys import re import shlex import subprocess from termcolor import cprint ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): cprint('Overwrite {}? [y/n]: '.format(path), 'yellow', file=sys.stderr, end="") y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret ## Instruction: Fix error when import lamvery in function ## Code After: import os import sys import re import shlex import subprocess ENV_PATTERN = re.compile('^(?P<name>[^\s]+)\s*=\s*(?P<value>.+)$') def previous_alias(alias): return '{}-pre'.format(alias) def parse_env_args(env): if not isinstance(env, list): return None ret = {} for e in env: matches = ENV_PATTERN.match(e) if matches is None: raise Exception( 'The format of "env" option must be "NAME=VALUE": {}'.format(e)) name = matches.group('name') value = matches.group('value') k, v = shlex.split('{} {}'.format(name, value)) ret[k] = v return ret def run_commands(commands, working_dir=os.getcwd()): cwd = os.getcwd() os.chdir(working_dir) for c in commands: try: subprocess.check_output( c, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError as e: os.chdir(cwd) raise Exception(e.output) os.chdir(cwd) def confirm_overwrite(path): ret = True if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() if not y_n.startswith('y'): ret = False return ret
... import subprocess ... if os.path.exists(path): print('Overwrite {}? [y/n]: '.format(path)) y_n = sys.stdin.readline() ...
293d50438fab81e74ab4559df7a4f7aa7cfd8f03
etcdocker/container.py
etcdocker/container.py
import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=self.params.get('ports'), privileged=self.params.get('privileged'))
import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=ports, privileged=self.params.get('privileged'))
Convert port list to dict
Convert port list to dict
Python
mit
CloudBrewery/docrane
+ import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) + # Convert our ports into a dict if necessary + ports = ast.literal_eval(self.params.get('ports')) + # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), + ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, - port_bindings=self.params.get('ports'), + port_bindings=ports, privileged=self.params.get('privileged'))
Convert port list to dict
## Code Before: import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=self.params.get('ports'), privileged=self.params.get('privileged')) ## Instruction: Convert port list to dict ## Code After: import ast import docker from etcdocker import util class Container: def __init__(self, name, params): self.name = name self.params = params def set_or_create_param(self, key, value): self.params[key] = value def ensure_running(self, force_restart=False): # Ensure container is running with specified params containers = util.get_containers() found = False for pc in containers: if "/%s" % self.name in pc['Names']: found = True full_image = "%s:%s" % ( self.params.get('image'), self.params.get('tag')) if (pc['Status'].startswith('Up') and pc['Image'] == full_image and not force_restart): return break client = docker.Client() # Start our container if found: # Shut down old container first client.stop(self.name, 5) client.remove_container(self.name) # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args client.create_container( image=self.params.get('image'), detach=True, volumes_from=self.params.get('volumes_from'), volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) # Start 'er up client.start( container=self.name, port_bindings=ports, privileged=self.params.get('privileged'))
// ... existing code ... import ast import docker // ... modified code ... # Convert our ports into a dict if necessary ports = ast.literal_eval(self.params.get('ports')) # Create container with specified args ... volumes=self.params.get('volumes'), ports=ports.keys(), name=self.name) ... container=self.name, port_bindings=ports, privileged=self.params.get('privileged')) // ... rest of the code ...
6d72a1d3b4bd2e1a11e2fb9744353e5d2d9c8863
setup.py
setup.py
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])])
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
Add NumPy includes dir for Cython builds.
Add NumPy includes dir for Cython builds.
Python
bsd-3-clause
stefanv/lulu
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext + import numpy + + def cext(name): + return Extension(name, [name + ".pyx"], + include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, + ext_modules = [cext('lulu_base'), cext('ccomp')]) - ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), - Extension("ccomp", ["ccomp.pyx"])]) +
Add NumPy includes dir for Cython builds.
## Code Before: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup(cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("lulu_base", ["lulu_base.pyx"]), Extension("ccomp", ["ccomp.pyx"])]) ## Instruction: Add NumPy includes dir for Cython builds. ## Code After: from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')])
... from Cython.Distutils import build_ext import numpy def cext(name): return Extension(name, [name + ".pyx"], include_dirs=[numpy.get_include()]) ... setup(cmdclass = {'build_ext': build_ext}, ext_modules = [cext('lulu_base'), cext('ccomp')]) ...
14a085f787f5fe80a0737d97515b71adaf05d1cd
checker/checker/contest.py
checker/checker/contest.py
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
Fix double-encoding of binary blobs
Fix double-encoding of binary blobs
Python
isc
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) - return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) + return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
Fix double-encoding of binary blobs
## Code Before: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, base64.b64encode(data).decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data) ## Instruction: Fix double-encoding of binary blobs ## Code After: from checker.abstract import AbstractChecker import base64 import sys import codecs class ContestChecker(AbstractChecker): def __init__(self, tick, team, service, ip): AbstractChecker.__init__(self, tick, team, service, ip) def _rpc(self, function, *args): sys.stdout.write("%s %s\n" % (function, " ".join(args))) sys.stdout.flush() return sys.stdin.readline().strip() def get_flag(self, tick, payload=None): if payload is None: return self._rpc("FLAG", str(tick)) else: payload = codecs.encode(payload, 'hex').decode('latin-1') return self._rpc("FLAG", str(tick), payload) def store_blob(self, ident, blob): data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) def retrieve_blob(self, ident): data = self._rpc("RETRIEVE", ident) return base64.b64decode(data)
# ... existing code ... data = base64.b64encode(blob) return self._rpc("STORE", ident, data.decode('latin-1')) # ... rest of the code ...
e50aee5973a2593546d1308b5ba77cd0905dd2be
app/models.py
app/models.py
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. return cls(**fields)
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
Fix excessive fields in conversion
Fix excessive fields in conversion
Python
agpl-3.0
rschiang/ntu-weather,rschiang/ntu-weather
import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. + del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
Fix excessive fields in conversion
## Code Before: import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. return cls(**fields) ## Instruction: Fix excessive fields in conversion ## Code After: import dataclasses from ntuweather import Weather from sqlalchemy import Table, Column, DateTime, Integer, Float from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class WeatherData(Base): """Represents a weather record saved in the database.""" __tablename__ = 'weather_data' id = Column(Integer, primary_key=True) date = Column(DateTime(timezone=True), index=True) temperature = Column(Float) pressure = Column(Float) humidity = Column(Float) wind_speed = Column(Float) wind_direction = Column(Integer) rain_per_hour = Column(Float) rain_per_minute = Column(Float) ground_temperature = Column(Float) def __repr__(self): return f"<WeatherData(date='{self.date.isoformat()}', temperature={self.temperature})>" def weather(self): self_dict = {field.name: self.__dict__.get(field.name) for field in dataclasses.fields(Weather)} return Weather(**self_dict) @classmethod def fromweather(cls, weather): fields = dataclasses.asdict(weather) del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields)
# ... existing code ... del fields['provider'] # We don’t store provider name as there would be only one. del fields['valid'] # We only store valid weather data, hence. return cls(**fields) # ... rest of the code ...
da59d4334eb1a6f77bd0a9599614a6289ef843e4
pytest-server-fixtures/tests/integration/test_mongo_server.py
pytest-server-fixtures/tests/integration/test_mongo_server.py
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count_documents({}) == 0 coll.insert_one({'a': 'b'}) assert coll.count_documents({}) == 1
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1
Revert "fix deprecation warnings in mongo"
Revert "fix deprecation warnings in mongo" This reverts commit 5d449ff9376e7c0a3c78f2b2d631ab0ecd08fe81.
Python
mit
manahl/pytest-plugins,manahl/pytest-plugins
import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete - mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) + mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection - assert coll.count_documents({}) == 0 + assert coll.count() == 0 - coll.insert_one({'a': 'b'}) + coll.insert({'a': 'b'}) - assert coll.count_documents({}) == 1 + assert coll.count() == 1
Revert "fix deprecation warnings in mongo"
## Code Before: import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert_one({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count_documents({}) == 0 coll.insert_one({'a': 'b'}) assert coll.count_documents({}) == 1 ## Instruction: Revert "fix deprecation warnings in mongo" ## Code After: import pytest def test_mongo_server(mongo_server): assert mongo_server.check_server_up() assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} @pytest.mark.parametrize('count', range(3)) def test_mongo_server_multi(count, mongo_server): coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1
# ... existing code ... assert mongo_server.delete mongo_server.api.db.test.insert({'a': 'b', 'c': 'd'}) assert mongo_server.api.db.test.find_one({'a': 'b'}, {'_id': False}) == {'a': 'b', 'c': 'd'} # ... modified code ... coll = mongo_server.api.some_database.some_collection assert coll.count() == 0 coll.insert({'a': 'b'}) assert coll.count() == 1 # ... rest of the code ...
a1bcb99691f5a0238f6a34a5579df3e89e8d6823
child_sync_gp/model/project_compassion.py
child_sync_gp/model/project_compassion.py
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
Fix bug in write project.
Fix bug in write project.
Python
agpl-3.0
CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) + if not isinstance(ids, list): + ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
Fix bug in write project.
## Code Before: from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res ## Instruction: Fix bug in write project. ## Code After: from openerp.osv import orm from . import gp_connector class project_compassion(orm.Model): _inherit = 'compassion.project' def write(self, cr, uid, ids, vals, context=None): """Update Project in GP.""" res = super(project_compassion, self).write(cr, uid, ids, vals, context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() for project in self.browse(cr, uid, ids, context): gp_connect.upsert_project(uid, project) return res
# ... existing code ... context) if not isinstance(ids, list): ids = [ids] gp_connect = gp_connector.GPConnect() # ... rest of the code ...
8235a217b50520093d549115fe09a8d4ff5e9191
webmanager/default_settings.py
webmanager/default_settings.py
INSTALLED_APPS += ( 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
Python
bsd-3-clause
weijia/webmanager,weijia/webmanager,weijia/webmanager
INSTALLED_APPS += ( + 'provider', + 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', - 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
Fix provider oauth2 warning by import provider before oauth2 as described in the manual
## Code Before: INSTALLED_APPS += ( 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', 'provider.oauth2', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True ## Instruction: Fix provider oauth2 warning by import provider before oauth2 as described in the manual ## Code After: INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', 'webmanager', 'bootstrapform', 'userenabootstrap', 'userena', # 'social_auth', ) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.auth.context_processors.auth', ) AUTHENTICATION_BACKENDS += ( 'userena.backends.UserenaAuthenticationBackend', 'django.contrib.auth.backends.ModelBackend', 'guardian.backends.ObjectPermissionBackend' ) ANONYMOUS_USER_ID = -1 AUTH_PROFILE_MODULE = 'webmanager.MyProfile' USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/' LOGIN_URL = '/accounts/signin/' LOGOUT_URL = '/accounts/signout/' #EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' USERENA_ACTIVATION_REQUIRED = False USERENA_SIGNIN_AFTER_SIGNUP = True
... INSTALLED_APPS += ( 'provider', 'provider.oauth2', 'simplemenu', ... # 'social_auth', ) ...
3fbca600b1b90ad3499d941e178aae89d1c7df70
regulations/generator/layers/external_citation.py
regulations/generator/layers/external_citation.py
from django.template import loader import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
Make external citations Python3 compatible
Make external citations Python3 compatible
Python
cc0-1.0
18F/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,18F/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site
from django.template import loader - import utils + from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
Make external citations Python3 compatible
## Code Before: from django.template import loader import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data) ## Instruction: Make external citations Python3 compatible ## Code After: from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer class ExternalCitationLayer(SearchReplaceLayer): shorthand = 'external' data_source = 'external-citations' def __init__(self, layer): self.layer = layer self.template = loader.get_template( 'regulations/layers/external_citation.html') def replacements_for(self, text, data): yield utils.render_template(self.template, data)
# ... existing code ... from django.template import loader from regulations.generator.layers import utils from regulations.generator.layers.base import SearchReplaceLayer # ... rest of the code ...
30f8317838a2e984e54fe22042fd3ffff10f82e6
waterbutler/core/streams/file.py
waterbutler/core/streams/file.py
import os import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: data = self.file_pointer.read(self.read_size) if not data: break yield data async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() # add sleep of 0 so read will yield and continue in next io loop iteration await asyncio.sleep(0) self.read_size = size try: return next(self.file_gen) except StopIteration: self.feed_eof() return b''
import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen)
Update FileStreamReader for new python 3.5 async
Update FileStreamReader for new python 3.5 async
Python
apache-2.0
RCOSDP/waterbutler,felliott/waterbutler,rdhyee/waterbutler,CenterForOpenScience/waterbutler,TomBaxter/waterbutler,Johnetordoff/waterbutler
import os - import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: - data = self.file_pointer.read(self.read_size) + chunk = self.file_pointer.read(self.read_size) - if not data: + if not chunk: - break + self.feed_eof() + chunk = b'' - yield data + yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() - # add sleep of 0 so read will yield and continue in next io loop iteration - await asyncio.sleep(0) self.read_size = size - try: - return next(self.file_gen) + return next(self.file_gen) - except StopIteration: - self.feed_eof() - return b''
Update FileStreamReader for new python 3.5 async
## Code Before: import os import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: data = self.file_pointer.read(self.read_size) if not data: break yield data async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() # add sleep of 0 so read will yield and continue in next io loop iteration await asyncio.sleep(0) self.read_size = size try: return next(self.file_gen) except StopIteration: self.feed_eof() return b'' ## Instruction: Update FileStreamReader for new python 3.5 async ## Code After: import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen)
// ... existing code ... import os // ... modified code ... while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk ... self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen) // ... rest of the code ...
30044f8272557dbd367eab3dbe7c1ba1076484e9
readux/pages/models.py
readux/pages/models.py
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), ))
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) Page.create_content_type(VideoContent)
Enable video content for cms pages
Enable video content for cms pages [#110289088]
Python
apache-2.0
emory-libraries/readux,emory-libraries/readux,emory-libraries/readux
from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent + from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) + + Page.create_content_type(VideoContent)
Enable video content for cms pages
## Code Before: from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) ## Instruction: Enable video content for cms pages ## Code After: from django.db import models # Create your models here. from django.utils.translation import ugettext_lazy as _ from feincms.module.page.models import Page from feincms.content.richtext.models import RichTextContent from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent # Page.register_extensions('datepublisher', 'translations') # Example set of extensions # Page.register_extensions('changedate') # in docs but not available Page.register_templates({ 'title': _('Standard template'), 'path': 'pages/base.html', 'regions': ( ('main', _('Main content area')), # ('sidebar', _('Sidebar'), 'inherited'), ), }) Page.create_content_type(RichTextContent) Page.create_content_type(MediaFileContent, TYPE_CHOICES=( ('default', _('default')), ('lightbox', _('lightbox')), )) Page.create_content_type(VideoContent)
// ... existing code ... from feincms.content.medialibrary.models import MediaFileContent from feincms.content.video.models import VideoContent // ... modified code ... )) Page.create_content_type(VideoContent) // ... rest of the code ...
65e6c8466482464333e77a2892fd0ac33ab5c3cb
q_and_a/apps/token_auth/views.py
q_and_a/apps/token_auth/views.py
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate, login from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if not self.request.user.is_authenticated() \ and not hasattr(self.request.user, 'organisation_id'): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
Fix indent, PEP-8 style and remove dup import.
Fix indent, PEP-8 style and remove dup import.
Python
bsd-3-clause
DemocracyClub/candidate_questions,DemocracyClub/candidate_questions,DemocracyClub/candidate_questions
from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin - from django.contrib.auth import login, authenticate, login + from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): - if not self.request.user.is_authenticated() \ + if (not self.request.user.is_authenticated() - and not hasattr(self.request.user, 'organisation_id'): + and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
Fix indent, PEP-8 style and remove dup import.
## Code Before: from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate, login from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if not self.request.user.is_authenticated() \ and not hasattr(self.request.user, 'organisation_id'): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions') ## Instruction: Fix indent, PEP-8 style and remove dup import. ## Code After: from django.views.generic import RedirectView from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied from django.core.urlresolvers import reverse class BaseAuthView(SingleObjectMixin, RedirectView): def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) if not auth_user: raise PermissionDenied() login(self.request, auth_user) return reverse('organisation_questions')
// ... existing code ... from django.views.generic.detail import SingleObjectMixin from django.contrib.auth import login, authenticate from django.core.exceptions import PermissionDenied // ... modified code ... def get_redirect_url(self, *args, **kwargs): if (not self.request.user.is_authenticated() and not hasattr(self.request.user, 'organisation_id')): auth_user = authenticate(auth_token=self.kwargs['token']) // ... rest of the code ...
b4d9fb47e040b199f88cffb4a0b761c443f390b4
dduplicated/cli.py
dduplicated/cli.py
from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
Update in output to terminal.
Update in output to terminal. Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
Python
mit
messiasthi/dduplicated-cli
from os import path as opath, getcwd from pprint import pprint from sys import argv + from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() + exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
Update in output to terminal.
## Code Before: from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0) ## Instruction: Update in output to terminal. ## Code After: from os import path as opath, getcwd from pprint import pprint from sys import argv from dduplicated import commands def get_paths(params): paths = [] for param in params: path = opath.join(getcwd(), param) if opath.exists(path) and opath.isdir(path) and not opath.islink(path): paths.append(path) return paths def main(): params = argv processed_files = [] # Remove the command name del params[0] if len(params) == 0 or "help" in params: commands.help() elif "detect" in params: processed_files = commands.detect(get_paths(params)) elif "delete" in params: processed_files = commands.delete(commands.detect(get_paths(params))) elif "link" in params: processed_files = commands.link(commands.detect(get_paths(params))) else: commands.help() exit() if len(processed_files) > 0: pprint(processed_files) else: print("No duplicates found") print("Great! Bye!") exit(0)
... from sys import argv from dduplicated import commands ... commands.help() exit() ...
536211012be24a20c34ef0af1fcc555672129354
byceps/util/system.py
byceps/util/system.py
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(*, default=None): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if env is None: if default is None: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) env = default return env
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) return env
Remove default argument from function that reads the configuration name from the environment
Remove default argument from function that reads the configuration name from the environment
Python
bsd-3-clause
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps
import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' - def get_config_env_name_from_env(*, default=None): + def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) + if not env: - if env is None: - if default is None: - raise Exception( + raise Exception( - "No configuration environment was specified via the '{}' " + "No configuration environment was specified via the '{}' " - "environment variable.".format(CONFIG_ENV_VAR_NAME)) + "environment variable.".format(CONFIG_ENV_VAR_NAME)) - - env = default return env
Remove default argument from function that reads the configuration name from the environment
## Code Before: import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(*, default=None): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if env is None: if default is None: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) env = default return env ## Instruction: Remove default argument from function that reads the configuration name from the environment ## Code After: import os CONFIG_ENV_VAR_NAME = 'BYCEPS_CONFIG' def get_config_env_name_from_env(): """Return the configuration environment name set via environment variable. Raise an exception if it isn't set. """ env = os.environ.get(CONFIG_ENV_VAR_NAME) if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) return env
// ... existing code ... def get_config_env_name_from_env(): """Return the configuration environment name set via environment // ... modified code ... if not env: raise Exception( "No configuration environment was specified via the '{}' " "environment variable.".format(CONFIG_ENV_VAR_NAME)) // ... rest of the code ...
a8a56f20dd76f61ec1ea6e99037490922d5cbcb1
setup.py
setup.py
from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
FIX missing Rules directory in package
FIX missing Rules directory in package
Python
mit
PatrikValkovic/grammpy
from distutils.core import setup setup( name='grammpy', version='1.1.1', - packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], + packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
FIX missing Rules directory in package
## Code Before: from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' ) ## Instruction: FIX missing Rules directory in package ## Code After: from distutils.core import setup setup( name='grammpy', version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', license='GNU General Public License v3.0', author='Patrik Valkovic', download_url='https://github.com/PatrikValkovic/grammpy/archive/v1.0.1.tar.gz', author_email='patrik.valkovic@hotmail.cz', description='Package for representing formal grammars.' )
... version='1.1.1', packages=['grammpy', 'grammpy.Grammars', 'grammpy.exceptions', 'grammpy.Rules'], url='https://github.com/PatrikValkovic/grammpy', ...
6c564ebe538d2723cc5f9397e09e5945796a257e
pyelevator/message.py
pyelevator/message.py
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
Fix : Range of len(1) have to be a tuple of tuples
Fix : Range of len(1) have to be a tuple of tuples
Python
mit
oleiade/py-elevator
import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: - if (len(self._datas) == 1): + if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
Fix : Range of len(1) have to be a tuple of tuples
## Code Before: import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], } ## Instruction: Fix : Range of len(1) have to be a tuple of tuples ## Code After: import msgpack import logging from .constants import FAILURE_STATUS class MessageFormatError(Exception): pass class Request(object): """Handler objects for frontend->backend objects messages""" def __new__(cls, *args, **kwargs): content = { 'DB_UID': kwargs.pop('db_uid'), 'COMMAND': kwargs.pop('command'), 'ARGS': kwargs.pop('args'), } return msgpack.packb(content) class Response(object): def __init__(self, raw_message): self.error = None errors_logger = logging.getLogger("errors_logger") message = msgpack.unpackb(raw_message) try: self.status = message.pop('STATUS') self._datas = message.pop('DATAS') except KeyError: errors_logger.exception("Invalid response message : %s" % message) raise MessageFormatError("Invalid response message") self._handle_failures() @property def datas(self): if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] return self._datas def _handle_failures(self): if self.status == FAILURE_STATUS: self.error = { 'code': int(self.datas[0]), 'msg': self.datas[1], }
// ... existing code ... if hasattr(self, '_datas') and self._datas is not None: if (len(self._datas) == 1) and not isinstance(self._datas[0], (tuple, list)): return self._datas[0] // ... rest of the code ...
464bc1b511415459e99700b94101776d00b23796
indra/pre_assemble_for_db/pre_assemble_script.py
indra/pre_assemble_for_db/pre_assemble_script.py
import indra.tools.assemble_corpus as ac def process_statements(stmts): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False) return stmts
import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts
Create function to handle full pipeline.
Create function to handle full pipeline.
Python
bsd-2-clause
bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra
import indra.tools.assemble_corpus as ac + from indra.db.util import get_statements, insert_pa_stmts + - def process_statements(stmts): + def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) - stmts = ac.run_preassembly(stmts, return_toplevel=False) + stmts = ac.run_preassembly(stmts, return_toplevel=False, + poolsize=num_procs) return stmts + + def preassemble_db_stmts(db, num_procs, *clauses): + """Run pre-assembly on a set of statements in the database.""" + stmts = get_statements(clauses, db=db, do_stmt_count=False) + pa_stmts = process_statements(stmts, num_procs) + insert_pa_stmts(db, pa_stmts) + return pa_stmts +
Create function to handle full pipeline.
## Code Before: import indra.tools.assemble_corpus as ac def process_statements(stmts): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False) return stmts ## Instruction: Create function to handle full pipeline. ## Code After: import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts
// ... existing code ... import indra.tools.assemble_corpus as ac from indra.db.util import get_statements, insert_pa_stmts def process_statements(stmts, num_procs=1): stmts = ac.map_grounding(stmts) // ... modified code ... stmts = ac.map_sequence(stmts) stmts = ac.run_preassembly(stmts, return_toplevel=False, poolsize=num_procs) return stmts def preassemble_db_stmts(db, num_procs, *clauses): """Run pre-assembly on a set of statements in the database.""" stmts = get_statements(clauses, db=db, do_stmt_count=False) pa_stmts = process_statements(stmts, num_procs) insert_pa_stmts(db, pa_stmts) return pa_stmts // ... rest of the code ...
5b7a1a40ea43834feb5563f566d07bd5b31c589d
tests/test-recipes/metadata/always_include_files_glob/run_test.py
tests/test-recipes/metadata/always_include_files_glob/run_test.py
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main()
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
Add error messages to the asserts
Python
bsd-3-clause
ilastik/conda-build,shastings517/conda-build,frol/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,ilastik/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,shastings517/conda-build,rmcgibbo/conda-build,shastings517/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,ilastik/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,frol/conda-build,frol/conda-build
import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': - assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} + assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): - assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} + assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
Add error messages to the asserts
## Code Before: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'} elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'} if __name__ == '__main__': main() ## Instruction: Add error messages to the asserts ## Code After: import os import sys import json def main(): prefix = os.environ['PREFIX'] info_file = os.path.join(prefix, 'conda-meta', 'always_include_files_regex-0.1-0.json') with open(info_file, 'r') as fh: info = json.load(fh) if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] if __name__ == '__main__': main()
... if sys.platform == 'darwin': assert set(info['files']) == {'lib/libpng.dylib', 'lib/libpng16.16.dylib', 'lib/libpng16.dylib'}, info['files'] elif sys.platform.startswith('linux'): assert set(info['files']) == {'lib/libpng.so', 'lib/libpng16.so', 'lib/libpng16.so.16', 'lib/libpng16.so.16.17.0'}, info['files'] ...
114eae527cce97423ec5cc5896a4728dc0764d2c
chunsabot/modules/images.py
chunsabot/modules/images.py
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
Fix some confusion of creating folders
Fix some confusion of creating folders
Python
mit
susemeee/Chunsabot-framework
import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None - path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') + path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
Fix some confusion of creating folders
## Code Before: import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, id_generator(), 'image_processing') if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message ## Instruction: Fix some confusion of creating folders ## Code After: import os import json import shutil import subprocess import string import random from chunsabot.database import Database from chunsabot.botlogic import brain RNN_PATH = Database.load_config('rnn_library_path') MODEL_PATH = os.path.join(RNN_PATH, "models/checkpoint_v1.t7_cpu.t7") def id_generator(size=12, chars=string.ascii_lowercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @brain.route("@image") def add_image_description(msg, extras): attachment = extras['attachment'] if not attachment: return None path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): os.mkdir(path) # Moving to temp path img = shutil.move(attachment, path) img_folder = os.path.dirname(img) result = subprocess.run( "th {}/eval.lua -model {} -gpuid -1 -image_folder {} -batch_size 1"\ .format(RNN_PATH, MODEL_PATH, img_folder) ) os.rmdir(img_folder) result_message = None with open(os.path.join(result, "vis/vis.json"), 'r') as output: json_output = json.loads(output) result_message = json_output[0]['caption'] return result_message
# ... existing code ... path = os.path.join(brain.__temppath__, "{}_{}".format(id_generator(), 'image_processing')) if not os.path.isdir(path): # ... rest of the code ...
c266fbd7a3478d582dc0d6c88fc5e3d8b7a8f62f
survey/views/survey_result.py
survey/views/survey_result.py
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
Fix - Apache error AH02429 Response header name 'mimetype=' contains invalid characters, aborting request
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') - response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
Fix - Apache error AH02429
## Code Before: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') response['mimetype='] = 'application/force-download' cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response ## Instruction: Fix - Apache error AH02429 ## Code After: import datetime import os from django.http.response import HttpResponse from django.shortcuts import get_object_or_404 from survey.management.survey2csv import Survey2CSV from survey.models import Survey def serve_result_csv(request, pk): survey = get_object_or_404(Survey, pk=pk) try: latest_answer = survey.latest_answer_date() csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey)) csv_time = datetime.datetime.fromtimestamp(csv_modification_time) csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo) if latest_answer > csv_time: # If the file was generated before the last answer, generate it. Survey2CSV.generate_file(survey) except OSError: # If the file do not exist, generate it. Survey2CSV.generate_file(survey) with open(Survey2CSV.file_name(survey), 'r') as f: response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) response['Content-Disposition'] = cd return response
... response = HttpResponse(f.read(), content_type='text/csv') cd = u'attachment; filename="{}.csv"'.format(survey.name) ...
3a6d76201104b928c1b9053317c9e61804814ff5
pyresticd.py
pyresticd.py
import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print "\nStarting Backup at " + str(time.ctime()) os.system(restic_command) print "\nRestic Scheduler\n----------------------------\n" print "Timout ist: " + str(timeout) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
Use py3-style print and string-formatting
Use py3-style print and string-formatting
Python
mit
Mebus/pyresticd,Mebus/pyresticd
import os import getpass import time from twisted.internet import task from twisted.internet import reactor - # Configuration + # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program + def do_restic_backup(): - print "\nStarting Backup at " + str(time.ctime()) + print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) - print "\nRestic Scheduler\n----------------------------\n" - print "Timout ist: " + str(timeout) + print('Restic Scheduler') + print('-' * 30) + print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
Use py3-style print and string-formatting
## Code Before: import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print "\nStarting Backup at " + str(time.ctime()) os.system(restic_command) print "\nRestic Scheduler\n----------------------------\n" print "Timout ist: " + str(timeout) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run() ## Instruction: Use py3-style print and string-formatting ## Code After: import os import getpass import time from twisted.internet import task from twisted.internet import reactor # Configuration timeout = 3600*24*3 # Period restic_command = "/home/mebus/restic" # your restic command here # Program def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") os.environ["RESTIC_PASSWORD"] = restic_password l = task.LoopingCall(do_restic_backup) l.start(timeout) reactor.run()
# ... existing code ... # Configuration # ... modified code ... def do_restic_backup(): print('Starting Backup at {}'.format(time.ctime())) os.system(restic_command) ... print('Restic Scheduler') print('-' * 30) print('Timeout: {}'.format(timeout)) restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ") # ... rest of the code ...
5d332259e16758bc43201073db91409390be9134
UM/Operations/GroupedOperation.py
UM/Operations/GroupedOperation.py
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Removes an operation from this group. def removeOperation(self, index): del self._children[index] ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
Remove removeOperation from grouped operation
Remove removeOperation from grouped operation This function is never used and actually should never be used. The operation may not be modified after it is used, so removing an operation from the list makes no sense.
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) - ## Removes an operation from this group. - def removeOperation(self, index): - del self._children[index] - ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
Remove removeOperation from grouped operation
## Code Before: from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Removes an operation from this group. def removeOperation(self, index): del self._children[index] ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo() ## Instruction: Remove removeOperation from grouped operation ## Code After: from . import Operation ## An operation that groups several other operations together. # # The intent of this operation is to hide an underlying chain of operations # from the user if they correspond to only one interaction with the user, such # as an operation applied to multiple scene nodes or a re-arrangement of # multiple items in the scene. class GroupedOperation(Operation.Operation): ## Creates a new grouped operation. # # The grouped operation is empty after its initialisation. def __init__(self): super().__init__() self._children = [] ## Adds an operation to this group. # # The operation will be undone together with the rest of the operations in # this group. # Note that when the order matters, the operations are undone in reverse # order as the order in which they are added. def addOperation(self, op): self._children.append(op) ## Undo all operations in this group. # # The operations are undone in reverse order as the order in which they # were added. def undo(self): for op in reversed(self._children): op.undo() ## Redoes all operations in this group. def redo(self): for op in self._children: op.redo()
# ... existing code ... ## Undo all operations in this group. # ... rest of the code ...
3864ef6773000d516ee6542a11db3c3b636d5b49
test/framework/killer.py
test/framework/killer.py
from __future__ import print_function import sys, os, signal, time, subprocess32 def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) time.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
from __future__ import print_function import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed
Prepare kill test for mock - use hyperspeed
Python
bsd-3-clause
lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow
from __future__ import print_function import sys, os, signal, time, subprocess32 + + sys.path.append('../../..') + from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) - time.sleep(1) + hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
Prepare kill test for mock - use hyperspeed
## Code Before: from __future__ import print_function import sys, os, signal, time, subprocess32 def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) time.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)]) ## Instruction: Prepare kill test for mock - use hyperspeed ## Code After: from __future__ import print_function import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed def _killer(pid, sleep_time, num_kills): print("\nKiller going to sleep for", sleep_time, "seconds") time.sleep(sleep_time) print("\nKiller woke up") for ii in range(0, num_kills): os.kill(pid, signal.SIGTERM) print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) if __name__ == '__main__': _killer(int(sys.argv[1]), float(sys.argv[2]), int(sys.argv[3])) def kill(sleep_time, num_kills): """Kill this process""" pid = os.getpid() print("kill, pid:", pid) subprocess32.Popen([sys.executable, __file__, repr(pid), repr(sleep_time), repr(num_kills)])
// ... existing code ... import sys, os, signal, time, subprocess32 sys.path.append('../../..') from jenkinsflow.mocked import hyperspeed // ... modified code ... print("\nKiller sent", ii + 1, "of", num_kills, "SIGTERM signals to ", pid) hyperspeed.sleep(1) // ... rest of the code ...
f2005fadb9fb2e2bcad32286a9d993c291c1992e
lazyblacksmith/models/api/industry_index.py
lazyblacksmith/models/api/industry_index.py
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS
Fix celery task for industry indexes by adding missing field
Fix celery task for industry indexes by adding missing field
Python
bsd-3-clause
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING + if activity_string == 'reaction': + return Activity.REACTIONS
Fix celery task for industry indexes by adding missing field
## Code Before: from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING ## Instruction: Fix celery task for industry indexes by adding missing field ## Code After: from . import db from lazyblacksmith.models import Activity class IndustryIndex(db.Model): solarsystem_id = db.Column( db.Integer, db.ForeignKey('solar_system.id'), primary_key=True ) solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes')) activity = db.Column(db.Integer, primary_key=True, autoincrement=False) cost_index = db.Column( db.Numeric( precision=20, scale=19, decimal_return_scale=19, asdecimal=False ), nullable=True) @classmethod def activity_string_to_activity(cls, activity_string): if activity_string == 'invention': return Activity.INVENTION if activity_string == 'manufacturing': return Activity.MANUFACTURING if activity_string == 'researching_time_efficiency': return Activity.RESEARCH_TIME_EFFICIENCY if activity_string == 'researching_material_efficiency': return Activity.RESEARCH_MATERIAL_EFFICIENCY if activity_string == 'copying': return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS
// ... existing code ... return Activity.COPYING if activity_string == 'reaction': return Activity.REACTIONS // ... rest of the code ...
497313620772c1cb0d520be1a0024c12ca02742e
tests/python_tests/fontset_test.py
tests/python_tests/fontset_test.py
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
Python
lgpl-2.1
Mappy/mapnik,qianwenming/mapnik,tomhughes/mapnik,jwomeara/mapnik,pnorman/mapnik,davenquinn/python-mapnik,yiqingj/work,pnorman/mapnik,Mappy/mapnik,yohanboniface/python-mapnik,mapycz/python-mapnik,jwomeara/mapnik,Mappy/mapnik,yiqingj/work,strk/mapnik,kapouer/mapnik,Mappy/mapnik,qianwenming/mapnik,lightmare/mapnik,garnertb/python-mapnik,strk/mapnik,cjmayo/mapnik,lightmare/mapnik,Uli1/mapnik,mapycz/python-mapnik,zerebubuth/mapnik,manz/python-mapnik,CartoDB/mapnik,rouault/mapnik,stefanklug/mapnik,garnertb/python-mapnik,yohanboniface/python-mapnik,cjmayo/mapnik,manz/python-mapnik,pramsey/mapnik,tomhughes/mapnik,mapnik/mapnik,kapouer/mapnik,qianwenming/mapnik,yiqingj/work,mapnik/python-mapnik,garnertb/python-mapnik,rouault/mapnik,pnorman/mapnik,naturalatlas/mapnik,Uli1/mapnik,tomhughes/mapnik,strk/mapnik,manz/python-mapnik,mbrukman/mapnik,davenquinn/python-mapnik,pramsey/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,tomhughes/python-mapnik,lightmare/mapnik,zerebubuth/mapnik,zerebubuth/mapnik,Airphrame/mapnik,mbrukman/mapnik,CartoDB/mapnik,mapycz/mapnik,sebastic/python-mapnik,rouault/mapnik,kapouer/mapnik,Uli1/mapnik,mbrukman/mapnik,mapnik/mapnik,stefanklug/mapnik,mapnik/python-mapnik,lightmare/mapnik,Uli1/mapnik,kapouer/mapnik,jwomeara/mapnik,whuaegeanse/mapnik,CartoDB/mapnik,mapycz/mapnik,qianwenming/mapnik,davenquinn/python-mapnik,whuaegeanse/mapnik,yiqingj/work,strk/mapnik,pnorman/mapnik,rouault/mapnik,mapnik/python-mapnik,mbrukman/mapnik,pramsey/mapnik,naturalatlas/mapnik,cjmayo/mapnik,naturalatlas/mapnik,yohanboniface/python-mapnik,sebastic/python-mapnik,tomhughes/mapnik,cjmayo/mapnik,stefanklug/mapnik,jwomeara/mapnik,mapnik/mapnik,qianwenming/mapnik,whuaegeanse/mapnik,Airphrame/mapnik,mapnik/mapnik,Airphrame/mapnik,mapycz/mapnik,pramsey/mapnik,sebastic/python-mapnik,stefanklug/mapnik,naturalatlas/mapnik
from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) + def test_loading_fontset_from_python(): + m = mapnik.Map(256,256) + fset = mapnik.FontSet('my-set') + fset.add_face_name('Comic Sans') + fset.add_face_name('Papyrus') + m.append_fontset('my-set', fset) + sty = mapnik.Style() + rule = mapnik.Rule() + tsym = mapnik.TextSymbolizer() + tsym.fontset = fset + rule.symbols.append(tsym) + sty.rules.append(rule) + m.append_style('Style',sty) + serialized_map = mapnik.save_map_to_string(m) + eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized
## Code Before: from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run] ## Instruction: Add a test (currently failing) ensuring that named fontsets created in python are propertly serialized ## Code After: from nose.tools import * from utilities import execution_path import os, mapnik def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) def test_loading_fontset_from_map(): m = mapnik.Map(256,256) mapnik.load_map(m,'../data/good_maps/fontset.xml',True) fs = m.find_fontset('book-fonts') eq_(len(fs.names),2) eq_(list(fs.names),['DejaVu Sans Book','DejaVu Sans Oblique']) def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
... def test_loading_fontset_from_python(): m = mapnik.Map(256,256) fset = mapnik.FontSet('my-set') fset.add_face_name('Comic Sans') fset.add_face_name('Papyrus') m.append_fontset('my-set', fset) sty = mapnik.Style() rule = mapnik.Rule() tsym = mapnik.TextSymbolizer() tsym.fontset = fset rule.symbols.append(tsym) sty.rules.append(rule) m.append_style('Style',sty) serialized_map = mapnik.save_map_to_string(m) eq_('fontset-name="my-set"' in serialized_map,True) ...
85c509913cc9a6b22036c33eccb07277b39260e3
pygraphc/anomaly/AnomalyScore.py
pygraphc/anomaly/AnomalyScore.py
import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph clusters : dict[list] filename : str """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
Add description of Parameters section in docstring
Add description of Parameters section in docstring
Python
mit
studiawan/pygraphc
import csv - from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph + A graph to be analyzed for its anomaly. clusters : dict[list] + Dictionary of list containing node identifier for each clusters. filename : str + Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
Add description of Parameters section in docstring
## Code Before: import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph clusters : dict[list] filename : str """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row) ## Instruction: Add description of Parameters section in docstring ## Code After: import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction from pygraphc.clustering.ClusterUtility import ClusterUtility class AnomalyScore(object): """A class to calculate anomaly score in a cluster. """ def __init__(self, graph, clusters, filename): """The constructor of class AnomalyScore. Parameters ---------- graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ self.graph = graph self.clusters = clusters self.filename = filename self.property = {} self.abstraction = {} def write_property(self): """Write cluster property to a file. """ # get cluster abstraction and its properties self.abstraction = ClusterAbstraction.dp_lcs(self.graph, self.clusters) self.property = ClusterUtility.get_cluster_property(self.graph, self.clusters) # write to csv f = open(self.filename + '_anomaly.csv', 'wt') writer = csv.writer(f) # set header header = ('cluster_id', 'cluster_abstraction') + tuple(self.property[0].keys()) writer.writerow(header) # write data for cluster_id, abstract in self.abstraction.iteritems(): row = (cluster_id, abstract) + tuple(self.property[cluster_id].values()) writer.writerow(row)
... import csv from pygraphc.abstraction.ClusterAbstraction import ClusterAbstraction ... graph : graph A graph to be analyzed for its anomaly. clusters : dict[list] Dictionary of list containing node identifier for each clusters. filename : str Filename for anomaly detection result. """ ...
7539a5445d24193395eed5dc658a4e69d8782736
buffpy/tests/test_profile.py
buffpy/tests/test_profile.py
from nose.tools import eq_ from mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profile_schedules_getter(): ''' Test schedules gettering from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) eq_(profile.schedules, '123') mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) def test_profile_schedules_setter(): ''' Test schedules setter from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) profile.schedules = { 'times': ['mo'] } mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, data='schedules[0][times][]=mo&') def test_profile_updates(): ''' Test updates relationship with a profile ''' mocked_api = MagicMock() with patch('buffpy.models.profile.Updates') as mocked_updates: profile = Profile(api=mocked_api, raw_response={'id': 1}) updates = profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { "name": "me", "service": "twiter", "id": 1 } def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
Migrate profile tests to pytest
Migrate profile tests to pytest
Python
mit
vtemian/buffpy
- from nose.tools import eq_ - from mock import MagicMock, patch + from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS + mocked_response = { - 'name': 'me', + "name": "me", - 'service': 'twiter', + "service": "twiter", - 'id': 1 + "id": 1 } + def test_profile_schedules_getter(): + """ Should retrieve profiles from buffer's API. """ - ''' - Test schedules gettering from buffer api - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - mocked_api.get.return_value = '123' + mocked_api.get.return_value = "123" - profile = Profile(mocked_api, mocked_response) + profile = Profile(mocked_api, mocked_response) - eq_(profile.schedules, '123') + assert profile.schedules == "123" - mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) + mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) + def test_profile_schedules_setter(): + """ Should update profile's schedules. """ - ''' - Test schedules setter from buffer api - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - mocked_api.get.return_value = '123' + mocked_api.get.return_value = "123" - profile = Profile(mocked_api, mocked_response) + profile = Profile(mocked_api, mocked_response) - profile.schedules = { + profile.schedules = { - 'times': ['mo'] + "times": ["mo"] - } + } - mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, + mocked_api.post.assert_called_once_with( + url=PATHS["UPDATE_SCHEDULES"].format("1"), - data='schedules[0][times][]=mo&') + data="schedules[0][times][]=mo&") + def test_profile_updates(): + """ Should properly call buffer's updates. """ - ''' - Test updates relationship with a profile - ''' - mocked_api = MagicMock() + mocked_api = MagicMock() - with patch('buffpy.models.profile.Updates') as mocked_updates: + with patch("buffpy.models.profile.Updates") as mocked_updates: - profile = Profile(api=mocked_api, raw_response={'id': 1}) + profile = Profile(api=mocked_api, raw_response={"id": 1}) - updates = profile.updates + assert profile.updates - mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) + mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
Migrate profile tests to pytest
## Code Before: from nose.tools import eq_ from mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profile_schedules_getter(): ''' Test schedules gettering from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) eq_(profile.schedules, '123') mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1) def test_profile_schedules_setter(): ''' Test schedules setter from buffer api ''' mocked_api = MagicMock() mocked_api.get.return_value = '123' profile = Profile(mocked_api, mocked_response) profile.schedules = { 'times': ['mo'] } mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1, data='schedules[0][times][]=mo&') def test_profile_updates(): ''' Test updates relationship with a profile ''' mocked_api = MagicMock() with patch('buffpy.models.profile.Updates') as mocked_updates: profile = Profile(api=mocked_api, raw_response={'id': 1}) updates = profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) ## Instruction: Migrate profile tests to pytest ## Code After: from unittest.mock import MagicMock, patch from buffpy.models.profile import Profile, PATHS mocked_response = { "name": "me", "service": "twiter", "id": 1 } def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
// ... existing code ... from unittest.mock import MagicMock, patch // ... modified code ... mocked_response = { "name": "me", "service": "twiter", "id": 1 } ... def test_profile_schedules_getter(): """ Should retrieve profiles from buffer's API. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) assert profile.schedules == "123" mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1")) ... def test_profile_schedules_setter(): """ Should update profile's schedules. """ mocked_api = MagicMock() mocked_api.get.return_value = "123" profile = Profile(mocked_api, mocked_response) profile.schedules = { "times": ["mo"] } mocked_api.post.assert_called_once_with( url=PATHS["UPDATE_SCHEDULES"].format("1"), data="schedules[0][times][]=mo&") ... def test_profile_updates(): """ Should properly call buffer's updates. """ mocked_api = MagicMock() with patch("buffpy.models.profile.Updates") as mocked_updates: profile = Profile(api=mocked_api, raw_response={"id": 1}) assert profile.updates mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1) // ... rest of the code ...
959897478bbda18f02aa6e38f2ebdd837581f1f0
tests/test_sct_verify_signature.py
tests/test_sct_verify_signature.py
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
Fix test for changed SctVerificationResult
Fix test for changed SctVerificationResult
Python
mit
theno/ctutlz,theno/ctutlz
from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is True - - assert got_verified is True - assert got_output == 'Verified OK\n' - assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' - got_verified, got_output, got_cmd_res = \ - verify_signature(signature_input, signature, pubkey) + assert verify_signature(signature_input, signature, pubkey) is False - assert got_verified is False - assert got_output == 'Verification Failure\n' - assert got_cmd_res.exitcode == 1 -
Fix test for changed SctVerificationResult
## Code Before: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is True assert got_output == 'Verified OK\n' assert got_cmd_res.exitcode == 0 signature_input = b'some invalid signature input' got_verified, got_output, got_cmd_res = \ verify_signature(signature_input, signature, pubkey) assert got_verified is False assert got_output == 'Verification Failure\n' assert got_cmd_res.exitcode == 1 ## Instruction: Fix test for changed SctVerificationResult ## Code After: from os.path import join, dirname from utlz import flo from ctutlz.sct.verification import verify_signature def test_verify_signature(): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') signature_input = \ open(flo('{basedir}/signature_input_valid.bin'), 'rb').read() signature = open(flo('{basedir}/signature.der'), 'rb').read() pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read() assert verify_signature(signature_input, signature, pubkey) is True signature_input = b'some invalid signature input' assert verify_signature(signature_input, signature, pubkey) is False
# ... existing code ... assert verify_signature(signature_input, signature, pubkey) is True # ... modified code ... assert verify_signature(signature_input, signature, pubkey) is False # ... rest of the code ...
dbec204b242ab643de162046ba73dca32043c6c2
space-age/space_age.py
space-age/space_age.py
class SpaceAge(object): def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def on_earth(self): return round(self.years, 2) def on_mercury(self): return round(self.years/0.2408467, 2) def on_venus(self): return round(self.years/0.6151976, 2) def on_mars(self): return round(self.years/1.8808158, 2) def on_jupiter(self): return round(self.years/11.862615, 2) def on_saturn(self): return round(self.years/29.447498, 2) def on_uranus(self): return round(self.years/84.016846, 2) def on_neptune(self): return round(self.years/164.79132, 2)
class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError
Implement __getattr__ to reduce code
Implement __getattr__ to reduce code
Python
agpl-3.0
CubicComet/exercism-python-solutions
class SpaceAge(object): + YEARS = {"on_earth": 1, + "on_mercury": 0.2408467, + "on_venus": 0.61519726, + "on_mars": 1.8808158, + "on_jupiter": 11.862615, + "on_saturn": 29.447498, + "on_uranus": 84.016846, + "on_neptune": 164.79132} + def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 - def on_earth(self): - return round(self.years, 2) + def __getattr__(self, on_planet): + if on_planet in SpaceAge.YEARS: + return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) + else: + raise AttributeError - def on_mercury(self): - return round(self.years/0.2408467, 2) - - def on_venus(self): - return round(self.years/0.6151976, 2) - - def on_mars(self): - return round(self.years/1.8808158, 2) - - def on_jupiter(self): - return round(self.years/11.862615, 2) - - def on_saturn(self): - return round(self.years/29.447498, 2) - - def on_uranus(self): - return round(self.years/84.016846, 2) - - def on_neptune(self): - return round(self.years/164.79132, 2) -
Implement __getattr__ to reduce code
## Code Before: class SpaceAge(object): def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def on_earth(self): return round(self.years, 2) def on_mercury(self): return round(self.years/0.2408467, 2) def on_venus(self): return round(self.years/0.6151976, 2) def on_mars(self): return round(self.years/1.8808158, 2) def on_jupiter(self): return round(self.years/11.862615, 2) def on_saturn(self): return round(self.years/29.447498, 2) def on_uranus(self): return round(self.years/84.016846, 2) def on_neptune(self): return round(self.years/164.79132, 2) ## Instruction: Implement __getattr__ to reduce code ## Code After: class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): self.seconds = seconds @property def years(self): return self.seconds/31557600 def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError
# ... existing code ... class SpaceAge(object): YEARS = {"on_earth": 1, "on_mercury": 0.2408467, "on_venus": 0.61519726, "on_mars": 1.8808158, "on_jupiter": 11.862615, "on_saturn": 29.447498, "on_uranus": 84.016846, "on_neptune": 164.79132} def __init__(self, seconds): # ... modified code ... def __getattr__(self, on_planet): if on_planet in SpaceAge.YEARS: return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2) else: raise AttributeError # ... rest of the code ...
3c65881633daee8d5b19760e5c887dce25ab69c3
froide/helper/db_utils.py
froide/helper/db_utils.py
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: raise else: break
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: raise else: break
Fix bad initial count in slug creation helper
Fix bad initial count in slug creation helper
Python
mit
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide
from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False + count = max( - count = klass.objects.filter(**{ + klass.objects.filter(**{ - '%s__startswith' % attr: base_attr + '%s__startswith' % attr: base_attr - }).count() + }).count(), + initial_count + ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: + if first_round: - first_round = False + first_round = False + count = max( - count = klass.objects.filter(**{ + klass.objects.filter(**{ - '%s__startswith' % attr: base_attr + '%s__startswith' % attr: base_attr - }).count() + }).count(), + initial_count + ) + count += 1 else: raise else: break
Fix bad initial count in slug creation helper
## Code Before: from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: first_round = False count = klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count() else: raise else: break ## Instruction: Fix bad initial count in slug creation helper ## Code After: from django.db import IntegrityError from django.template.defaultfilters import slugify def save_obj_with_slug(obj, attribute='title', **kwargs): obj.slug = slugify(getattr(obj, attribute)) return save_obj_unique(obj, 'slug', **kwargs) def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'): klass = obj.__class__ MAX_COUNT = 10000 # max 10 thousand loops base_attr = getattr(obj, attr) initial_count = count first_round = count == 0 postfix = '' while True: try: while initial_count - count < MAX_COUNT: if not first_round: postfix = postfix_format.format(count=count) if not klass.objects.filter(**{ attr: getattr(obj, attr) + postfix }).exists(): break if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: count += 1 setattr(obj, attr, base_attr + postfix) obj.save() except IntegrityError: if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: raise else: break
... first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) else: ... if count - initial_count < MAX_COUNT: if first_round: first_round = False count = max( klass.objects.filter(**{ '%s__startswith' % attr: base_attr }).count(), initial_count ) count += 1 else: ...
b06f0e17541f7d424e73fd200ae10db0722b1a5a
organizer/views.py
organizer/views.py
from django.shortcuts import ( get_object_or_404, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): # create new object from data # show webpage for new object pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
Create and redirect to Tag in tag_create().
Ch09: Create and redirect to Tag in tag_create().
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
from django.shortcuts import ( - get_object_or_404, render) + get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): + new_tag = form.save() + return redirect(new_tag) - # create new object from data - # show webpage for new object - pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
Create and redirect to Tag in tag_create().
## Code Before: from django.shortcuts import ( get_object_or_404, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): # create new object from data # show webpage for new object pass else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()}) ## Instruction: Create and redirect to Tag in tag_create(). ## Code After: from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import TagForm from .models import Startup, Tag def startup_detail(request, slug): startup = get_object_or_404( Startup, slug__iexact=slug) return render( request, 'organizer/startup_detail.html', {'startup': startup}) def startup_list(request): return render( request, 'organizer/startup_list.html', {'startup_list': Startup.objects.all()}) def tag_create(request): if request.method == 'POST': form = TagForm(request.POST) if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # show bound HTML form (with errors) pass else: # request.method != 'POST' # show unbound HTML form pass def tag_detail(request, slug): tag = get_object_or_404( Tag, slug__iexact=slug) return render( request, 'organizer/tag_detail.html', {'tag': tag}) def tag_list(request): return render( request, 'organizer/tag_list.html', {'tag_list': Tag.objects.all()})
# ... existing code ... from django.shortcuts import ( get_object_or_404, redirect, render) # ... modified code ... if form.is_valid(): new_tag = form.save() return redirect(new_tag) else: # empty data or invalid data # ... rest of the code ...
37b8cf1af7818fe78b31ed25622f3f91805ade01
test_bert_trainer.py
test_bert_trainer.py
import unittest import time import shutil import pandas as pd from bert_trainer import BERTTrainer from utils import * class TestBERT(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestBERT, self).__init__(*args, **kwargs) self.output_dir = 'test_{}'.format(str(int(time.time()))) self.trainer = BERTTrainer(output_dir=self.output_dir) self.data = pd.DataFrame({ 'abstract': ['test one', 'test two', 'test three'] * 5, 'section': ['U.S.', 'Arts', 'U.S.'] * 5, }) def train_model(self): self.trainer.train(self.data['abstract'], self.data['section']) def test_train(self): self.train_model() shutil.rmtree(self.output_dir) def test_train_and_test(self): self.train_model() results = self.trainer.evaluate(self.data['abstract'], self.data['section']) results2 = self.trainer.evaluate(self.data['abstract'], self.data['section']) eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy'] self.assertEqual(eval_acc1, eval_acc2) loss1, loss2 = results['loss'], results2['loss'] self.assertEqual(eval_acc1, eval_acc2) shutil.rmtree(self.output_dir) def test_train_and_predict(self): self.train_model() input_sentences = [ "test four", "test one", ] * 5 preds = self.trainer.predict(input_sentences) shutil.rmtree(self.output_dir) if __name__ == '__main__': unittest.main()
import unittest import time import shutil import pandas as pd from bert_trainer import BERTTrainer from utils import * class TestBERT(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestBERT, self).__init__(*args, **kwargs) self.output_dir = 'test_{}'.format(str(int(time.time()))) self.trainer = BERTTrainer(output_dir=self.output_dir) self.data = pd.DataFrame({ 'abstract': ['test one', 'test two', 'test three'] * 5, 'section': ['U.S.', 'Arts', 'U.S.'] * 5, }) def train_model(self): self.trainer.train(self.data['abstract'], self.data['section']) def test_train(self): self.train_model() shutil.rmtree(self.output_dir) def test_train_and_predict(self): self.train_model() input_sentences = [ "test four", "test one", ] * 5 preds = self.trainer.predict(input_sentences) shutil.rmtree(self.output_dir) if __name__ == '__main__': unittest.main()
Fix merge conflict in bert_trainer_example.py
Fix merge conflict in bert_trainer_example.py
Python
apache-2.0
googleinterns/smart-news-query-embeddings,googleinterns/smart-news-query-embeddings
import unittest import time import shutil import pandas as pd from bert_trainer import BERTTrainer from utils import * class TestBERT(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestBERT, self).__init__(*args, **kwargs) self.output_dir = 'test_{}'.format(str(int(time.time()))) self.trainer = BERTTrainer(output_dir=self.output_dir) self.data = pd.DataFrame({ 'abstract': ['test one', 'test two', 'test three'] * 5, 'section': ['U.S.', 'Arts', 'U.S.'] * 5, }) def train_model(self): self.trainer.train(self.data['abstract'], self.data['section']) def test_train(self): self.train_model() shutil.rmtree(self.output_dir) - def test_train_and_test(self): - self.train_model() - results = self.trainer.evaluate(self.data['abstract'], self.data['section']) - results2 = self.trainer.evaluate(self.data['abstract'], self.data['section']) - eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy'] - self.assertEqual(eval_acc1, eval_acc2) - loss1, loss2 = results['loss'], results2['loss'] - self.assertEqual(eval_acc1, eval_acc2) - shutil.rmtree(self.output_dir) - def test_train_and_predict(self): self.train_model() input_sentences = [ "test four", "test one", ] * 5 preds = self.trainer.predict(input_sentences) shutil.rmtree(self.output_dir) if __name__ == '__main__': unittest.main()
Fix merge conflict in bert_trainer_example.py
## Code Before: import unittest import time import shutil import pandas as pd from bert_trainer import BERTTrainer from utils import * class TestBERT(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestBERT, self).__init__(*args, **kwargs) self.output_dir = 'test_{}'.format(str(int(time.time()))) self.trainer = BERTTrainer(output_dir=self.output_dir) self.data = pd.DataFrame({ 'abstract': ['test one', 'test two', 'test three'] * 5, 'section': ['U.S.', 'Arts', 'U.S.'] * 5, }) def train_model(self): self.trainer.train(self.data['abstract'], self.data['section']) def test_train(self): self.train_model() shutil.rmtree(self.output_dir) def test_train_and_test(self): self.train_model() results = self.trainer.evaluate(self.data['abstract'], self.data['section']) results2 = self.trainer.evaluate(self.data['abstract'], self.data['section']) eval_acc1, eval_acc2 = results['eval_accuracy'], results2['eval_accuracy'] self.assertEqual(eval_acc1, eval_acc2) loss1, loss2 = results['loss'], results2['loss'] self.assertEqual(eval_acc1, eval_acc2) shutil.rmtree(self.output_dir) def test_train_and_predict(self): self.train_model() input_sentences = [ "test four", "test one", ] * 5 preds = self.trainer.predict(input_sentences) shutil.rmtree(self.output_dir) if __name__ == '__main__': unittest.main() ## Instruction: Fix merge conflict in bert_trainer_example.py ## Code After: import unittest import time import shutil import pandas as pd from bert_trainer import BERTTrainer from utils import * class TestBERT(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestBERT, self).__init__(*args, **kwargs) self.output_dir = 'test_{}'.format(str(int(time.time()))) self.trainer = BERTTrainer(output_dir=self.output_dir) self.data = pd.DataFrame({ 'abstract': ['test one', 'test two', 'test three'] * 5, 'section': ['U.S.', 'Arts', 'U.S.'] * 5, }) def train_model(self): self.trainer.train(self.data['abstract'], self.data['section']) def test_train(self): self.train_model() shutil.rmtree(self.output_dir) def test_train_and_predict(self): self.train_model() input_sentences = [ "test four", "test one", ] * 5 preds = self.trainer.predict(input_sentences) shutil.rmtree(self.output_dir) if __name__ == '__main__': unittest.main()
// ... existing code ... def test_train_and_predict(self): // ... rest of the code ...
a1bf03f69b9cadddcc7e0015788f23f9bad0f862
apps/splash/views.py
apps/splash/views.py
import datetime from django.shortcuts import render from apps.splash.models import SplashEvent, SplashYear def index(request): # I'm really sorry ... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) return render(request, 'splash/base.html', {'splash_year': splash_year }) # And I'm really sorry for this ... def _merge_events(splash_events): events = [] for event in splash_events: if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'): events[-1].append(event) else: events.append([event]) return events
import datetime from django.shortcuts import render from apps.splash.models import SplashEvent, SplashYear def index(request): # I'm really sorry ... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) splash_year.events = _merge_events(splash_year.splash_events.all()) return render(request, 'splash/base.html', {'splash_year': splash_year }) # And I'm really sorry for this ... def _merge_events(splash_events): events = [] for event in splash_events: if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'): events[-1].append(event) else: events.append([event]) return events
Append event merging on splash_events
Append event merging on splash_events
Python
mit
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
import datetime from django.shortcuts import render from apps.splash.models import SplashEvent, SplashYear def index(request): # I'm really sorry ... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) + + splash_year.events = _merge_events(splash_year.splash_events.all()) + return render(request, 'splash/base.html', {'splash_year': splash_year }) # And I'm really sorry for this ... def _merge_events(splash_events): events = [] for event in splash_events: if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'): events[-1].append(event) else: events.append([event]) return events
Append event merging on splash_events
## Code Before: import datetime from django.shortcuts import render from apps.splash.models import SplashEvent, SplashYear def index(request): # I'm really sorry ... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) return render(request, 'splash/base.html', {'splash_year': splash_year }) # And I'm really sorry for this ... def _merge_events(splash_events): events = [] for event in splash_events: if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'): events[-1].append(event) else: events.append([event]) return events ## Instruction: Append event merging on splash_events ## Code After: import datetime from django.shortcuts import render from apps.splash.models import SplashEvent, SplashYear def index(request): # I'm really sorry ... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) splash_year.events = _merge_events(splash_year.splash_events.all()) return render(request, 'splash/base.html', {'splash_year': splash_year }) # And I'm really sorry for this ... def _merge_events(splash_events): events = [] for event in splash_events: if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'): events[-1].append(event) else: events.append([event]) return events
... splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180))) splash_year.events = _merge_events(splash_year.splash_events.all()) return render(request, 'splash/base.html', {'splash_year': splash_year }) ...
969fcfa12bcb734720c3e48c508329b687f91bf6
Cogs/Message.py
Cogs/Message.py
import asyncio import discord import textwrap from discord.ext import commands async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000): """A helper function to get the bot to cut his text into chunks.""" if not bot or not msg or not target: return False textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False) if not len(textList): return False dmChannel = requestor.dm_channel if len(textList) > maxMessage and dmChannel.id != target.id : # PM the contents to the requestor await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList))) target = requestor for message in textList: await target.send(message) return True
import asyncio import discord import textwrap from discord.ext import commands async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000): """A helper function to get the bot to cut his text into chunks.""" if not bot or not msg or not target: return False textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False) if not len(textList): return False if not requestor.dm_channel: # No dm channel - create it await requestor.create_dm() dmChannel = requestor.dm_channel if len(textList) > maxMessage and dmChannel.id != target.id : # PM the contents to the requestor await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList))) target = requestor for message in textList: await target.send(message) return True
Create dm channel if it doesn't exist
Create dm channel if it doesn't exist
Python
mit
corpnewt/CorpBot.py,corpnewt/CorpBot.py
import asyncio import discord import textwrap from discord.ext import commands async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000): """A helper function to get the bot to cut his text into chunks.""" if not bot or not msg or not target: return False textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False) if not len(textList): return False + if not requestor.dm_channel: + # No dm channel - create it + await requestor.create_dm() + dmChannel = requestor.dm_channel + if len(textList) > maxMessage and dmChannel.id != target.id : # PM the contents to the requestor await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList))) target = requestor for message in textList: await target.send(message) return True
Create dm channel if it doesn't exist
## Code Before: import asyncio import discord import textwrap from discord.ext import commands async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000): """A helper function to get the bot to cut his text into chunks.""" if not bot or not msg or not target: return False textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False) if not len(textList): return False dmChannel = requestor.dm_channel if len(textList) > maxMessage and dmChannel.id != target.id : # PM the contents to the requestor await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList))) target = requestor for message in textList: await target.send(message) return True ## Instruction: Create dm channel if it doesn't exist ## Code After: import asyncio import discord import textwrap from discord.ext import commands async def say(bot, msg, target, requestor, maxMessage : int = 5, characters : int = 2000): """A helper function to get the bot to cut his text into chunks.""" if not bot or not msg or not target: return False textList = textwrap.wrap(msg, characters, break_long_words=True, replace_whitespace=False) if not len(textList): return False if not requestor.dm_channel: # No dm channel - create it await requestor.create_dm() dmChannel = requestor.dm_channel if len(textList) > maxMessage and dmChannel.id != target.id : # PM the contents to the requestor await target.send("Since this message is *{} pages* - I'm just going to DM it to you.".format(len(textList))) target = requestor for message in textList: await target.send(message) return True
# ... existing code ... if not requestor.dm_channel: # No dm channel - create it await requestor.create_dm() dmChannel = requestor.dm_channel if len(textList) > maxMessage and dmChannel.id != target.id : # ... rest of the code ...
edc8248e6122dcfc1c4e6972ae0a4866de5c0d42
modules/urbandictionary.py
modules/urbandictionary.py
import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
Fix new lines in definition of UD module
Fix new lines in definition of UD module
Python
mit
billyvg/piebot
import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] + definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") - definition['example'] = definition['example'].replace("\r", "").replace("\n", "") + definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
Fix new lines in definition of UD module
## Code Before: import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['example'] = definition['example'].replace("\r", "").replace("\n", "") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>') ## Instruction: Fix new lines in definition of UD module ## Code After: import requests import json from modules import * class Urbandictionary(Module): def __init__(self, *args, **kwargs): """Constructor""" Module.__init__(self, kwargs=kwargs) self.url = "http://www.urbandictionary.com/iphone/search/define?term=%s" def _register_events(self): """Register module commands.""" self.add_command('ud') def ud(self, event): """Action to react/respond to user calls.""" if self.num_args >= 1: word = '%20'.join(event['args']) r = requests.get(self.url % (word)) ur = json.loads(r.text) try: definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) self.msg(event['target'], message) except KeyError: self.msg(event['target'], 'Could find word "%s"' % ' '.join(event['args'])) else: self.syntax_message(event['nick'], '.ud <word>')
// ... existing code ... definition = ur['list'][0] definition['definition'] = definition['definition'].replace("\r", " ").replace("\n", " ") definition['example'] = definition['example'].replace("\r", " ").replace("\n", " ") message = "%(word)s (%(thumbs_up)d/%(thumbs_down)d): %(definition)s (ex: %(example)s)" % (definition) // ... rest of the code ...
910fd1b323f05b695cccf6d3250b340c46cc2db5
venvctrl/cli/relocate.py
venvctrl/cli/relocate.py
"""Relocate a virtual environment.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import argparse from .. import api def relocate(source, destination, move=False): """Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False. """ venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) def main(): """Relocate a virtual environment.""" parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which to configure the virtual environment.', required=True, ) parser.add_argument( '--move', help='Move the virtual environment to the destination.', default=False, action='store_true', ) args = parser.parse_args() relocate(args.source, args.destination, args.move) if __name__ == '__main__': main()
"""Relocate a virtual environment.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import argparse from .. import api def relocate(source, destination, move=False): """Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False. """ venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) return None def main(): """Relocate a virtual environment.""" parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which to configure the virtual environment.', required=True, ) parser.add_argument( '--move', help='Move the virtual environment to the destination.', default=False, action='store_true', ) args = parser.parse_args() relocate(args.source, args.destination, args.move) if __name__ == '__main__': main()
Fix cli module for new lint detection
Fix cli module for new lint detection Since the last commit (2015), some of the test dependencies have updated. This commit specifically addresses updates in PyLint which result in more lint being detected in the project that previous test runs.
Python
mit
kevinconway/venvctrl
"""Relocate a virtual environment.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import argparse from .. import api def relocate(source, destination, move=False): """Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False. """ venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) + return None def main(): """Relocate a virtual environment.""" parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which to configure the virtual environment.', required=True, ) parser.add_argument( '--move', help='Move the virtual environment to the destination.', default=False, action='store_true', ) args = parser.parse_args() relocate(args.source, args.destination, args.move) if __name__ == '__main__': main()
Fix cli module for new lint detection
## Code Before: """Relocate a virtual environment.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import argparse from .. import api def relocate(source, destination, move=False): """Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False. """ venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) def main(): """Relocate a virtual environment.""" parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which to configure the virtual environment.', required=True, ) parser.add_argument( '--move', help='Move the virtual environment to the destination.', default=False, action='store_true', ) args = parser.parse_args() relocate(args.source, args.destination, args.move) if __name__ == '__main__': main() ## Instruction: Fix cli module for new lint detection ## Code After: """Relocate a virtual environment.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import argparse from .. import api def relocate(source, destination, move=False): """Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False. """ venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) return None def main(): """Relocate a virtual environment.""" parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which to configure the virtual environment.', required=True, ) parser.add_argument( '--move', help='Move the virtual environment to the destination.', default=False, action='store_true', ) args = parser.parse_args() relocate(args.source, args.destination, args.move) if __name__ == '__main__': main()
# ... existing code ... venv.move(destination) return None # ... rest of the code ...
c2d7f4c6ae9042d1cc7f11fa82d7133e9b506ad7
src/main/scripts/data_exports/export_json.py
src/main/scripts/data_exports/export_json.py
from lib.harvester import Harvester from lib.cli_helper import is_writable_directory import argparse import logging import json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s") parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.") parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json", help="specify output file, default: './gazetteer_export.json'") parser.add_argument('-p', '--polygons', action='store_true', help="export place shape polygons, this will increase the file size significantly") if __name__ == "__main__": options = vars(parser.parse_args()) harvester = Harvester(options['polygons']) places = harvester.get_data() with open(options['target'], 'w') as outfile: json.dump(places, outfile)
from lib.harvester import Harvester from lib.cli_helper import is_writable_directory import argparse import logging import json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s") parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.") parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json", help="specify output file, default: './gazetteer_export.json'") parser.add_argument('-p', '--polygons', action='store_true', help="export place shape polygons, this will increase the file size significantly") if __name__ == "__main__": options = vars(parser.parse_args()) harvester = Harvester(options['polygons']) places = harvester.get_data() with open(options['target'], 'w', encoding='utf-8') as outfile: json.dump(places, outfile, ensure_ascii=False)
Fix UTF-8 encoding for json exports
Fix UTF-8 encoding for json exports
Python
apache-2.0
dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer,dainst/gazetteer
from lib.harvester import Harvester from lib.cli_helper import is_writable_directory import argparse import logging import json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s") parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.") parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json", help="specify output file, default: './gazetteer_export.json'") parser.add_argument('-p', '--polygons', action='store_true', help="export place shape polygons, this will increase the file size significantly") if __name__ == "__main__": options = vars(parser.parse_args()) harvester = Harvester(options['polygons']) places = harvester.get_data() - with open(options['target'], 'w') as outfile: + with open(options['target'], 'w', encoding='utf-8') as outfile: - json.dump(places, outfile) + json.dump(places, outfile, ensure_ascii=False)
Fix UTF-8 encoding for json exports
## Code Before: from lib.harvester import Harvester from lib.cli_helper import is_writable_directory import argparse import logging import json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s") parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.") parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json", help="specify output file, default: './gazetteer_export.json'") parser.add_argument('-p', '--polygons', action='store_true', help="export place shape polygons, this will increase the file size significantly") if __name__ == "__main__": options = vars(parser.parse_args()) harvester = Harvester(options['polygons']) places = harvester.get_data() with open(options['target'], 'w') as outfile: json.dump(places, outfile) ## Instruction: Fix UTF-8 encoding for json exports ## Code After: from lib.harvester import Harvester from lib.cli_helper import is_writable_directory import argparse import logging import json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") logging.basicConfig(format="%(asctime)s-%(levelname)s-%(name)s - %(message)s") parser = argparse.ArgumentParser(description="Export all publicly available Gazetteer data as one JSON file.") parser.add_argument('-t', '--target', type=is_writable_directory, nargs='?', default="./gazetteer_export.json", help="specify output file, default: './gazetteer_export.json'") parser.add_argument('-p', '--polygons', action='store_true', help="export place shape polygons, this will increase the file size significantly") if __name__ == "__main__": options = vars(parser.parse_args()) harvester = Harvester(options['polygons']) places = harvester.get_data() with open(options['target'], 'w', encoding='utf-8') as outfile: json.dump(places, outfile, ensure_ascii=False)
# ... existing code ... with open(options['target'], 'w', encoding='utf-8') as outfile: json.dump(places, outfile, ensure_ascii=False) # ... rest of the code ...
fe9e11af28e2ffe2b3da5ebb0971cd712136284c
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
nodeconductor/iaas/migrations/0011_cloudprojectmembership_availability_zone.py
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('iaas', '0010_auto_20150118_1834'), ] operations = [ migrations.AddField( model_name='cloudprojectmembership', name='availability_zone', field=models.CharField(max_length=100, blank=True), preserve_default=True, ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('iaas', '0010_auto_20150118_1834'), ] operations = [ migrations.AddField( model_name='cloudprojectmembership', name='availability_zone', field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True), preserve_default=True, ), ]
Add help_text to availability_zone field (nc-327)
Add help_text to availability_zone field (nc-327)
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('iaas', '0010_auto_20150118_1834'), ] operations = [ migrations.AddField( model_name='cloudprojectmembership', name='availability_zone', - field=models.CharField(max_length=100, blank=True), + field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True), preserve_default=True, ), ]
Add help_text to availability_zone field (nc-327)
## Code Before: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('iaas', '0010_auto_20150118_1834'), ] operations = [ migrations.AddField( model_name='cloudprojectmembership', name='availability_zone', field=models.CharField(max_length=100, blank=True), preserve_default=True, ), ] ## Instruction: Add help_text to availability_zone field (nc-327) ## Code After: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('iaas', '0010_auto_20150118_1834'), ] operations = [ migrations.AddField( model_name='cloudprojectmembership', name='availability_zone', field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True), preserve_default=True, ), ]
... name='availability_zone', field=models.CharField(help_text='Optional availability group. Will be used for all instances provisioned in this tenant', max_length=100, blank=True), preserve_default=True, ...
23ab67f74fc7c09310638529ccf804ec2271fd6c
pynads/writer.py
pynads/writer.py
from .monad import Monad from .functor import fmap class Writer(Monad): """Stores a value as well as a log of events that have transpired with the value. """ def __init__(self, v, log): self.v = v if not isinstance(log, list): self.log = [log] else: self.log = log @classmethod def unit(cls, v): return cls(v, []) def fmap(self, f): return Writer(f(self.v), self.log) def apply(self, applicative): return fmap(self.v, applicative) def bind(self, f): v, msg = f(self.v) return Writer(v, self.log + [msg]) def __repr__(self): return "Writer({!r}, {!r})".format(self.v, self.log)
from .utils import _iter_but_not_str_or_map from .monad import Monad from .functor import fmap class Writer(Monad): """Stores a value as well as a log of events that have transpired with the value. """ __slots__ = ('v', 'log') def __init__(self, v, log): self.v = v if _iter_but_not_str_or_map(log): print("convert iter to list log...") self.log = [l for l in log] else: print("convert str/map/other to list log...") self.log = [log] @classmethod def unit(cls, v): return cls(v, []) def fmap(self, f): return Writer(f(self.v), self.log) def apply(self, applicative): return fmap(self.v, applicative) def bind(self, f): v, msg = f(self.v) return Writer(v, self.log + [msg]) def __repr__(self): return "Writer({!r}, {!r})".format(self.v, self.log)
Use utils._iter_but_not_str_or_map in Writer log creation.
Use utils._iter_but_not_str_or_map in Writer log creation.
Python
mit
justanr/pynads
+ from .utils import _iter_but_not_str_or_map from .monad import Monad from .functor import fmap class Writer(Monad): """Stores a value as well as a log of events that have transpired with the value. """ + __slots__ = ('v', 'log') + def __init__(self, v, log): self.v = v - if not isinstance(log, list): + if _iter_but_not_str_or_map(log): + print("convert iter to list log...") + self.log = [l for l in log] + else: + print("convert str/map/other to list log...") self.log = [log] - else: - self.log = log @classmethod def unit(cls, v): return cls(v, []) def fmap(self, f): return Writer(f(self.v), self.log) def apply(self, applicative): return fmap(self.v, applicative) def bind(self, f): v, msg = f(self.v) return Writer(v, self.log + [msg]) def __repr__(self): return "Writer({!r}, {!r})".format(self.v, self.log)
Use utils._iter_but_not_str_or_map in Writer log creation.
## Code Before: from .monad import Monad from .functor import fmap class Writer(Monad): """Stores a value as well as a log of events that have transpired with the value. """ def __init__(self, v, log): self.v = v if not isinstance(log, list): self.log = [log] else: self.log = log @classmethod def unit(cls, v): return cls(v, []) def fmap(self, f): return Writer(f(self.v), self.log) def apply(self, applicative): return fmap(self.v, applicative) def bind(self, f): v, msg = f(self.v) return Writer(v, self.log + [msg]) def __repr__(self): return "Writer({!r}, {!r})".format(self.v, self.log) ## Instruction: Use utils._iter_but_not_str_or_map in Writer log creation. ## Code After: from .utils import _iter_but_not_str_or_map from .monad import Monad from .functor import fmap class Writer(Monad): """Stores a value as well as a log of events that have transpired with the value. """ __slots__ = ('v', 'log') def __init__(self, v, log): self.v = v if _iter_but_not_str_or_map(log): print("convert iter to list log...") self.log = [l for l in log] else: print("convert str/map/other to list log...") self.log = [log] @classmethod def unit(cls, v): return cls(v, []) def fmap(self, f): return Writer(f(self.v), self.log) def apply(self, applicative): return fmap(self.v, applicative) def bind(self, f): v, msg = f(self.v) return Writer(v, self.log + [msg]) def __repr__(self): return "Writer({!r}, {!r})".format(self.v, self.log)
... from .utils import _iter_but_not_str_or_map from .monad import Monad ... """ __slots__ = ('v', 'log') def __init__(self, v, log): ... if _iter_but_not_str_or_map(log): print("convert iter to list log...") self.log = [l for l in log] else: print("convert str/map/other to list log...") self.log = [log] ...
34fda0b20a87b94d7413054bfcfc81dad0ecde19
utils/get_message.py
utils/get_message.py
import amqp from contextlib import closing def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = connection.channel() return channel.basic_get(queue=queue)
import amqp from contextlib import closing def __get_channel(connection): return connection.channel() def __get_message_from_queue(channel, queue): return channel.basic_get(queue=queue) def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = __get_channel(connection) return __get_message_from_queue(channel, queue)
Revert "Remove redundant functions (one too many levels of abstraction)@"
Revert "Remove redundant functions (one too many levels of abstraction)@" This reverts commit 9c5bf06d1427db9839b1531aa08e66574c7b4582.
Python
mit
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
import amqp from contextlib import closing + + def __get_channel(connection): + return connection.channel() + + def __get_message_from_queue(channel, queue): + return channel.basic_get(queue=queue) def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: - channel = connection.channel() - return channel.basic_get(queue=queue) + channel = __get_channel(connection) + return __get_message_from_queue(channel, queue)
Revert "Remove redundant functions (one too many levels of abstraction)@"
## Code Before: import amqp from contextlib import closing def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = connection.channel() return channel.basic_get(queue=queue) ## Instruction: Revert "Remove redundant functions (one too many levels of abstraction)@" ## Code After: import amqp from contextlib import closing def __get_channel(connection): return connection.channel() def __get_message_from_queue(channel, queue): return channel.basic_get(queue=queue) def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = __get_channel(connection) return __get_message_from_queue(channel, queue)
... def __get_channel(connection): return connection.channel() def __get_message_from_queue(channel, queue): return channel.basic_get(queue=queue) ... with closing(amqp.Connection()) as connection: channel = __get_channel(connection) return __get_message_from_queue(channel, queue) ...
07ccbc36fd5148db2efc5f676fd13d4b24aa004f
hackasmlexer/hacklexer.py
hackasmlexer/hacklexer.py
import re from pygments.lexer import RegexLexer, include from pygments.token import * class HackAsmLexer(RegexLexer): name = 'Hack Assembler' aliases = ['hack_asm'] filenames = ['*.asm'] identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*' flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ include('whitespace'), (r'\(' + identifier + '\)', Name.Label), (r'[+-=;&|!]+', Operator), (r'\/\/.+$', Comment), (r'[\r\n]+', Text), (r'@[A-Za-z][A-Za-z0-9]+', Name.Variable), (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), (r'null', Keyword.Pseudo), (r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin), (r'@[0-9]+', Name.Constant) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'#.*?\n', Comment) ] }
import re from pygments.lexer import RegexLexer, include from pygments.token import * class HackAsmLexer(RegexLexer): name = 'Hack Assembler' aliases = ['hack_asm'] filenames = ['*.asm'] identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*' flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ include('whitespace'), (r'\(' + identifier + '\)', Name.Label), (r'[+-=;&|!]+', Operator), (r'\/\/.+$', Comment), (r'[\r\n]+', Text), (r'@[A-Za-z][A-Za-z0-9]+', Name.Variable), (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), (r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses (r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses (r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses (r'null', Keyword.Pseudo), (r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin), (r'@[0-9]+', Name.Constant) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'#.*?\n', Comment) ] }
Add register and IO addresses
Add register and IO addresses
Python
mit
cprieto/pygments_hack_asm
import re from pygments.lexer import RegexLexer, include from pygments.token import * class HackAsmLexer(RegexLexer): name = 'Hack Assembler' aliases = ['hack_asm'] filenames = ['*.asm'] identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*' flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ include('whitespace'), (r'\(' + identifier + '\)', Name.Label), (r'[+-=;&|!]+', Operator), (r'\/\/.+$', Comment), (r'[\r\n]+', Text), (r'@[A-Za-z][A-Za-z0-9]+', Name.Variable), (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), + (r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses + (r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses + (r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses (r'null', Keyword.Pseudo), (r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin), (r'@[0-9]+', Name.Constant) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'#.*?\n', Comment) ] }
Add register and IO addresses
## Code Before: import re from pygments.lexer import RegexLexer, include from pygments.token import * class HackAsmLexer(RegexLexer): name = 'Hack Assembler' aliases = ['hack_asm'] filenames = ['*.asm'] identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*' flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ include('whitespace'), (r'\(' + identifier + '\)', Name.Label), (r'[+-=;&|!]+', Operator), (r'\/\/.+$', Comment), (r'[\r\n]+', Text), (r'@[A-Za-z][A-Za-z0-9]+', Name.Variable), (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), (r'null', Keyword.Pseudo), (r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin), (r'@[0-9]+', Name.Constant) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'#.*?\n', Comment) ] } ## Instruction: Add register and IO addresses ## Code After: import re from pygments.lexer import RegexLexer, include from pygments.token import * class HackAsmLexer(RegexLexer): name = 'Hack Assembler' aliases = ['hack_asm'] filenames = ['*.asm'] identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*' flags = re.IGNORECASE | re.MULTILINE tokens = { 'root': [ include('whitespace'), (r'\(' + identifier + '\)', Name.Label), (r'[+-=;&|!]+', Operator), (r'\/\/.+$', Comment), (r'[\r\n]+', Text), (r'@[A-Za-z][A-Za-z0-9]+', Name.Variable), (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), (r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses (r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses (r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses (r'null', Keyword.Pseudo), (r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin), (r'@[0-9]+', Name.Constant) ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), (r'#.*?\n', Comment) ] }
// ... existing code ... (r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword), (r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses (r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses (r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses (r'null', Keyword.Pseudo), // ... rest of the code ...
994e185e7bb8b2ffb78f20012121c441ea6b73a1
comics/views.py
comics/views.py
from django.views import generic from gallery.models import GalleryImage from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def get_queryset(self): query_set = ( super().get_queryset().filter(issue__slug=self.kwargs.get("issue_slug")) ) return query_set
from django.views import generic from gallery.models import GalleryImage from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" def get_queryset(self): query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) return query_set class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def get_queryset(self): query_set = ( super().get_queryset().filter(issue__slug=self.kwargs.get("issue_slug")) ) return query_set
Fix bug where arc slug could be literally anything
Fix bug where arc slug could be literally anything
Python
mit
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
from django.views import generic from gallery.models import GalleryImage from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" + def get_queryset(self): + query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) + return query_set + class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def get_queryset(self): query_set = ( super().get_queryset().filter(issue__slug=self.kwargs.get("issue_slug")) ) return query_set
Fix bug where arc slug could be literally anything
## Code Before: from django.views import generic from gallery.models import GalleryImage from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def get_queryset(self): query_set = ( super().get_queryset().filter(issue__slug=self.kwargs.get("issue_slug")) ) return query_set ## Instruction: Fix bug where arc slug could be literally anything ## Code After: from django.views import generic from gallery.models import GalleryImage from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" def get_queryset(self): query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) return query_set class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def get_queryset(self): query_set = ( super().get_queryset().filter(issue__slug=self.kwargs.get("issue_slug")) ) return query_set
# ... existing code ... def get_queryset(self): query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) return query_set # ... rest of the code ...
8b5337878172df95400a708b096e012436f8a706
dags/main_summary.py
dags/main_summary.py
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': 'mreid@mozilla.com', 'depends_on_past': False, 'start_date': datetime(2016, 6, 27), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': 'mreid@mozilla.com', 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
Prepare "Main Summary" job for backfill
Prepare "Main Summary" job for backfill Set the max number of active runs so we don't overwhelm the system, and rewind the start date by a couple of days to test that the scheduler does the right thing.
Python
mpl-2.0
opentrials/opentrials-airflow,opentrials/opentrials-airflow
from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': 'mreid@mozilla.com', 'depends_on_past': False, - 'start_date': datetime(2016, 6, 27), + 'start_date': datetime(2016, 6, 25), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } - dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') + dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, - env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, + env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
Prepare "Main Summary" job for backfill
## Code Before: from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': 'mreid@mozilla.com', 'depends_on_past': False, 'start_date': datetime(2016, 6, 27), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily') # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env = {"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0) ## Instruction: Prepare "Main Summary" job for backfill ## Code After: from airflow import DAG from datetime import datetime, timedelta from operators.emr_spark_operator import EMRSparkOperator from airflow.operators import BashOperator default_args = { 'owner': 'mreid@mozilla.com', 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': timedelta(minutes=30), } dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) # Make sure all the data for the given day has arrived before running. t0 = BashOperator(task_id="delayed_start", bash_command="sleep 1800", dag=dag) t1 = EMRSparkOperator(task_id="main_summary", job_name="Main Summary View", execution_timeout=timedelta(hours=10), instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", dag=dag) # Wait a little while after midnight to start for a given day. t1.set_upstream(t0)
# ... existing code ... 'depends_on_past': False, 'start_date': datetime(2016, 6, 25), 'email': ['telemetry-alerts@mozilla.com', 'mreid@mozilla.com'], # ... modified code ... dag = DAG('main_summary', default_args=default_args, schedule_interval='@daily', max_active_runs=10) ... instance_count=10, env={"date": "{{ ds_nodash }}", "bucket": "{{ task.__class__.private_output_bucket }}"}, uri="https://raw.githubusercontent.com/mozilla/telemetry-airflow/master/jobs/main_summary_view.sh", # ... rest of the code ...
3c1e90761bf6d046c3b462dcdddb75335c259433
rnacentral/portal/tests/rna_type_tests.py
rnacentral/portal/tests/rna_type_tests.py
from django.test import TestCase from portal.models import Rna class GenericRnaTypeTest(TestCase): def rna_type_of(self, upi, taxid=None): return Rna.objects.\ get(upi=upi).\ get_rna_type(taxid=taxid, recompute=True) def assertRnaTypeIs(self, description, upi, taxid=None): self.assertEquals(description, self.description_of(upi, taxid=taxid)) class WormTests(GenericRnaTypeTest): def test_gets_mirna_over_pirna(self): self.assertRnaTypeIs( 'miRNA', 'URS0000016972', taxid=6239)
from django.test import TestCase from portal.models import Rna class GenericRnaTypeTest(TestCase): def rna_type_of(self, upi, taxid=None): return Rna.objects.\ get(upi=upi).\ get_rna_type(taxid=taxid, recompute=True) def assertRnaTypeIs(self, description, upi, taxid=None): self.assertEquals(description, self.description_of(upi, taxid=taxid)) class WormTests(GenericRnaTypeTest): def test_gets_mirna_over_pirna(self): self.assertRnaTypeIs( 'miRNA', 'URS0000016972', taxid=6239) class HumanTests(GenericRnaTypeTest): def test_if_has_both_anti_and_lnc_likes_lnc(self): self.assertRnaTypeIs( 'lncRNA', 'URS0000732D5D', taxid=9606)
Add test showing issue with rna_type
Add test showing issue with rna_type
Python
apache-2.0
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
from django.test import TestCase from portal.models import Rna class GenericRnaTypeTest(TestCase): def rna_type_of(self, upi, taxid=None): return Rna.objects.\ get(upi=upi).\ get_rna_type(taxid=taxid, recompute=True) def assertRnaTypeIs(self, description, upi, taxid=None): self.assertEquals(description, self.description_of(upi, taxid=taxid)) class WormTests(GenericRnaTypeTest): def test_gets_mirna_over_pirna(self): self.assertRnaTypeIs( 'miRNA', 'URS0000016972', taxid=6239) + + class HumanTests(GenericRnaTypeTest): + def test_if_has_both_anti_and_lnc_likes_lnc(self): + self.assertRnaTypeIs( + 'lncRNA', + 'URS0000732D5D', + taxid=9606) +
Add test showing issue with rna_type
## Code Before: from django.test import TestCase from portal.models import Rna class GenericRnaTypeTest(TestCase): def rna_type_of(self, upi, taxid=None): return Rna.objects.\ get(upi=upi).\ get_rna_type(taxid=taxid, recompute=True) def assertRnaTypeIs(self, description, upi, taxid=None): self.assertEquals(description, self.description_of(upi, taxid=taxid)) class WormTests(GenericRnaTypeTest): def test_gets_mirna_over_pirna(self): self.assertRnaTypeIs( 'miRNA', 'URS0000016972', taxid=6239) ## Instruction: Add test showing issue with rna_type ## Code After: from django.test import TestCase from portal.models import Rna class GenericRnaTypeTest(TestCase): def rna_type_of(self, upi, taxid=None): return Rna.objects.\ get(upi=upi).\ get_rna_type(taxid=taxid, recompute=True) def assertRnaTypeIs(self, description, upi, taxid=None): self.assertEquals(description, self.description_of(upi, taxid=taxid)) class WormTests(GenericRnaTypeTest): def test_gets_mirna_over_pirna(self): self.assertRnaTypeIs( 'miRNA', 'URS0000016972', taxid=6239) class HumanTests(GenericRnaTypeTest): def test_if_has_both_anti_and_lnc_likes_lnc(self): self.assertRnaTypeIs( 'lncRNA', 'URS0000732D5D', taxid=9606)
# ... existing code ... taxid=6239) class HumanTests(GenericRnaTypeTest): def test_if_has_both_anti_and_lnc_likes_lnc(self): self.assertRnaTypeIs( 'lncRNA', 'URS0000732D5D', taxid=9606) # ... rest of the code ...
cc08fcbb513224aafe6c04143a150d1019c032ef
setup_py2exe.py
setup_py2exe.py
from distutils.core import setup from glob import glob import os import py2exe from setup import SSLYZE_SETUP data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))] # Trust Stores plugin_data_path = 'plugins\\data\\trust_stores' plugin_data_files = [] for file in os.listdir(plugin_data_path): file = os.path.join(plugin_data_path, file) if os.path.isfile(file): # skip directories plugin_data_files.append( file) data_files.append((plugin_data_path, plugin_data_files)) sslyze_setup_py2exe = SSLYZE_SETUP.copy() sslyze_setup_py2exe.update( { 'console' : ['sslyze.py'], 'data_files' : data_files, 'zipfile' : None, 'options' : {'py2exe':{ #'skip_archive': True, 'bundle_files': 1, }} } ) setup(**sslyze_setup_py2exe)
from distutils.core import setup from glob import glob import os import py2exe from setup import SSLYZE_SETUP data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))] # Trust Stores plugin_data_files = [] for file in os.listdir('plugins\\data\\trust_stores'): file = os.path.join('plugins\\data\\trust_stores', file) if os.path.isfile(file): # skip directories plugin_data_files.append( file) data_files.append(('data\\trust_stores', plugin_data_files)) sslyze_setup_py2exe = SSLYZE_SETUP.copy() sslyze_setup_py2exe.update( { 'console' : ['sslyze.py'], 'data_files' : data_files, 'zipfile' : None, 'options' : {'py2exe':{ #'skip_archive': True, 'bundle_files': 1, }} } ) setup(**sslyze_setup_py2exe)
Fix trust stores paths for py2exe builds
Fix trust stores paths for py2exe builds
Python
agpl-3.0
nabla-c0d3/sslyze
from distutils.core import setup from glob import glob import os import py2exe from setup import SSLYZE_SETUP data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))] # Trust Stores - plugin_data_path = 'plugins\\data\\trust_stores' plugin_data_files = [] - for file in os.listdir(plugin_data_path): + for file in os.listdir('plugins\\data\\trust_stores'): - file = os.path.join(plugin_data_path, file) + file = os.path.join('plugins\\data\\trust_stores', file) if os.path.isfile(file): # skip directories plugin_data_files.append( file) - data_files.append((plugin_data_path, plugin_data_files)) + data_files.append(('data\\trust_stores', plugin_data_files)) sslyze_setup_py2exe = SSLYZE_SETUP.copy() sslyze_setup_py2exe.update( { 'console' : ['sslyze.py'], 'data_files' : data_files, 'zipfile' : None, 'options' : {'py2exe':{ #'skip_archive': True, 'bundle_files': 1, }} } ) setup(**sslyze_setup_py2exe)
Fix trust stores paths for py2exe builds
## Code Before: from distutils.core import setup from glob import glob import os import py2exe from setup import SSLYZE_SETUP data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))] # Trust Stores plugin_data_path = 'plugins\\data\\trust_stores' plugin_data_files = [] for file in os.listdir(plugin_data_path): file = os.path.join(plugin_data_path, file) if os.path.isfile(file): # skip directories plugin_data_files.append( file) data_files.append((plugin_data_path, plugin_data_files)) sslyze_setup_py2exe = SSLYZE_SETUP.copy() sslyze_setup_py2exe.update( { 'console' : ['sslyze.py'], 'data_files' : data_files, 'zipfile' : None, 'options' : {'py2exe':{ #'skip_archive': True, 'bundle_files': 1, }} } ) setup(**sslyze_setup_py2exe) ## Instruction: Fix trust stores paths for py2exe builds ## Code After: from distutils.core import setup from glob import glob import os import py2exe from setup import SSLYZE_SETUP data_files = [("Microsoft.VC90.CRT", glob(r'C:\Program Files\Microsoft Visual Studio 9.0\VC\redist\x86\Microsoft.VC90.CRT\*.*'))] # Trust Stores plugin_data_files = [] for file in os.listdir('plugins\\data\\trust_stores'): file = os.path.join('plugins\\data\\trust_stores', file) if os.path.isfile(file): # skip directories plugin_data_files.append( file) data_files.append(('data\\trust_stores', plugin_data_files)) sslyze_setup_py2exe = SSLYZE_SETUP.copy() sslyze_setup_py2exe.update( { 'console' : ['sslyze.py'], 'data_files' : data_files, 'zipfile' : None, 'options' : {'py2exe':{ #'skip_archive': True, 'bundle_files': 1, }} } ) setup(**sslyze_setup_py2exe)
# ... existing code ... # Trust Stores plugin_data_files = [] for file in os.listdir('plugins\\data\\trust_stores'): file = os.path.join('plugins\\data\\trust_stores', file) if os.path.isfile(file): # skip directories # ... modified code ... data_files.append(('data\\trust_stores', plugin_data_files)) # ... rest of the code ...
6edd4114c4e715a3a0c440af455fff089a099620
scrapy/squeues.py
scrapy/squeues.py
import marshal from six.moves import cPickle as pickle from queuelib import queue def _serializable_queue(queue_class, serialize, deserialize): class SerializableQueue(queue_class): def push(self, obj): s = serialize(obj) super(SerializableQueue, self).push(s) def pop(self): s = super(SerializableQueue, self).pop() if s: return deserialize(s) return SerializableQueue def _pickle_serialize(obj): try: return pickle.dumps(obj, protocol=2) # Python<=3.4 raises pickle.PicklingError here while # Python>=3.5 raises AttributeError and # Python>=3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: raise ValueError(str(e)) PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ _pickle_serialize, pickle.loads) PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ _pickle_serialize, pickle.loads) MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ marshal.dumps, marshal.loads) MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ marshal.dumps, marshal.loads) FifoMemoryQueue = queue.FifoMemoryQueue LifoMemoryQueue = queue.LifoMemoryQueue
import marshal from six.moves import cPickle as pickle from queuelib import queue def _serializable_queue(queue_class, serialize, deserialize): class SerializableQueue(queue_class): def push(self, obj): s = serialize(obj) super(SerializableQueue, self).push(s) def pop(self): s = super(SerializableQueue, self).pop() if s: return deserialize(s) return SerializableQueue def _pickle_serialize(obj): try: return pickle.dumps(obj, protocol=2) # Python <= 3.4 raises pickle.PicklingError here while # 3.5 <= Python < 3.6 raises AttributeError and # Python >= 3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: raise ValueError(str(e)) PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ _pickle_serialize, pickle.loads) PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ _pickle_serialize, pickle.loads) MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ marshal.dumps, marshal.loads) MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ marshal.dumps, marshal.loads) FifoMemoryQueue = queue.FifoMemoryQueue LifoMemoryQueue = queue.LifoMemoryQueue
Clarify comment about Pyhton versions
Clarify comment about Pyhton versions
Python
bsd-3-clause
pablohoffman/scrapy,pawelmhm/scrapy,finfish/scrapy,Ryezhang/scrapy,ssteo/scrapy,pawelmhm/scrapy,ssteo/scrapy,scrapy/scrapy,pawelmhm/scrapy,starrify/scrapy,ArturGaspar/scrapy,ssteo/scrapy,wujuguang/scrapy,dangra/scrapy,pablohoffman/scrapy,dangra/scrapy,elacuesta/scrapy,starrify/scrapy,scrapy/scrapy,kmike/scrapy,pablohoffman/scrapy,starrify/scrapy,finfish/scrapy,Ryezhang/scrapy,wujuguang/scrapy,elacuesta/scrapy,finfish/scrapy,eLRuLL/scrapy,ArturGaspar/scrapy,Ryezhang/scrapy,wujuguang/scrapy,eLRuLL/scrapy,eLRuLL/scrapy,dangra/scrapy,kmike/scrapy,elacuesta/scrapy,scrapy/scrapy,ArturGaspar/scrapy,kmike/scrapy
import marshal from six.moves import cPickle as pickle from queuelib import queue def _serializable_queue(queue_class, serialize, deserialize): class SerializableQueue(queue_class): def push(self, obj): s = serialize(obj) super(SerializableQueue, self).push(s) def pop(self): s = super(SerializableQueue, self).pop() if s: return deserialize(s) return SerializableQueue def _pickle_serialize(obj): try: return pickle.dumps(obj, protocol=2) - # Python<=3.4 raises pickle.PicklingError here while + # Python <= 3.4 raises pickle.PicklingError here while - # Python>=3.5 raises AttributeError and + # 3.5 <= Python < 3.6 raises AttributeError and - # Python>=3.6 raises TypeError + # Python >= 3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: raise ValueError(str(e)) PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ _pickle_serialize, pickle.loads) PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ _pickle_serialize, pickle.loads) MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ marshal.dumps, marshal.loads) MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ marshal.dumps, marshal.loads) FifoMemoryQueue = queue.FifoMemoryQueue LifoMemoryQueue = queue.LifoMemoryQueue
Clarify comment about Pyhton versions
## Code Before: import marshal from six.moves import cPickle as pickle from queuelib import queue def _serializable_queue(queue_class, serialize, deserialize): class SerializableQueue(queue_class): def push(self, obj): s = serialize(obj) super(SerializableQueue, self).push(s) def pop(self): s = super(SerializableQueue, self).pop() if s: return deserialize(s) return SerializableQueue def _pickle_serialize(obj): try: return pickle.dumps(obj, protocol=2) # Python<=3.4 raises pickle.PicklingError here while # Python>=3.5 raises AttributeError and # Python>=3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: raise ValueError(str(e)) PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ _pickle_serialize, pickle.loads) PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ _pickle_serialize, pickle.loads) MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ marshal.dumps, marshal.loads) MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ marshal.dumps, marshal.loads) FifoMemoryQueue = queue.FifoMemoryQueue LifoMemoryQueue = queue.LifoMemoryQueue ## Instruction: Clarify comment about Pyhton versions ## Code After: import marshal from six.moves import cPickle as pickle from queuelib import queue def _serializable_queue(queue_class, serialize, deserialize): class SerializableQueue(queue_class): def push(self, obj): s = serialize(obj) super(SerializableQueue, self).push(s) def pop(self): s = super(SerializableQueue, self).pop() if s: return deserialize(s) return SerializableQueue def _pickle_serialize(obj): try: return pickle.dumps(obj, protocol=2) # Python <= 3.4 raises pickle.PicklingError here while # 3.5 <= Python < 3.6 raises AttributeError and # Python >= 3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: raise ValueError(str(e)) PickleFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ _pickle_serialize, pickle.loads) PickleLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ _pickle_serialize, pickle.loads) MarshalFifoDiskQueue = _serializable_queue(queue.FifoDiskQueue, \ marshal.dumps, marshal.loads) MarshalLifoDiskQueue = _serializable_queue(queue.LifoDiskQueue, \ marshal.dumps, marshal.loads) FifoMemoryQueue = queue.FifoMemoryQueue LifoMemoryQueue = queue.LifoMemoryQueue
# ... existing code ... return pickle.dumps(obj, protocol=2) # Python <= 3.4 raises pickle.PicklingError here while # 3.5 <= Python < 3.6 raises AttributeError and # Python >= 3.6 raises TypeError except (pickle.PicklingError, AttributeError, TypeError) as e: # ... rest of the code ...
ac7477803739d303df8374f916748173da32cb07
test_elasticsearch/test_server/__init__.py
test_elasticsearch/test_server/__init__.py
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(): global client if client is not None: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client client = local_get_client() except ImportError: # fallback to using vanilla client client = get_test_client() return client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(): return get_client()
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(**kwargs): global client if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client new_client = get_test_client(**kwargs) if not kwargs: client = new_client return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(**kwargs): return get_client(**kwargs)
Allow test client to be created with kwargs
Allow test client to be created with kwargs
Python
apache-2.0
brunobell/elasticsearch-py,elastic/elasticsearch-py,brunobell/elasticsearch-py,elastic/elasticsearch-py
from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None - def get_client(): + def get_client(**kwargs): global client - if client is not None: + if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client - client = local_get_client() + new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client - client = get_test_client() + new_client = get_test_client(**kwargs) + if not kwargs: + client = new_client + - return client + return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod - def _get_client(): + def _get_client(**kwargs): - return get_client() + return get_client(**kwargs)
Allow test client to be created with kwargs
## Code Before: from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(): global client if client is not None: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client client = local_get_client() except ImportError: # fallback to using vanilla client client = get_test_client() return client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(): return get_client() ## Instruction: Allow test client to be created with kwargs ## Code After: from elasticsearch.helpers.test import get_test_client, ElasticsearchTestCase as BaseTestCase client = None def get_client(**kwargs): global client if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client new_client = get_test_client(**kwargs) if not kwargs: client = new_client return new_client def setup(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(**kwargs): return get_client(**kwargs)
// ... existing code ... def get_client(**kwargs): global client if client is not None and not kwargs: return client // ... modified code ... from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: ... # fallback to using vanilla client new_client = get_test_client(**kwargs) if not kwargs: client = new_client return new_client ... @staticmethod def _get_client(**kwargs): return get_client(**kwargs) // ... rest of the code ...
739ae88d817cb86723b126360aaf3dd6df3045c0
tests/test_log.py
tests/test_log.py
import json import logging from unittest.mock import Mock, patch from jsonrpcclient.log import _trim_string, _trim_values def test_trim_string(): message = _trim_string("foo" * 100) assert "..." in message def test_trim_values(): message = _trim_values({"list": [0] * 100}) assert "..." in message["list"] def test_trim_values_nested(): message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}}) assert "..." in message["obj"]["obj2"]["string2"] def test_trim_values_batch(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] def test_trim_message(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"]
import json import logging from unittest.mock import Mock, patch from jsonrpcclient.log import _trim_string, _trim_values, _trim_message def test_trim_string(): message = _trim_string("foo" * 100) assert "..." in message def test_trim_values(): message = _trim_values({"list": [0] * 100}) assert "..." in message["list"] def test_trim_values_nested(): message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}}) assert "..." in message["obj"]["obj2"]["string2"] def test_trim_values_batch(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] def test_trim_message(): message = _trim_message("foo" * 100) assert "..." in message
Add coverage to some of log.py
Add coverage to some of log.py
Python
mit
bcb/jsonrpcclient
import json import logging from unittest.mock import Mock, patch - from jsonrpcclient.log import _trim_string, _trim_values + from jsonrpcclient.log import _trim_string, _trim_values, _trim_message def test_trim_string(): message = _trim_string("foo" * 100) assert "..." in message def test_trim_values(): message = _trim_values({"list": [0] * 100}) assert "..." in message["list"] def test_trim_values_nested(): message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}}) assert "..." in message["obj"]["obj2"]["string2"] def test_trim_values_batch(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] def test_trim_message(): - message = _trim_values([{"list": [0] * 100}]) + message = _trim_message("foo" * 100) - assert "..." in message[0]["list"] + assert "..." in message
Add coverage to some of log.py
## Code Before: import json import logging from unittest.mock import Mock, patch from jsonrpcclient.log import _trim_string, _trim_values def test_trim_string(): message = _trim_string("foo" * 100) assert "..." in message def test_trim_values(): message = _trim_values({"list": [0] * 100}) assert "..." in message["list"] def test_trim_values_nested(): message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}}) assert "..." in message["obj"]["obj2"]["string2"] def test_trim_values_batch(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] def test_trim_message(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] ## Instruction: Add coverage to some of log.py ## Code After: import json import logging from unittest.mock import Mock, patch from jsonrpcclient.log import _trim_string, _trim_values, _trim_message def test_trim_string(): message = _trim_string("foo" * 100) assert "..." in message def test_trim_values(): message = _trim_values({"list": [0] * 100}) assert "..." in message["list"] def test_trim_values_nested(): message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}}) assert "..." in message["obj"]["obj2"]["string2"] def test_trim_values_batch(): message = _trim_values([{"list": [0] * 100}]) assert "..." in message[0]["list"] def test_trim_message(): message = _trim_message("foo" * 100) assert "..." in message
# ... existing code ... from jsonrpcclient.log import _trim_string, _trim_values, _trim_message # ... modified code ... def test_trim_message(): message = _trim_message("foo" * 100) assert "..." in message # ... rest of the code ...
d042f4ced40d8d03bd65edf798a29058f26e98c6
test/test_wsstat.py
test/test_wsstat.py
import hashlib from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection class Tests(object): def setup(self): self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1) def teardown(self): pass class TestConnectedWebsocketConnection: def setup(self): self.token = hashlib.sha256(b'derp').hexdigest() self.socket = ConnectedWebsocketConnection(None, self.token) def test_message_increment(self): assert self.socket.message_count == 0 self.socket.increment_message_counter() assert self.socket.message_count == 1 self.socket.increment_message_counter() assert self.socket.message_count == 2 def test_socket_as_string(self): assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
import hashlib from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection class Tests(object): def setup(self): self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3) def test_coroutines(self): print(self.client) assert len(self.client.tasks._children) == (1 + self.client.total_connections) class TestConnectedWebsocketConnection: def setup(self): self.token = hashlib.sha256(b'derp').hexdigest() self.socket = ConnectedWebsocketConnection(None, self.token) def test_message_increment(self): assert self.socket.message_count == 0 self.socket.increment_message_counter() assert self.socket.message_count == 1 self.socket.increment_message_counter() assert self.socket.message_count == 2 def test_socket_as_string(self): assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
Add a test for running tasks
Add a test for running tasks
Python
mit
Fitblip/wsstat
import hashlib from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection class Tests(object): def setup(self): - self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1) + self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3) - def teardown(self): + def test_coroutines(self): - pass + print(self.client) + assert len(self.client.tasks._children) == (1 + self.client.total_connections) + class TestConnectedWebsocketConnection: def setup(self): self.token = hashlib.sha256(b'derp').hexdigest() self.socket = ConnectedWebsocketConnection(None, self.token) def test_message_increment(self): assert self.socket.message_count == 0 self.socket.increment_message_counter() assert self.socket.message_count == 1 self.socket.increment_message_counter() assert self.socket.message_count == 2 def test_socket_as_string(self): assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
Add a test for running tasks
## Code Before: import hashlib from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection class Tests(object): def setup(self): self.client = WebsocketTestingClient('wss://testserver/', total_connections=1, max_connecting_sockets=1) def teardown(self): pass class TestConnectedWebsocketConnection: def setup(self): self.token = hashlib.sha256(b'derp').hexdigest() self.socket = ConnectedWebsocketConnection(None, self.token) def test_message_increment(self): assert self.socket.message_count == 0 self.socket.increment_message_counter() assert self.socket.message_count == 1 self.socket.increment_message_counter() assert self.socket.message_count == 2 def test_socket_as_string(self): assert str(self.socket) == "<Websocket {}>".format(self.socket.id) ## Instruction: Add a test for running tasks ## Code After: import hashlib from wsstat.main import WebsocketTestingClient, ConnectedWebsocketConnection class Tests(object): def setup(self): self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3) def test_coroutines(self): print(self.client) assert len(self.client.tasks._children) == (1 + self.client.total_connections) class TestConnectedWebsocketConnection: def setup(self): self.token = hashlib.sha256(b'derp').hexdigest() self.socket = ConnectedWebsocketConnection(None, self.token) def test_message_increment(self): assert self.socket.message_count == 0 self.socket.increment_message_counter() assert self.socket.message_count == 1 self.socket.increment_message_counter() assert self.socket.message_count == 2 def test_socket_as_string(self): assert str(self.socket) == "<Websocket {}>".format(self.socket.id)
// ... existing code ... def setup(self): self.client = WebsocketTestingClient('wss://testserver/', total_connections=3, max_connecting_sockets=3) def test_coroutines(self): print(self.client) assert len(self.client.tasks._children) == (1 + self.client.total_connections) // ... rest of the code ...
abd0a6854c90c3647d17dfb3ea980fa49aa5372f
pwndbg/commands/segments.py
pwndbg/commands/segments.py
from __future__ import print_function import gdb import pwndbg.regs class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase')
from __future__ import print_function import gdb import pwndbg.regs import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase))
Add fsbase and gsbase commands
Add fsbase and gsbase commands
Python
mit
cebrusfs/217gdb,anthraxx/pwndbg,chubbymaggie/pwndbg,anthraxx/pwndbg,disconnect3d/pwndbg,0xddaa/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,zachriggle/pwndbg,disconnect3d/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,zachriggle/pwndbg,pwndbg/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,chubbymaggie/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,0xddaa/pwndbg
from __future__ import print_function import gdb import pwndbg.regs + import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') + @pwndbg.commands.OnlyWhenRunning + @pwndbg.commands.ParsedCommand + def fsbase(): + """ + Prints out the FS base address. See also $fsbase. + """ + print(hex(pwndbg.regs.fsbase)) + + + @pwndbg.commands.OnlyWhenRunning + @pwndbg.commands.ParsedCommand + def gsbase(): + """ + Prints out the GS base address. See also $gsbase. + """ + print(hex(pwndbg.regs.gsbase)) +
Add fsbase and gsbase commands
## Code Before: from __future__ import print_function import gdb import pwndbg.regs class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') ## Instruction: Add fsbase and gsbase commands ## Code After: from __future__ import print_function import gdb import pwndbg.regs import pwndbg.commands class segment(gdb.Function): """Get the flat address of memory based off of the named segment register. """ def __init__(self, name): super(segment, self).__init__(name) self.name = name def invoke(self, arg=0): result = getattr(pwndbg.regs, self.name) return result + arg segment('fsbase') segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase))
// ... existing code ... import pwndbg.regs import pwndbg.commands // ... modified code ... segment('gsbase') @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def fsbase(): """ Prints out the FS base address. See also $fsbase. """ print(hex(pwndbg.regs.fsbase)) @pwndbg.commands.OnlyWhenRunning @pwndbg.commands.ParsedCommand def gsbase(): """ Prints out the GS base address. See also $gsbase. """ print(hex(pwndbg.regs.gsbase)) // ... rest of the code ...
4a6ccb58bade2cefc7baa9424f1747275adaa166
antxetamedia/archive/filtersets.py
antxetamedia/archive/filtersets.py
from django_filters import FilterSet from antxetamedia.news.models import NewsPodcast from antxetamedia.radio.models import RadioPodcast from antxetamedia.projects.models import ProjectShow # We do not want to accidentally discard anything, so be inclusive and always # make gte and lte lookups instead of using gt or lt ones class NewsPodcastFilterSet(FilterSet): class Meta: model = NewsPodcast fields = { 'show': ['exact'], 'categories': ['exact'], 'pub_date': ['gte', 'lte'], } class RadioPodcastFilterSet(FilterSet): class Meta: model = RadioPodcast fields = { 'show': ['exact'], 'show__category': ['exact'], 'show__producer': ['exact'], 'pub_date': ['gte', 'lte'], } class ProjectShowFilterSet(FilterSet): class Meta: model = ProjectShow fields = { 'producer': ['exact'], 'creation_date': ['year__exact'], }
from django.utils.translation import ugettext_lazy as _ from django_filters import FilterSet, DateTimeFilter from antxetamedia.news.models import NewsPodcast from antxetamedia.radio.models import RadioPodcast from antxetamedia.projects.models import ProjectShow # We do not want to accidentally discard anything, so be inclusive and always # make gte and lte lookups instead of using gt or lt ones class NewsPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: model = NewsPodcast fields = ['show', 'categories', 'pub_date_after', 'pub_date_before'] class RadioPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: model = RadioPodcast fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before'] class ProjectShowFilterSet(FilterSet): class Meta: model = ProjectShow fields = { 'producer': ['exact'], 'creation_date': ['year__exact'], }
Add labels to the pub_date__lte pub_date__gte filters
Add labels to the pub_date__lte pub_date__gte filters
Python
agpl-3.0
GISAElkartea/amv2,GISAElkartea/amv2,GISAElkartea/amv2
+ from django.utils.translation import ugettext_lazy as _ + - from django_filters import FilterSet + from django_filters import FilterSet, DateTimeFilter from antxetamedia.news.models import NewsPodcast from antxetamedia.radio.models import RadioPodcast from antxetamedia.projects.models import ProjectShow # We do not want to accidentally discard anything, so be inclusive and always # make gte and lte lookups instead of using gt or lt ones class NewsPodcastFilterSet(FilterSet): + pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) + pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) + class Meta: model = NewsPodcast + fields = ['show', 'categories', 'pub_date_after', 'pub_date_before'] - fields = { - 'show': ['exact'], - 'categories': ['exact'], - 'pub_date': ['gte', 'lte'], - } class RadioPodcastFilterSet(FilterSet): + pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) + pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) + class Meta: model = RadioPodcast + fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before'] - fields = { - 'show': ['exact'], - 'show__category': ['exact'], - 'show__producer': ['exact'], - 'pub_date': ['gte', 'lte'], - } class ProjectShowFilterSet(FilterSet): class Meta: model = ProjectShow fields = { 'producer': ['exact'], 'creation_date': ['year__exact'], }
Add labels to the pub_date__lte pub_date__gte filters
## Code Before: from django_filters import FilterSet from antxetamedia.news.models import NewsPodcast from antxetamedia.radio.models import RadioPodcast from antxetamedia.projects.models import ProjectShow # We do not want to accidentally discard anything, so be inclusive and always # make gte and lte lookups instead of using gt or lt ones class NewsPodcastFilterSet(FilterSet): class Meta: model = NewsPodcast fields = { 'show': ['exact'], 'categories': ['exact'], 'pub_date': ['gte', 'lte'], } class RadioPodcastFilterSet(FilterSet): class Meta: model = RadioPodcast fields = { 'show': ['exact'], 'show__category': ['exact'], 'show__producer': ['exact'], 'pub_date': ['gte', 'lte'], } class ProjectShowFilterSet(FilterSet): class Meta: model = ProjectShow fields = { 'producer': ['exact'], 'creation_date': ['year__exact'], } ## Instruction: Add labels to the pub_date__lte pub_date__gte filters ## Code After: from django.utils.translation import ugettext_lazy as _ from django_filters import FilterSet, DateTimeFilter from antxetamedia.news.models import NewsPodcast from antxetamedia.radio.models import RadioPodcast from antxetamedia.projects.models import ProjectShow # We do not want to accidentally discard anything, so be inclusive and always # make gte and lte lookups instead of using gt or lt ones class NewsPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: model = NewsPodcast fields = ['show', 'categories', 'pub_date_after', 'pub_date_before'] class RadioPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: model = RadioPodcast fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before'] class ProjectShowFilterSet(FilterSet): class Meta: model = ProjectShow fields = { 'producer': ['exact'], 'creation_date': ['year__exact'], }
# ... existing code ... from django.utils.translation import ugettext_lazy as _ from django_filters import FilterSet, DateTimeFilter # ... modified code ... class NewsPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: ... model = NewsPodcast fields = ['show', 'categories', 'pub_date_after', 'pub_date_before'] ... class RadioPodcastFilterSet(FilterSet): pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after')) pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before')) class Meta: ... model = RadioPodcast fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before'] # ... rest of the code ...
931a858dc1cfde1652d21e1ccd60a82dde683ce3
moxie/butterfield.py
moxie/butterfield.py
import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job))
import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) elif cmd == "yo": yield from bot.post( message['channel'], "Yo {}".format(message['user']))
Add simple "yo" bot command
Add simple "yo" bot command
Python
mit
paultag/moxie,loandy/moxie,mileswwatkins/moxie,mileswwatkins/moxie,paultag/moxie,loandy/moxie,loandy/moxie,paultag/moxie,rshorey/moxie,rshorey/moxie,rshorey/moxie,mileswwatkins/moxie
import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) + elif cmd == "yo": + yield from bot.post( + message['channel'], "Yo {}".format(message['user']))
Add simple "yo" bot command
## Code Before: import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) ## Instruction: Add simple "yo" bot command ## Code After: import os import json import asyncio from butterfield.utils import at_bot from aiodocker import Docker from aiocore import Service WEB_ROOT = os.environ.get("MOXIE_WEB_URL", "http://localhost:8888") @asyncio.coroutine def events(bot): docker = Docker() events = docker.events events.saferun() stream = events.listen() while True: el = yield from stream.get() yield from bot.post("#cron", "`{}`".format(str(el))) @asyncio.coroutine @at_bot def run(bot, message: "message"): runner = Service.resolve("moxie.cores.run.RunService") text = message.get("text", "") if text == "": yield from bot.post(message['channel'], "Invalid request") cmd, arg = text.split(" ", 1) if cmd == "run": job = arg yield from bot.post( message['channel'], "Doing bringup of {}".format(job)) try: yield from runner.run(job) except ValueError as e: yield from bot.post( message['channel'], "Gah, {job} failed - {e}".format(e=e, job=job) ) return yield from bot.post(message['channel'], "Job {job} online - {webroot}/container/{job}/".format( webroot=WEB_ROOT, job=job)) elif cmd == "yo": yield from bot.post( message['channel'], "Yo {}".format(message['user']))
# ... existing code ... webroot=WEB_ROOT, job=job)) elif cmd == "yo": yield from bot.post( message['channel'], "Yo {}".format(message['user'])) # ... rest of the code ...
c955628b134586491265bc2e6b4045398072cead
allauth/socialaccount/providers/kakao/provider.py
allauth/socialaccount/providers/kakao/provider.py
from allauth.account.models import EmailAddress from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class KakaoAccount(ProviderAccount): @property def properties(self): return self.account.extra_data['properties'] def get_avatar_url(self): return self.properties['profile_image'] def to_str(self): dflt = super(KakaoAccount, self).to_str() return self.properties['nickname'] or dflt class KakaoProvider(OAuth2Provider): id = 'kakao' name = 'Kakao' account_class = KakaoAccount def extract_uid(self, data): return str(data['id']) def extract_common_fields(self, data): email = data.get("kaccount_email") return dict(email=email) def extract_email_addresses(self, data): ret = [] email = data.get("kaccount_email") verified = data.get("kaccount_email_verified") # data["kaccount_email_verified"] imply the email address is # verified ret.append(EmailAddress(email=email, verified=verified, primary=True)) return ret provider_classes = [KakaoProvider]
from allauth.account.models import EmailAddress from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class KakaoAccount(ProviderAccount): @property def properties(self): return self.account.extra_data['properties'] def get_avatar_url(self): return self.properties['profile_image'] def to_str(self): dflt = super(KakaoAccount, self).to_str() return self.properties['nickname'] or dflt class KakaoProvider(OAuth2Provider): id = 'kakao' name = 'Kakao' account_class = KakaoAccount def extract_uid(self, data): return str(data['id']) def extract_common_fields(self, data): email = data.get("kaccount_email") return dict(email=email) def extract_email_addresses(self, data): ret = [] email = data.get("kaccount_email") if email: verified = data.get("kaccount_email_verified") # data["kaccount_email_verified"] imply the email address is # verified ret.append(EmailAddress(email=email, verified=verified, primary=True)) return ret provider_classes = [KakaoProvider]
Handle case where email is not present
fix(kakao): Handle case where email is not present
Python
mit
pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,bittner/django-allauth,AltSchool/django-allauth,pennersr/django-allauth,AltSchool/django-allauth,rsalmaso/django-allauth,AltSchool/django-allauth,lukeburden/django-allauth,bittner/django-allauth,lukeburden/django-allauth,bittner/django-allauth,pennersr/django-allauth
from allauth.account.models import EmailAddress from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class KakaoAccount(ProviderAccount): @property def properties(self): return self.account.extra_data['properties'] def get_avatar_url(self): return self.properties['profile_image'] def to_str(self): dflt = super(KakaoAccount, self).to_str() return self.properties['nickname'] or dflt class KakaoProvider(OAuth2Provider): id = 'kakao' name = 'Kakao' account_class = KakaoAccount def extract_uid(self, data): return str(data['id']) def extract_common_fields(self, data): email = data.get("kaccount_email") return dict(email=email) def extract_email_addresses(self, data): ret = [] email = data.get("kaccount_email") + if email: - verified = data.get("kaccount_email_verified") + verified = data.get("kaccount_email_verified") - # data["kaccount_email_verified"] imply the email address is + # data["kaccount_email_verified"] imply the email address is - # verified + # verified - ret.append(EmailAddress(email=email, + ret.append(EmailAddress(email=email, - verified=verified, + verified=verified, - primary=True)) + primary=True)) return ret provider_classes = [KakaoProvider]
Handle case where email is not present
## Code Before: from allauth.account.models import EmailAddress from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class KakaoAccount(ProviderAccount): @property def properties(self): return self.account.extra_data['properties'] def get_avatar_url(self): return self.properties['profile_image'] def to_str(self): dflt = super(KakaoAccount, self).to_str() return self.properties['nickname'] or dflt class KakaoProvider(OAuth2Provider): id = 'kakao' name = 'Kakao' account_class = KakaoAccount def extract_uid(self, data): return str(data['id']) def extract_common_fields(self, data): email = data.get("kaccount_email") return dict(email=email) def extract_email_addresses(self, data): ret = [] email = data.get("kaccount_email") verified = data.get("kaccount_email_verified") # data["kaccount_email_verified"] imply the email address is # verified ret.append(EmailAddress(email=email, verified=verified, primary=True)) return ret provider_classes = [KakaoProvider] ## Instruction: Handle case where email is not present ## Code After: from allauth.account.models import EmailAddress from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider class KakaoAccount(ProviderAccount): @property def properties(self): return self.account.extra_data['properties'] def get_avatar_url(self): return self.properties['profile_image'] def to_str(self): dflt = super(KakaoAccount, self).to_str() return self.properties['nickname'] or dflt class KakaoProvider(OAuth2Provider): id = 'kakao' name = 'Kakao' account_class = KakaoAccount def extract_uid(self, data): return str(data['id']) def extract_common_fields(self, data): email = data.get("kaccount_email") return dict(email=email) def extract_email_addresses(self, data): ret = [] email = data.get("kaccount_email") if email: verified = data.get("kaccount_email_verified") # data["kaccount_email_verified"] imply the email address is # verified ret.append(EmailAddress(email=email, verified=verified, primary=True)) return ret provider_classes = [KakaoProvider]
# ... existing code ... email = data.get("kaccount_email") if email: verified = data.get("kaccount_email_verified") # data["kaccount_email_verified"] imply the email address is # verified ret.append(EmailAddress(email=email, verified=verified, primary=True)) return ret # ... rest of the code ...
090bcbf8bbc32a2a8da5f0ab2be097e5a6716c3d
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
src/adhocracy_frontend/adhocracy_frontend/tests/integration/test_jasmine.py
from pytest import fixture from pytest import mark from adhocracy_frontend.testing import Browser from adhocracy_frontend.testing import browser_test_helper from adhocracy_frontend.tests.unit.console import Parser from adhocracy_frontend.tests.unit.console import Formatter pytestmark = mark.jasmine class TestJasmine: def test_all(self, browser_igtest): data = browser_igtest.evaluate_script('jsApiReporter.specs()') formatter = Formatter([]) parser = Parser() results = parser.parse(data) formatter.results = results print(formatter.format()) num_failures = len(list(results.failed())) assert num_failures == 0
from pytest import fixture from pytest import mark from adhocracy_frontend.testing import Browser from adhocracy_frontend.testing import browser_test_helper from adhocracy_frontend.tests.unit.console import Parser from adhocracy_frontend.tests.unit.console import Formatter pytestmark = mark.jasmine class TestJasmine: @mark.xfail def test_all(self, browser_igtest): data = browser_igtest.evaluate_script('jsApiReporter.specs()') formatter = Formatter([]) parser = Parser() results = parser.parse(data) formatter.results = results print(formatter.format()) num_failures = len(list(results.failed())) assert num_failures == 0
Mark integration tests as xfail
Mark integration tests as xfail
Python
agpl-3.0
fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator
from pytest import fixture from pytest import mark from adhocracy_frontend.testing import Browser from adhocracy_frontend.testing import browser_test_helper from adhocracy_frontend.tests.unit.console import Parser from adhocracy_frontend.tests.unit.console import Formatter pytestmark = mark.jasmine class TestJasmine: + @mark.xfail def test_all(self, browser_igtest): data = browser_igtest.evaluate_script('jsApiReporter.specs()') formatter = Formatter([]) parser = Parser() results = parser.parse(data) formatter.results = results print(formatter.format()) num_failures = len(list(results.failed())) assert num_failures == 0
Mark integration tests as xfail
## Code Before: from pytest import fixture from pytest import mark from adhocracy_frontend.testing import Browser from adhocracy_frontend.testing import browser_test_helper from adhocracy_frontend.tests.unit.console import Parser from adhocracy_frontend.tests.unit.console import Formatter pytestmark = mark.jasmine class TestJasmine: def test_all(self, browser_igtest): data = browser_igtest.evaluate_script('jsApiReporter.specs()') formatter = Formatter([]) parser = Parser() results = parser.parse(data) formatter.results = results print(formatter.format()) num_failures = len(list(results.failed())) assert num_failures == 0 ## Instruction: Mark integration tests as xfail ## Code After: from pytest import fixture from pytest import mark from adhocracy_frontend.testing import Browser from adhocracy_frontend.testing import browser_test_helper from adhocracy_frontend.tests.unit.console import Parser from adhocracy_frontend.tests.unit.console import Formatter pytestmark = mark.jasmine class TestJasmine: @mark.xfail def test_all(self, browser_igtest): data = browser_igtest.evaluate_script('jsApiReporter.specs()') formatter = Formatter([]) parser = Parser() results = parser.parse(data) formatter.results = results print(formatter.format()) num_failures = len(list(results.failed())) assert num_failures == 0
// ... existing code ... class TestJasmine: @mark.xfail def test_all(self, browser_igtest): // ... rest of the code ...
b7106307baf97ba32cb29fe2a4bb9ed925c194ca
custom/onse/management/commands/update_onse_facility_cases.py
custom/onse/management/commands/update_onse_facility_cases.py
from django.core.management import BaseCommand from custom.onse.tasks import update_facility_cases_from_dhis2_data_elements class Command(BaseCommand): help = ('Update facility_supervision cases with indicators collected ' 'in DHIS2 over the last quarter.') def handle(self, *args, **options): update_facility_cases_from_dhis2_data_elements.apply( print_notifications=True)
from django.core.management import BaseCommand from custom.onse.tasks import update_facility_cases_from_dhis2_data_elements class Command(BaseCommand): help = ('Update facility_supervision cases with indicators collected ' 'in DHIS2 over the last quarter.') def handle(self, *args, **options): update_facility_cases_from_dhis2_data_elements.apply(kwargs={ 'print_notifications': True})
Fix passing keyword arg to task
Fix passing keyword arg to task
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from django.core.management import BaseCommand from custom.onse.tasks import update_facility_cases_from_dhis2_data_elements class Command(BaseCommand): help = ('Update facility_supervision cases with indicators collected ' 'in DHIS2 over the last quarter.') def handle(self, *args, **options): - update_facility_cases_from_dhis2_data_elements.apply( + update_facility_cases_from_dhis2_data_elements.apply(kwargs={ - print_notifications=True) + 'print_notifications': True})
Fix passing keyword arg to task
## Code Before: from django.core.management import BaseCommand from custom.onse.tasks import update_facility_cases_from_dhis2_data_elements class Command(BaseCommand): help = ('Update facility_supervision cases with indicators collected ' 'in DHIS2 over the last quarter.') def handle(self, *args, **options): update_facility_cases_from_dhis2_data_elements.apply( print_notifications=True) ## Instruction: Fix passing keyword arg to task ## Code After: from django.core.management import BaseCommand from custom.onse.tasks import update_facility_cases_from_dhis2_data_elements class Command(BaseCommand): help = ('Update facility_supervision cases with indicators collected ' 'in DHIS2 over the last quarter.') def handle(self, *args, **options): update_facility_cases_from_dhis2_data_elements.apply(kwargs={ 'print_notifications': True})
# ... existing code ... def handle(self, *args, **options): update_facility_cases_from_dhis2_data_elements.apply(kwargs={ 'print_notifications': True}) # ... rest of the code ...
1994a59d3ae9d3f24445f11f3bc0dd3089042bc4
main.py
main.py
from order import Order from orderbook import OrderBook from client import FinanceClient from ordermanager import OrderManager from strategy import Vanilla, Strawberry import sys # local server for finance data host_ip, server_port = "localhost", 9995 def main(): """ Turn on the FinanceServer - fetch data from the FinanceServer - parse out each order as an Order object - add these Orders to the OrderBook using the values in Action - for each added order, decide to trade indicated by signal """ strategy_choice = sys.argv[1] books = {} client = FinanceClient(host_ip, server_port) ordermanager = OrderManager() if strategy_choice == 'Vanilla': strategy = Vanilla() elif strategy_choice == 'Strawberry': strategy = Strawberry() else: print('strategies available: Vanilla or Strawberry') print(strategy.name, strategy.description) for line in client.fetch(): try: order = Order(line) book = books.get(order.symbol) if book is None: book = books[order.symbol] = OrderBook(order.symbol) book.add(order) bid, offer = book.display_book(output=True) ordermanager.signal(bid, offer, strategy.execute) except Exception as e: print(e) pass if __name__ == '__main__': main()
from order import Order from orderbook import OrderBook from client import FinanceClient from ordermanager import OrderManager from strategy import Vanilla, Strawberry import sys # local server for finance data host_ip, server_port = "localhost", 9995 def main(): """ Turn on the FinanceServer - fetch data from the FinanceServer - parse out each order as an Order object - add these Orders to the OrderBook using the values in Action - for each added order, decide to trade indicated by signal """ strategy_choice = sys.argv[1] books = {} client = FinanceClient(host_ip, server_port) ordermanager = OrderManager() if strategy_choice == 'Vanilla': strategy = Vanilla() elif strategy_choice == 'Strawberry': strategy = Strawberry() else: print('strategies available: Vanilla or Strawberry') print(strategy.name, strategy.description) for line in client.fetch(): try: order = Order(line) book = books.get(order.symbol) if book is None: book = books[order.symbol] = OrderBook(order.symbol) if order.action == 'A': book.add(order) elif order.side == 'M': book.modify(order) bid, offer = book.display_book(output=True) ordermanager.signal(bid, offer, strategy.execute) except Exception as e: print(e) pass if __name__ == '__main__': main()
Use modify with the orderbook
Use modify with the orderbook
Python
mit
albhu/finance
from order import Order from orderbook import OrderBook from client import FinanceClient from ordermanager import OrderManager from strategy import Vanilla, Strawberry import sys # local server for finance data host_ip, server_port = "localhost", 9995 def main(): """ Turn on the FinanceServer - fetch data from the FinanceServer - parse out each order as an Order object - add these Orders to the OrderBook using the values in Action - for each added order, decide to trade indicated by signal """ strategy_choice = sys.argv[1] books = {} client = FinanceClient(host_ip, server_port) ordermanager = OrderManager() if strategy_choice == 'Vanilla': strategy = Vanilla() elif strategy_choice == 'Strawberry': strategy = Strawberry() else: print('strategies available: Vanilla or Strawberry') - + print(strategy.name, strategy.description) for line in client.fetch(): try: order = Order(line) book = books.get(order.symbol) if book is None: book = books[order.symbol] = OrderBook(order.symbol) + if order.action == 'A': - book.add(order) + book.add(order) + elif order.side == 'M': + book.modify(order) bid, offer = book.display_book(output=True) ordermanager.signal(bid, offer, strategy.execute) except Exception as e: print(e) pass if __name__ == '__main__': main()
Use modify with the orderbook
## Code Before: from order import Order from orderbook import OrderBook from client import FinanceClient from ordermanager import OrderManager from strategy import Vanilla, Strawberry import sys # local server for finance data host_ip, server_port = "localhost", 9995 def main(): """ Turn on the FinanceServer - fetch data from the FinanceServer - parse out each order as an Order object - add these Orders to the OrderBook using the values in Action - for each added order, decide to trade indicated by signal """ strategy_choice = sys.argv[1] books = {} client = FinanceClient(host_ip, server_port) ordermanager = OrderManager() if strategy_choice == 'Vanilla': strategy = Vanilla() elif strategy_choice == 'Strawberry': strategy = Strawberry() else: print('strategies available: Vanilla or Strawberry') print(strategy.name, strategy.description) for line in client.fetch(): try: order = Order(line) book = books.get(order.symbol) if book is None: book = books[order.symbol] = OrderBook(order.symbol) book.add(order) bid, offer = book.display_book(output=True) ordermanager.signal(bid, offer, strategy.execute) except Exception as e: print(e) pass if __name__ == '__main__': main() ## Instruction: Use modify with the orderbook ## Code After: from order import Order from orderbook import OrderBook from client import FinanceClient from ordermanager import OrderManager from strategy import Vanilla, Strawberry import sys # local server for finance data host_ip, server_port = "localhost", 9995 def main(): """ Turn on the FinanceServer - fetch data from the FinanceServer - parse out each order as an Order object - add these Orders to the OrderBook using the values in Action - for each added order, decide to trade indicated by signal """ strategy_choice = sys.argv[1] books = {} client = FinanceClient(host_ip, server_port) ordermanager = OrderManager() if strategy_choice == 'Vanilla': strategy = Vanilla() elif strategy_choice == 'Strawberry': strategy = Strawberry() else: print('strategies available: Vanilla or Strawberry') print(strategy.name, strategy.description) for line in client.fetch(): try: order = Order(line) book = books.get(order.symbol) if book is None: book = books[order.symbol] = OrderBook(order.symbol) if order.action == 'A': book.add(order) elif order.side == 'M': book.modify(order) bid, offer = book.display_book(output=True) ordermanager.signal(bid, offer, strategy.execute) except Exception as e: print(e) pass if __name__ == '__main__': main()
... print('strategies available: Vanilla or Strawberry') print(strategy.name, strategy.description) ... book = books[order.symbol] = OrderBook(order.symbol) if order.action == 'A': book.add(order) elif order.side == 'M': book.modify(order) bid, offer = book.display_book(output=True) ...
23f95f0319c929006c89efdf0d113370a1a003b4
moa/factory_registers.py
moa/factory_registers.py
from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage.base') r('StageRender', module='moa.stage.base') r('Delay', module='moa.stage.delay') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('StageRender', module='moa.stage.base') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
Update factory registers with stages.
Update factory registers with stages.
Python
mit
matham/moa
from kivy.factory import Factory r = Factory.register - r('MoaStage', module='moa.stage.base') + r('MoaStage', module='moa.stage') + r('Delay', module='moa.stage.delay') + r('GateStage', module='moa.stage.gate') + r('StageRender', module='moa.stage.base') - r('Delay', module='moa.stage.delay') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
Update factory registers with stages.
## Code Before: from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage.base') r('StageRender', module='moa.stage.base') r('Delay', module='moa.stage.delay') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple') ## Instruction: Update factory registers with stages. ## Code After: from kivy.factory import Factory r = Factory.register r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('StageRender', module='moa.stage.base') r('TreeRender', module='moa.render.treerender') r('TreeRenderExt', module='moa.render.treerender') r('StageTreeNode', module='moa.render.treerender') r('StageSimpleDisplay', module='moa.render.stage_simple')
... r = Factory.register r('MoaStage', module='moa.stage') r('Delay', module='moa.stage.delay') r('GateStage', module='moa.stage.gate') r('StageRender', module='moa.stage.base') r('TreeRender', module='moa.render.treerender') ...
2db6e8e294059847251feb9610c42180ae44e05b
fbone/appointment/views.py
fbone/appointment/views.py
from flask import (Blueprint, render_template, request, flash, url_for, redirect, session) from flask.ext.mail import Message from ..extensions import db, mail from .forms import MakeAppointmentForm from .models import Appointment appointment = Blueprint('appointment', __name__, url_prefix='/appointment') @appointment.route('/create', methods=['GET', 'POST']) def create(): form = MakeAppointmentForm(formdata=request.args, next=request.args.get('next')) # Dump all available data from request or session object to form fields. for key in form.data.keys(): setattr(getattr(form, key), 'data', request.args.get(key) or session.get(key)) if form.validate_on_submit(): appointment = Appointment() form.populate_obj(appointment) db.session.add(appointment) db.session.commit() flash_message = """ Congratulations! You've just made an appointment on WPIC Web Calendar system, please check your email for details. """ flash(flash_message) return redirect(url_for('appointment.create')) return render_template('appointment/create.html', form=form)
from datetime import datetime from flask import (Blueprint, render_template, request, abort, flash, url_for, redirect, session) from flask.ext.mail import Message from ..extensions import db, mail from .forms import MakeAppointmentForm from .models import Appointment appointment = Blueprint('appointment', __name__, url_prefix='/appointment') @appointment.route('/create', methods=['GET', 'POST']) def create(): if request.method == 'POST': form = MakeAppointmentForm(next=request.args.get('next')) if form.validate_on_submit(): appointment = Appointment() form.populate_obj(appointment) db.session.add(appointment) db.session.commit() flash_message = """ Congratulations! You've just made an appointment on WPIC Web Calendar system, please check your email for details. """ flash(flash_message) return redirect(url_for('appointment.create')) elif request.method == 'GET': form = MakeAppointmentForm(formdata=request.args, next=request.args.get('next')) # Dump all available data from request or session object to form # fields. for key in form.data.keys(): if key == "date": setattr(getattr(form, key), 'data', datetime.strptime(request.args.get(key) or session.get(key) or datetime.today().strftime('%Y-%m-%d'), "%Y-%m-%d")) else: setattr(getattr(form, key), 'data', request.args.get(key) or session.get(key)) return render_template('appointment/create.html', form=form) else: abort(405)
Fix some error about session.
Fix some error about session.
Python
bsd-3-clause
wpic/flask-appointment-calendar,wpic/flask-appointment-calendar
+ from datetime import datetime + - from flask import (Blueprint, render_template, request, + from flask import (Blueprint, render_template, request, abort, flash, url_for, redirect, session) from flask.ext.mail import Message from ..extensions import db, mail from .forms import MakeAppointmentForm from .models import Appointment appointment = Blueprint('appointment', __name__, url_prefix='/appointment') @appointment.route('/create', methods=['GET', 'POST']) def create(): + if request.method == 'POST': - form = MakeAppointmentForm(formdata=request.args, + form = MakeAppointmentForm(next=request.args.get('next')) - next=request.args.get('next')) + if form.validate_on_submit(): + appointment = Appointment() + form.populate_obj(appointment) - # Dump all available data from request or session object to form fields. - for key in form.data.keys(): - setattr(getattr(form, key), 'data', - request.args.get(key) or session.get(key)) + db.session.add(appointment) + db.session.commit() - if form.validate_on_submit(): - appointment = Appointment() - form.populate_obj(appointment) - db.session.add(appointment) - db.session.commit() + flash_message = """ + Congratulations! You've just made an appointment + on WPIC Web Calendar system, please check your email for details. + """ + flash(flash_message) + return redirect(url_for('appointment.create')) - flash_message = """ - Congratulations! You've just made an appointment - on WPIC Web Calendar system, please check your email for details. - """ - flash(flash_message) - return redirect(url_for('appointment.create')) + elif request.method == 'GET': + form = MakeAppointmentForm(formdata=request.args, + next=request.args.get('next')) + # Dump all available data from request or session object to form + # fields. + for key in form.data.keys(): + if key == "date": + setattr(getattr(form, key), 'data', + datetime.strptime(request.args.get(key) or + session.get(key) or + datetime.today().strftime('%Y-%m-%d'), + "%Y-%m-%d")) + else: + setattr(getattr(form, key), 'data', + request.args.get(key) or session.get(key)) - return render_template('appointment/create.html', form=form) + return render_template('appointment/create.html', form=form) + else: + abort(405) +
Fix some error about session.
## Code Before: from flask import (Blueprint, render_template, request, flash, url_for, redirect, session) from flask.ext.mail import Message from ..extensions import db, mail from .forms import MakeAppointmentForm from .models import Appointment appointment = Blueprint('appointment', __name__, url_prefix='/appointment') @appointment.route('/create', methods=['GET', 'POST']) def create(): form = MakeAppointmentForm(formdata=request.args, next=request.args.get('next')) # Dump all available data from request or session object to form fields. for key in form.data.keys(): setattr(getattr(form, key), 'data', request.args.get(key) or session.get(key)) if form.validate_on_submit(): appointment = Appointment() form.populate_obj(appointment) db.session.add(appointment) db.session.commit() flash_message = """ Congratulations! You've just made an appointment on WPIC Web Calendar system, please check your email for details. """ flash(flash_message) return redirect(url_for('appointment.create')) return render_template('appointment/create.html', form=form) ## Instruction: Fix some error about session. ## Code After: from datetime import datetime from flask import (Blueprint, render_template, request, abort, flash, url_for, redirect, session) from flask.ext.mail import Message from ..extensions import db, mail from .forms import MakeAppointmentForm from .models import Appointment appointment = Blueprint('appointment', __name__, url_prefix='/appointment') @appointment.route('/create', methods=['GET', 'POST']) def create(): if request.method == 'POST': form = MakeAppointmentForm(next=request.args.get('next')) if form.validate_on_submit(): appointment = Appointment() form.populate_obj(appointment) db.session.add(appointment) db.session.commit() flash_message = """ Congratulations! You've just made an appointment on WPIC Web Calendar system, please check your email for details. """ flash(flash_message) return redirect(url_for('appointment.create')) elif request.method == 'GET': form = MakeAppointmentForm(formdata=request.args, next=request.args.get('next')) # Dump all available data from request or session object to form # fields. for key in form.data.keys(): if key == "date": setattr(getattr(form, key), 'data', datetime.strptime(request.args.get(key) or session.get(key) or datetime.today().strftime('%Y-%m-%d'), "%Y-%m-%d")) else: setattr(getattr(form, key), 'data', request.args.get(key) or session.get(key)) return render_template('appointment/create.html', form=form) else: abort(405)
// ... existing code ... from datetime import datetime from flask import (Blueprint, render_template, request, abort, flash, url_for, redirect, session) // ... modified code ... def create(): if request.method == 'POST': form = MakeAppointmentForm(next=request.args.get('next')) if form.validate_on_submit(): appointment = Appointment() form.populate_obj(appointment) db.session.add(appointment) db.session.commit() flash_message = """ Congratulations! You've just made an appointment on WPIC Web Calendar system, please check your email for details. """ flash(flash_message) return redirect(url_for('appointment.create')) elif request.method == 'GET': form = MakeAppointmentForm(formdata=request.args, next=request.args.get('next')) # Dump all available data from request or session object to form # fields. for key in form.data.keys(): if key == "date": setattr(getattr(form, key), 'data', datetime.strptime(request.args.get(key) or session.get(key) or datetime.today().strftime('%Y-%m-%d'), "%Y-%m-%d")) else: setattr(getattr(form, key), 'data', request.args.get(key) or session.get(key)) return render_template('appointment/create.html', form=form) else: abort(405) // ... rest of the code ...
a73cc6d6ad8460d492b29db60df2c0e8eaff932e
openerp_conventions.py
openerp_conventions.py
"""OpenERP community addons standard plugin for flake8""" from __future__ import absolute_import import common_checker from common_checker.base_checker import BaseChecker # When OpenERP version 8 API will be frozen # We wille be able to do version toggle here import v7 __version__ = '0.0.1' class OpenERPConventionsChecker(object): """Check OpenERP conventions It will call the function 'visit(root_node)' for all checker instances registered in BaseCheckerMeta """ name = 'OpenERP convention' version = __version__ def __init__(self, tree, filename): """Constructor :param tree: root ast.node of current module :param filename: current module filename """ self.tree = tree if tree else () self.filename = filename self.checks = BaseChecker._checks def run(self): """Run the checks""" return self.check_tree(self.tree) def check_tree(self, tree_root): """Apply all checks registered in BaseCheckerMeta on root ast.node :param tree_root: Root ast node of the namespace :returns: yeld list of errors codes """ for check in self.checks: check.visit(tree_root) for error in check.errors: yield error
"""OpenERP community addons standard plugin for flake8""" from __future__ import absolute_import import common_checker from common_checker.base_checker import BaseChecker # When OpenERP version 8 API will be frozen # We wille be able to do version toggle here import v7 __version__ = '0.0.1' class OpenERPConventionsChecker(object): """Check OpenERP conventions It will call the function 'visit(root_node)' for all checker instances registered in BaseCheckerMeta """ name = 'OpenERP convention' version = __version__ def __init__(self, tree, filename): """Constructor :param tree: root ast.node of current module :param filename: current module filename """ self.tree = tree if tree else () self.filename = filename self.checks = BaseChecker._checks def run(self): """Run the checks""" return self.check_tree(self.tree) def check_tree(self, tree_root): """Apply all checks registered in BaseCheckerMeta on root ast.node :param tree_root: Root ast node of the namespace :returns: yeld list of errors codes """ for check in self.checks: check.set_filename(self.filename) check.visit(tree_root) for error in check.errors: yield error
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
Python
mit
nbessi/openerp-conventions
"""OpenERP community addons standard plugin for flake8""" from __future__ import absolute_import import common_checker from common_checker.base_checker import BaseChecker # When OpenERP version 8 API will be frozen # We wille be able to do version toggle here import v7 __version__ = '0.0.1' class OpenERPConventionsChecker(object): """Check OpenERP conventions It will call the function 'visit(root_node)' for all checker instances registered in BaseCheckerMeta """ name = 'OpenERP convention' version = __version__ def __init__(self, tree, filename): """Constructor :param tree: root ast.node of current module :param filename: current module filename """ self.tree = tree if tree else () self.filename = filename self.checks = BaseChecker._checks def run(self): """Run the checks""" return self.check_tree(self.tree) def check_tree(self, tree_root): """Apply all checks registered in BaseCheckerMeta on root ast.node :param tree_root: Root ast node of the namespace :returns: yeld list of errors codes """ for check in self.checks: + check.set_filename(self.filename) check.visit(tree_root) for error in check.errors: yield error
Improve BaseChecker class by using __metaclass__ keyword + add a filename setter
## Code Before: """OpenERP community addons standard plugin for flake8""" from __future__ import absolute_import import common_checker from common_checker.base_checker import BaseChecker # When OpenERP version 8 API will be frozen # We wille be able to do version toggle here import v7 __version__ = '0.0.1' class OpenERPConventionsChecker(object): """Check OpenERP conventions It will call the function 'visit(root_node)' for all checker instances registered in BaseCheckerMeta """ name = 'OpenERP convention' version = __version__ def __init__(self, tree, filename): """Constructor :param tree: root ast.node of current module :param filename: current module filename """ self.tree = tree if tree else () self.filename = filename self.checks = BaseChecker._checks def run(self): """Run the checks""" return self.check_tree(self.tree) def check_tree(self, tree_root): """Apply all checks registered in BaseCheckerMeta on root ast.node :param tree_root: Root ast node of the namespace :returns: yeld list of errors codes """ for check in self.checks: check.visit(tree_root) for error in check.errors: yield error ## Instruction: Improve BaseChecker class by using __metaclass__ keyword + add a filename setter ## Code After: """OpenERP community addons standard plugin for flake8""" from __future__ import absolute_import import common_checker from common_checker.base_checker import BaseChecker # When OpenERP version 8 API will be frozen # We wille be able to do version toggle here import v7 __version__ = '0.0.1' class OpenERPConventionsChecker(object): """Check OpenERP conventions It will call the function 'visit(root_node)' for all checker instances registered in BaseCheckerMeta """ name = 'OpenERP convention' version = __version__ def __init__(self, tree, filename): """Constructor :param tree: root ast.node of current module :param filename: current module filename """ self.tree = tree if tree else () self.filename = filename self.checks = BaseChecker._checks def run(self): """Run the checks""" return self.check_tree(self.tree) def check_tree(self, tree_root): """Apply all checks registered in BaseCheckerMeta on root ast.node :param tree_root: Root ast node of the namespace :returns: yeld list of errors codes """ for check in self.checks: check.set_filename(self.filename) check.visit(tree_root) for error in check.errors: yield error
// ... existing code ... for check in self.checks: check.set_filename(self.filename) check.visit(tree_root) // ... rest of the code ...
fb25fa04cf553b1084425a1f2af6a9315266ffaf
salt/renderers/yaml_jinja.py
salt/renderers/yaml_jinja.py
''' The default rendering engine, process yaml with the jinja2 templating engine This renderer will take a yaml file with the jinja2 template and render it to a high data format for salt states. ''' # Import Python Modules import os # Import thirt party modules import yaml try: yaml.Loader = yaml.CLoader yaml.Dumper = yaml.CDumper except: pass # Import Salt libs from salt.utils.jinja import get_template def render(template_file, env='', sls=''): ''' Render the data passing the functions and grains into the rendering system ''' if not os.path.isfile(template_file): return {} passthrough = {} passthrough['salt'] = __salt__ passthrough['grains'] = __grains__ passthrough['env'] = env passthrough['sls'] = sls template = get_template(template_file, __opts__, env) yaml_data = template.render(**passthrough) return yaml.safe_load(yaml_data)
''' The default rendering engine, process yaml with the jinja2 templating engine This renderer will take a yaml file with the jinja2 template and render it to a high data format for salt states. ''' # Import Python Modules import os # Import thirt party modules import yaml try: yaml.Loader = yaml.CLoader yaml.Dumper = yaml.CDumper except: pass # Import Salt libs from salt.utils.jinja import get_template def render(template_file, env='', sls=''): ''' Render the data passing the functions and grains into the rendering system ''' if not os.path.isfile(template_file): return {} passthrough = {} passthrough['salt'] = __salt__ passthrough['grains'] = __grains__ passthrough['pillar'] = __pillar__ passthrough['env'] = env passthrough['sls'] = sls template = get_template(template_file, __opts__, env) yaml_data = template.render(**passthrough) return yaml.safe_load(yaml_data)
Add pillar data to default renderer
Add pillar data to default renderer
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' The default rendering engine, process yaml with the jinja2 templating engine This renderer will take a yaml file with the jinja2 template and render it to a high data format for salt states. ''' # Import Python Modules import os # Import thirt party modules import yaml try: yaml.Loader = yaml.CLoader yaml.Dumper = yaml.CDumper except: pass # Import Salt libs from salt.utils.jinja import get_template def render(template_file, env='', sls=''): ''' Render the data passing the functions and grains into the rendering system ''' if not os.path.isfile(template_file): return {} passthrough = {} passthrough['salt'] = __salt__ passthrough['grains'] = __grains__ + passthrough['pillar'] = __pillar__ passthrough['env'] = env passthrough['sls'] = sls template = get_template(template_file, __opts__, env) yaml_data = template.render(**passthrough) return yaml.safe_load(yaml_data)
Add pillar data to default renderer
## Code Before: ''' The default rendering engine, process yaml with the jinja2 templating engine This renderer will take a yaml file with the jinja2 template and render it to a high data format for salt states. ''' # Import Python Modules import os # Import thirt party modules import yaml try: yaml.Loader = yaml.CLoader yaml.Dumper = yaml.CDumper except: pass # Import Salt libs from salt.utils.jinja import get_template def render(template_file, env='', sls=''): ''' Render the data passing the functions and grains into the rendering system ''' if not os.path.isfile(template_file): return {} passthrough = {} passthrough['salt'] = __salt__ passthrough['grains'] = __grains__ passthrough['env'] = env passthrough['sls'] = sls template = get_template(template_file, __opts__, env) yaml_data = template.render(**passthrough) return yaml.safe_load(yaml_data) ## Instruction: Add pillar data to default renderer ## Code After: ''' The default rendering engine, process yaml with the jinja2 templating engine This renderer will take a yaml file with the jinja2 template and render it to a high data format for salt states. ''' # Import Python Modules import os # Import thirt party modules import yaml try: yaml.Loader = yaml.CLoader yaml.Dumper = yaml.CDumper except: pass # Import Salt libs from salt.utils.jinja import get_template def render(template_file, env='', sls=''): ''' Render the data passing the functions and grains into the rendering system ''' if not os.path.isfile(template_file): return {} passthrough = {} passthrough['salt'] = __salt__ passthrough['grains'] = __grains__ passthrough['pillar'] = __pillar__ passthrough['env'] = env passthrough['sls'] = sls template = get_template(template_file, __opts__, env) yaml_data = template.render(**passthrough) return yaml.safe_load(yaml_data)
// ... existing code ... passthrough['grains'] = __grains__ passthrough['pillar'] = __pillar__ passthrough['env'] = env // ... rest of the code ...
28126555aea9a78467dfcadbb2b14f9c640cdc6d
dwitter/templatetags/to_gravatar_url.py
dwitter/templatetags/to_gravatar_url.py
import hashlib from django import template register = template.Library() @register.filter def to_gravatar_url(email): return ('https://gravatar.com/avatar/%s?d=retro' % hashlib.md5((email or '').strip().lower()).hexdigest())
import hashlib from django import template register = template.Library() @register.filter def to_gravatar_url(email): return ('https://gravatar.com/avatar/%s?d=retro' % hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
Fix gravatar hashing error on py3
Fix gravatar hashing error on py3
Python
apache-2.0
lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter
import hashlib from django import template register = template.Library() @register.filter def to_gravatar_url(email): return ('https://gravatar.com/avatar/%s?d=retro' % - hashlib.md5((email or '').strip().lower()).hexdigest()) + hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
Fix gravatar hashing error on py3
## Code Before: import hashlib from django import template register = template.Library() @register.filter def to_gravatar_url(email): return ('https://gravatar.com/avatar/%s?d=retro' % hashlib.md5((email or '').strip().lower()).hexdigest()) ## Instruction: Fix gravatar hashing error on py3 ## Code After: import hashlib from django import template register = template.Library() @register.filter def to_gravatar_url(email): return ('https://gravatar.com/avatar/%s?d=retro' % hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest())
# ... existing code ... return ('https://gravatar.com/avatar/%s?d=retro' % hashlib.md5((email or '').strip().lower().encode('utf-8')).hexdigest()) # ... rest of the code ...
66602e67c06266735b58fd2bee8b55b7cac401b1
archive/archive_report_ingest_status/src/test_archive_report_ingest_status.py
archive/archive_report_ingest_status/src/test_archive_report_ingest_status.py
import uuid import archive_report_ingest_status as report_ingest_status def test_get_returns_status(dynamodb_resource, table_name): guid = str(uuid.uuid4()) table = dynamodb_resource.Table(table_name) table.put_item(Item={'id': guid}) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response['id'] == guid def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name): guid = str(uuid.uuid4()) item = {'id': guid, 'fooKey': 'barValue'} table = dynamodb_resource.Table(table_name) table.put_item(Item=item) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response == item
import uuid import pytest import archive_report_ingest_status as report_ingest_status def test_get_returns_status(dynamodb_resource, table_name): guid = str(uuid.uuid4()) table = dynamodb_resource.Table(table_name) table.put_item(Item={'id': guid}) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response['id'] == guid def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name): guid = str(uuid.uuid4()) item = {'id': guid, 'fooKey': 'barValue'} table = dynamodb_resource.Table(table_name) table.put_item(Item=item) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response == item def test_fails_if_called_with_post_event(): event = { 'request_method': 'POST' } with pytest.raises(AssertionError, match='Expected request_method=GET'): report_ingest_status.main(event=event)
Add a test that a non-GET method is rejected
Add a test that a non-GET method is rejected
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
import uuid + + import pytest import archive_report_ingest_status as report_ingest_status def test_get_returns_status(dynamodb_resource, table_name): guid = str(uuid.uuid4()) table = dynamodb_resource.Table(table_name) table.put_item(Item={'id': guid}) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response['id'] == guid def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name): guid = str(uuid.uuid4()) item = {'id': guid, 'fooKey': 'barValue'} table = dynamodb_resource.Table(table_name) table.put_item(Item=item) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response == item + + def test_fails_if_called_with_post_event(): + event = { + 'request_method': 'POST' + } + + with pytest.raises(AssertionError, match='Expected request_method=GET'): + report_ingest_status.main(event=event) +
Add a test that a non-GET method is rejected
## Code Before: import uuid import archive_report_ingest_status as report_ingest_status def test_get_returns_status(dynamodb_resource, table_name): guid = str(uuid.uuid4()) table = dynamodb_resource.Table(table_name) table.put_item(Item={'id': guid}) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response['id'] == guid def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name): guid = str(uuid.uuid4()) item = {'id': guid, 'fooKey': 'barValue'} table = dynamodb_resource.Table(table_name) table.put_item(Item=item) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response == item ## Instruction: Add a test that a non-GET method is rejected ## Code After: import uuid import pytest import archive_report_ingest_status as report_ingest_status def test_get_returns_status(dynamodb_resource, table_name): guid = str(uuid.uuid4()) table = dynamodb_resource.Table(table_name) table.put_item(Item={'id': guid}) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response['id'] == guid def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name): guid = str(uuid.uuid4()) item = {'id': guid, 'fooKey': 'barValue'} table = dynamodb_resource.Table(table_name) table.put_item(Item=item) event = { 'request_method': 'GET', 'id': guid } response = report_ingest_status.main( event=event, dynamodb_resource=dynamodb_resource ) assert response == item def test_fails_if_called_with_post_event(): event = { 'request_method': 'POST' } with pytest.raises(AssertionError, match='Expected request_method=GET'): report_ingest_status.main(event=event)
# ... existing code ... import uuid import pytest # ... modified code ... assert response == item def test_fails_if_called_with_post_event(): event = { 'request_method': 'POST' } with pytest.raises(AssertionError, match='Expected request_method=GET'): report_ingest_status.main(event=event) # ... rest of the code ...
cbdc24aeef9ffbd8e7400ab43112409509b3337d
reviewday/util.py
reviewday/util.py
import os import shutil import html_helper from Cheetah.Template import Template def prep_out_dir(out_dir='out_report'): src_dir = os.path.dirname(__file__) report_files_dir = os.path.join(src_dir, 'report_files') if os.path.exists(out_dir): print 'WARNING: output directory "%s" already exists' % out_dir else: shutil.copytree(report_files_dir, out_dir) def create_report(name_space={}): filename = os.path.join(os.path.dirname(__file__), 'report.html') report_text = open(filename).read() name_space['helper'] = html_helper t = Template(report_text, searchList=[name_space]) out_dir = 'out_report' prep_out_dir(out_dir) out_file = open(os.path.join(out_dir, 'index.html'), "w") out_file.write(str(t)) out_file.close()
import os import html_helper from Cheetah.Template import Template from distutils.dir_util import copy_tree def prep_out_dir(out_dir='out_report'): src_dir = os.path.dirname(__file__) report_files_dir = os.path.join(src_dir, 'report_files') copy_tree(report_files_dir, out_dir) def create_report(name_space={}): filename = os.path.join(os.path.dirname(__file__), 'report.html') report_text = open(filename).read() name_space['helper'] = html_helper t = Template(report_text, searchList=[name_space]) out_dir = 'out_report' prep_out_dir(out_dir) out_file = open(os.path.join(out_dir, 'index.html'), "w") out_file.write(str(t)) out_file.close()
Remove warning for existing output directory.
Remove warning for existing output directory. In our configuration puppet will manage the output directory, so it is expected behavior for it to exist, removing warning. Also switching to distutils.dir_util copy_tree since that allows for copying of required supporting files into an existing output directory. Change-Id: I38b2c6ec47fd61814554a4b5007a83553b05aeb2 Reviewed-on: https://review.openstack.org/20647 Approved: Dan Prince <62cccc0004df0a8e3c343b26b69e434f6aa9711c@redhat.com> Reviewed-by: Dan Prince <62cccc0004df0a8e3c343b26b69e434f6aa9711c@redhat.com> Tested-by: Jenkins
Python
mit
openstack-infra/reviewday,openstack-infra/reviewday,dprince/reviewday
import os - import shutil import html_helper from Cheetah.Template import Template + from distutils.dir_util import copy_tree def prep_out_dir(out_dir='out_report'): src_dir = os.path.dirname(__file__) report_files_dir = os.path.join(src_dir, 'report_files') - if os.path.exists(out_dir): - print 'WARNING: output directory "%s" already exists' % out_dir - else: - shutil.copytree(report_files_dir, out_dir) + copy_tree(report_files_dir, out_dir) def create_report(name_space={}): filename = os.path.join(os.path.dirname(__file__), 'report.html') report_text = open(filename).read() name_space['helper'] = html_helper t = Template(report_text, searchList=[name_space]) out_dir = 'out_report' prep_out_dir(out_dir) out_file = open(os.path.join(out_dir, 'index.html'), "w") out_file.write(str(t)) out_file.close()
Remove warning for existing output directory.
## Code Before: import os import shutil import html_helper from Cheetah.Template import Template def prep_out_dir(out_dir='out_report'): src_dir = os.path.dirname(__file__) report_files_dir = os.path.join(src_dir, 'report_files') if os.path.exists(out_dir): print 'WARNING: output directory "%s" already exists' % out_dir else: shutil.copytree(report_files_dir, out_dir) def create_report(name_space={}): filename = os.path.join(os.path.dirname(__file__), 'report.html') report_text = open(filename).read() name_space['helper'] = html_helper t = Template(report_text, searchList=[name_space]) out_dir = 'out_report' prep_out_dir(out_dir) out_file = open(os.path.join(out_dir, 'index.html'), "w") out_file.write(str(t)) out_file.close() ## Instruction: Remove warning for existing output directory. ## Code After: import os import html_helper from Cheetah.Template import Template from distutils.dir_util import copy_tree def prep_out_dir(out_dir='out_report'): src_dir = os.path.dirname(__file__) report_files_dir = os.path.join(src_dir, 'report_files') copy_tree(report_files_dir, out_dir) def create_report(name_space={}): filename = os.path.join(os.path.dirname(__file__), 'report.html') report_text = open(filename).read() name_space['helper'] = html_helper t = Template(report_text, searchList=[name_space]) out_dir = 'out_report' prep_out_dir(out_dir) out_file = open(os.path.join(out_dir, 'index.html'), "w") out_file.write(str(t)) out_file.close()
# ... existing code ... import os import html_helper # ... modified code ... from Cheetah.Template import Template from distutils.dir_util import copy_tree ... report_files_dir = os.path.join(src_dir, 'report_files') copy_tree(report_files_dir, out_dir) # ... rest of the code ...
5425c2419b7365969ea8b211432858d599214201
tests/test_archive.py
tests/test_archive.py
from json import load from django_archive import __version__ from .base import BaseArchiveTestCase from .sample.models import Sample class ArchiveTestCase(BaseArchiveTestCase): """ Test that the archive command includes correct data in the archive """ def setUp(self): Sample().save() super().setUp() def test_data(self): """ Confirm that the model was archived """ with self.tarfile.extractfile('data.json') as fileobj: data = load(fileobj) self.assertEqual(len(data), 1) self.assertEqual(data[0]['model'], 'sample.sample') def test_meta(self): """ Confirm that meta information is present """ with self.tarfile.extractfile('meta.json') as fileobj: data = load(fileobj) self.assertEqual(data['version'], __version__)
from json import load from django.core.files.base import ContentFile from django_archive import __version__ from .base import BaseArchiveTestCase from .sample.models import Sample class ArchiveTestCase(BaseArchiveTestCase): """ Test that the archive command includes correct data in the archive """ _ATTACHMENT_FILENAME = 'sample.txt' _ATTACHMENT_CONTENT = b'sample' def setUp(self): sample = Sample() sample.attachment.save( self._ATTACHMENT_FILENAME, ContentFile(self._ATTACHMENT_CONTENT), ) super().setUp() def test_data(self): """ Confirm that the model and attached files were archived """ with self.tarfile.extractfile('data.json') as fileobj: data = load(fileobj) self.assertEqual(len(data), 1) self.assertEqual(data[0]['model'], 'sample.sample') with self.tarfile.extractfile(data[0]['fields']['attachment']) as fileobj: content = fileobj.read() self.assertEqual(content, self._ATTACHMENT_CONTENT) def test_meta(self): """ Confirm that meta information is present """ with self.tarfile.extractfile('meta.json') as fileobj: data = load(fileobj) self.assertEqual(data['version'], __version__)
Update test to ensure attached files are present in archives.
Update test to ensure attached files are present in archives.
Python
mit
nathan-osman/django-archive,nathan-osman/django-archive
from json import load + from django.core.files.base import ContentFile from django_archive import __version__ from .base import BaseArchiveTestCase from .sample.models import Sample class ArchiveTestCase(BaseArchiveTestCase): """ Test that the archive command includes correct data in the archive """ + _ATTACHMENT_FILENAME = 'sample.txt' + _ATTACHMENT_CONTENT = b'sample' + def setUp(self): - Sample().save() + sample = Sample() + sample.attachment.save( + self._ATTACHMENT_FILENAME, + ContentFile(self._ATTACHMENT_CONTENT), + ) super().setUp() def test_data(self): """ - Confirm that the model was archived + Confirm that the model and attached files were archived """ with self.tarfile.extractfile('data.json') as fileobj: data = load(fileobj) - self.assertEqual(len(data), 1) + self.assertEqual(len(data), 1) - self.assertEqual(data[0]['model'], 'sample.sample') + self.assertEqual(data[0]['model'], 'sample.sample') + with self.tarfile.extractfile(data[0]['fields']['attachment']) as fileobj: + content = fileobj.read() + self.assertEqual(content, self._ATTACHMENT_CONTENT) def test_meta(self): """ Confirm that meta information is present """ with self.tarfile.extractfile('meta.json') as fileobj: data = load(fileobj) self.assertEqual(data['version'], __version__)
Update test to ensure attached files are present in archives.
## Code Before: from json import load from django_archive import __version__ from .base import BaseArchiveTestCase from .sample.models import Sample class ArchiveTestCase(BaseArchiveTestCase): """ Test that the archive command includes correct data in the archive """ def setUp(self): Sample().save() super().setUp() def test_data(self): """ Confirm that the model was archived """ with self.tarfile.extractfile('data.json') as fileobj: data = load(fileobj) self.assertEqual(len(data), 1) self.assertEqual(data[0]['model'], 'sample.sample') def test_meta(self): """ Confirm that meta information is present """ with self.tarfile.extractfile('meta.json') as fileobj: data = load(fileobj) self.assertEqual(data['version'], __version__) ## Instruction: Update test to ensure attached files are present in archives. ## Code After: from json import load from django.core.files.base import ContentFile from django_archive import __version__ from .base import BaseArchiveTestCase from .sample.models import Sample class ArchiveTestCase(BaseArchiveTestCase): """ Test that the archive command includes correct data in the archive """ _ATTACHMENT_FILENAME = 'sample.txt' _ATTACHMENT_CONTENT = b'sample' def setUp(self): sample = Sample() sample.attachment.save( self._ATTACHMENT_FILENAME, ContentFile(self._ATTACHMENT_CONTENT), ) super().setUp() def test_data(self): """ Confirm that the model and attached files were archived """ with self.tarfile.extractfile('data.json') as fileobj: data = load(fileobj) self.assertEqual(len(data), 1) self.assertEqual(data[0]['model'], 'sample.sample') with self.tarfile.extractfile(data[0]['fields']['attachment']) as fileobj: content = fileobj.read() self.assertEqual(content, self._ATTACHMENT_CONTENT) def test_meta(self): """ Confirm that meta information is present """ with self.tarfile.extractfile('meta.json') as fileobj: data = load(fileobj) self.assertEqual(data['version'], __version__)
... from django.core.files.base import ContentFile from django_archive import __version__ ... _ATTACHMENT_FILENAME = 'sample.txt' _ATTACHMENT_CONTENT = b'sample' def setUp(self): sample = Sample() sample.attachment.save( self._ATTACHMENT_FILENAME, ContentFile(self._ATTACHMENT_CONTENT), ) super().setUp() ... """ Confirm that the model and attached files were archived """ ... data = load(fileobj) self.assertEqual(len(data), 1) self.assertEqual(data[0]['model'], 'sample.sample') with self.tarfile.extractfile(data[0]['fields']['attachment']) as fileobj: content = fileobj.read() self.assertEqual(content, self._ATTACHMENT_CONTENT) ...
94a944b01953ed75bfbefbd11ed62ca438cd9200
accounts/tests/test_models.py
accounts/tests/test_models.py
from django.test import TestCase from django.contrib.auth import get_user_model USER = get_user_model() TEST_EMAIL = 'newvisitor@example.com' class UserModelTest(TestCase): """Tests for passwordless user model. """ def test_user_valid_with_only_email(self): """Should not raise if the user model is happy with email only. """ user = USER(email=TEST_EMAIL) user.full_clean() def test_users_are_authenticated(self): """User objects should be authenticated for views/templates. """ user = USER() self.assertTrue(user.is_authenticated)
from django.test import TestCase from django.contrib.auth import get_user_model from django.core.exceptions import ValidationError USER = get_user_model() TEST_EMAIL = 'newvisitor@example.com' class UserModelTest(TestCase): """Tests for passwordless user model. """ def test_user_valid_with_only_email(self): """Should not raise if the user model is happy with email only. """ user = USER(email=TEST_EMAIL) user.full_clean() def test_user_invalid_without_email(self): """Should raise if the user model requires an email. """ with self.assertRaises(ValidationError): user = USER() user.full_clean() def test_users_are_authenticated(self): """User objects should be authenticated for views/templates. """ user = USER() self.assertTrue(user.is_authenticated)
Add test for unsupplied email for user model
Add test for unsupplied email for user model
Python
mit
randomic/aniauth-tdd,randomic/aniauth-tdd
from django.test import TestCase from django.contrib.auth import get_user_model + from django.core.exceptions import ValidationError USER = get_user_model() TEST_EMAIL = 'newvisitor@example.com' class UserModelTest(TestCase): """Tests for passwordless user model. """ def test_user_valid_with_only_email(self): """Should not raise if the user model is happy with email only. """ user = USER(email=TEST_EMAIL) user.full_clean() + def test_user_invalid_without_email(self): + """Should raise if the user model requires an email. + + """ + with self.assertRaises(ValidationError): + user = USER() + user.full_clean() + def test_users_are_authenticated(self): """User objects should be authenticated for views/templates. """ user = USER() self.assertTrue(user.is_authenticated)
Add test for unsupplied email for user model
## Code Before: from django.test import TestCase from django.contrib.auth import get_user_model USER = get_user_model() TEST_EMAIL = 'newvisitor@example.com' class UserModelTest(TestCase): """Tests for passwordless user model. """ def test_user_valid_with_only_email(self): """Should not raise if the user model is happy with email only. """ user = USER(email=TEST_EMAIL) user.full_clean() def test_users_are_authenticated(self): """User objects should be authenticated for views/templates. """ user = USER() self.assertTrue(user.is_authenticated) ## Instruction: Add test for unsupplied email for user model ## Code After: from django.test import TestCase from django.contrib.auth import get_user_model from django.core.exceptions import ValidationError USER = get_user_model() TEST_EMAIL = 'newvisitor@example.com' class UserModelTest(TestCase): """Tests for passwordless user model. """ def test_user_valid_with_only_email(self): """Should not raise if the user model is happy with email only. """ user = USER(email=TEST_EMAIL) user.full_clean() def test_user_invalid_without_email(self): """Should raise if the user model requires an email. """ with self.assertRaises(ValidationError): user = USER() user.full_clean() def test_users_are_authenticated(self): """User objects should be authenticated for views/templates. """ user = USER() self.assertTrue(user.is_authenticated)
... from django.contrib.auth import get_user_model from django.core.exceptions import ValidationError ... def test_user_invalid_without_email(self): """Should raise if the user model requires an email. """ with self.assertRaises(ValidationError): user = USER() user.full_clean() def test_users_are_authenticated(self): ...
abefbbc99e7e62bed31db549519807feee7254f9
tests/test_machine.py
tests/test_machine.py
import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173
import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173 for element in lattice.get_elements(): assert element.get_pv_name('readback')
Test to get different pv names for an element
Test to get different pv names for an element
Python
apache-2.0
razvanvasile/RML,willrogers/pml,willrogers/pml
import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173 + for element in lattice.get_elements(): + assert element.get_pv_name('readback')
Test to get different pv names for an element
## Code Before: import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173 ## Instruction: Test to get different pv names for an element ## Code After: import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173 for element in lattice.get_elements(): assert element.get_pv_name('readback')
// ... existing code ... assert len(lattice) == 173 for element in lattice.get_elements(): assert element.get_pv_name('readback') // ... rest of the code ...
5a4ff0b4da37a97e6ef86074dde63b47ba553ad8
test/typing.py
test/typing.py
from stella import stella from random import randint from test import * def return_bool(): return True def return_arg(x): return x def equality(a,b): return a==b def test1(): make_eq_test(return_bool, ()) @mark.parametrize('arg', single_args([True, False, 0, 1, 42.0, -42.5])) def test2(arg): make_eq_test(return_arg, arg) @mark.parametrize('args', [(True, True), (1,1), (42.0, 42.0), (1,2), (2.0, -2.0), (True, False), (randint(0, 10000000), randint(-10000 , 1000000))]) def test3(args): make_eq_test(equality, args) if __name__ == '__main__': print(stella(return_bool)())
from stella import stella from random import randint from test import * def return_bool(): return True def return_arg(x): return x def equality(a,b): return a==b def test1(): make_eq_test(return_bool, ()) @mark.parametrize('arg', single_args([True, False, 0, 1, 42.0, -42.5])) def test2(arg): make_eq_test(return_arg, arg) @mark.parametrize('args', [(True, True), (1,1), (42.0, 42.0), (1,2), (2.0, -2.0), (True, False), (randint(0, 10000000), randint(-10000 , 1000000))]) def test3(args): make_eq_test(equality, args) @mark.parametrize('args', [(False, 1), (42.0, True), (1, 1.0), (randint(0, 10000000), float(randint(-10000 , 1000000)))]) @mark.xfail() def test3fail(args): make_eq_test(equality, args) if __name__ == '__main__': print(stella(return_bool)())
Add a test that automatic type promotions are not allowed.
Add a test that automatic type promotions are not allowed.
Python
apache-2.0
squisher/stella,squisher/stella,squisher/stella,squisher/stella
from stella import stella from random import randint from test import * def return_bool(): return True def return_arg(x): return x def equality(a,b): return a==b def test1(): make_eq_test(return_bool, ()) @mark.parametrize('arg', single_args([True, False, 0, 1, 42.0, -42.5])) def test2(arg): make_eq_test(return_arg, arg) @mark.parametrize('args', [(True, True), (1,1), (42.0, 42.0), (1,2), (2.0, -2.0), (True, False), (randint(0, 10000000), randint(-10000 , 1000000))]) def test3(args): make_eq_test(equality, args) + @mark.parametrize('args', [(False, 1), (42.0, True), (1, 1.0), (randint(0, 10000000), float(randint(-10000 , 1000000)))]) + @mark.xfail() + def test3fail(args): + make_eq_test(equality, args) + if __name__ == '__main__': print(stella(return_bool)())
Add a test that automatic type promotions are not allowed.
## Code Before: from stella import stella from random import randint from test import * def return_bool(): return True def return_arg(x): return x def equality(a,b): return a==b def test1(): make_eq_test(return_bool, ()) @mark.parametrize('arg', single_args([True, False, 0, 1, 42.0, -42.5])) def test2(arg): make_eq_test(return_arg, arg) @mark.parametrize('args', [(True, True), (1,1), (42.0, 42.0), (1,2), (2.0, -2.0), (True, False), (randint(0, 10000000), randint(-10000 , 1000000))]) def test3(args): make_eq_test(equality, args) if __name__ == '__main__': print(stella(return_bool)()) ## Instruction: Add a test that automatic type promotions are not allowed. ## Code After: from stella import stella from random import randint from test import * def return_bool(): return True def return_arg(x): return x def equality(a,b): return a==b def test1(): make_eq_test(return_bool, ()) @mark.parametrize('arg', single_args([True, False, 0, 1, 42.0, -42.5])) def test2(arg): make_eq_test(return_arg, arg) @mark.parametrize('args', [(True, True), (1,1), (42.0, 42.0), (1,2), (2.0, -2.0), (True, False), (randint(0, 10000000), randint(-10000 , 1000000))]) def test3(args): make_eq_test(equality, args) @mark.parametrize('args', [(False, 1), (42.0, True), (1, 1.0), (randint(0, 10000000), float(randint(-10000 , 1000000)))]) @mark.xfail() def test3fail(args): make_eq_test(equality, args) if __name__ == '__main__': print(stella(return_bool)())
// ... existing code ... @mark.parametrize('args', [(False, 1), (42.0, True), (1, 1.0), (randint(0, 10000000), float(randint(-10000 , 1000000)))]) @mark.xfail() def test3fail(args): make_eq_test(equality, args) if __name__ == '__main__': // ... rest of the code ...
5bbed41d8150f6d0657f1a7670b449619f3ba0f7
promgen/util.py
promgen/util.py
import requests from promgen.version import __version__ def post(url, *args, **kwargs): '''Wraps requests.post with our user-agent''' if 'headers' not in kwargs: kwargs['headers'] = {} kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) return requests.post(url, *args, **kwargs) def get(url, *args, **kwargs): '''Wraps requests.post with our user-agent''' if 'headers' not in kwargs: kwargs['headers'] = {} kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) return requests.get(url, *args, **kwargs)
import requests.sessions from promgen.version import __version__ def post(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.post(url, **kwargs) def get(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.get(url, **kwargs)
Copy the pattern from requests.api to use a slightly more stable API
Copy the pattern from requests.api to use a slightly more stable API
Python
mit
kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen
- import requests + import requests.sessions from promgen.version import __version__ - def post(url, *args, **kwargs): + def post(url, **kwargs): + with requests.sessions.Session() as session: - '''Wraps requests.post with our user-agent''' - if 'headers' not in kwargs: - kwargs['headers'] = {} - kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) + session.headers['User-Agent'] = 'promgen/{}'.format(__version__) - - return requests.post(url, *args, **kwargs) + return session.post(url, **kwargs) - def get(url, *args, **kwargs): + def get(url, **kwargs): + with requests.sessions.Session() as session: - '''Wraps requests.post with our user-agent''' - if 'headers' not in kwargs: - kwargs['headers'] = {} - kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) + session.headers['User-Agent'] = 'promgen/{}'.format(__version__) + return session.get(url, **kwargs) - return requests.get(url, *args, **kwargs) -
Copy the pattern from requests.api to use a slightly more stable API
## Code Before: import requests from promgen.version import __version__ def post(url, *args, **kwargs): '''Wraps requests.post with our user-agent''' if 'headers' not in kwargs: kwargs['headers'] = {} kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) return requests.post(url, *args, **kwargs) def get(url, *args, **kwargs): '''Wraps requests.post with our user-agent''' if 'headers' not in kwargs: kwargs['headers'] = {} kwargs['headers']['user-agent'] = 'promgen/{}'.format(__version__) return requests.get(url, *args, **kwargs) ## Instruction: Copy the pattern from requests.api to use a slightly more stable API ## Code After: import requests.sessions from promgen.version import __version__ def post(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.post(url, **kwargs) def get(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.get(url, **kwargs)
... import requests.sessions ... def post(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.post(url, **kwargs) ... def get(url, **kwargs): with requests.sessions.Session() as session: session.headers['User-Agent'] = 'promgen/{}'.format(__version__) return session.get(url, **kwargs) ...
623c56c14aa1d1c47b081f607701323d00903dc9
gather/topic/api.py
gather/topic/api.py
from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager from gather.topic.models import Topic, Reply bp = api_manager.create_api_blueprint( Topic, methods=["GET", "POST"], preprocessors={ 'POST': [need_auth], }, include_methods=["have_read"], exclude_columns=EXCLUDE_COLUMNS ) def _update_topic_updated(result=None, **kw): if not result: return reply = Reply.query.get(result["id"]) reply.topic.updated = reply.created reply.topic.clear_read() reply.topic.save() reply_bp = api_manager.create_api_blueprint( Reply, methods=["POST"], preprocessors={ 'POST': [need_auth], }, postprocessors={ 'POST': [_update_topic_updated] }, exclude_columns=EXCLUDE_COLUMNS )
from flask import g, jsonify from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager from gather.topic.models import Topic, Reply bp = api_manager.create_api_blueprint( Topic, methods=["GET", "POST"], preprocessors={ 'POST': [need_auth], }, include_methods=["have_read"], exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/topic/<int:topic_id>/mark_read") def _mark_read_for_topic(topic_id): need_auth() topic = Topic.query.get_or_404(topic_id) topic.mark_read(g.token_user) return jsonify({"code": 200}) def _update_topic_updated(result=None, **kw): if not result: return reply = Reply.query.get(result["id"]) reply.topic.updated = reply.created reply.topic.clear_read() reply.topic.save() reply_bp = api_manager.create_api_blueprint( Reply, methods=["POST"], preprocessors={ 'POST': [need_auth], }, postprocessors={ 'POST': [_update_topic_updated] }, exclude_columns=EXCLUDE_COLUMNS )
Add API to mark topic as reader
Add API to mark topic as reader
Python
mit
whtsky/Gather,whtsky/Gather
+ from flask import g, jsonify from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager from gather.topic.models import Topic, Reply bp = api_manager.create_api_blueprint( Topic, methods=["GET", "POST"], preprocessors={ 'POST': [need_auth], }, include_methods=["have_read"], exclude_columns=EXCLUDE_COLUMNS ) + + + @bp.route("/topic/<int:topic_id>/mark_read") + def _mark_read_for_topic(topic_id): + need_auth() + topic = Topic.query.get_or_404(topic_id) + topic.mark_read(g.token_user) + return jsonify({"code": 200}) def _update_topic_updated(result=None, **kw): if not result: return reply = Reply.query.get(result["id"]) reply.topic.updated = reply.created reply.topic.clear_read() reply.topic.save() reply_bp = api_manager.create_api_blueprint( Reply, methods=["POST"], preprocessors={ 'POST': [need_auth], }, postprocessors={ 'POST': [_update_topic_updated] }, exclude_columns=EXCLUDE_COLUMNS )
Add API to mark topic as reader
## Code Before: from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager from gather.topic.models import Topic, Reply bp = api_manager.create_api_blueprint( Topic, methods=["GET", "POST"], preprocessors={ 'POST': [need_auth], }, include_methods=["have_read"], exclude_columns=EXCLUDE_COLUMNS ) def _update_topic_updated(result=None, **kw): if not result: return reply = Reply.query.get(result["id"]) reply.topic.updated = reply.created reply.topic.clear_read() reply.topic.save() reply_bp = api_manager.create_api_blueprint( Reply, methods=["POST"], preprocessors={ 'POST': [need_auth], }, postprocessors={ 'POST': [_update_topic_updated] }, exclude_columns=EXCLUDE_COLUMNS ) ## Instruction: Add API to mark topic as reader ## Code After: from flask import g, jsonify from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager from gather.topic.models import Topic, Reply bp = api_manager.create_api_blueprint( Topic, methods=["GET", "POST"], preprocessors={ 'POST': [need_auth], }, include_methods=["have_read"], exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/topic/<int:topic_id>/mark_read") def _mark_read_for_topic(topic_id): need_auth() topic = Topic.query.get_or_404(topic_id) topic.mark_read(g.token_user) return jsonify({"code": 200}) def _update_topic_updated(result=None, **kw): if not result: return reply = Reply.query.get(result["id"]) reply.topic.updated = reply.created reply.topic.clear_read() reply.topic.save() reply_bp = api_manager.create_api_blueprint( Reply, methods=["POST"], preprocessors={ 'POST': [need_auth], }, postprocessors={ 'POST': [_update_topic_updated] }, exclude_columns=EXCLUDE_COLUMNS )
// ... existing code ... from flask import g, jsonify // ... modified code ... ) @bp.route("/topic/<int:topic_id>/mark_read") def _mark_read_for_topic(topic_id): need_auth() topic = Topic.query.get_or_404(topic_id) topic.mark_read(g.token_user) return jsonify({"code": 200}) // ... rest of the code ...
f87b8c5b94e3e163f19ea0414d1fb2c42f09c166
test/test_genmidi.py
test/test_genmidi.py
import unittest import tempfile from pyknon.MidiFile import MIDIFile from pyknon.genmidi import Midi, MidiError from pyknon.music import NoteSeq, Note class TestMidi(unittest.TestCase): def test_init(self): midi = Midi(1, tempo=120) self.assertEqual(midi.number_tracks, 1) self.assertIsInstance(midi.midi_data, MIDIFile) def test_seq_notes_with_more_tracks_than_exists(self): midi = Midi(1) with self.assertRaises(MidiError): midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_notes(self): midi = Midi(2) midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) class TestWriteMidi(unittest.TestCase): def test_write_midifile(self): notes1 = NoteSeq("D4 F#8 R A") midi = Midi(1, tempo=133) midi.seq_notes(notes1, track=0) midi.write(tempfile.TemporaryFile())
import unittest import tempfile from pyknon.MidiFile import MIDIFile from pyknon.genmidi import Midi, MidiError from pyknon.music import NoteSeq, Note class TestMidi(unittest.TestCase): def test_init(self): midi = Midi(1, tempo=120) self.assertEqual(midi.number_tracks, 1) self.assertIsInstance(midi.midi_data, MIDIFile) def test_seq_notes_with_more_tracks_than_exists(self): midi = Midi(1) with self.assertRaises(MidiError): midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_notes(self): midi = Midi(2) midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_chords(self): chords = [NoteSeq("C E G"), NoteSeq("G B D")] midi = Midi() midi.seq_chords(chords) class TestWriteMidi(unittest.TestCase): def test_write_midifile(self): notes1 = NoteSeq("D4 F#8 R A") midi = Midi(1, tempo=133) midi.seq_notes(notes1, track=0) midi.write(tempfile.TemporaryFile())
Test for sequence of chords
Test for sequence of chords
Python
mit
palmerev/pyknon,kroger/pyknon
import unittest import tempfile from pyknon.MidiFile import MIDIFile from pyknon.genmidi import Midi, MidiError from pyknon.music import NoteSeq, Note class TestMidi(unittest.TestCase): def test_init(self): midi = Midi(1, tempo=120) self.assertEqual(midi.number_tracks, 1) self.assertIsInstance(midi.midi_data, MIDIFile) def test_seq_notes_with_more_tracks_than_exists(self): midi = Midi(1) with self.assertRaises(MidiError): midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_notes(self): midi = Midi(2) midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) + def test_seq_chords(self): + chords = [NoteSeq("C E G"), NoteSeq("G B D")] + midi = Midi() + midi.seq_chords(chords) + class TestWriteMidi(unittest.TestCase): def test_write_midifile(self): notes1 = NoteSeq("D4 F#8 R A") midi = Midi(1, tempo=133) midi.seq_notes(notes1, track=0) midi.write(tempfile.TemporaryFile())
Test for sequence of chords
## Code Before: import unittest import tempfile from pyknon.MidiFile import MIDIFile from pyknon.genmidi import Midi, MidiError from pyknon.music import NoteSeq, Note class TestMidi(unittest.TestCase): def test_init(self): midi = Midi(1, tempo=120) self.assertEqual(midi.number_tracks, 1) self.assertIsInstance(midi.midi_data, MIDIFile) def test_seq_notes_with_more_tracks_than_exists(self): midi = Midi(1) with self.assertRaises(MidiError): midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_notes(self): midi = Midi(2) midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) class TestWriteMidi(unittest.TestCase): def test_write_midifile(self): notes1 = NoteSeq("D4 F#8 R A") midi = Midi(1, tempo=133) midi.seq_notes(notes1, track=0) midi.write(tempfile.TemporaryFile()) ## Instruction: Test for sequence of chords ## Code After: import unittest import tempfile from pyknon.MidiFile import MIDIFile from pyknon.genmidi import Midi, MidiError from pyknon.music import NoteSeq, Note class TestMidi(unittest.TestCase): def test_init(self): midi = Midi(1, tempo=120) self.assertEqual(midi.number_tracks, 1) self.assertIsInstance(midi.midi_data, MIDIFile) def test_seq_notes_with_more_tracks_than_exists(self): midi = Midi(1) with self.assertRaises(MidiError): midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_notes(self): midi = Midi(2) midi.seq_notes(NoteSeq("C D"), track=0) midi.seq_notes(NoteSeq("D E"), track=1) def test_seq_chords(self): chords = [NoteSeq("C E G"), NoteSeq("G B D")] midi = Midi() midi.seq_chords(chords) class TestWriteMidi(unittest.TestCase): def test_write_midifile(self): notes1 = NoteSeq("D4 F#8 R A") midi = Midi(1, tempo=133) midi.seq_notes(notes1, track=0) midi.write(tempfile.TemporaryFile())
// ... existing code ... def test_seq_chords(self): chords = [NoteSeq("C E G"), NoteSeq("G B D")] midi = Midi() midi.seq_chords(chords) // ... rest of the code ...
f4286480f0fa157eb1b88b144ee57ffef7d1fc03
barython/tests/hooks/test_bspwm.py
barython/tests/hooks/test_bspwm.py
from collections import OrderedDict import pytest from barython.hooks.bspwm import BspwmHook def test_bspwm_hook_parse_event(): bh = BspwmHook() status = ("WmHDMI-0:Ou:LT:MDVI-D-0:fo:f7:fDesktop2:os:Of:fp:oq:fi:LT:" "mDVI-I-0:Od:LT") expected = OrderedDict([ ('HDMI-0', {'desktops': ['Ou'], 'focused': False, 'layout': 'T'}), ('DVI-D-0', { 'desktops': ['fo', 'f7', 'fDesktop2', 'os', 'Of', 'fp', 'oq', 'fi'], 'focused': True, 'layout': 'T' }), ('DVI-I-0', {'desktops': ['Od'], 'focused': False, 'layout': 'T'}) ]) assert expected == bh.parse_event(status)
from collections import OrderedDict import pytest from barython.hooks.bspwm import BspwmHook def test_bspwm_hook_parse_event(): bh = BspwmHook() status = ("WmHDMI-0:Ou:LT:MDVI-D-0:fo:f7:fDesktop2:os:Of:fp:oq:fi:LT:" "mDVI-I-0:Od:LT") expected = OrderedDict([ ('HDMI-0', {'desktops': ['Ou'], 'focused': False, 'layout': 'T'}), ('DVI-D-0', { 'desktops': ['fo', 'f7', 'fDesktop2', 'os', 'Of', 'fp', 'oq', 'fi'], 'focused': True, 'layout': 'T' }), ('DVI-I-0', {'desktops': ['Od'], 'focused': False, 'layout': 'T'}) ]) assert expected == bh.parse_event(status)["monitors"]
Add test for bspwm widget
Add test for bspwm widget
Python
bsd-3-clause
Anthony25/barython
from collections import OrderedDict import pytest from barython.hooks.bspwm import BspwmHook def test_bspwm_hook_parse_event(): bh = BspwmHook() status = ("WmHDMI-0:Ou:LT:MDVI-D-0:fo:f7:fDesktop2:os:Of:fp:oq:fi:LT:" "mDVI-I-0:Od:LT") expected = OrderedDict([ ('HDMI-0', {'desktops': ['Ou'], 'focused': False, 'layout': 'T'}), ('DVI-D-0', { 'desktops': ['fo', 'f7', 'fDesktop2', 'os', 'Of', 'fp', 'oq', 'fi'], 'focused': True, 'layout': 'T' }), ('DVI-I-0', {'desktops': ['Od'], 'focused': False, 'layout': 'T'}) ]) - assert expected == bh.parse_event(status) + assert expected == bh.parse_event(status)["monitors"]
Add test for bspwm widget
## Code Before: from collections import OrderedDict import pytest from barython.hooks.bspwm import BspwmHook def test_bspwm_hook_parse_event(): bh = BspwmHook() status = ("WmHDMI-0:Ou:LT:MDVI-D-0:fo:f7:fDesktop2:os:Of:fp:oq:fi:LT:" "mDVI-I-0:Od:LT") expected = OrderedDict([ ('HDMI-0', {'desktops': ['Ou'], 'focused': False, 'layout': 'T'}), ('DVI-D-0', { 'desktops': ['fo', 'f7', 'fDesktop2', 'os', 'Of', 'fp', 'oq', 'fi'], 'focused': True, 'layout': 'T' }), ('DVI-I-0', {'desktops': ['Od'], 'focused': False, 'layout': 'T'}) ]) assert expected == bh.parse_event(status) ## Instruction: Add test for bspwm widget ## Code After: from collections import OrderedDict import pytest from barython.hooks.bspwm import BspwmHook def test_bspwm_hook_parse_event(): bh = BspwmHook() status = ("WmHDMI-0:Ou:LT:MDVI-D-0:fo:f7:fDesktop2:os:Of:fp:oq:fi:LT:" "mDVI-I-0:Od:LT") expected = OrderedDict([ ('HDMI-0', {'desktops': ['Ou'], 'focused': False, 'layout': 'T'}), ('DVI-D-0', { 'desktops': ['fo', 'f7', 'fDesktop2', 'os', 'Of', 'fp', 'oq', 'fi'], 'focused': True, 'layout': 'T' }), ('DVI-I-0', {'desktops': ['Od'], 'focused': False, 'layout': 'T'}) ]) assert expected == bh.parse_event(status)["monitors"]
# ... existing code ... assert expected == bh.parse_event(status)["monitors"] # ... rest of the code ...
73d59df8b94f72e83b978c00518afa01967faac9
mle/test_package.py
mle/test_package.py
def test_distribution(): from mle import Normal, var, par import theano.tensor as T x = var('x') mu = par('mu') sigma = par('sigma') dist = Normal(x, mu, sigma) assert(len(dist.get_vars()) == 1) assert(len(dist.get_params()) == 2) assert(len(dist.get_dists()) == 0)
def test_formula_transform(): """ Check if variables can be added/multiplied/transformed. The result should be a formula that can be plugged into a model. """ from mle import var, par x = var('x') a = par('a') b = par('b') formula = a * x**2 + b def test_simple_fit(): """ Check if generating/fitting Gaussian data works """ from mle import Normal, var, par import theano.tensor as T import numpy as np x = var('x') mu = par('mu') sigma = par('sigma') dist = Normal(x, mu, sigma) np.random.seed(42) data = dist.sample(1e6, {'mu': 0, 'sigma': 1}) results = dist.fit({'x': data}, {'mu': 1, 'sigma': 2}, method='L-BFGS-B') def test_linear_regression(): """ Check if fitting a linear model works """ from mle import Normal, var, par import theano.tensor as T import numpy as np x = var('x') y = var('y') a = par('a') b = par('b') sigma = par('sigma') dist = Normal(y, a * x + b, sigma) np.random.seed(42) xs = linspace(0, 1, 20) ys = dist.sample(20, {'x': xs, 'a': 1, 'b': 0, 'sigma': 0.5}) results = dist.fit({'x': xs, 'y': ys}, {'a': 2, 'b': 1, 'sigma': 1})
Add some tests that don't pass yet
Add some tests that don't pass yet
Python
mit
ibab/python-mle
- def test_distribution(): + + def test_formula_transform(): + """ + Check if variables can be added/multiplied/transformed. + The result should be a formula that can be plugged into a model. + """ + from mle import var, par + + x = var('x') + a = par('a') + b = par('b') + + formula = a * x**2 + b + + def test_simple_fit(): + """ + Check if generating/fitting Gaussian data works + """ from mle import Normal, var, par import theano.tensor as T + import numpy as np x = var('x') mu = par('mu') sigma = par('sigma') dist = Normal(x, mu, sigma) + np.random.seed(42) + data = dist.sample(1e6, {'mu': 0, 'sigma': 1}) + results = dist.fit({'x': data}, {'mu': 1, 'sigma': 2}, method='L-BFGS-B') - assert(len(dist.get_vars()) == 1) - assert(len(dist.get_params()) == 2) - assert(len(dist.get_dists()) == 0) + def test_linear_regression(): + """ + Check if fitting a linear model works + """ + from mle import Normal, var, par + import theano.tensor as T + import numpy as np + x = var('x') + y = var('y') + + a = par('a') + b = par('b') + sigma = par('sigma') + + dist = Normal(y, a * x + b, sigma) + np.random.seed(42) + + xs = linspace(0, 1, 20) + ys = dist.sample(20, {'x': xs, 'a': 1, 'b': 0, 'sigma': 0.5}) + + results = dist.fit({'x': xs, 'y': ys}, {'a': 2, 'b': 1, 'sigma': 1}) + + +
Add some tests that don't pass yet
## Code Before: def test_distribution(): from mle import Normal, var, par import theano.tensor as T x = var('x') mu = par('mu') sigma = par('sigma') dist = Normal(x, mu, sigma) assert(len(dist.get_vars()) == 1) assert(len(dist.get_params()) == 2) assert(len(dist.get_dists()) == 0) ## Instruction: Add some tests that don't pass yet ## Code After: def test_formula_transform(): """ Check if variables can be added/multiplied/transformed. The result should be a formula that can be plugged into a model. """ from mle import var, par x = var('x') a = par('a') b = par('b') formula = a * x**2 + b def test_simple_fit(): """ Check if generating/fitting Gaussian data works """ from mle import Normal, var, par import theano.tensor as T import numpy as np x = var('x') mu = par('mu') sigma = par('sigma') dist = Normal(x, mu, sigma) np.random.seed(42) data = dist.sample(1e6, {'mu': 0, 'sigma': 1}) results = dist.fit({'x': data}, {'mu': 1, 'sigma': 2}, method='L-BFGS-B') def test_linear_regression(): """ Check if fitting a linear model works """ from mle import Normal, var, par import theano.tensor as T import numpy as np x = var('x') y = var('y') a = par('a') b = par('b') sigma = par('sigma') dist = Normal(y, a * x + b, sigma) np.random.seed(42) xs = linspace(0, 1, 20) ys = dist.sample(20, {'x': xs, 'a': 1, 'b': 0, 'sigma': 0.5}) results = dist.fit({'x': xs, 'y': ys}, {'a': 2, 'b': 1, 'sigma': 1})
# ... existing code ... def test_formula_transform(): """ Check if variables can be added/multiplied/transformed. The result should be a formula that can be plugged into a model. """ from mle import var, par x = var('x') a = par('a') b = par('b') formula = a * x**2 + b def test_simple_fit(): """ Check if generating/fitting Gaussian data works """ from mle import Normal, var, par # ... modified code ... import theano.tensor as T import numpy as np ... dist = Normal(x, mu, sigma) np.random.seed(42) data = dist.sample(1e6, {'mu': 0, 'sigma': 1}) results = dist.fit({'x': data}, {'mu': 1, 'sigma': 2}, method='L-BFGS-B') def test_linear_regression(): """ Check if fitting a linear model works """ from mle import Normal, var, par import theano.tensor as T import numpy as np x = var('x') y = var('y') a = par('a') b = par('b') sigma = par('sigma') dist = Normal(y, a * x + b, sigma) np.random.seed(42) xs = linspace(0, 1, 20) ys = dist.sample(20, {'x': xs, 'a': 1, 'b': 0, 'sigma': 0.5}) results = dist.fit({'x': xs, 'y': ys}, {'a': 2, 'b': 1, 'sigma': 1}) # ... rest of the code ...
7b1d520278b8fe33b68103d26f9aa7bb945f6791
cryptography/hazmat/backends/__init__.py
cryptography/hazmat/backends/__init__.py
from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend
from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
Make the default backend be a multi-backend
Make the default backend be a multi-backend
Python
bsd-3-clause
bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,Lukasa/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,sholsapp/cryptography,Hasimir/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,Hasimir/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,kimvais/cryptography,sholsapp/cryptography,bwhmather/cryptography
from cryptography.hazmat.backends import openssl + from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) + _default_backend = MultiBackend(_ALL_BACKENDS) + def default_backend(): - return openssl.backend + return _default_backend
Make the default backend be a multi-backend
## Code Before: from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend ## Instruction: Make the default backend be a multi-backend ## Code After: from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
... from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( ... _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend ...
6eb4c09cfc43e2f939660525101a1a8fac9c4838
threadedcomments/forms.py
threadedcomments/forms.py
from django import forms from django.contrib.comments.forms import CommentForm from django.conf import settings from django.utils.hashcompat import sha_constructor from threadedcomments.models import ThreadedComment class ThreadedCommentForm(CommentForm): parent = forms.IntegerField(required=False, widget=forms.HiddenInput) title = forms.CharField(required=False) def __init__(self, target_object, parent=None, data=None, initial=None): self.parent = parent if initial is None: initial = {} initial.update({'parent': self.parent}) super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial) def get_comment_model(self): return ThreadedComment def get_comment_create_data(self): d = super(ThreadedCommentForm, self).get_comment_create_data() d['parent_id'] = self.cleaned_data['parent'] d['title'] = self.cleaned_data['title'] return d
from django import forms from django.contrib.comments.forms import CommentForm from django.conf import settings from django.utils.hashcompat import sha_constructor from threadedcomments.models import ThreadedComment class ThreadedCommentForm(CommentForm): parent = forms.IntegerField(required=False, widget=forms.HiddenInput) def __init__(self, target_object, parent=None, data=None, initial=None): self.base_fields.insert( self.base_fields.keyOrder.index('comment'), 'title', forms.CharField(required=False) ) self.parent = parent if initial is None: initial = {} initial.update({'parent': self.parent}) super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial) def get_comment_model(self): return ThreadedComment def get_comment_create_data(self): d = super(ThreadedCommentForm, self).get_comment_create_data() d['parent_id'] = self.cleaned_data['parent'] d['title'] = self.cleaned_data['title'] return d
Make title field appear before comment in the form
Make title field appear before comment in the form Fixes #7
Python
bsd-3-clause
yrcjaya/django-threadedcomments,coxmediagroup/django-threadedcomments,nikolas/django-threadedcomments,ccnmtl/django-threadedcomments,yrcjaya/django-threadedcomments,nikolas/django-threadedcomments,SmithsonianEnterprises/django-threadedcomments,PolicyStat/django-threadedcomments,ccnmtl/django-threadedcomments,SmithsonianEnterprises/django-threadedcomments,HonzaKral/django-threadedcomments,coxmediagroup/django-threadedcomments,incuna/django-threadedcomments,HonzaKral/django-threadedcomments
from django import forms from django.contrib.comments.forms import CommentForm from django.conf import settings from django.utils.hashcompat import sha_constructor from threadedcomments.models import ThreadedComment class ThreadedCommentForm(CommentForm): parent = forms.IntegerField(required=False, widget=forms.HiddenInput) - title = forms.CharField(required=False) def __init__(self, target_object, parent=None, data=None, initial=None): + self.base_fields.insert( + self.base_fields.keyOrder.index('comment'), + 'title', forms.CharField(required=False) + ) self.parent = parent if initial is None: initial = {} initial.update({'parent': self.parent}) super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial) def get_comment_model(self): return ThreadedComment def get_comment_create_data(self): d = super(ThreadedCommentForm, self).get_comment_create_data() d['parent_id'] = self.cleaned_data['parent'] d['title'] = self.cleaned_data['title'] return d
Make title field appear before comment in the form
## Code Before: from django import forms from django.contrib.comments.forms import CommentForm from django.conf import settings from django.utils.hashcompat import sha_constructor from threadedcomments.models import ThreadedComment class ThreadedCommentForm(CommentForm): parent = forms.IntegerField(required=False, widget=forms.HiddenInput) title = forms.CharField(required=False) def __init__(self, target_object, parent=None, data=None, initial=None): self.parent = parent if initial is None: initial = {} initial.update({'parent': self.parent}) super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial) def get_comment_model(self): return ThreadedComment def get_comment_create_data(self): d = super(ThreadedCommentForm, self).get_comment_create_data() d['parent_id'] = self.cleaned_data['parent'] d['title'] = self.cleaned_data['title'] return d ## Instruction: Make title field appear before comment in the form ## Code After: from django import forms from django.contrib.comments.forms import CommentForm from django.conf import settings from django.utils.hashcompat import sha_constructor from threadedcomments.models import ThreadedComment class ThreadedCommentForm(CommentForm): parent = forms.IntegerField(required=False, widget=forms.HiddenInput) def __init__(self, target_object, parent=None, data=None, initial=None): self.base_fields.insert( self.base_fields.keyOrder.index('comment'), 'title', forms.CharField(required=False) ) self.parent = parent if initial is None: initial = {} initial.update({'parent': self.parent}) super(ThreadedCommentForm, self).__init__(target_object, data=data, initial=initial) def get_comment_model(self): return ThreadedComment def get_comment_create_data(self): d = super(ThreadedCommentForm, self).get_comment_create_data() d['parent_id'] = self.cleaned_data['parent'] d['title'] = self.cleaned_data['title'] return d
... parent = forms.IntegerField(required=False, widget=forms.HiddenInput) ... def __init__(self, target_object, parent=None, data=None, initial=None): self.base_fields.insert( self.base_fields.keyOrder.index('comment'), 'title', forms.CharField(required=False) ) self.parent = parent ...
12acfff456e1a696d1117b20b8843c6789ee38bb
wake/views.py
wake/views.py
from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events)
from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
Add Atom feed for events that have 'syndicate' set in their source config.
Add Atom feed for events that have 'syndicate' set in their source config.
Python
bsd-3-clause
chromakode/wake
from been.couch import CouchStore - from flask import render_template, abort + from flask import render_template, abort, request, url_for + from urlparse import urljoin + from werkzeug.contrib.atom import AtomFeed + from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) + @app.route('/recent.atom') + def recent_feed(): + feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, + generator=('Wake', None, None)) + sources = store.get_sources() + for event in store.events(): + if sources[event['source']].get('syndicate'): + feed.add(event['title'], + unicode(event['content']), + content_type='html', + author=event.get('author', ''), + url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), + updated=datetime.fromtimestamp(event['timestamp']), + published=datetime.fromtimestamp(event['timestamp'])) + return feed.get_response()
Add Atom feed for events that have 'syndicate' set in their source config.
## Code Before: from been.couch import CouchStore from flask import render_template, abort from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) ## Instruction: Add Atom feed for events that have 'syndicate' set in their source config. ## Code After: from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app store = CouchStore().load() @app.route('/') def wake(): return render_template('stream.html', events=store.collapsed_events()) @app.route('/<slug>') def by_slug(slug): events = list(store.events_by_slug(slug)) if not events: abort(404) return render_template('stream.html', events=events) @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response()
# ... existing code ... from been.couch import CouchStore from flask import render_template, abort, request, url_for from urlparse import urljoin from werkzeug.contrib.atom import AtomFeed from datetime import datetime from wake import app # ... modified code ... @app.route('/recent.atom') def recent_feed(): feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root, generator=('Wake', None, None)) sources = store.get_sources() for event in store.events(): if sources[event['source']].get('syndicate'): feed.add(event['title'], unicode(event['content']), content_type='html', author=event.get('author', ''), url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))), updated=datetime.fromtimestamp(event['timestamp']), published=datetime.fromtimestamp(event['timestamp'])) return feed.get_response() # ... rest of the code ...
7fa490cb598aca2848ce886dfc45bb8606f07e58
backend/geonature/core/gn_profiles/models.py
backend/geonature/core/gn_profiles/models.py
from geonature.utils.env import DB from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer)
from flask import current_app from geoalchemy2 import Geometry from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) @serializable @geoserializable class VmValidProfiles(DB.Model): __tablename__ = "vm_valid_profiles" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) first_valid_data = DB.Column(DB.DateTime) last_valid_data = DB.Column(DB.DateTime) count_valid_data = DB.Column(DB.Integer)
Add VM valid profile model
Add VM valid profile model
Python
bsd-2-clause
PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature
+ from flask import current_app + from geoalchemy2 import Geometry + + from utils_flask_sqla.serializers import serializable + from utils_flask_sqla_geo.serializers import geoserializable + from geonature.utils.env import DB - from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) + + @serializable + @geoserializable + class VmValidProfiles(DB.Model): + __tablename__ = "vm_valid_profiles" + __table_args__ = {"schema": "gn_profiles"} + cd_ref = DB.Column(DB.Integer) + valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) + altitude_min = DB.Column(DB.Integer) + altitude_max = DB.Column(DB.Integer) + first_valid_data = DB.Column(DB.DateTime) + last_valid_data = DB.Column(DB.DateTime) + count_valid_data = DB.Column(DB.Integer)
Add VM valid profile model
## Code Before: from geonature.utils.env import DB from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) ## Instruction: Add VM valid profile model ## Code After: from flask import current_app from geoalchemy2 import Geometry from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) @serializable @geoserializable class VmValidProfiles(DB.Model): __tablename__ = "vm_valid_profiles" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) first_valid_data = DB.Column(DB.DateTime) last_valid_data = DB.Column(DB.DateTime) count_valid_data = DB.Column(DB.Integer)
# ... existing code ... from flask import current_app from geoalchemy2 import Geometry from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB # ... modified code ... count_valid_data = DB.Column(DB.Integer) @serializable @geoserializable class VmValidProfiles(DB.Model): __tablename__ = "vm_valid_profiles" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) first_valid_data = DB.Column(DB.DateTime) last_valid_data = DB.Column(DB.DateTime) count_valid_data = DB.Column(DB.Integer) # ... rest of the code ...
8052577164ba144263c7f45e4c823ba396f19d65
badgekit_webhooks/views.py
badgekit_webhooks/views.py
from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.http import require_POST import json def hello(request): return HttpResponse("Hello, world. Badges!!!") @require_POST def badge_issued_hook(request): try: data = json.loads(request.body) except ValueError: return HttpResponseBadRequest("Bad JSON") return HttpResponse("Hello, world. Badges!!!")
from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST import json def hello(request): return HttpResponse("Hello, world. Badges!!!") @require_POST @csrf_exempt def badge_issued_hook(request): try: data = json.loads(request.body) except ValueError: return HttpResponseBadRequest("Bad JSON") return HttpResponse("Hello, world. Badges!!!")
Make webhook exempt from CSRF protection
Make webhook exempt from CSRF protection Soon, we will add JWT verification, to replace it.
Python
mit
tgs/django-badgekit-webhooks
from django.http import HttpResponse, HttpResponseBadRequest + from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST import json def hello(request): return HttpResponse("Hello, world. Badges!!!") @require_POST + @csrf_exempt def badge_issued_hook(request): try: data = json.loads(request.body) except ValueError: return HttpResponseBadRequest("Bad JSON") return HttpResponse("Hello, world. Badges!!!")
Make webhook exempt from CSRF protection
## Code Before: from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.http import require_POST import json def hello(request): return HttpResponse("Hello, world. Badges!!!") @require_POST def badge_issued_hook(request): try: data = json.loads(request.body) except ValueError: return HttpResponseBadRequest("Bad JSON") return HttpResponse("Hello, world. Badges!!!") ## Instruction: Make webhook exempt from CSRF protection ## Code After: from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST import json def hello(request): return HttpResponse("Hello, world. Badges!!!") @require_POST @csrf_exempt def badge_issued_hook(request): try: data = json.loads(request.body) except ValueError: return HttpResponseBadRequest("Bad JSON") return HttpResponse("Hello, world. Badges!!!")
# ... existing code ... from django.http import HttpResponse, HttpResponseBadRequest from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST # ... modified code ... @require_POST @csrf_exempt def badge_issued_hook(request): # ... rest of the code ...
e9f2a3c29185466f1c92121e9f4e4b727fb20fd0
scripts/rename_tutorial_src_files.py
scripts/rename_tutorial_src_files.py
from pathlib import Path, PurePath from string import digits directory = Path("./docs/tutorial/src") dirs = sorted([Path(f) for f in directory.iterdir()]) d: PurePath sufix = "__out__" for d in dirs: if d.name.endswith(sufix): continue output_dir_name = d.name + "__out__" output_directory = directory / output_dir_name output_directory.mkdir(exist_ok=True) files = sorted([Path(f) for f in d.iterdir()]) for i, f in enumerate(files): index = str(i + 1).zfill(3) new_name = output_directory / f"tutorial{index}.py" print(new_name) f.rename(new_name) for d in dirs: current_dir = Path(str(d) + sufix) print(current_dir) current_dir.rename(d) #%%
from pathlib import Path, PurePath from string import digits directory = Path("./docs/tutorial/src") skip_names = {"bigger_applications"} skip_dirs = {directory / name for name in skip_names} dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs]) d: PurePath sufix = "__out__" for d in dirs: if d.name.endswith(sufix): continue output_dir_name = d.name + "__out__" output_directory = directory / output_dir_name output_directory.mkdir(exist_ok=True) files = sorted([Path(f) for f in d.iterdir()]) f: PurePath for i, f in enumerate(files): index = str(i + 1).zfill(3) if f.name != "__init__.py" and f.name.endswith(".py"): new_name = output_directory / f"tutorial{index}.py" else: new_name = output_directory / f.name print(new_name) f.rename(new_name) for d in dirs: current_dir = Path(str(d) + sufix) print(current_dir) current_dir.rename(d) #%%
Update tutorial renamer to exclude files
:sparkles: Update tutorial renamer to exclude files
Python
mit
tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi
from pathlib import Path, PurePath from string import digits directory = Path("./docs/tutorial/src") + skip_names = {"bigger_applications"} + skip_dirs = {directory / name for name in skip_names} - dirs = sorted([Path(f) for f in directory.iterdir()]) + dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs]) d: PurePath sufix = "__out__" for d in dirs: if d.name.endswith(sufix): continue output_dir_name = d.name + "__out__" output_directory = directory / output_dir_name output_directory.mkdir(exist_ok=True) files = sorted([Path(f) for f in d.iterdir()]) + f: PurePath for i, f in enumerate(files): index = str(i + 1).zfill(3) + if f.name != "__init__.py" and f.name.endswith(".py"): - new_name = output_directory / f"tutorial{index}.py" + new_name = output_directory / f"tutorial{index}.py" + else: + new_name = output_directory / f.name print(new_name) f.rename(new_name) for d in dirs: current_dir = Path(str(d) + sufix) print(current_dir) current_dir.rename(d) #%%
Update tutorial renamer to exclude files
## Code Before: from pathlib import Path, PurePath from string import digits directory = Path("./docs/tutorial/src") dirs = sorted([Path(f) for f in directory.iterdir()]) d: PurePath sufix = "__out__" for d in dirs: if d.name.endswith(sufix): continue output_dir_name = d.name + "__out__" output_directory = directory / output_dir_name output_directory.mkdir(exist_ok=True) files = sorted([Path(f) for f in d.iterdir()]) for i, f in enumerate(files): index = str(i + 1).zfill(3) new_name = output_directory / f"tutorial{index}.py" print(new_name) f.rename(new_name) for d in dirs: current_dir = Path(str(d) + sufix) print(current_dir) current_dir.rename(d) #%% ## Instruction: Update tutorial renamer to exclude files ## Code After: from pathlib import Path, PurePath from string import digits directory = Path("./docs/tutorial/src") skip_names = {"bigger_applications"} skip_dirs = {directory / name for name in skip_names} dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs]) d: PurePath sufix = "__out__" for d in dirs: if d.name.endswith(sufix): continue output_dir_name = d.name + "__out__" output_directory = directory / output_dir_name output_directory.mkdir(exist_ok=True) files = sorted([Path(f) for f in d.iterdir()]) f: PurePath for i, f in enumerate(files): index = str(i + 1).zfill(3) if f.name != "__init__.py" and f.name.endswith(".py"): new_name = output_directory / f"tutorial{index}.py" else: new_name = output_directory / f.name print(new_name) f.rename(new_name) for d in dirs: current_dir = Path(str(d) + sufix) print(current_dir) current_dir.rename(d) #%%
# ... existing code ... directory = Path("./docs/tutorial/src") skip_names = {"bigger_applications"} skip_dirs = {directory / name for name in skip_names} dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs]) d: PurePath # ... modified code ... files = sorted([Path(f) for f in d.iterdir()]) f: PurePath for i, f in enumerate(files): ... index = str(i + 1).zfill(3) if f.name != "__init__.py" and f.name.endswith(".py"): new_name = output_directory / f"tutorial{index}.py" else: new_name = output_directory / f.name print(new_name) # ... rest of the code ...
c4e0a132461dba798739b752a04fe3ff66af17ab
tests/high_level_curl_test.py
tests/high_level_curl_test.py
import curl import unittest from . import appmanager setup_module, teardown_module = appmanager.setup(('app', 8380)) class RelativeUrlTest(unittest.TestCase): def setUp(self): self.curl = curl.Curl('http://localhost:8380/') def tearDown(self): self.curl.close() def test_reuse(self): result = self.curl.get('/success') self.assertEqual('success', result) result = self.curl.get('/success') self.assertEqual('success', result)
import curl import unittest from . import appmanager setup_module, teardown_module = appmanager.setup(('app', 8380)) class RelativeUrlTest(unittest.TestCase): def setUp(self): self.curl = curl.Curl('http://localhost:8380/') def tearDown(self): self.curl.close() def test_reuse(self): result = self.curl.get('/success') self.assertEqual('success', result.decode()) result = self.curl.get('/success') self.assertEqual('success', result.decode())
Fix test suite on python 3 - high level curl object returns result as bytes
Fix test suite on python 3 - high level curl object returns result as bytes
Python
lgpl-2.1
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
import curl import unittest from . import appmanager setup_module, teardown_module = appmanager.setup(('app', 8380)) class RelativeUrlTest(unittest.TestCase): def setUp(self): self.curl = curl.Curl('http://localhost:8380/') - + def tearDown(self): self.curl.close() - + def test_reuse(self): result = self.curl.get('/success') - self.assertEqual('success', result) + self.assertEqual('success', result.decode()) result = self.curl.get('/success') - self.assertEqual('success', result) + self.assertEqual('success', result.decode())
Fix test suite on python 3 - high level curl object returns result as bytes
## Code Before: import curl import unittest from . import appmanager setup_module, teardown_module = appmanager.setup(('app', 8380)) class RelativeUrlTest(unittest.TestCase): def setUp(self): self.curl = curl.Curl('http://localhost:8380/') def tearDown(self): self.curl.close() def test_reuse(self): result = self.curl.get('/success') self.assertEqual('success', result) result = self.curl.get('/success') self.assertEqual('success', result) ## Instruction: Fix test suite on python 3 - high level curl object returns result as bytes ## Code After: import curl import unittest from . import appmanager setup_module, teardown_module = appmanager.setup(('app', 8380)) class RelativeUrlTest(unittest.TestCase): def setUp(self): self.curl = curl.Curl('http://localhost:8380/') def tearDown(self): self.curl.close() def test_reuse(self): result = self.curl.get('/success') self.assertEqual('success', result.decode()) result = self.curl.get('/success') self.assertEqual('success', result.decode())
# ... existing code ... self.curl = curl.Curl('http://localhost:8380/') def tearDown(self): # ... modified code ... self.curl.close() def test_reuse(self): ... result = self.curl.get('/success') self.assertEqual('success', result.decode()) ... result = self.curl.get('/success') self.assertEqual('success', result.decode()) # ... rest of the code ...
66e67e53360a9f49ae73c8c8f2de49991525363b
txircd/modules/cmode_t.py
txircd/modules/cmode_t.py
from twisted.words.protocols import irc from txircd.modbase import Mode class TopiclockMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "TOPIC": return data if "topic" not in data: return data targetChannel = data["targetchan"] if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") return {} return data class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): if "channel_minimum_level" not in self.ircd.servconfig: self.ircd.servconfig["channel_minimum_level"] = {} if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]: self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o" return { "modes": { "cnt": TopiclockMode() } } def cleanup(self): self.ircd.removeMode("cnt")
from twisted.words.protocols import irc from txircd.modbase import Mode class TopiclockMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "TOPIC": return data if "topic" not in data: return data targetChannel = data["targetchan"] if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") return {} return data class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): if "channel_minimum_level" not in self.ircd.servconfig: self.ircd.servconfig["channel_minimum_level"] = {} if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]: self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o" return { "modes": { "cnt": TopiclockMode() } } def cleanup(self): self.ircd.removeMode("cnt")
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
Python
bsd-3-clause
Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd
from twisted.words.protocols import irc from txircd.modbase import Mode class TopiclockMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "TOPIC": return data if "topic" not in data: return data targetChannel = data["targetchan"] - if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name): + if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") return {} return data class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): if "channel_minimum_level" not in self.ircd.servconfig: self.ircd.servconfig["channel_minimum_level"] = {} if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]: self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o" return { "modes": { "cnt": TopiclockMode() } } def cleanup(self): self.ircd.removeMode("cnt")
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
## Code Before: from twisted.words.protocols import irc from txircd.modbase import Mode class TopiclockMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "TOPIC": return data if "topic" not in data: return data targetChannel = data["targetchan"] if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") return {} return data class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): if "channel_minimum_level" not in self.ircd.servconfig: self.ircd.servconfig["channel_minimum_level"] = {} if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]: self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o" return { "modes": { "cnt": TopiclockMode() } } def cleanup(self): self.ircd.removeMode("cnt") ## Instruction: Fix the order of parameters to hasAccess, which broke all topic changing when +t was set ## Code After: from twisted.words.protocols import irc from txircd.modbase import Mode class TopiclockMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "TOPIC": return data if "topic" not in data: return data targetChannel = data["targetchan"] if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") return {} return data class Spawner(object): def __init__(self, ircd): self.ircd = ircd def spawn(self): if "channel_minimum_level" not in self.ircd.servconfig: self.ircd.servconfig["channel_minimum_level"] = {} if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]: self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o" return { "modes": { "cnt": TopiclockMode() } } def cleanup(self): self.ircd.removeMode("cnt")
# ... existing code ... targetChannel = data["targetchan"] if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]): user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel") # ... rest of the code ...
e01b0c9129c05e366605639553201f0dc2af2756
django_fsm_log/apps.py
django_fsm_log/apps.py
from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" default_auto_field = 'django.db.models.BigAutoField' def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
Revert "Solve warning coming from django 4.0"
Revert "Solve warning coming from django 4.0"
Python
mit
gizmag/django-fsm-log,ticosax/django-fsm-log
from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" - default_auto_field = 'django.db.models.BigAutoField' def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
Revert "Solve warning coming from django 4.0"
## Code Before: from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" default_auto_field = 'django.db.models.BigAutoField' def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback) ## Instruction: Revert "Solve warning coming from django 4.0" ## Code After: from __future__ import unicode_literals from django.apps import AppConfig from django.conf import settings from django.utils.module_loading import import_string from django_fsm.signals import pre_transition, post_transition class DjangoFSMLogAppConfig(AppConfig): name = 'django_fsm_log' verbose_name = "Django FSM Log" def ready(self): backend = import_string(settings.DJANGO_FSM_LOG_STORAGE_METHOD) StateLog = self.get_model('StateLog') backend.setup_model(StateLog) pre_transition.connect(backend.pre_transition_callback) post_transition.connect(backend.post_transition_callback)
... verbose_name = "Django FSM Log" ...
a84c02b4369bf698c82be22b6231fe412ad67c63
Cauldron/ext/click/__init__.py
Cauldron/ext/click/__init__.py
try: import click except ImportError: raise ImportError("Cauldron.ext.click requires the click package.") from ...api import use __all__ = ['backend', 'service'] def select_backend(ctx, param, value): """Callback to set the Cauldron backend.""" if not value or ctx.resilient_parsing: return use(str(value)) def backend(default=None): """Click options to set up a Cauldron backend.""" option = click.option("-k", "--backend", expose_value=False, is_eager=True, callback=select_backend, help="Set the Cauldron backend.", default=default) def decorate(func): return option(func) return decorate backend_option = backend def construct_service(ctx, param, value): """Construct a service.""" if not value: return from Cauldron import ktl return ktl.Service(str(value)) def service(default=None, backend=True): """Add a service argument which returns a ktl.Service class.""" option = click.option("-s", "--service", callback=construct_service, help="KTL Service name to use.", default=default) backend_default = None if backend and isinstance(backend, str): backend_default = backend def decorate(func): if backend: func = backend_option(default=backend_default)(func) return option(func)
try: import click except ImportError: raise ImportError("Cauldron.ext.click requires the click package.") from ...api import use __all__ = ['backend', 'service'] def select_backend(ctx, param, value): """Callback to set the Cauldron backend.""" if not value or ctx.resilient_parsing: return use(str(value)) def backend(default=None): """Click options to set up a Cauldron backend.""" option = click.option("-k", "--backend", expose_value=False, is_eager=True, callback=select_backend, help="Set the Cauldron backend.", default=default) def decorate(func): return option(func) return decorate backend_option = backend def construct_service(ctx, param, value): """Construct a service.""" if not value: return from Cauldron import ktl return ktl.Service(str(value)) def service(default=None, backend=True): """Add a service argument which returns a ktl.Service class.""" option = click.option("-s", "--service", callback=construct_service, help="KTL Service name to use.", default=default) backend_default = None if backend and isinstance(backend, str): backend_default = backend def decorate(func): if backend: func = backend_option(default=backend_default)(func) return option(func) return decorate
Fix a bug in Cauldron click extension
Fix a bug in Cauldron click extension
Python
bsd-3-clause
alexrudy/Cauldron
try: import click except ImportError: raise ImportError("Cauldron.ext.click requires the click package.") from ...api import use __all__ = ['backend', 'service'] def select_backend(ctx, param, value): """Callback to set the Cauldron backend.""" if not value or ctx.resilient_parsing: return use(str(value)) def backend(default=None): """Click options to set up a Cauldron backend.""" option = click.option("-k", "--backend", expose_value=False, is_eager=True, callback=select_backend, help="Set the Cauldron backend.", default=default) def decorate(func): return option(func) return decorate backend_option = backend def construct_service(ctx, param, value): """Construct a service.""" if not value: return from Cauldron import ktl return ktl.Service(str(value)) def service(default=None, backend=True): """Add a service argument which returns a ktl.Service class.""" option = click.option("-s", "--service", callback=construct_service, help="KTL Service name to use.", default=default) backend_default = None if backend and isinstance(backend, str): backend_default = backend def decorate(func): if backend: func = backend_option(default=backend_default)(func) return option(func) + return decorate
Fix a bug in Cauldron click extension
## Code Before: try: import click except ImportError: raise ImportError("Cauldron.ext.click requires the click package.") from ...api import use __all__ = ['backend', 'service'] def select_backend(ctx, param, value): """Callback to set the Cauldron backend.""" if not value or ctx.resilient_parsing: return use(str(value)) def backend(default=None): """Click options to set up a Cauldron backend.""" option = click.option("-k", "--backend", expose_value=False, is_eager=True, callback=select_backend, help="Set the Cauldron backend.", default=default) def decorate(func): return option(func) return decorate backend_option = backend def construct_service(ctx, param, value): """Construct a service.""" if not value: return from Cauldron import ktl return ktl.Service(str(value)) def service(default=None, backend=True): """Add a service argument which returns a ktl.Service class.""" option = click.option("-s", "--service", callback=construct_service, help="KTL Service name to use.", default=default) backend_default = None if backend and isinstance(backend, str): backend_default = backend def decorate(func): if backend: func = backend_option(default=backend_default)(func) return option(func) ## Instruction: Fix a bug in Cauldron click extension ## Code After: try: import click except ImportError: raise ImportError("Cauldron.ext.click requires the click package.") from ...api import use __all__ = ['backend', 'service'] def select_backend(ctx, param, value): """Callback to set the Cauldron backend.""" if not value or ctx.resilient_parsing: return use(str(value)) def backend(default=None): """Click options to set up a Cauldron backend.""" option = click.option("-k", "--backend", expose_value=False, is_eager=True, callback=select_backend, help="Set the Cauldron backend.", default=default) def decorate(func): return option(func) return decorate backend_option = backend def construct_service(ctx, param, value): """Construct a service.""" if not value: return from Cauldron import ktl return ktl.Service(str(value)) def service(default=None, backend=True): """Add a service argument which returns a ktl.Service class.""" option = click.option("-s", "--service", callback=construct_service, help="KTL Service name to use.", default=default) backend_default = None if backend and isinstance(backend, str): backend_default = backend def decorate(func): if backend: func = backend_option(default=backend_default)(func) return option(func) return decorate
# ... existing code ... return option(func) return decorate # ... rest of the code ...
d837a194e29b867443a3758bb4c159afe193e798
enumfields/fields.py
enumfields/fields.py
from django.core.exceptions import ValidationError from django.db import models import six class EnumFieldMixin(six.with_metaclass(models.SubfieldBase)): def __init__(self, enum, choices=None, max_length=10, **options): self.enum = enum if not choices: try: choices = enum.choices() except AttributeError: choices = [(m.value, getattr(m, 'label', m.name)) for m in enum] super(EnumFieldMixin, self).__init__( choices=choices, max_length=max_length, **options) def to_python(self, value): if value is None: return None for m in self.enum: if value == m: return value if value == m.value: return m raise ValidationError('%s is not a valid value for enum %s' % (value, self.enum)) def get_prep_value(self, value): return None if value is None else value.value class EnumField(EnumFieldMixin, models.CharField): pass class EnumIntegerField(EnumFieldMixin, models.IntegerField): pass try: from south.modelsinspector import add_introspection_rules except: pass else: add_introspection_rules([], ['^enumfields\.fields\.EnumField$']) add_introspection_rules([], ['^enumfields\.fields\.EnumIntegerField$'])
from django.core.exceptions import ValidationError from django.db import models import six class EnumFieldMixin(six.with_metaclass(models.SubfieldBase)): def __init__(self, enum, choices=None, max_length=10, **options): self.enum = enum if not choices: try: choices = enum.choices() except AttributeError: choices = [(m.value, getattr(m, 'label', m.name)) for m in enum] super(EnumFieldMixin, self).__init__( choices=choices, max_length=max_length, **options) def to_python(self, value): if value is None: return None for m in self.enum: if value == m: return value if value == m.value: return m raise ValidationError('%s is not a valid value for enum %s' % (value, self.enum)) def get_prep_value(self, value): return None if value is None else value.value class EnumField(EnumFieldMixin, models.CharField): pass class EnumIntegerField(EnumFieldMixin, models.IntegerField): pass
Revert "Add South introspection rules"
Revert "Add South introspection rules" They weren't correct. This reverts commit b7235e2fc4b28271e0dce8d812faa4a46ed84aea.
Python
mit
suutari-ai/django-enumfields,jessamynsmith/django-enumfields,bxm156/django-enumfields,jackyyf/django-enumfields
from django.core.exceptions import ValidationError from django.db import models import six class EnumFieldMixin(six.with_metaclass(models.SubfieldBase)): def __init__(self, enum, choices=None, max_length=10, **options): self.enum = enum if not choices: try: choices = enum.choices() except AttributeError: choices = [(m.value, getattr(m, 'label', m.name)) for m in enum] super(EnumFieldMixin, self).__init__( choices=choices, max_length=max_length, **options) def to_python(self, value): if value is None: return None for m in self.enum: if value == m: return value if value == m.value: return m raise ValidationError('%s is not a valid value for enum %s' % (value, self.enum)) def get_prep_value(self, value): return None if value is None else value.value class EnumField(EnumFieldMixin, models.CharField): pass class EnumIntegerField(EnumFieldMixin, models.IntegerField): pass - - try: - from south.modelsinspector import add_introspection_rules - except: - pass - else: - add_introspection_rules([], ['^enumfields\.fields\.EnumField$']) - add_introspection_rules([], ['^enumfields\.fields\.EnumIntegerField$']) -
Revert "Add South introspection rules"
## Code Before: from django.core.exceptions import ValidationError from django.db import models import six class EnumFieldMixin(six.with_metaclass(models.SubfieldBase)): def __init__(self, enum, choices=None, max_length=10, **options): self.enum = enum if not choices: try: choices = enum.choices() except AttributeError: choices = [(m.value, getattr(m, 'label', m.name)) for m in enum] super(EnumFieldMixin, self).__init__( choices=choices, max_length=max_length, **options) def to_python(self, value): if value is None: return None for m in self.enum: if value == m: return value if value == m.value: return m raise ValidationError('%s is not a valid value for enum %s' % (value, self.enum)) def get_prep_value(self, value): return None if value is None else value.value class EnumField(EnumFieldMixin, models.CharField): pass class EnumIntegerField(EnumFieldMixin, models.IntegerField): pass try: from south.modelsinspector import add_introspection_rules except: pass else: add_introspection_rules([], ['^enumfields\.fields\.EnumField$']) add_introspection_rules([], ['^enumfields\.fields\.EnumIntegerField$']) ## Instruction: Revert "Add South introspection rules" ## Code After: from django.core.exceptions import ValidationError from django.db import models import six class EnumFieldMixin(six.with_metaclass(models.SubfieldBase)): def __init__(self, enum, choices=None, max_length=10, **options): self.enum = enum if not choices: try: choices = enum.choices() except AttributeError: choices = [(m.value, getattr(m, 'label', m.name)) for m in enum] super(EnumFieldMixin, self).__init__( choices=choices, max_length=max_length, **options) def to_python(self, value): if value is None: return None for m in self.enum: if value == m: return value if value == m.value: return m raise ValidationError('%s is not a valid value for enum %s' % (value, self.enum)) def get_prep_value(self, value): return None if value is None else value.value class EnumField(EnumFieldMixin, models.CharField): pass class EnumIntegerField(EnumFieldMixin, models.IntegerField): pass
... pass ...
38b4af0b3c1c6105d68ff453d86107758ef9d751
preconditions.py
preconditions.py
class PreconditionError (TypeError): pass def preconditions(*precs): def decorate(f): def g(*a, **kw): return f(*a, **kw) return g return decorate
import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults) appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): def g(*a, **kw): return f(*a, **kw) return g return decorate
Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.
Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.
Python
mit
nejucomo/preconditions
+ import inspect + + class PreconditionError (TypeError): pass def preconditions(*precs): + + precinfo = [] + for p in precs: + spec = inspect.getargspec(p) + if spec.varargs or spec.keywords: + raise PreconditionError( + 'Precondition {!r} must not accept * nor ** args.'.format(p)) + + i = -len(spec.defaults) + appargs, closureargs = spec.args[:i], spec.args[i:] + precinfo.append( (appargs, closureargs, p) ) + def decorate(f): def g(*a, **kw): return f(*a, **kw) return g return decorate
Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function.
## Code Before: class PreconditionError (TypeError): pass def preconditions(*precs): def decorate(f): def g(*a, **kw): return f(*a, **kw) return g return decorate ## Instruction: Implement two of the "early" InvalidPreconditionTests which can be checked prior to seeing the wrapping function. ## Code After: import inspect class PreconditionError (TypeError): pass def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults) appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): def g(*a, **kw): return f(*a, **kw) return g return decorate
... import inspect class PreconditionError (TypeError): ... def preconditions(*precs): precinfo = [] for p in precs: spec = inspect.getargspec(p) if spec.varargs or spec.keywords: raise PreconditionError( 'Precondition {!r} must not accept * nor ** args.'.format(p)) i = -len(spec.defaults) appargs, closureargs = spec.args[:i], spec.args[i:] precinfo.append( (appargs, closureargs, p) ) def decorate(f): ...
e94503e25bff0ba986c28ce3f16636b3bb9f2c3d
green_django/__init__.py
green_django/__init__.py
import sys from utils import module_exists from gevent import monkey def make_django_green(): monkey.patch_all() if module_exists('psycogreen'): from psycogreen.gevent.psyco_gevent import make_psycopg_green make_psycopg_green() if module_exists('pymysql'): import pymysql pymysql.install_as_MySQLdb() if module_exists('zmq'): from gevent_zeromq import zmq sys.modules["zmq"] = zmq
import sys from utils import module_exists from gevent import monkey def make_django_green(): monkey.patch_all() if module_exists('psycogreen'): from psycogreen.gevent.psyco_gevent import make_psycopg_green make_psycopg_green() if module_exists('pymysql'): import pymysql pymysql.install_as_MySQLdb() if module_exists('gevent_zeromq'): from gevent_zeromq import zmq sys.modules["zmq"] = zmq
Check for greened package - consistency
Check for greened package - consistency
Python
mit
philipn/green-monkey
import sys from utils import module_exists from gevent import monkey def make_django_green(): monkey.patch_all() if module_exists('psycogreen'): from psycogreen.gevent.psyco_gevent import make_psycopg_green make_psycopg_green() if module_exists('pymysql'): import pymysql pymysql.install_as_MySQLdb() - if module_exists('zmq'): + if module_exists('gevent_zeromq'): from gevent_zeromq import zmq sys.modules["zmq"] = zmq
Check for greened package - consistency
## Code Before: import sys from utils import module_exists from gevent import monkey def make_django_green(): monkey.patch_all() if module_exists('psycogreen'): from psycogreen.gevent.psyco_gevent import make_psycopg_green make_psycopg_green() if module_exists('pymysql'): import pymysql pymysql.install_as_MySQLdb() if module_exists('zmq'): from gevent_zeromq import zmq sys.modules["zmq"] = zmq ## Instruction: Check for greened package - consistency ## Code After: import sys from utils import module_exists from gevent import monkey def make_django_green(): monkey.patch_all() if module_exists('psycogreen'): from psycogreen.gevent.psyco_gevent import make_psycopg_green make_psycopg_green() if module_exists('pymysql'): import pymysql pymysql.install_as_MySQLdb() if module_exists('gevent_zeromq'): from gevent_zeromq import zmq sys.modules["zmq"] = zmq
... if module_exists('gevent_zeromq'): from gevent_zeromq import zmq ...
1d3e956dcf667601feb871eab2a462fa09d0d101
tests/test_length.py
tests/test_length.py
from math import sqrt import pytest # type: ignore from hypothesis import given from ppb_vector import Vector from utils import isclose, vectors @pytest.mark.parametrize( "x, y, expected", [(6, 8, 10), (8, 6, 10), (0, 0, 0), (-6, -8, 10), (1, 2, 2.23606797749979)], ) def test_length(x, y, expected): vector = Vector(x, y) assert vector.length == expected @given(v=vectors()) def test_length_dot(v: Vector): """Test that |v| ≃ √v².""" assert isclose(v.length, sqrt(v * v))
from math import fabs, sqrt import pytest # type: ignore from hypothesis import given from ppb_vector import Vector from utils import floats, isclose, vectors @pytest.mark.parametrize( "x, y, expected", [(6, 8, 10), (8, 6, 10), (0, 0, 0), (-6, -8, 10), (1, 2, 2.23606797749979)], ) def test_length(x, y, expected): vector = Vector(x, y) assert vector.length == expected @given(v=vectors()) def test_length_dot(v: Vector): """Test that |v| ≃ √v².""" assert isclose(v.length, sqrt(v * v)) @given(v=vectors()) def test_length_zero(v: Vector): """1st axiom of normed vector spaces: |v| = 0 iff v = 0""" assert (v.length == 0) == (v == (0, 0)) @given(v=vectors(), scalar=floats()) def test_length_scalar(v: Vector, scalar: float): """2nd axiom of normed vector spaces: |λv| = |λ| |v|""" assert isclose((scalar * v).length, fabs(scalar) * v.length) @given(v=vectors(), w=vectors()) def test_length_triangle(v: Vector, w: Vector): """3rd axiom of normed vector spaces: |v+w| = |v| + |w|""" assert (v + w).length <= v.length + w.length
Test the axioms of normed vector spaces
tests/length: Test the axioms of normed vector spaces
Python
artistic-2.0
ppb/ppb-vector,ppb/ppb-vector
- from math import sqrt + from math import fabs, sqrt import pytest # type: ignore from hypothesis import given from ppb_vector import Vector - from utils import isclose, vectors + from utils import floats, isclose, vectors @pytest.mark.parametrize( "x, y, expected", [(6, 8, 10), (8, 6, 10), (0, 0, 0), (-6, -8, 10), (1, 2, 2.23606797749979)], ) def test_length(x, y, expected): vector = Vector(x, y) assert vector.length == expected @given(v=vectors()) def test_length_dot(v: Vector): """Test that |v| ≃ √v².""" assert isclose(v.length, sqrt(v * v)) + + @given(v=vectors()) + def test_length_zero(v: Vector): + """1st axiom of normed vector spaces: |v| = 0 iff v = 0""" + assert (v.length == 0) == (v == (0, 0)) + + + @given(v=vectors(), scalar=floats()) + def test_length_scalar(v: Vector, scalar: float): + """2nd axiom of normed vector spaces: |λv| = |λ| |v|""" + assert isclose((scalar * v).length, fabs(scalar) * v.length) + + + @given(v=vectors(), w=vectors()) + def test_length_triangle(v: Vector, w: Vector): + """3rd axiom of normed vector spaces: |v+w| = |v| + |w|""" + assert (v + w).length <= v.length + w.length +
Test the axioms of normed vector spaces
## Code Before: from math import sqrt import pytest # type: ignore from hypothesis import given from ppb_vector import Vector from utils import isclose, vectors @pytest.mark.parametrize( "x, y, expected", [(6, 8, 10), (8, 6, 10), (0, 0, 0), (-6, -8, 10), (1, 2, 2.23606797749979)], ) def test_length(x, y, expected): vector = Vector(x, y) assert vector.length == expected @given(v=vectors()) def test_length_dot(v: Vector): """Test that |v| ≃ √v².""" assert isclose(v.length, sqrt(v * v)) ## Instruction: Test the axioms of normed vector spaces ## Code After: from math import fabs, sqrt import pytest # type: ignore from hypothesis import given from ppb_vector import Vector from utils import floats, isclose, vectors @pytest.mark.parametrize( "x, y, expected", [(6, 8, 10), (8, 6, 10), (0, 0, 0), (-6, -8, 10), (1, 2, 2.23606797749979)], ) def test_length(x, y, expected): vector = Vector(x, y) assert vector.length == expected @given(v=vectors()) def test_length_dot(v: Vector): """Test that |v| ≃ √v².""" assert isclose(v.length, sqrt(v * v)) @given(v=vectors()) def test_length_zero(v: Vector): """1st axiom of normed vector spaces: |v| = 0 iff v = 0""" assert (v.length == 0) == (v == (0, 0)) @given(v=vectors(), scalar=floats()) def test_length_scalar(v: Vector, scalar: float): """2nd axiom of normed vector spaces: |λv| = |λ| |v|""" assert isclose((scalar * v).length, fabs(scalar) * v.length) @given(v=vectors(), w=vectors()) def test_length_triangle(v: Vector, w: Vector): """3rd axiom of normed vector spaces: |v+w| = |v| + |w|""" assert (v + w).length <= v.length + w.length
... from math import fabs, sqrt ... from ppb_vector import Vector from utils import floats, isclose, vectors ... assert isclose(v.length, sqrt(v * v)) @given(v=vectors()) def test_length_zero(v: Vector): """1st axiom of normed vector spaces: |v| = 0 iff v = 0""" assert (v.length == 0) == (v == (0, 0)) @given(v=vectors(), scalar=floats()) def test_length_scalar(v: Vector, scalar: float): """2nd axiom of normed vector spaces: |λv| = |λ| |v|""" assert isclose((scalar * v).length, fabs(scalar) * v.length) @given(v=vectors(), w=vectors()) def test_length_triangle(v: Vector, w: Vector): """3rd axiom of normed vector spaces: |v+w| = |v| + |w|""" assert (v + w).length <= v.length + w.length ...
301463a99dceceb21ecec933f3a83e55ca37c3b8
wagtail/wagtailimages/api/admin/serializers.py
wagtail/wagtailimages/api/admin/serializers.py
from __future__ import absolute_import, unicode_literals from collections import OrderedDict from rest_framework.fields import Field from ...models import SourceImageIOError from ..v2.serializers import ImageSerializer class ImageRenditionField(Field): """ A field that generates a rendition with the specified filter spec, and serialises details of that rendition. Example: "thumbnail": { "url": "/media/images/myimage.max-165x165.jpg", "width": 165, "height": 100 } If there is an error with the source image. The dict will only contain a single key, "error", indicating this error: "thumbnail": { "error": "SourceImageIOError" } """ def __init__(self, filter_spec, *args, **kwargs): self.filter_spec = filter_spec super(ImageRenditionField, self).__init__(*args, **kwargs) def get_attribute(self, instance): return instance def to_representation(self, image): try: thumbnail = image.get_rendition(self.filter_spec) return OrderedDict([ ('url', thumbnail.url), ('width', thumbnail.width), ('height', thumbnail.height), ]) except SourceImageIOError: return OrderedDict([ ('error', 'SourceImageIOError'), ]) class AdminImageSerializer(ImageSerializer): thumbnail = ImageRenditionField('max-165x165', read_only=True)
from __future__ import absolute_import, unicode_literals from collections import OrderedDict from rest_framework.fields import Field from ...models import SourceImageIOError from ..v2.serializers import ImageSerializer class ImageRenditionField(Field): """ A field that generates a rendition with the specified filter spec, and serialises details of that rendition. Example: "thumbnail": { "url": "/media/images/myimage.max-165x165.jpg", "width": 165, "height": 100 } If there is an error with the source image. The dict will only contain a single key, "error", indicating this error: "thumbnail": { "error": "SourceImageIOError" } """ def __init__(self, filter_spec, *args, **kwargs): self.filter_spec = filter_spec super(ImageRenditionField, self).__init__(*args, **kwargs) def to_representation(self, image): try: thumbnail = image.get_rendition(self.filter_spec) return OrderedDict([ ('url', thumbnail.url), ('width', thumbnail.width), ('height', thumbnail.height), ]) except SourceImageIOError: return OrderedDict([ ('error', 'SourceImageIOError'), ]) class AdminImageSerializer(ImageSerializer): thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
Use source keyword argument (instead of overriding get_attribute)
Use source keyword argument (instead of overriding get_attribute) This allows the ImageRenditionField to be used on models that contain an image field.
Python
bsd-3-clause
nealtodd/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,torchbox/wagtail,iansprice/wagtail,jnns/wagtail,wagtail/wagtail,zerolab/wagtail,thenewguy/wagtail,iansprice/wagtail,zerolab/wagtail,rsalmaso/wagtail,gasman/wagtail,timorieber/wagtail,kaedroho/wagtail,mikedingjan/wagtail,torchbox/wagtail,thenewguy/wagtail,zerolab/wagtail,takeflight/wagtail,takeflight/wagtail,gasman/wagtail,rsalmaso/wagtail,nimasmi/wagtail,Toshakins/wagtail,timorieber/wagtail,thenewguy/wagtail,timorieber/wagtail,nimasmi/wagtail,gasman/wagtail,wagtail/wagtail,wagtail/wagtail,mixxorz/wagtail,nealtodd/wagtail,mixxorz/wagtail,zerolab/wagtail,iansprice/wagtail,timorieber/wagtail,jnns/wagtail,gasman/wagtail,nealtodd/wagtail,wagtail/wagtail,iansprice/wagtail,rsalmaso/wagtail,takeflight/wagtail,jnns/wagtail,mixxorz/wagtail,torchbox/wagtail,FlipperPA/wagtail,mixxorz/wagtail,jnns/wagtail,kaedroho/wagtail,Toshakins/wagtail,FlipperPA/wagtail,nimasmi/wagtail,zerolab/wagtail,wagtail/wagtail,nimasmi/wagtail,kaedroho/wagtail,mikedingjan/wagtail,rsalmaso/wagtail,mixxorz/wagtail,thenewguy/wagtail,takeflight/wagtail,kaedroho/wagtail,mikedingjan/wagtail,thenewguy/wagtail,nealtodd/wagtail,rsalmaso/wagtail,gasman/wagtail,FlipperPA/wagtail,Toshakins/wagtail,Toshakins/wagtail,torchbox/wagtail,kaedroho/wagtail
from __future__ import absolute_import, unicode_literals from collections import OrderedDict from rest_framework.fields import Field from ...models import SourceImageIOError from ..v2.serializers import ImageSerializer class ImageRenditionField(Field): """ A field that generates a rendition with the specified filter spec, and serialises details of that rendition. Example: "thumbnail": { "url": "/media/images/myimage.max-165x165.jpg", "width": 165, "height": 100 } If there is an error with the source image. The dict will only contain a single key, "error", indicating this error: "thumbnail": { "error": "SourceImageIOError" } """ def __init__(self, filter_spec, *args, **kwargs): self.filter_spec = filter_spec super(ImageRenditionField, self).__init__(*args, **kwargs) - def get_attribute(self, instance): - return instance - def to_representation(self, image): try: thumbnail = image.get_rendition(self.filter_spec) return OrderedDict([ ('url', thumbnail.url), ('width', thumbnail.width), ('height', thumbnail.height), ]) except SourceImageIOError: return OrderedDict([ ('error', 'SourceImageIOError'), ]) class AdminImageSerializer(ImageSerializer): - thumbnail = ImageRenditionField('max-165x165', read_only=True) + thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
Use source keyword argument (instead of overriding get_attribute)
## Code Before: from __future__ import absolute_import, unicode_literals from collections import OrderedDict from rest_framework.fields import Field from ...models import SourceImageIOError from ..v2.serializers import ImageSerializer class ImageRenditionField(Field): """ A field that generates a rendition with the specified filter spec, and serialises details of that rendition. Example: "thumbnail": { "url": "/media/images/myimage.max-165x165.jpg", "width": 165, "height": 100 } If there is an error with the source image. The dict will only contain a single key, "error", indicating this error: "thumbnail": { "error": "SourceImageIOError" } """ def __init__(self, filter_spec, *args, **kwargs): self.filter_spec = filter_spec super(ImageRenditionField, self).__init__(*args, **kwargs) def get_attribute(self, instance): return instance def to_representation(self, image): try: thumbnail = image.get_rendition(self.filter_spec) return OrderedDict([ ('url', thumbnail.url), ('width', thumbnail.width), ('height', thumbnail.height), ]) except SourceImageIOError: return OrderedDict([ ('error', 'SourceImageIOError'), ]) class AdminImageSerializer(ImageSerializer): thumbnail = ImageRenditionField('max-165x165', read_only=True) ## Instruction: Use source keyword argument (instead of overriding get_attribute) ## Code After: from __future__ import absolute_import, unicode_literals from collections import OrderedDict from rest_framework.fields import Field from ...models import SourceImageIOError from ..v2.serializers import ImageSerializer class ImageRenditionField(Field): """ A field that generates a rendition with the specified filter spec, and serialises details of that rendition. Example: "thumbnail": { "url": "/media/images/myimage.max-165x165.jpg", "width": 165, "height": 100 } If there is an error with the source image. The dict will only contain a single key, "error", indicating this error: "thumbnail": { "error": "SourceImageIOError" } """ def __init__(self, filter_spec, *args, **kwargs): self.filter_spec = filter_spec super(ImageRenditionField, self).__init__(*args, **kwargs) def to_representation(self, image): try: thumbnail = image.get_rendition(self.filter_spec) return OrderedDict([ ('url', thumbnail.url), ('width', thumbnail.width), ('height', thumbnail.height), ]) except SourceImageIOError: return OrderedDict([ ('error', 'SourceImageIOError'), ]) class AdminImageSerializer(ImageSerializer): thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True)
# ... existing code ... def to_representation(self, image): # ... modified code ... class AdminImageSerializer(ImageSerializer): thumbnail = ImageRenditionField('max-165x165', source='*', read_only=True) # ... rest of the code ...
fd819ff0ff1a7d73dd58f152d2c4be8aea18e2d3
rebulk/processors.py
rebulk/processors.py
def conflict_prefer_longer(matches): """ Remove shorter matches if they conflicts with longer ones :param matches: :type matches: rebulk.match.Matches :param context: :type context: :return: :rtype: list[rebulk.match.Match] """ to_remove_matches = set() for match in filter(lambda match: not match.private, matches): conflicting_matches = set() for i in range(*match.span): conflicting_matches.update(matches.starting(i)) conflicting_matches.update(matches.ending(i)) if conflicting_matches: # keep the match only if it's the longest for conflicting_match in filter(lambda match: not match.private, conflicting_matches): if len(conflicting_match) < len(match): to_remove_matches.add(conflicting_match) for match in list(to_remove_matches): matches.remove(match) return matches def remove_private(matches): """ Removes private matches. :param matches: :type matches: :return: :rtype: """ to_remove_matches = set() for match in matches: if match.private: to_remove_matches.add(match) for match in list(to_remove_matches): matches.remove(match) return matches
def conflict_prefer_longer(matches): """ Remove shorter matches if they conflicts with longer ones :param matches: :type matches: rebulk.match.Matches :param context: :type context: :return: :rtype: list[rebulk.match.Match] """ to_remove_matches = set() for match in filter(lambda match: not match.private, matches): conflicting_matches = set() for i in range(*match.span): conflicting_matches.update(matches.starting(i)) conflicting_matches.update(matches.ending(i)) if conflicting_matches: # keep the match only if it's the longest for conflicting_match in filter(lambda match: not match.private, conflicting_matches): if len(conflicting_match) < len(match): to_remove_matches.add(conflicting_match) for match in list(to_remove_matches): matches.remove(match) return matches def remove_private(matches): """ Removes private matches. :param matches: :type matches: :return: :rtype: """ for match in list(matches): if match.private: matches.remove(match) return matches
Fix issue when a private match is found multiple times
Fix issue when a private match is found multiple times
Python
mit
Toilal/rebulk
def conflict_prefer_longer(matches): """ Remove shorter matches if they conflicts with longer ones :param matches: :type matches: rebulk.match.Matches :param context: :type context: :return: :rtype: list[rebulk.match.Match] """ to_remove_matches = set() for match in filter(lambda match: not match.private, matches): conflicting_matches = set() for i in range(*match.span): conflicting_matches.update(matches.starting(i)) conflicting_matches.update(matches.ending(i)) if conflicting_matches: # keep the match only if it's the longest for conflicting_match in filter(lambda match: not match.private, conflicting_matches): if len(conflicting_match) < len(match): to_remove_matches.add(conflicting_match) for match in list(to_remove_matches): matches.remove(match) return matches def remove_private(matches): """ Removes private matches. :param matches: :type matches: :return: :rtype: """ - to_remove_matches = set() - for match in matches: + for match in list(matches): if match.private: - to_remove_matches.add(match) - - for match in list(to_remove_matches): - matches.remove(match) + matches.remove(match) return matches
Fix issue when a private match is found multiple times
## Code Before: def conflict_prefer_longer(matches): """ Remove shorter matches if they conflicts with longer ones :param matches: :type matches: rebulk.match.Matches :param context: :type context: :return: :rtype: list[rebulk.match.Match] """ to_remove_matches = set() for match in filter(lambda match: not match.private, matches): conflicting_matches = set() for i in range(*match.span): conflicting_matches.update(matches.starting(i)) conflicting_matches.update(matches.ending(i)) if conflicting_matches: # keep the match only if it's the longest for conflicting_match in filter(lambda match: not match.private, conflicting_matches): if len(conflicting_match) < len(match): to_remove_matches.add(conflicting_match) for match in list(to_remove_matches): matches.remove(match) return matches def remove_private(matches): """ Removes private matches. :param matches: :type matches: :return: :rtype: """ to_remove_matches = set() for match in matches: if match.private: to_remove_matches.add(match) for match in list(to_remove_matches): matches.remove(match) return matches ## Instruction: Fix issue when a private match is found multiple times ## Code After: def conflict_prefer_longer(matches): """ Remove shorter matches if they conflicts with longer ones :param matches: :type matches: rebulk.match.Matches :param context: :type context: :return: :rtype: list[rebulk.match.Match] """ to_remove_matches = set() for match in filter(lambda match: not match.private, matches): conflicting_matches = set() for i in range(*match.span): conflicting_matches.update(matches.starting(i)) conflicting_matches.update(matches.ending(i)) if conflicting_matches: # keep the match only if it's the longest for conflicting_match in filter(lambda match: not match.private, conflicting_matches): if len(conflicting_match) < len(match): to_remove_matches.add(conflicting_match) for match in list(to_remove_matches): matches.remove(match) return matches def remove_private(matches): """ Removes private matches. :param matches: :type matches: :return: :rtype: """ for match in list(matches): if match.private: matches.remove(match) return matches
// ... existing code ... """ for match in list(matches): if match.private: matches.remove(match) // ... rest of the code ...
e3c1819b6b5ddec1ff326c3693d48ec8a8b3a834
fantail/tests/__init__.py
fantail/tests/__init__.py
tests_require = [ 'pytest', 'pytest-capturelog', 'pytest-cov', ]
tests_require = [ 'coveralls', 'pytest', 'pytest-capturelog', 'pytest-cov', ]
Add coveralls to test requirements
Add coveralls to test requirements
Python
bsd-2-clause
sjkingo/fantail,sjkingo/fantail,sjkingo/fantail
tests_require = [ + 'coveralls', 'pytest', 'pytest-capturelog', 'pytest-cov', ]
Add coveralls to test requirements
## Code Before: tests_require = [ 'pytest', 'pytest-capturelog', 'pytest-cov', ] ## Instruction: Add coveralls to test requirements ## Code After: tests_require = [ 'coveralls', 'pytest', 'pytest-capturelog', 'pytest-cov', ]
... tests_require = [ 'coveralls', 'pytest', ...
6fd1305f2a4a2e08b51c421b1c2cfdd33b407119
src/puzzle/problems/problem.py
src/puzzle/problems/problem.py
from data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
from data import meta _THRESHOLD = 0.01 class Problem(object): def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines self._threshold = threshold self._solutions = None self._constraints = [ lambda k, v: v > self._threshold ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None self._solutions_iter = None def solutions(self): if self._solutions is None: self._solutions_iter = self._solve_iter() results = [] for k, v in self._solutions_iter: if all(fn(k, v) for fn in self._constraints): results.append((k, v)) self._solutions = meta.Meta(results) return self._solutions def _solve_iter(self): return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
Set a threshold on Problem and enforce it.
Set a threshold on Problem and enforce it.
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
from data import meta + + _THRESHOLD = 0.01 class Problem(object): - def __init__(self, name, lines): + def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines + self._threshold = threshold self._solutions = None - self._constraints = [] + self._constraints = [ + lambda k, v: v > self._threshold + ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None + self._solutions_iter = None def solutions(self): if self._solutions is None: + self._solutions_iter = self._solve_iter() + results = [] + for k, v in self._solutions_iter: + if all(fn(k, v) for fn in self._constraints): + results.append((k, v)) - self._solutions = meta.Meta( + self._solutions = meta.Meta(results) - (k, v) for k, v in self._solve().items() if all( - [fn(k, v) for fn in self._constraints] - ) - ) return self._solutions + + def _solve_iter(self): + return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
Set a threshold on Problem and enforce it.
## Code Before: from data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__ ## Instruction: Set a threshold on Problem and enforce it. ## Code After: from data import meta _THRESHOLD = 0.01 class Problem(object): def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name self.lines = lines self._threshold = threshold self._solutions = None self._constraints = [ lambda k, v: v > self._threshold ] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None self._solutions_iter = None def solutions(self): if self._solutions is None: self._solutions_iter = self._solve_iter() results = [] for k, v in self._solutions_iter: if all(fn(k, v) for fn in self._constraints): results.append((k, v)) self._solutions = meta.Meta(results) return self._solutions def _solve_iter(self): return iter(self._solve().items()) def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError() def __repr__(self): return '%s()' % self.__class__.__name__
# ... existing code ... from data import meta _THRESHOLD = 0.01 # ... modified code ... class Problem(object): def __init__(self, name, lines, threshold=_THRESHOLD): self.name = name ... self.lines = lines self._threshold = threshold self._solutions = None self._constraints = [ lambda k, v: v > self._threshold ] ... self._solutions = None self._solutions_iter = None ... if self._solutions is None: self._solutions_iter = self._solve_iter() results = [] for k, v in self._solutions_iter: if all(fn(k, v) for fn in self._constraints): results.append((k, v)) self._solutions = meta.Meta(results) return self._solutions def _solve_iter(self): return iter(self._solve().items()) # ... rest of the code ...
0d2816e4ea0bf5a04794456651e79f7db9b2571f
src/jupyter_notebook_gist/config.py
src/jupyter_notebook_gist/config.py
from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, (str, bytes)): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
Use six for correct Python2/3 compatibility
Use six for correct Python2/3 compatibility
Python
mpl-2.0
mreid-moz/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mozilla/jupyter-notebook-gist,mreid-moz/jupyter-notebook-gist
+ import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id - if not isinstance(client_id, (str, bytes)): + if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
Use six for correct Python2/3 compatibility
## Code Before: from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, (str, bytes)): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, }) ## Instruction: Use six for correct Python2/3 compatibility ## Code After: import six from traitlets.config import LoggingConfigurable from traitlets.traitlets import Unicode class NotebookGist(LoggingConfigurable): oauth_client_id = Unicode( '', help='The GitHub application OAUTH client ID', ).tag(config=True) oauth_client_secret = Unicode( '', help='The GitHub application OAUTH client secret', ).tag(config=True) def __init__(self, *args, **kwargs): self.config_manager = kwargs.pop('config_manager') super(NotebookGist, self).__init__(*args, **kwargs) # update the frontend settings with the currently passed # OAUTH client id client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None self.config_manager.update('notebook', { 'oauth_client_id': client_id, })
... import six from traitlets.config import LoggingConfigurable ... client_id = self.config.NotebookGist.oauth_client_id if not isinstance(client_id, six.string_types): client_id = None ...
1557de38bcc9fa4099655c210d7e2daf7c19d715
task/models.py
task/models.py
from django.db import models from django.conf import settings class Task(models.Model): title = models.CharField(max_length=50, unique=True) created_at = models.DateField() status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) def __unicode__(self): # pragma: no cover return self.title
import datetime from django.db import models from django.conf import settings class Task(models.Model): title = models.CharField(max_length=50, unique=True) created_at = models.DateTimeField(auto_now_add=True) status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) class Meta: ordering = ('-created_at',) def __unicode__(self): # pragma: no cover return self.title
Set order getting the list of tasks
Set order getting the list of tasks
Python
mit
rosadurante/to_do,rosadurante/to_do
+ import datetime + from django.db import models from django.conf import settings class Task(models.Model): title = models.CharField(max_length=50, unique=True) - created_at = models.DateField() + created_at = models.DateTimeField(auto_now_add=True) status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) + + class Meta: + ordering = ('-created_at',) def __unicode__(self): # pragma: no cover return self.title
Set order getting the list of tasks
## Code Before: from django.db import models from django.conf import settings class Task(models.Model): title = models.CharField(max_length=50, unique=True) created_at = models.DateField() status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) def __unicode__(self): # pragma: no cover return self.title ## Instruction: Set order getting the list of tasks ## Code After: import datetime from django.db import models from django.conf import settings class Task(models.Model): title = models.CharField(max_length=50, unique=True) created_at = models.DateTimeField(auto_now_add=True) status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) class Meta: ordering = ('-created_at',) def __unicode__(self): # pragma: no cover return self.title
... import datetime from django.db import models ... title = models.CharField(max_length=50, unique=True) created_at = models.DateTimeField(auto_now_add=True) status = models.CharField(max_length=30, choices=settings.TASK_CHOICES) class Meta: ordering = ('-created_at',) ...
8b545ee63ec695a77ba08fa5ff45b7d6dd3d94f8
cuteshop/downloaders/git.py
cuteshop/downloaders/git.py
import subprocess from ..utils import DEVNULL, change_working_directory from .base import DOWNLOAD_CONTAINER def download(source_info): url = source_info['git'] subprocess.call( ('git', 'clone', url, DOWNLOAD_CONTAINER), stdout=DEVNULL, stderr=subprocess.STDOUT, ) if 'tag' in source_info: with change_working_directory(DOWNLOAD_CONTAINER): subprocess.call( ('git', 'checkout', source_info['tag']), stdout=DEVNULL, stderr=subprocess.STDOUT, )
import subprocess from ..utils import DEVNULL, change_working_directory from .base import DOWNLOAD_CONTAINER def _checkout(name): with change_working_directory(DOWNLOAD_CONTAINER): subprocess.call( ('git', 'checkout', name), stdout=DEVNULL, stderr=subprocess.STDOUT, ) def download(source_info): url = source_info['git'] subprocess.call( ('git', 'clone', url, DOWNLOAD_CONTAINER), stdout=DEVNULL, stderr=subprocess.STDOUT, ) if 'tag' in source_info: _checkout(source_info['tag']) elif 'branch' in source_info: _checkout(source_info['branch'])
Add auto branch checkout functionality
Add auto branch checkout functionality
Python
mit
uranusjr/cuteshop
import subprocess from ..utils import DEVNULL, change_working_directory from .base import DOWNLOAD_CONTAINER + + + def _checkout(name): + with change_working_directory(DOWNLOAD_CONTAINER): + subprocess.call( + ('git', 'checkout', name), + stdout=DEVNULL, stderr=subprocess.STDOUT, + ) def download(source_info): url = source_info['git'] subprocess.call( ('git', 'clone', url, DOWNLOAD_CONTAINER), stdout=DEVNULL, stderr=subprocess.STDOUT, ) if 'tag' in source_info: - with change_working_directory(DOWNLOAD_CONTAINER): - subprocess.call( - ('git', 'checkout', source_info['tag']), + _checkout(source_info['tag']) - stdout=DEVNULL, stderr=subprocess.STDOUT, - ) + elif 'branch' in source_info: + _checkout(source_info['branch'])
Add auto branch checkout functionality
## Code Before: import subprocess from ..utils import DEVNULL, change_working_directory from .base import DOWNLOAD_CONTAINER def download(source_info): url = source_info['git'] subprocess.call( ('git', 'clone', url, DOWNLOAD_CONTAINER), stdout=DEVNULL, stderr=subprocess.STDOUT, ) if 'tag' in source_info: with change_working_directory(DOWNLOAD_CONTAINER): subprocess.call( ('git', 'checkout', source_info['tag']), stdout=DEVNULL, stderr=subprocess.STDOUT, ) ## Instruction: Add auto branch checkout functionality ## Code After: import subprocess from ..utils import DEVNULL, change_working_directory from .base import DOWNLOAD_CONTAINER def _checkout(name): with change_working_directory(DOWNLOAD_CONTAINER): subprocess.call( ('git', 'checkout', name), stdout=DEVNULL, stderr=subprocess.STDOUT, ) def download(source_info): url = source_info['git'] subprocess.call( ('git', 'clone', url, DOWNLOAD_CONTAINER), stdout=DEVNULL, stderr=subprocess.STDOUT, ) if 'tag' in source_info: _checkout(source_info['tag']) elif 'branch' in source_info: _checkout(source_info['branch'])
# ... existing code ... from .base import DOWNLOAD_CONTAINER def _checkout(name): with change_working_directory(DOWNLOAD_CONTAINER): subprocess.call( ('git', 'checkout', name), stdout=DEVNULL, stderr=subprocess.STDOUT, ) # ... modified code ... if 'tag' in source_info: _checkout(source_info['tag']) elif 'branch' in source_info: _checkout(source_info['branch']) # ... rest of the code ...
b13efa6234c2748515a9c3f5a8fbb3ad43093083
test/test_device.py
test/test_device.py
from pml.exceptions import PvException import pml.device import pytest import mock @pytest.fixture def create_device(readback, setpoint): _rb = readback _sp = setpoint device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock()) return device def test_set_device_value(): rb_pv = 'SR01A-PC-SQUAD-01:I' sp_pv = 'SR01A-PC-SQUAD-01:SETI' device1 = create_device(rb_pv, sp_pv) device1.put_value(40) device1._cs.put.assert_called_with(sp_pv, 40) device2 = create_device(rb_pv, None) with pytest.raises(PvException): device2.put_value(40) def test_get_device_value(): sp_pv = 'SR01A-PC-SQUAD-01:SETI' device = create_device(None, sp_pv) with pytest.raises(PvException): device.get_value('non_existent') with pytest.raises(PvException): create_device(None, None)
from pml.exceptions import PvException import pml.device import pytest import mock @pytest.fixture def create_device(readback, setpoint): _rb = readback _sp = setpoint device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock()) return device def test_set_device_value(): rb_pv = 'SR01A-PC-SQUAD-01:I' sp_pv = 'SR01A-PC-SQUAD-01:SETI' device1 = create_device(rb_pv, sp_pv) device1.put_value(40) device1._cs.put.assert_called_with(sp_pv, 40) device2 = create_device(rb_pv, None) with pytest.raises(PvException): device2.put_value(40) def test_get_device_value(): sp_pv = 'SR01A-PC-SQUAD-01:SETI' device = create_device(None, sp_pv) with pytest.raises(PvException): device.get_value('non_existent') with pytest.raises(AssertionError): create_device(None, None)
Raise assertion error when creating a device with no pv
Raise assertion error when creating a device with no pv
Python
apache-2.0
willrogers/pml,willrogers/pml
from pml.exceptions import PvException import pml.device import pytest import mock @pytest.fixture def create_device(readback, setpoint): _rb = readback _sp = setpoint device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock()) return device def test_set_device_value(): rb_pv = 'SR01A-PC-SQUAD-01:I' sp_pv = 'SR01A-PC-SQUAD-01:SETI' device1 = create_device(rb_pv, sp_pv) device1.put_value(40) device1._cs.put.assert_called_with(sp_pv, 40) device2 = create_device(rb_pv, None) with pytest.raises(PvException): device2.put_value(40) def test_get_device_value(): sp_pv = 'SR01A-PC-SQUAD-01:SETI' device = create_device(None, sp_pv) with pytest.raises(PvException): device.get_value('non_existent') - with pytest.raises(PvException): + with pytest.raises(AssertionError): create_device(None, None)
Raise assertion error when creating a device with no pv
## Code Before: from pml.exceptions import PvException import pml.device import pytest import mock @pytest.fixture def create_device(readback, setpoint): _rb = readback _sp = setpoint device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock()) return device def test_set_device_value(): rb_pv = 'SR01A-PC-SQUAD-01:I' sp_pv = 'SR01A-PC-SQUAD-01:SETI' device1 = create_device(rb_pv, sp_pv) device1.put_value(40) device1._cs.put.assert_called_with(sp_pv, 40) device2 = create_device(rb_pv, None) with pytest.raises(PvException): device2.put_value(40) def test_get_device_value(): sp_pv = 'SR01A-PC-SQUAD-01:SETI' device = create_device(None, sp_pv) with pytest.raises(PvException): device.get_value('non_existent') with pytest.raises(PvException): create_device(None, None) ## Instruction: Raise assertion error when creating a device with no pv ## Code After: from pml.exceptions import PvException import pml.device import pytest import mock @pytest.fixture def create_device(readback, setpoint): _rb = readback _sp = setpoint device = pml.device.Device(rb_pv=_rb, sp_pv=_sp, cs=mock.MagicMock()) return device def test_set_device_value(): rb_pv = 'SR01A-PC-SQUAD-01:I' sp_pv = 'SR01A-PC-SQUAD-01:SETI' device1 = create_device(rb_pv, sp_pv) device1.put_value(40) device1._cs.put.assert_called_with(sp_pv, 40) device2 = create_device(rb_pv, None) with pytest.raises(PvException): device2.put_value(40) def test_get_device_value(): sp_pv = 'SR01A-PC-SQUAD-01:SETI' device = create_device(None, sp_pv) with pytest.raises(PvException): device.get_value('non_existent') with pytest.raises(AssertionError): create_device(None, None)
// ... existing code ... with pytest.raises(AssertionError): create_device(None, None) // ... rest of the code ...
e1138ebffbdfe31d4a4acdb4e164bdd767c6e8ea
saylua/wrappers.py
saylua/wrappers.py
from flask import redirect as _redirect, url_for, render_template, g from functools import wraps def login_required(f, redirect='login'): """Redirects non-logged in users to a specified location. Usage: `@login_required`, `@login_required(redirect=<url>)` """ @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for(redirect)) return f(*args, **kwargs) return decorated_function def admin_access_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for('login')) if not g.user.get_role().can_access_admin: return render_template('403.html'), 403 return f(*args, **kwargs) return decorated_function
from flask import redirect as _redirect, url_for, render_template, g from functools import wraps def login_required(f, redirect='login'): """Redirects non-logged in users to a specified location. Usage: `@login_required`, `@login_required(redirect=<url>)` """ @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for(redirect)) return f(*args, **kwargs) return decorated_function def admin_access_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for('login')) if not g.user.get_role() or not g.user.get_role().can_access_admin: return render_template('403.html'), 403 return f(*args, **kwargs) return decorated_function
Fix for no role in admin access wrapper
Fix for no role in admin access wrapper
Python
agpl-3.0
LikeMyBread/Saylua,saylua/SayluaV2,LikeMyBread/Saylua,saylua/SayluaV2,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua
from flask import redirect as _redirect, url_for, render_template, g from functools import wraps def login_required(f, redirect='login'): """Redirects non-logged in users to a specified location. Usage: `@login_required`, `@login_required(redirect=<url>)` """ @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for(redirect)) return f(*args, **kwargs) return decorated_function def admin_access_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for('login')) - if not g.user.get_role().can_access_admin: + if not g.user.get_role() or not g.user.get_role().can_access_admin: return render_template('403.html'), 403 return f(*args, **kwargs) return decorated_function
Fix for no role in admin access wrapper
## Code Before: from flask import redirect as _redirect, url_for, render_template, g from functools import wraps def login_required(f, redirect='login'): """Redirects non-logged in users to a specified location. Usage: `@login_required`, `@login_required(redirect=<url>)` """ @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for(redirect)) return f(*args, **kwargs) return decorated_function def admin_access_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for('login')) if not g.user.get_role().can_access_admin: return render_template('403.html'), 403 return f(*args, **kwargs) return decorated_function ## Instruction: Fix for no role in admin access wrapper ## Code After: from flask import redirect as _redirect, url_for, render_template, g from functools import wraps def login_required(f, redirect='login'): """Redirects non-logged in users to a specified location. Usage: `@login_required`, `@login_required(redirect=<url>)` """ @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for(redirect)) return f(*args, **kwargs) return decorated_function def admin_access_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not g.logged_in: return _redirect(url_for('login')) if not g.user.get_role() or not g.user.get_role().can_access_admin: return render_template('403.html'), 403 return f(*args, **kwargs) return decorated_function
... if not g.user.get_role() or not g.user.get_role().can_access_admin: return render_template('403.html'), 403 ...
aa65464c86c562a690ba42901fa9dc24f17ba714
xbrowse_server/base/management/commands/add_project.py
xbrowse_server/base/management/commands/add_project.py
from django.core.management.base import BaseCommand from xbrowse_server.base.models import Project class Command(BaseCommand): def handle(self, *args, **options): project_id = args[0] if Project.objects.filter(project_id=project_id).exists(): raise Exception("Project exists :(") Project.objects.create(project_id=project_id)
from django.core.management.base import BaseCommand from xbrowse_server.base.models import Project import sys class Command(BaseCommand): def handle(self, *args, **options): project_id = args[0] if "." in project_id: sys.exit("ERROR: A '.' in the project ID is not supported") if Project.objects.filter(project_id=project_id).exists(): raise Exception("Project exists :(") Project.objects.create(project_id=project_id)
Print error if dot in project ids
Print error if dot in project ids
Python
agpl-3.0
ssadedin/seqr,ssadedin/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,ssadedin/seqr,macarthur-lab/xbrowse,ssadedin/seqr,ssadedin/seqr
from django.core.management.base import BaseCommand from xbrowse_server.base.models import Project - + import sys class Command(BaseCommand): def handle(self, *args, **options): project_id = args[0] + if "." in project_id: + sys.exit("ERROR: A '.' in the project ID is not supported") + if Project.objects.filter(project_id=project_id).exists(): raise Exception("Project exists :(") Project.objects.create(project_id=project_id) +
Print error if dot in project ids
## Code Before: from django.core.management.base import BaseCommand from xbrowse_server.base.models import Project class Command(BaseCommand): def handle(self, *args, **options): project_id = args[0] if Project.objects.filter(project_id=project_id).exists(): raise Exception("Project exists :(") Project.objects.create(project_id=project_id) ## Instruction: Print error if dot in project ids ## Code After: from django.core.management.base import BaseCommand from xbrowse_server.base.models import Project import sys class Command(BaseCommand): def handle(self, *args, **options): project_id = args[0] if "." in project_id: sys.exit("ERROR: A '.' in the project ID is not supported") if Project.objects.filter(project_id=project_id).exists(): raise Exception("Project exists :(") Project.objects.create(project_id=project_id)
... from xbrowse_server.base.models import Project import sys ... project_id = args[0] if "." in project_id: sys.exit("ERROR: A '.' in the project ID is not supported") if Project.objects.filter(project_id=project_id).exists(): ...
eb0714767cf5c0fd89ff4e50e22445a5e436f94c
iopath/tabular/tabular_io.py
iopath/tabular/tabular_io.py
from typing import Any, Iterable from iopath.common.file_io import PathHandler class TabularUriParser: def parse_uri(self, uri: str) -> None: pass class TabularPathHandler(PathHandler): def _opent( self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any ) -> Iterable[Any]: assert mode == "r"
from typing import Any from iopath.common.file_io import PathHandler, TabularIO class TabularUriParser: def parse_uri(self, uri: str) -> None: pass class TabularPathHandler(PathHandler): def _opent( self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any ) -> TabularIO: assert mode == "r"
Update type signature of AIRStorePathHandler.opent()
Update type signature of AIRStorePathHandler.opent() Summary: The previous diff updated the type signature of the `PathHandler.opent()` method to return a custom context manager. Here, we update the return type of the overriden `AIRStorePathHandler.opent()` method to return an implementation of the `PathHandlerContext` protocol, namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow Pyre to carry out static type checking. Reviewed By: mackorone Differential Revision: D33833561 fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26
Python
mit
facebookresearch/iopath,facebookresearch/iopath
- from typing import Any, Iterable + from typing import Any - from iopath.common.file_io import PathHandler + from iopath.common.file_io import PathHandler, TabularIO class TabularUriParser: def parse_uri(self, uri: str) -> None: pass class TabularPathHandler(PathHandler): def _opent( self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any - ) -> Iterable[Any]: + ) -> TabularIO: assert mode == "r"
Update type signature of AIRStorePathHandler.opent()
## Code Before: from typing import Any, Iterable from iopath.common.file_io import PathHandler class TabularUriParser: def parse_uri(self, uri: str) -> None: pass class TabularPathHandler(PathHandler): def _opent( self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any ) -> Iterable[Any]: assert mode == "r" ## Instruction: Update type signature of AIRStorePathHandler.opent() ## Code After: from typing import Any from iopath.common.file_io import PathHandler, TabularIO class TabularUriParser: def parse_uri(self, uri: str) -> None: pass class TabularPathHandler(PathHandler): def _opent( self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any ) -> TabularIO: assert mode == "r"
... from typing import Any from iopath.common.file_io import PathHandler, TabularIO ... self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any ) -> TabularIO: assert mode == "r" ...
9d162a2919a1c9b56ded74d40963fa022fc7943b
src/config/settings/testing.py
src/config/settings/testing.py
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
Disable logging in test runs
Disable logging in test runs SPEED!
Python
agpl-3.0
FlowFX/unkenmathe.de,FlowFX/unkenmathe.de,FlowFX/unkenmathe.de,FlowFX/unkenmathe.de
"""Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } + # Disable logging + import logging + logging.disable(logging.CRITICAL) + env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
Disable logging in test runs
## Code Before: """Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3' ## Instruction: Disable logging in test runs ## Code After: """Django configuration for testing and CI environments.""" from .common import * # Use in-memory file storage DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage' # Speed! PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.MD5PasswordHasher', ) # Database DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', 'TEST': {} } } # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") import sys if os.path.isdir('/Volumes/RAMDisk') and not env == 'ci' and not 'create-db' in sys.argv: # and this allows you to use --reuse-db to skip re-creating the db, # even faster! # # To create the RAMDisk, use bash: # $ hdiutil attach -nomount ram://$((2 * 1024 * SIZE_IN_MB)) # /dev/disk2 # $ diskutil eraseVolume HFS+ RAMDisk /dev/disk2 DATABASES['default']['TEST']['NAME'] = '/Volumes/RAMDisk/unkenmathe.test.db.sqlite3'
# ... existing code ... # Disable logging import logging logging.disable(logging.CRITICAL) env = get_secret("ENVIRONMENT") # ... rest of the code ...
7352a257a08ad4d41261dd0c1076cde966d2a5c2
sharer/multi.py
sharer/multi.py
from .base import AbstractSharer class MultiSharer(AbstractSharer): def __init__(self, **kw): super(MultiSharer, self).__init__() self.sharers = {} self.add_sharers(**kw) def add_sharers(self, **kw): for key, val in kw.iteritems(): self.sharers[key] = val def send(self, *args, **kw): for sharer in self.sharers.itervalues(): sharer.send(*args, **kw)
from .base import AbstractSharer class MultiSharer(AbstractSharer): def __init__(self, **kw): super(MultiSharer, self).__init__() self.sharers = {} self.add_sharers(**kw) def add_sharers(self, **kw): for key, val in kw.iteritems(): self.sharers[key] = val def send(self, *args, **kw): services = kw.pop('_services', {}) for name, sharer in self.sharers.iteritems(): if services.get(name, True): services[name] = sharer.send(*args, **kw) return services
Add _services keyword argument to MultiSharer's send.
Add _services keyword argument to MultiSharer's send.
Python
mit
FelixLoether/python-sharer
from .base import AbstractSharer class MultiSharer(AbstractSharer): def __init__(self, **kw): super(MultiSharer, self).__init__() self.sharers = {} self.add_sharers(**kw) def add_sharers(self, **kw): for key, val in kw.iteritems(): self.sharers[key] = val def send(self, *args, **kw): + services = kw.pop('_services', {}) - for sharer in self.sharers.itervalues(): + for name, sharer in self.sharers.iteritems(): + if services.get(name, True): - sharer.send(*args, **kw) + services[name] = sharer.send(*args, **kw) + return services
Add _services keyword argument to MultiSharer's send.
## Code Before: from .base import AbstractSharer class MultiSharer(AbstractSharer): def __init__(self, **kw): super(MultiSharer, self).__init__() self.sharers = {} self.add_sharers(**kw) def add_sharers(self, **kw): for key, val in kw.iteritems(): self.sharers[key] = val def send(self, *args, **kw): for sharer in self.sharers.itervalues(): sharer.send(*args, **kw) ## Instruction: Add _services keyword argument to MultiSharer's send. ## Code After: from .base import AbstractSharer class MultiSharer(AbstractSharer): def __init__(self, **kw): super(MultiSharer, self).__init__() self.sharers = {} self.add_sharers(**kw) def add_sharers(self, **kw): for key, val in kw.iteritems(): self.sharers[key] = val def send(self, *args, **kw): services = kw.pop('_services', {}) for name, sharer in self.sharers.iteritems(): if services.get(name, True): services[name] = sharer.send(*args, **kw) return services
# ... existing code ... def send(self, *args, **kw): services = kw.pop('_services', {}) for name, sharer in self.sharers.iteritems(): if services.get(name, True): services[name] = sharer.send(*args, **kw) return services # ... rest of the code ...
ef69cad1175fa92543fce085cd46a9ec990fa55b
nbresuse/__init__.py
nbresuse/__init__.py
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
Add back the /metrics endpoint
Add back the /metrics endpoint
Python
bsd-2-clause
yuvipanda/nbresuse,yuvipanda/nbresuse
from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig - route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') - nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) + base_url = nbapp.web_app.settings["base_url"] + nbapp.web_app.add_handlers( + ".*", + [ + (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), + (url_path_join(base_url, "/metrics"), ApiHandler), + ], + ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start() +
Add back the /metrics endpoint
## Code Before: from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig route_pattern = url_path_join(nbapp.web_app.settings['base_url'], '/api/nbresuse/v1') nbapp.web_app.add_handlers('.*', [(route_pattern, ApiHandler)]) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start() ## Instruction: Add back the /metrics endpoint ## Code After: from notebook.utils import url_path_join from tornado import ioloop from nbresuse.api import ApiHandler from nbresuse.config import ResourceUseDisplay from nbresuse.metrics import PSUtilMetricsLoader from nbresuse.prometheus import PrometheusHandler def _jupyter_server_extension_paths(): """ Set up the server extension for collecting metrics """ return [{"module": "nbresuse"}] def _jupyter_nbextension_paths(): """ Set up the notebook extension for displaying metrics """ return [ { "section": "notebook", "dest": "nbresuse", "src": "static", "require": "nbresuse/main", } ] def load_jupyter_server_extension(nbapp): """ Called during notebook start """ resuseconfig = ResourceUseDisplay(parent=nbapp) nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( PrometheusHandler(PSUtilMetricsLoader(nbapp)), 1000 ) callback.start()
... nbapp.web_app.settings["nbresuse_display_config"] = resuseconfig base_url = nbapp.web_app.settings["base_url"] nbapp.web_app.add_handlers( ".*", [ (url_path_join(base_url, "/api/nbresuse/v1"), ApiHandler), (url_path_join(base_url, "/metrics"), ApiHandler), ], ) callback = ioloop.PeriodicCallback( ...