commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
858bc6f152a87298f9bd3568712aed49b6e02e42
suave/suave.py
suave/suave.py
import curses import os import time from box import Box from utils import load_yaml def main(screen): """ Draws and redraws the screen. """ # Hide the cursor. curses.curs_set(0) # Load config from file. config = load_yaml(os.path.expanduser('~/.suave/config.yml')) # Create boxes from config. boxes = [] for box in config: boxes.append( Box( screen=screen, rows=box['rows'], columns=box['columns'], rows_offset=box['rows-offset'], columns_offset=box['columns-offset'], command=box['command'], interval=box['interval'], ) ) while True: # Redraw the screen only when it changes. if screen.is_wintouched(): screen.clear() screen.refresh() # Give every box an opportunity to redraw if it has changed. [box.redraw_if_changed() for box in boxes] # Wait before redrawing again. time.sleep(1) curses.wrapper(main)
import curses import os from box import Box from utils import load_yaml def main(screen): """ Draws and redraws the screen. """ # Hide the cursor. curses.curs_set(0) # Load config from file. config = load_yaml(os.path.expanduser('~/.suave/config.yml')) # Create boxes from config. boxes = [] for box in config: boxes.append( Box( screen=screen, rows=box['rows'], columns=box['columns'], rows_offset=box['rows-offset'], columns_offset=box['columns-offset'], command=box['command'], interval=box['interval'], ) ) while True: # Redraw the screen only when it changes. if screen.is_wintouched(): screen.clear() screen.refresh() # Give every box an opportunity to redraw if it has changed. [box.redraw_if_changed() for box in boxes] # Wait before redrawing again. curses.napms(1000) curses.wrapper(main)
Use napms method from curses rather than sleep method from time
Use napms method from curses rather than sleep method from time
Python
mit
countermeasure/suave
import curses import os - import time from box import Box from utils import load_yaml def main(screen): """ Draws and redraws the screen. """ # Hide the cursor. curses.curs_set(0) # Load config from file. config = load_yaml(os.path.expanduser('~/.suave/config.yml')) # Create boxes from config. boxes = [] for box in config: boxes.append( Box( screen=screen, rows=box['rows'], columns=box['columns'], rows_offset=box['rows-offset'], columns_offset=box['columns-offset'], command=box['command'], interval=box['interval'], ) ) while True: # Redraw the screen only when it changes. if screen.is_wintouched(): screen.clear() screen.refresh() # Give every box an opportunity to redraw if it has changed. [box.redraw_if_changed() for box in boxes] # Wait before redrawing again. - time.sleep(1) + curses.napms(1000) curses.wrapper(main)
Use napms method from curses rather than sleep method from time
## Code Before: import curses import os import time from box import Box from utils import load_yaml def main(screen): """ Draws and redraws the screen. """ # Hide the cursor. curses.curs_set(0) # Load config from file. config = load_yaml(os.path.expanduser('~/.suave/config.yml')) # Create boxes from config. boxes = [] for box in config: boxes.append( Box( screen=screen, rows=box['rows'], columns=box['columns'], rows_offset=box['rows-offset'], columns_offset=box['columns-offset'], command=box['command'], interval=box['interval'], ) ) while True: # Redraw the screen only when it changes. if screen.is_wintouched(): screen.clear() screen.refresh() # Give every box an opportunity to redraw if it has changed. [box.redraw_if_changed() for box in boxes] # Wait before redrawing again. time.sleep(1) curses.wrapper(main) ## Instruction: Use napms method from curses rather than sleep method from time ## Code After: import curses import os from box import Box from utils import load_yaml def main(screen): """ Draws and redraws the screen. """ # Hide the cursor. curses.curs_set(0) # Load config from file. config = load_yaml(os.path.expanduser('~/.suave/config.yml')) # Create boxes from config. boxes = [] for box in config: boxes.append( Box( screen=screen, rows=box['rows'], columns=box['columns'], rows_offset=box['rows-offset'], columns_offset=box['columns-offset'], command=box['command'], interval=box['interval'], ) ) while True: # Redraw the screen only when it changes. if screen.is_wintouched(): screen.clear() screen.refresh() # Give every box an opportunity to redraw if it has changed. [box.redraw_if_changed() for box in boxes] # Wait before redrawing again. curses.napms(1000) curses.wrapper(main)
// ... existing code ... import os // ... modified code ... # Wait before redrawing again. curses.napms(1000) // ... rest of the code ...
e95deac720589eaf81dbb54cadcef9a3459f7d02
youtube/downloader.py
youtube/downloader.py
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
Add comment explaining why use choose bestaudio for audio downloads.
Add comment explaining why use choose bestaudio for audio downloads.
Python
mit
tpcstld/youtube,tpcstld/youtube,tpcstld/youtube
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 + # We don't really have to do this, but YoutubeDL sometimes has a problem + # combining the video and audio portions of webm files, so this is a good + # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
Add comment explaining why use choose bestaudio for audio downloads.
## Code Before: import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title ## Instruction: Add comment explaining why use choose bestaudio for audio downloads. ## Code After: import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
# ... existing code ... downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: # ... rest of the code ...
88da3432dc0676cbe74c0d9f170fbd6f18f97f8a
examples/tornado_server.py
examples/tornado_server.py
from tornado import ioloop, web from jsonrpcserver import method, async_dispatch as dispatch @method async def ping(): return "pong" class MainHandler(web.RequestHandler): async def post(self): request = self.request.body.decode() response = await dispatch(request) print(response) if response.wanted: self.write(str(response)) app = web.Application([(r"/", MainHandler)]) if __name__ == "__main__": app.listen(5000) ioloop.IOLoop.current().start()
from tornado import ioloop, web from jsonrpcserver import method, async_dispatch as dispatch @method async def ping() -> str: return "pong" class MainHandler(web.RequestHandler): async def post(self) -> None: request = self.request.body.decode() response = await dispatch(request) if response.wanted: self.write(str(response)) app = web.Application([(r"/", MainHandler)]) if __name__ == "__main__": app.listen(5000) ioloop.IOLoop.current().start()
Remove unwanted print statement from example
Remove unwanted print statement from example
Python
mit
bcb/jsonrpcserver
from tornado import ioloop, web from jsonrpcserver import method, async_dispatch as dispatch @method - async def ping(): + async def ping() -> str: return "pong" class MainHandler(web.RequestHandler): - async def post(self): + async def post(self) -> None: request = self.request.body.decode() response = await dispatch(request) - print(response) if response.wanted: self.write(str(response)) app = web.Application([(r"/", MainHandler)]) if __name__ == "__main__": app.listen(5000) ioloop.IOLoop.current().start()
Remove unwanted print statement from example
## Code Before: from tornado import ioloop, web from jsonrpcserver import method, async_dispatch as dispatch @method async def ping(): return "pong" class MainHandler(web.RequestHandler): async def post(self): request = self.request.body.decode() response = await dispatch(request) print(response) if response.wanted: self.write(str(response)) app = web.Application([(r"/", MainHandler)]) if __name__ == "__main__": app.listen(5000) ioloop.IOLoop.current().start() ## Instruction: Remove unwanted print statement from example ## Code After: from tornado import ioloop, web from jsonrpcserver import method, async_dispatch as dispatch @method async def ping() -> str: return "pong" class MainHandler(web.RequestHandler): async def post(self) -> None: request = self.request.body.decode() response = await dispatch(request) if response.wanted: self.write(str(response)) app = web.Application([(r"/", MainHandler)]) if __name__ == "__main__": app.listen(5000) ioloop.IOLoop.current().start()
... @method async def ping() -> str: return "pong" ... class MainHandler(web.RequestHandler): async def post(self) -> None: request = self.request.body.decode() ... response = await dispatch(request) if response.wanted: ...
bc3aee78bb5be3afa639b8c572273b662aea1721
glue/tests/test_settings_helpers.py
glue/tests/test_settings_helpers.py
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.STRING = 'red' settings.INT = 3 settings.FLOAT = 4.5 settings.LIST = ['DDD', 'EE', 'F'] load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] settings.reset_defaults() assert settings.STRING == 'green' assert settings.INT == 3 assert settings.FLOAT == 5.5 assert settings.LIST == [1, 2, 3] settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.reset_defaults() settings.STRING = 'red' settings.INT = 5 # Loading settings will only change settings that have not been # changed from the defaults... load_settings() assert settings.STRING == 'red' assert settings.INT == 5 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpers
Improve unit test for settings helpers
Python
bsd-3-clause
saimn/glue,stscieisenhamer/glue,stscieisenhamer/glue,saimn/glue
from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() + settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] + settings.reset_defaults() + + assert settings.STRING == 'green' + assert settings.INT == 3 + assert settings.FLOAT == 5.5 + assert settings.LIST == [1, 2, 3] + + settings.STRING = 'blue' + settings.INT = 4 + settings.FLOAT = 3.5 + settings.LIST = ['A', 'BB', 'CCC'] + save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) + settings.reset_defaults() + settings.STRING = 'red' - settings.INT = 3 + settings.INT = 5 - settings.FLOAT = 4.5 - settings.LIST = ['DDD', 'EE', 'F'] + # Loading settings will only change settings that have not been + # changed from the defaults... + load_settings() + + assert settings.STRING == 'red' + assert settings.INT == 5 + assert settings.FLOAT == 3.5 + assert settings.LIST == ['A', 'BB', 'CCC'] + + # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
Improve unit test for settings helpers
## Code Before: from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.STRING = 'red' settings.INT = 3 settings.FLOAT = 4.5 settings.LIST = ['DDD', 'EE', 'F'] load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] ## Instruction: Improve unit test for settings helpers ## Code After: from mock import patch import os from glue.config import SettingRegistry from glue._settings_helpers import load_settings, save_settings def test_roundtrip(tmpdir): settings = SettingRegistry() settings.add('STRING', 'green', str) settings.add('INT', 3, int) settings.add('FLOAT', 5.5, float) settings.add('LIST', [1,2,3], list) with patch('glue.config.settings', settings): with patch('glue.config.CFG_DIR', tmpdir.strpath): settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] settings.reset_defaults() assert settings.STRING == 'green' assert settings.INT == 3 assert settings.FLOAT == 5.5 assert settings.LIST == [1, 2, 3] settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() assert os.path.exists(os.path.join(tmpdir.strpath, 'settings.cfg')) settings.reset_defaults() settings.STRING = 'red' settings.INT = 5 # Loading settings will only change settings that have not been # changed from the defaults... load_settings() assert settings.STRING == 'red' assert settings.INT == 5 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] # ... unless the ``force=True`` option is passed load_settings(force=True) assert settings.STRING == 'blue' assert settings.INT == 4 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC']
// ... existing code ... settings = SettingRegistry() settings.add('STRING', 'green', str) // ... modified code ... settings.reset_defaults() assert settings.STRING == 'green' assert settings.INT == 3 assert settings.FLOAT == 5.5 assert settings.LIST == [1, 2, 3] settings.STRING = 'blue' settings.INT = 4 settings.FLOAT = 3.5 settings.LIST = ['A', 'BB', 'CCC'] save_settings() ... settings.reset_defaults() settings.STRING = 'red' settings.INT = 5 # Loading settings will only change settings that have not been # changed from the defaults... load_settings() assert settings.STRING == 'red' assert settings.INT == 5 assert settings.FLOAT == 3.5 assert settings.LIST == ['A', 'BB', 'CCC'] # ... unless the ``force=True`` option is passed load_settings(force=True) // ... rest of the code ...
259555775c098153b1715f85561309b42e29ee7d
setup.py
setup.py
from distutils.core import setup from avena import avena _classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Multimedia :: Graphics', ] with open('README.rst', 'r') as rst_file: _long_description = rst_file.read() _setup_args = { 'author': avena.__author__, 'author_email': avena.__email__, 'classifiers': _classifiers, 'description': avena.__doc__, 'license': avena.__license__, 'long_description': _long_description, 'name': 'Avena', 'url': 'https://bitbucket.org/eliteraspberries/avena', 'version': avena.__version__, } if __name__ == '__main__': setup(packages=['avena'], **_setup_args)
from distutils.core import setup from avena import avena _classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Multimedia :: Graphics', ] with open('README.rst', 'r') as rst_file: _long_description = rst_file.read() _setup_args = { 'author': avena.__author__, 'author_email': avena.__email__, 'classifiers': _classifiers, 'description': avena.__doc__, 'license': avena.__license__, 'long_description': _long_description, 'name': 'Avena', 'url': 'https://bitbucket.org/eliteraspberries/avena', 'version': avena.__version__, } if __name__ == '__main__': setup(packages=['avena'], scripts=['scripts/avena'], **_setup_args)
Install the script with the library.
Install the script with the library.
Python
isc
eliteraspberries/avena
from distutils.core import setup from avena import avena _classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Multimedia :: Graphics', ] with open('README.rst', 'r') as rst_file: _long_description = rst_file.read() _setup_args = { 'author': avena.__author__, 'author_email': avena.__email__, 'classifiers': _classifiers, 'description': avena.__doc__, 'license': avena.__license__, 'long_description': _long_description, 'name': 'Avena', 'url': 'https://bitbucket.org/eliteraspberries/avena', 'version': avena.__version__, } if __name__ == '__main__': - setup(packages=['avena'], + setup(packages=['avena'], scripts=['scripts/avena'], **_setup_args)
Install the script with the library.
## Code Before: from distutils.core import setup from avena import avena _classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Multimedia :: Graphics', ] with open('README.rst', 'r') as rst_file: _long_description = rst_file.read() _setup_args = { 'author': avena.__author__, 'author_email': avena.__email__, 'classifiers': _classifiers, 'description': avena.__doc__, 'license': avena.__license__, 'long_description': _long_description, 'name': 'Avena', 'url': 'https://bitbucket.org/eliteraspberries/avena', 'version': avena.__version__, } if __name__ == '__main__': setup(packages=['avena'], **_setup_args) ## Instruction: Install the script with the library. ## Code After: from distutils.core import setup from avena import avena _classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Multimedia :: Graphics', ] with open('README.rst', 'r') as rst_file: _long_description = rst_file.read() _setup_args = { 'author': avena.__author__, 'author_email': avena.__email__, 'classifiers': _classifiers, 'description': avena.__doc__, 'license': avena.__license__, 'long_description': _long_description, 'name': 'Avena', 'url': 'https://bitbucket.org/eliteraspberries/avena', 'version': avena.__version__, } if __name__ == '__main__': setup(packages=['avena'], scripts=['scripts/avena'], **_setup_args)
# ... existing code ... setup(packages=['avena'], scripts=['scripts/avena'], **_setup_args) # ... rest of the code ...
26c725d3e6b1d5737a0efcbcd2371ff066a13a86
tests/test_utils.py
tests/test_utils.py
from expert_tourist.utils import gmaps_url_to_coords from tests.tests import BaseTestConfig class TestUtils(BaseTestConfig): def test_url_to_coords(self): url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035' lat, long = gmaps_url_to_coords(url) self.assertEqual(lat, 9.8757875656828) self.assertEqual(long, -84.03733452782035)
from unittest import TestCase from expert_tourist.utils import gmaps_url_to_coords class TestUtils(TestCase): def test_url_to_coords(self): url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035' lat, long = gmaps_url_to_coords(url) self.assertEqual(lat, 9.8757875656828) self.assertEqual(long, -84.03733452782035)
Refactor test to implement 87ceac3 changes
Refactor test to implement 87ceac3 changes
Python
mit
richin13/expert-tourist
+ from unittest import TestCase + from expert_tourist.utils import gmaps_url_to_coords - from tests.tests import BaseTestConfig - class TestUtils(BaseTestConfig): + class TestUtils(TestCase): + def test_url_to_coords(self): url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035' lat, long = gmaps_url_to_coords(url) self.assertEqual(lat, 9.8757875656828) self.assertEqual(long, -84.03733452782035)
Refactor test to implement 87ceac3 changes
## Code Before: from expert_tourist.utils import gmaps_url_to_coords from tests.tests import BaseTestConfig class TestUtils(BaseTestConfig): def test_url_to_coords(self): url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035' lat, long = gmaps_url_to_coords(url) self.assertEqual(lat, 9.8757875656828) self.assertEqual(long, -84.03733452782035) ## Instruction: Refactor test to implement 87ceac3 changes ## Code After: from unittest import TestCase from expert_tourist.utils import gmaps_url_to_coords class TestUtils(TestCase): def test_url_to_coords(self): url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035' lat, long = gmaps_url_to_coords(url) self.assertEqual(lat, 9.8757875656828) self.assertEqual(long, -84.03733452782035)
# ... existing code ... from unittest import TestCase from expert_tourist.utils import gmaps_url_to_coords class TestUtils(TestCase): def test_url_to_coords(self): # ... rest of the code ...
dae9d7d67aaf2ab8d39b232d243d860d9597bbd2
django_excel_tools/exceptions.py
django_excel_tools/exceptions.py
class BaseExcelError(Exception): def __init__(self, message): super(BaseExcelError, self).__init__() self.message = message class ValidationError(BaseExcelError): pass class ColumnNotEqualError(BaseExcelError): pass class FieldNotExist(BaseExcelError): pass
class BaseExcelError(Exception): def __init__(self, message): super(BaseExcelError, self).__init__() self.message = message class ValidationError(BaseExcelError): pass class ColumnNotEqualError(BaseExcelError): pass class FieldNotExist(BaseExcelError): pass class SerializerConfigError(BaseExcelError): pass
Add error when serializer setup has error
Add error when serializer setup has error
Python
mit
NorakGithub/django-excel-tools
class BaseExcelError(Exception): def __init__(self, message): super(BaseExcelError, self).__init__() self.message = message class ValidationError(BaseExcelError): pass class ColumnNotEqualError(BaseExcelError): pass class FieldNotExist(BaseExcelError): pass + + class SerializerConfigError(BaseExcelError): + pass +
Add error when serializer setup has error
## Code Before: class BaseExcelError(Exception): def __init__(self, message): super(BaseExcelError, self).__init__() self.message = message class ValidationError(BaseExcelError): pass class ColumnNotEqualError(BaseExcelError): pass class FieldNotExist(BaseExcelError): pass ## Instruction: Add error when serializer setup has error ## Code After: class BaseExcelError(Exception): def __init__(self, message): super(BaseExcelError, self).__init__() self.message = message class ValidationError(BaseExcelError): pass class ColumnNotEqualError(BaseExcelError): pass class FieldNotExist(BaseExcelError): pass class SerializerConfigError(BaseExcelError): pass
// ... existing code ... pass class SerializerConfigError(BaseExcelError): pass // ... rest of the code ...
09468a6411a5c0816ecb2f79037b0a79b3ceb9c5
lib/carbon/hashing.py
lib/carbon/hashing.py
import hashlib import bisect serverRing = None class ConsistentHashRing: def __init__(self, nodes, replica_count=100): self.ring = [] self.replica_count = replica_count for node in nodes: self.add_node(node) def compute_ring_position(self, key): big_hash = hashlib.md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) return small_hash def add_node(self, key): for i in range(self.replica_count): replica_key = "%s:%d" % (key, i) position = self.compute_ring_position(replica_key) entry = (position, key) bisect.insort(self.ring, entry) def remove_node(self, key): self.ring = [entry for entry in self.ring if entry[1] != key] def get_node(self, key): assert self.ring position = self.compute_ring_position(key) search_entry = (position, None) index = bisect.bisect_left(self.ring, search_entry) index %= len(self.ring) entry = self.ring[index] return entry[1] def setDestinationServers(servers): global serverRing serverRing = ConsistentHashRing(servers) def getDestinations(metric): return [ serverRing.get_node(metric) ]
try: from hashlib import md5 except ImportError: from md5 import md5 import bisect serverRing = None class ConsistentHashRing: def __init__(self, nodes, replica_count=100): self.ring = [] self.replica_count = replica_count for node in nodes: self.add_node(node) def compute_ring_position(self, key): big_hash = md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) return small_hash def add_node(self, key): for i in range(self.replica_count): replica_key = "%s:%d" % (key, i) position = self.compute_ring_position(replica_key) entry = (position, key) bisect.insort(self.ring, entry) def remove_node(self, key): self.ring = [entry for entry in self.ring if entry[1] != key] def get_node(self, key): assert self.ring position = self.compute_ring_position(key) search_entry = (position, None) index = bisect.bisect_left(self.ring, search_entry) index %= len(self.ring) entry = self.ring[index] return entry[1] def setDestinationServers(servers): global serverRing serverRing = ConsistentHashRing(servers) def getDestinations(metric): return [ serverRing.get_node(metric) ]
Make compatible with python 2.4 hashlib was added in python 2.5, but just using the md5() method so fall back to md5.md5() if we can't import hashlib
Make compatible with python 2.4 hashlib was added in python 2.5, but just using the md5() method so fall back to md5.md5() if we can't import hashlib
Python
apache-2.0
kharandziuk/carbon,criteo-forks/carbon,krux/carbon,graphite-project/carbon,pratX/carbon,johnseekins/carbon,graphite-server/carbon,JeanFred/carbon,mleinart/carbon,benburry/carbon,graphite-server/carbon,iain-buclaw-sociomantic/carbon,obfuscurity/carbon,xadjmerripen/carbon,cbowman0/carbon,deniszh/carbon,obfuscurity/carbon,lyft/carbon,deniszh/carbon,pratX/carbon,johnseekins/carbon,cbowman0/carbon,criteo-forks/carbon,benburry/carbon,kharandziuk/carbon,protochron/carbon,lyft/carbon,protochron/carbon,piotr1212/carbon,graphite-project/carbon,krux/carbon,JeanFred/carbon,pu239ppy/carbon,mleinart/carbon,pu239ppy/carbon,piotr1212/carbon,iain-buclaw-sociomantic/carbon,xadjmerripen/carbon
- import hashlib + try: + from hashlib import md5 + except ImportError: + from md5 import md5 import bisect serverRing = None class ConsistentHashRing: def __init__(self, nodes, replica_count=100): self.ring = [] self.replica_count = replica_count for node in nodes: self.add_node(node) def compute_ring_position(self, key): - big_hash = hashlib.md5( str(key) ).hexdigest() + big_hash = md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) return small_hash def add_node(self, key): for i in range(self.replica_count): replica_key = "%s:%d" % (key, i) position = self.compute_ring_position(replica_key) entry = (position, key) bisect.insort(self.ring, entry) def remove_node(self, key): self.ring = [entry for entry in self.ring if entry[1] != key] def get_node(self, key): assert self.ring position = self.compute_ring_position(key) search_entry = (position, None) index = bisect.bisect_left(self.ring, search_entry) index %= len(self.ring) entry = self.ring[index] return entry[1] def setDestinationServers(servers): global serverRing serverRing = ConsistentHashRing(servers) def getDestinations(metric): return [ serverRing.get_node(metric) ]
Make compatible with python 2.4 hashlib was added in python 2.5, but just using the md5() method so fall back to md5.md5() if we can't import hashlib
## Code Before: import hashlib import bisect serverRing = None class ConsistentHashRing: def __init__(self, nodes, replica_count=100): self.ring = [] self.replica_count = replica_count for node in nodes: self.add_node(node) def compute_ring_position(self, key): big_hash = hashlib.md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) return small_hash def add_node(self, key): for i in range(self.replica_count): replica_key = "%s:%d" % (key, i) position = self.compute_ring_position(replica_key) entry = (position, key) bisect.insort(self.ring, entry) def remove_node(self, key): self.ring = [entry for entry in self.ring if entry[1] != key] def get_node(self, key): assert self.ring position = self.compute_ring_position(key) search_entry = (position, None) index = bisect.bisect_left(self.ring, search_entry) index %= len(self.ring) entry = self.ring[index] return entry[1] def setDestinationServers(servers): global serverRing serverRing = ConsistentHashRing(servers) def getDestinations(metric): return [ serverRing.get_node(metric) ] ## Instruction: Make compatible with python 2.4 hashlib was added in python 2.5, but just using the md5() method so fall back to md5.md5() if we can't import hashlib ## Code After: try: from hashlib import md5 except ImportError: from md5 import md5 import bisect serverRing = None class ConsistentHashRing: def __init__(self, nodes, replica_count=100): self.ring = [] self.replica_count = replica_count for node in nodes: self.add_node(node) def compute_ring_position(self, key): big_hash = md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) return small_hash def add_node(self, key): for i in range(self.replica_count): replica_key = "%s:%d" % (key, i) position = self.compute_ring_position(replica_key) entry = (position, key) bisect.insort(self.ring, entry) def remove_node(self, key): self.ring = [entry for entry in self.ring if entry[1] != key] def get_node(self, key): assert self.ring position = self.compute_ring_position(key) search_entry = (position, None) index = bisect.bisect_left(self.ring, search_entry) index %= len(self.ring) entry = self.ring[index] return entry[1] def setDestinationServers(servers): global serverRing serverRing = ConsistentHashRing(servers) def getDestinations(metric): return [ serverRing.get_node(metric) ]
... try: from hashlib import md5 except ImportError: from md5 import md5 import bisect ... def compute_ring_position(self, key): big_hash = md5( str(key) ).hexdigest() small_hash = int(big_hash[:4], 16) ...
cc0f33a51f3b13cec191a7a97d20af95082e38db
tests/test_utils.py
tests/test_utils.py
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
Add test for utility function json_error_message()
Add test for utility function json_error_message()
Python
apache-2.0
UUDigitalHumanitieslab/texcavator,msassmann/texcavator,msassmann/texcavator,msassmann/texcavator,UUDigitalHumanitieslab/texcavator,UUDigitalHumanitieslab/texcavator,msassmann/texcavator
"""Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") + from nose.tools import assert_equals + from testfixtures import compare + + import texcavator.utils as utils + + + def test_json_error_message(): + response = utils.json_error_message('test') + + compare(response.content, '{"status": "error", "msg": "test"}') + assert_equals(response.status_code, 200) +
Add test for utility function json_error_message()
## Code Before: """Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") ## Instruction: Add test for utility function json_error_message() ## Code After: """Tests for the Texcavator utility functions""" import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200)
... os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings") from nose.tools import assert_equals from testfixtures import compare import texcavator.utils as utils def test_json_error_message(): response = utils.json_error_message('test') compare(response.content, '{"status": "error", "msg": "test"}') assert_equals(response.status_code, 200) ...
2d0b44d65a8167a105cbc63e704735b1c360e0c4
api/core/urls.py
api/core/urls.py
from django.urls import path, re_path from django.conf.urls.static import static from django.conf import settings from . import views urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [ path('go/<path:path>', views.redirector, name='redirector'), re_path('^', views.index, name='index'), ]
from django.conf import settings from django.conf.urls.static import static from django.contrib.auth.views import logout from django.urls import path, re_path from . import views urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [ path('go/<path:path>', views.redirector, name='redirector'), path('logout', logout, {'next_page': '/'}), re_path('^', views.index, name='index'), ]
Handle logout on the backend
Handle logout on the backend
Python
mit
citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement
+ from django.conf import settings + from django.conf.urls.static import static + from django.contrib.auth.views import logout from django.urls import path, re_path - from django.conf.urls.static import static - from django.conf import settings from . import views urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [ path('go/<path:path>', views.redirector, name='redirector'), + path('logout', logout, {'next_page': '/'}), re_path('^', views.index, name='index'), ]
Handle logout on the backend
## Code Before: from django.urls import path, re_path from django.conf.urls.static import static from django.conf import settings from . import views urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [ path('go/<path:path>', views.redirector, name='redirector'), re_path('^', views.index, name='index'), ] ## Instruction: Handle logout on the backend ## Code After: from django.conf import settings from django.conf.urls.static import static from django.contrib.auth.views import logout from django.urls import path, re_path from . import views urlpatterns = static('/compiled/', document_root=settings.BUILD_ROOT) + [ path('go/<path:path>', views.redirector, name='redirector'), path('logout', logout, {'next_page': '/'}), re_path('^', views.index, name='index'), ]
# ... existing code ... from django.conf import settings from django.conf.urls.static import static from django.contrib.auth.views import logout from django.urls import path, re_path # ... modified code ... path('go/<path:path>', views.redirector, name='redirector'), path('logout', logout, {'next_page': '/'}), re_path('^', views.index, name='index'), # ... rest of the code ...
4c71ba23720001d06d519a7828f2866814f1c46a
tests/conftest.py
tests/conftest.py
import pytest from UM.Application import Application class FixtureApplication(Application): def __init__(self): Application._instance = None super().__init__("test", "1.0") def functionEvent(self, event): pass def parseCommandLine(self): pass @pytest.fixture() def application(): return FixtureApplication()
import pytest from UM.Application import Application from UM.Signal import Signal class FixtureApplication(Application): def __init__(self): Application._instance = None super().__init__("test", "1.0") Signal._app = self def functionEvent(self, event): event.call() def parseCommandLine(self): pass @pytest.fixture() def application(): return FixtureApplication()
Make sure to set the test application instance as app for Signals
Make sure to set the test application instance as app for Signals This makes singals be properly emitted in tests
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
import pytest from UM.Application import Application + from UM.Signal import Signal class FixtureApplication(Application): def __init__(self): Application._instance = None super().__init__("test", "1.0") + Signal._app = self def functionEvent(self, event): - pass + event.call() def parseCommandLine(self): pass @pytest.fixture() def application(): return FixtureApplication()
Make sure to set the test application instance as app for Signals
## Code Before: import pytest from UM.Application import Application class FixtureApplication(Application): def __init__(self): Application._instance = None super().__init__("test", "1.0") def functionEvent(self, event): pass def parseCommandLine(self): pass @pytest.fixture() def application(): return FixtureApplication() ## Instruction: Make sure to set the test application instance as app for Signals ## Code After: import pytest from UM.Application import Application from UM.Signal import Signal class FixtureApplication(Application): def __init__(self): Application._instance = None super().__init__("test", "1.0") Signal._app = self def functionEvent(self, event): event.call() def parseCommandLine(self): pass @pytest.fixture() def application(): return FixtureApplication()
// ... existing code ... from UM.Application import Application from UM.Signal import Signal // ... modified code ... super().__init__("test", "1.0") Signal._app = self ... def functionEvent(self, event): event.call() // ... rest of the code ...
bbc65d55d247d290a427ac5ba2c43b9d0033654d
WeatherServer/weather/views.py
WeatherServer/weather/views.py
import IP from flask import Blueprint, request, render_template, jsonify weather = Blueprint('weather', __name__, url_prefix='/weather') @weather.route('/', methods=['GET']) def index(): ip = request.remote_addr location = IP.find(ip) return jsonify(location=location, ip=ip)
import IP from flask import Blueprint, request, render_template, jsonify weather = Blueprint('weather', __name__, url_prefix='/weather') @weather.route('/', methods=['GET']) def index(): if request.headers.getlist("X-Forwarded-For"): ip = request.headers.getlist("X-Forwarded-For")[0] else: ip = request.remotw_addr location = IP.find(ip) return jsonify(location=location, ip=ip)
Fix user's real ip address.
Fix user's real ip address.
Python
mit
keysona/WeatherServer,keysona/WeatherServer,keysona/WeatherServer,keysona/WeatherServer
import IP from flask import Blueprint, request, render_template, jsonify weather = Blueprint('weather', __name__, url_prefix='/weather') @weather.route('/', methods=['GET']) def index(): + if request.headers.getlist("X-Forwarded-For"): + ip = request.headers.getlist("X-Forwarded-For")[0] + else: - ip = request.remote_addr + ip = request.remotw_addr location = IP.find(ip) return jsonify(location=location, ip=ip)
Fix user's real ip address.
## Code Before: import IP from flask import Blueprint, request, render_template, jsonify weather = Blueprint('weather', __name__, url_prefix='/weather') @weather.route('/', methods=['GET']) def index(): ip = request.remote_addr location = IP.find(ip) return jsonify(location=location, ip=ip) ## Instruction: Fix user's real ip address. ## Code After: import IP from flask import Blueprint, request, render_template, jsonify weather = Blueprint('weather', __name__, url_prefix='/weather') @weather.route('/', methods=['GET']) def index(): if request.headers.getlist("X-Forwarded-For"): ip = request.headers.getlist("X-Forwarded-For")[0] else: ip = request.remotw_addr location = IP.find(ip) return jsonify(location=location, ip=ip)
# ... existing code ... def index(): if request.headers.getlist("X-Forwarded-For"): ip = request.headers.getlist("X-Forwarded-For")[0] else: ip = request.remotw_addr location = IP.find(ip) # ... rest of the code ...
6b93f6a6bedf875d4bad1af2493c91b28a625ea9
chempy/electrochemistry/tests/test_nernst.py
chempy/electrochemistry/tests/test_nernst.py
from __future__ import (absolute_import, division, print_function) from ..nernst import nernst_potential def test_nernst_potential(): # Sodium in cells assert abs(1000 * nernst_potential(145, 15, 1, 310) - 60.605) < 1e-4 # Potassium in cells assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4
from __future__ import (absolute_import, division, print_function) from ..nernst import nernst_potential from chempy.util.testing import requires from chempy.units import default_units, default_constants, units_library def test_nernst_potential(): """ Test cases obtained from textbook examples of Nernst potential in cellular membranes. 310K = 37C, typical mammalian cell environment temperature. """ # Sodium in cells assert abs(1000 * nernst_potential(145, 15, 1, 310) - 60.605) < 1e-4 # Potassium in cells assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4 # Calcium in cells assert abs(1000 * nernst_potential(2, 7e-5, 2, 310) - 137.0436) < 1e-4 # Chloride in cells assert abs(1000 * nernst_potential(110, 10, -1, 310) - (-64.0567)) < 1e-4 @requires(units_library) def test_nernst_potential_units(): v = nernst_potential(145, 15, 1, 310, default_constants, default_units) assert (1000 * v - 60.605) < 1e-4
Add additional testing to electrochemistry/Nernst
Add additional testing to electrochemistry/Nernst
Python
bsd-2-clause
bjodah/aqchem,bjodah/aqchem,bjodah/chempy,bjodah/chempy,bjodah/aqchem
from __future__ import (absolute_import, division, print_function) from ..nernst import nernst_potential + from chempy.util.testing import requires + from chempy.units import default_units, default_constants, units_library def test_nernst_potential(): + """ + Test cases obtained from textbook examples of Nernst potential in cellular + membranes. 310K = 37C, typical mammalian cell environment temperature. + """ # Sodium in cells assert abs(1000 * nernst_potential(145, 15, 1, 310) - 60.605) < 1e-4 # Potassium in cells assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4 + # Calcium in cells + assert abs(1000 * nernst_potential(2, 7e-5, 2, 310) - 137.0436) < 1e-4 + # Chloride in cells + assert abs(1000 * nernst_potential(110, 10, -1, 310) - (-64.0567)) < 1e-4 + + @requires(units_library) + def test_nernst_potential_units(): + v = nernst_potential(145, 15, 1, 310, default_constants, default_units) + assert (1000 * v - 60.605) < 1e-4 +
Add additional testing to electrochemistry/Nernst
## Code Before: from __future__ import (absolute_import, division, print_function) from ..nernst import nernst_potential def test_nernst_potential(): # Sodium in cells assert abs(1000 * nernst_potential(145, 15, 1, 310) - 60.605) < 1e-4 # Potassium in cells assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4 ## Instruction: Add additional testing to electrochemistry/Nernst ## Code After: from __future__ import (absolute_import, division, print_function) from ..nernst import nernst_potential from chempy.util.testing import requires from chempy.units import default_units, default_constants, units_library def test_nernst_potential(): """ Test cases obtained from textbook examples of Nernst potential in cellular membranes. 310K = 37C, typical mammalian cell environment temperature. """ # Sodium in cells assert abs(1000 * nernst_potential(145, 15, 1, 310) - 60.605) < 1e-4 # Potassium in cells assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4 # Calcium in cells assert abs(1000 * nernst_potential(2, 7e-5, 2, 310) - 137.0436) < 1e-4 # Chloride in cells assert abs(1000 * nernst_potential(110, 10, -1, 310) - (-64.0567)) < 1e-4 @requires(units_library) def test_nernst_potential_units(): v = nernst_potential(145, 15, 1, 310, default_constants, default_units) assert (1000 * v - 60.605) < 1e-4
// ... existing code ... from ..nernst import nernst_potential from chempy.util.testing import requires from chempy.units import default_units, default_constants, units_library // ... modified code ... def test_nernst_potential(): """ Test cases obtained from textbook examples of Nernst potential in cellular membranes. 310K = 37C, typical mammalian cell environment temperature. """ # Sodium in cells ... assert abs(1000 * nernst_potential(4, 150, 1, 310) - (-96.8196)) < 1e-4 # Calcium in cells assert abs(1000 * nernst_potential(2, 7e-5, 2, 310) - 137.0436) < 1e-4 # Chloride in cells assert abs(1000 * nernst_potential(110, 10, -1, 310) - (-64.0567)) < 1e-4 @requires(units_library) def test_nernst_potential_units(): v = nernst_potential(145, 15, 1, 310, default_constants, default_units) assert (1000 * v - 60.605) < 1e-4 // ... rest of the code ...
bd22996e282328a72f3995a62078ce6867a158fc
tests/test_register.py
tests/test_register.py
import pytest from data import registers, fields, phases @pytest.mark.parametrize('register', registers) def test_register_key_matches_filename(register): assert registers[register].register == register @pytest.mark.parametrize('register', registers) def test_register_keys_are_known_fields(register): for field in registers[register].keys: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_register_fields(register): for field in registers[register].keys: assert field in registers['register'].fields @pytest.mark.parametrize('register', registers) def test_register_text_trailing_characters(register): text = registers[register].text assert text == text.rstrip(' \n\r.') @pytest.mark.parametrize('register', registers) def test_register_phase(register): assert registers[register].phase in phases @pytest.mark.parametrize('register', registers) def test_register_fields_are_known(register): item = registers[register] for field in item.fields: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_the_right_phase(register): item = registers[register] register_phase = phases.index(item.phase) for field in item.fields: field_phase = phases.index(fields[field].phase) assert field_phase >= register_phase
import pytest from data import registers, fields, phases @pytest.mark.parametrize('register', registers) def test_register_key_matches_filename(register): assert registers[register].register == register @pytest.mark.parametrize('register', registers) def test_register_primary_key_in_fields(register): assert register in registers[register].fields @pytest.mark.parametrize('register', registers) def test_register_keys_are_known_fields(register): for field in registers[register].keys: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_register_fields(register): for field in registers[register].keys: assert field in registers['register'].fields @pytest.mark.parametrize('register', registers) def test_register_text_trailing_characters(register): text = registers[register].text assert text == text.rstrip(' \n\r.') @pytest.mark.parametrize('register', registers) def test_register_phase(register): assert registers[register].phase in phases @pytest.mark.parametrize('register', registers) def test_register_fields_are_known(register): item = registers[register] for field in item.fields: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_the_right_phase(register): item = registers[register] register_phase = phases.index(item.phase) for field in item.fields: field_phase = phases.index(fields[field].phase) assert field_phase >= register_phase
Test primary key in register fields
Test primary key in register fields
Python
mit
openregister/registry-data
import pytest from data import registers, fields, phases @pytest.mark.parametrize('register', registers) def test_register_key_matches_filename(register): assert registers[register].register == register + + + @pytest.mark.parametrize('register', registers) + def test_register_primary_key_in_fields(register): + assert register in registers[register].fields @pytest.mark.parametrize('register', registers) def test_register_keys_are_known_fields(register): for field in registers[register].keys: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_register_fields(register): for field in registers[register].keys: assert field in registers['register'].fields @pytest.mark.parametrize('register', registers) def test_register_text_trailing_characters(register): text = registers[register].text assert text == text.rstrip(' \n\r.') @pytest.mark.parametrize('register', registers) def test_register_phase(register): assert registers[register].phase in phases @pytest.mark.parametrize('register', registers) def test_register_fields_are_known(register): item = registers[register] for field in item.fields: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_the_right_phase(register): item = registers[register] register_phase = phases.index(item.phase) for field in item.fields: field_phase = phases.index(fields[field].phase) assert field_phase >= register_phase
Test primary key in register fields
## Code Before: import pytest from data import registers, fields, phases @pytest.mark.parametrize('register', registers) def test_register_key_matches_filename(register): assert registers[register].register == register @pytest.mark.parametrize('register', registers) def test_register_keys_are_known_fields(register): for field in registers[register].keys: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_register_fields(register): for field in registers[register].keys: assert field in registers['register'].fields @pytest.mark.parametrize('register', registers) def test_register_text_trailing_characters(register): text = registers[register].text assert text == text.rstrip(' \n\r.') @pytest.mark.parametrize('register', registers) def test_register_phase(register): assert registers[register].phase in phases @pytest.mark.parametrize('register', registers) def test_register_fields_are_known(register): item = registers[register] for field in item.fields: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_the_right_phase(register): item = registers[register] register_phase = phases.index(item.phase) for field in item.fields: field_phase = phases.index(fields[field].phase) assert field_phase >= register_phase ## Instruction: Test primary key in register fields ## Code After: import pytest from data import registers, fields, phases @pytest.mark.parametrize('register', registers) def test_register_key_matches_filename(register): assert registers[register].register == register @pytest.mark.parametrize('register', registers) def test_register_primary_key_in_fields(register): assert register in registers[register].fields @pytest.mark.parametrize('register', registers) def test_register_keys_are_known_fields(register): for field in registers[register].keys: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_register_fields(register): for field in registers[register].keys: assert field in registers['register'].fields @pytest.mark.parametrize('register', registers) def test_register_text_trailing_characters(register): text = registers[register].text assert text == text.rstrip(' \n\r.') @pytest.mark.parametrize('register', registers) def test_register_phase(register): assert registers[register].phase in phases @pytest.mark.parametrize('register', registers) def test_register_fields_are_known(register): item = registers[register] for field in item.fields: assert field in fields @pytest.mark.parametrize('register', registers) def test_register_fields_are_the_right_phase(register): item = registers[register] register_phase = phases.index(item.phase) for field in item.fields: field_phase = phases.index(fields[field].phase) assert field_phase >= register_phase
// ... existing code ... assert registers[register].register == register @pytest.mark.parametrize('register', registers) def test_register_primary_key_in_fields(register): assert register in registers[register].fields // ... rest of the code ...
b556bffeb5ed48812258b452e05cc00cfb160453
girder/app/app/configuration.py
girder/app/app/configuration.py
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): return { 'features': { 'notebooks': Setting().get(Features.NOTEBOOKS, True) }, 'deployment': { 'site': Setting().get(Deployment.SITE, '') }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): notebooks = Setting().get(Features.NOTEBOOKS) if notebooks is None: notebooks = True site = Setting().get(Deployment.SITE) if site is None: site = '' return { 'features': { 'notebooks': notebooks }, 'deployment': { 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
Fix up settings for upstream Girder change
Fix up settings for upstream Girder change
Python
bsd-3-clause
OpenChemistry/mongochemserver
from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): + + notebooks = Setting().get(Features.NOTEBOOKS) + if notebooks is None: + notebooks = True + + site = Setting().get(Deployment.SITE) + if site is None: + site = '' + return { 'features': { - 'notebooks': Setting().get(Features.NOTEBOOKS, True) + 'notebooks': notebooks }, 'deployment': { - 'site': Setting().get(Deployment.SITE, '') + 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
Fix up settings for upstream Girder change
## Code Before: from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): return { 'features': { 'notebooks': Setting().get(Features.NOTEBOOKS, True) }, 'deployment': { 'site': Setting().get(Deployment.SITE, '') }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } } ## Instruction: Fix up settings for upstream Girder change ## Code After: from girder.api import access from girder.api.describe import Description, autoDescribeRoute from girder.api.rest import Resource, RestException from girder.constants import AccessType, TokenScope from girder.models.setting import Setting from .constants import Features, Deployment, Branding class Configuration(Resource): def __init__(self): super(Configuration, self).__init__() self.resourceName = 'configuration' self.route('GET', (), self.get) @access.public @autoDescribeRoute( Description('Get the deployment configuration.') ) def get(self): notebooks = Setting().get(Features.NOTEBOOKS) if notebooks is None: notebooks = True site = Setting().get(Deployment.SITE) if site is None: site = '' return { 'features': { 'notebooks': notebooks }, 'deployment': { 'site': site }, 'branding': { 'license': Setting().get(Branding.LICENSE), 'privacy': Setting().get(Branding.PRIVACY), 'headerLogoFileId': Setting().get(Branding.HEADER_LOGO_ID), 'footerLogoFileId': Setting().get(Branding.FOOTER_LOGO_ID), 'footerLogoUrl': Setting().get(Branding.FOOTER_LOGO_URL), 'faviconFileId': Setting().get(Branding.FAVICON_ID) } }
// ... existing code ... def get(self): notebooks = Setting().get(Features.NOTEBOOKS) if notebooks is None: notebooks = True site = Setting().get(Deployment.SITE) if site is None: site = '' return { // ... modified code ... 'features': { 'notebooks': notebooks }, ... 'deployment': { 'site': site }, // ... rest of the code ...
bbd3b1939712d9784fe61884d9b06faa95c36006
tests/test_project/test_app/models.py
tests/test_project/test_app/models.py
from django.db import models class TestModel(models.Model): name = models.CharField(max_length=63, unique=True, verbose_name='Name') image = models.ImageField(verbose_name='Image')
from django.db import models class TestModel(models.Model): name = models.CharField(max_length=63, unique=True, verbose_name='Name') image = models.ImageField(verbose_name='Image', upload_to='uploads/')
Test compatibility with older Django versions.
Test compatibility with older Django versions.
Python
mit
dessibelle/sorl-thumbnail-serializer-field
from django.db import models class TestModel(models.Model): name = models.CharField(max_length=63, unique=True, verbose_name='Name') - image = models.ImageField(verbose_name='Image') + image = models.ImageField(verbose_name='Image', upload_to='uploads/')
Test compatibility with older Django versions.
## Code Before: from django.db import models class TestModel(models.Model): name = models.CharField(max_length=63, unique=True, verbose_name='Name') image = models.ImageField(verbose_name='Image') ## Instruction: Test compatibility with older Django versions. ## Code After: from django.db import models class TestModel(models.Model): name = models.CharField(max_length=63, unique=True, verbose_name='Name') image = models.ImageField(verbose_name='Image', upload_to='uploads/')
// ... existing code ... name = models.CharField(max_length=63, unique=True, verbose_name='Name') image = models.ImageField(verbose_name='Image', upload_to='uploads/') // ... rest of the code ...
e5eaf68490098cb89cf9d6ad8b4eaa96bafd0450
compose/cli/docker_client.py
compose/cli/docker_client.py
import logging import os import ssl from docker import Client from docker import tls from ..const import HTTP_TIMEOUT log = logging.getLogger(__name__) def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') api_version = os.environ.get('COMPOSE_API_VERSION', '1.19') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) if 'DOCKER_CLIENT_TIMEOUT' in os.environ: log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.') return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT)
import logging import os from docker import Client from docker.utils import kwargs_from_env from ..const import HTTP_TIMEOUT log = logging.getLogger(__name__) def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ if 'DOCKER_CLIENT_TIMEOUT' in os.environ: log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.') kwargs = kwargs_from_env(assert_hostname=False) kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19') kwargs['timeout'] = HTTP_TIMEOUT return Client(**kwargs)
Remove custom docker client initialization logic
Remove custom docker client initialization logic Signed-off-by: Aanand Prasad <[email protected]>
Python
apache-2.0
phiroict/docker,denverdino/docker.github.io,phiroict/docker,denverdino/docker.github.io,jzwlqx/denverdino.github.io,docker/docker.github.io,shin-/docker.github.io,swoopla/compose,rillig/docker.github.io,jiekechoo/compose,bdwill/docker.github.io,joeuo/docker.github.io,sanscontext/docker.github.io,amitsaha/compose,GM-Alex/compose,mohitsoni/compose,charleswhchan/compose,troy0820/docker.github.io,thaJeztah/compose,kikkomep/compose,KevinGreene/compose,thaJeztah/docker.github.io,phiroict/docker,joaofnfernandes/docker.github.io,goloveychuk/compose,docker-zh/docker.github.io,sdurrheimer/compose,talolard/compose,alexisbellido/docker.github.io,londoncalling/docker.github.io,denverdino/docker.github.io,londoncalling/docker.github.io,mnowster/compose,londoncalling/docker.github.io,dbdd4us/compose,goloveychuk/compose,johnstep/docker.github.io,sanscontext/docker.github.io,swoopla/compose,rillig/docker.github.io,j-fuentes/compose,twitherspoon/compose,menglingwei/denverdino.github.io,danix800/docker.github.io,shubheksha/docker.github.io,viranch/compose,TomasTomecek/compose,bdwill/docker.github.io,denverdino/denverdino.github.io,jzwlqx/denverdino.github.io,shubheksha/docker.github.io,j-fuentes/compose,albers/compose,viranch/compose,funkyfuture/docker-compose,londoncalling/docker.github.io,joeuo/docker.github.io,johnstep/docker.github.io,docker-zh/docker.github.io,bdwill/docker.github.io,au-phiware/compose,jiekechoo/compose,moxiegirl/compose,phiroict/docker,docker/docker.github.io,denverdino/denverdino.github.io,jeanpralo/compose,kojiromike/compose,tiry/compose,aduermael/docker.github.io,albers/compose,twitherspoon/compose,denverdino/compose,mdaue/compose,kikkomep/compose,TomasTomecek/compose,denverdino/denverdino.github.io,johnstep/docker.github.io,londoncalling/docker.github.io,mrfuxi/compose,alexandrev/compose,hoogenm/compose,troy0820/docker.github.io,jeanpralo/compose,joaofnfernandes/docker.github.io,schmunk42/compose,LuisBosquez/docker.github.io,alexisbellido/docker.github.io,andrewgee/compose,vdemeester/compose,joaofnfernandes/docker.github.io,shin-/compose,jorgeLuizChaves/compose,shubheksha/docker.github.io,BSWANG/denverdino.github.io,KalleDK/compose,menglingwei/denverdino.github.io,jonaseck2/compose,shin-/docker.github.io,anweiss/docker.github.io,jrabbit/compose,dnephin/compose,mnowster/compose,amitsaha/compose,shin-/docker.github.io,docker-zh/docker.github.io,LuisBosquez/docker.github.io,joaofnfernandes/docker.github.io,BSWANG/denverdino.github.io,bdwill/docker.github.io,au-phiware/compose,vdemeester/compose,anweiss/docker.github.io,mohitsoni/compose,aduermael/docker.github.io,aduermael/docker.github.io,denverdino/docker.github.io,alexisbellido/docker.github.io,tiry/compose,anweiss/docker.github.io,shubheksha/docker.github.io,hoogenm/compose,JimGalasyn/docker.github.io,jorgeLuizChaves/compose,michael-k/docker-compose,funkyfuture/docker-compose,ChrisChinchilla/compose,talolard/compose,sdurrheimer/compose,thaJeztah/docker.github.io,alexisbellido/docker.github.io,kojiromike/compose,joeuo/docker.github.io,KevinGreene/compose,thaJeztah/docker.github.io,joeuo/docker.github.io,thaJeztah/compose,alexandrev/compose,troy0820/docker.github.io,docker/docker.github.io,danix800/docker.github.io,LuisBosquez/docker.github.io,GM-Alex/compose,sanscontext/docker.github.io,troy0820/docker.github.io,sanscontext/docker.github.io,shubheksha/docker.github.io,docker-zh/docker.github.io,rillig/docker.github.io,JimGalasyn/docker.github.io,schmunk42/compose,jzwlqx/denverdino.github.io,moxiegirl/compose,phiroict/docker,BSWANG/denverdino.github.io,rgbkrk/compose,dbdd4us/compose,thaJeztah/docker.github.io,michael-k/docker-compose,KalleDK/compose,jzwlqx/denverdino.github.io,johnstep/docker.github.io,gdevillele/docker.github.io,BSWANG/denverdino.github.io,ChrisChinchilla/compose,JimGalasyn/docker.github.io,gdevillele/docker.github.io,danix800/docker.github.io,gdevillele/docker.github.io,mrfuxi/compose,shin-/docker.github.io,aduermael/docker.github.io,docker/docker.github.io,sanscontext/docker.github.io,docker-zh/docker.github.io,danix800/docker.github.io,JimGalasyn/docker.github.io,denverdino/compose,dnephin/compose,shin-/docker.github.io,rillig/docker.github.io,gdevillele/docker.github.io,rgbkrk/compose,LuisBosquez/docker.github.io,anweiss/docker.github.io,LuisBosquez/docker.github.io,bdwill/docker.github.io,JimGalasyn/docker.github.io,denverdino/denverdino.github.io,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,denverdino/docker.github.io,johnstep/docker.github.io,docker/docker.github.io,menglingwei/denverdino.github.io,mdaue/compose,jonaseck2/compose,gdevillele/docker.github.io,anweiss/docker.github.io,BSWANG/denverdino.github.io,menglingwei/denverdino.github.io,thaJeztah/docker.github.io,shin-/compose,joaofnfernandes/docker.github.io,alexisbellido/docker.github.io,charleswhchan/compose,andrewgee/compose,denverdino/denverdino.github.io,jrabbit/compose,joeuo/docker.github.io
import logging import os - import ssl from docker import Client - from docker import tls + from docker.utils import kwargs_from_env from ..const import HTTP_TIMEOUT log = logging.getLogger(__name__) def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ - cert_path = os.environ.get('DOCKER_CERT_PATH', '') - if cert_path == '': - cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') - - base_url = os.environ.get('DOCKER_HOST') - api_version = os.environ.get('COMPOSE_API_VERSION', '1.19') - - tls_config = None - - if os.environ.get('DOCKER_TLS_VERIFY', '') != '': - parts = base_url.split('://', 1) - base_url = '%s://%s' % ('https', parts[1]) - - client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) - ca_cert = os.path.join(cert_path, 'ca.pem') - - tls_config = tls.TLSConfig( - ssl_version=ssl.PROTOCOL_TLSv1, - verify=True, - assert_hostname=False, - client_cert=client_cert, - ca_cert=ca_cert, - ) - if 'DOCKER_CLIENT_TIMEOUT' in os.environ: log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.') - return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT) + kwargs = kwargs_from_env(assert_hostname=False) + kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19') + kwargs['timeout'] = HTTP_TIMEOUT + return Client(**kwargs)
Remove custom docker client initialization logic
## Code Before: import logging import os import ssl from docker import Client from docker import tls from ..const import HTTP_TIMEOUT log = logging.getLogger(__name__) def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') api_version = os.environ.get('COMPOSE_API_VERSION', '1.19') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) if 'DOCKER_CLIENT_TIMEOUT' in os.environ: log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.') return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=HTTP_TIMEOUT) ## Instruction: Remove custom docker client initialization logic ## Code After: import logging import os from docker import Client from docker.utils import kwargs_from_env from ..const import HTTP_TIMEOUT log = logging.getLogger(__name__) def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ if 'DOCKER_CLIENT_TIMEOUT' in os.environ: log.warn('The DOCKER_CLIENT_TIMEOUT environment variable is deprecated. Please use COMPOSE_HTTP_TIMEOUT instead.') kwargs = kwargs_from_env(assert_hostname=False) kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19') kwargs['timeout'] = HTTP_TIMEOUT return Client(**kwargs)
# ... existing code ... import os # ... modified code ... from docker import Client from docker.utils import kwargs_from_env ... """ if 'DOCKER_CLIENT_TIMEOUT' in os.environ: ... kwargs = kwargs_from_env(assert_hostname=False) kwargs['version'] = os.environ.get('COMPOSE_API_VERSION', '1.19') kwargs['timeout'] = HTTP_TIMEOUT return Client(**kwargs) # ... rest of the code ...
3b61b9dfeda38e0a7afd5ec90b32f8abab18ef4f
unzip.py
unzip.py
import argparse import os import zipfile parser = argparse.ArgumentParser(description = "Extract zip file includes cp932 encoding file name") parser.add_argument("file") args = parser.parse_args() with zipfile.ZipFile(args.file, 'r') as archive: for item in archive.namelist(): filename = item.encode("cp437").decode("cp932") directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) if os.path.basename(filename): with open(filename, "wb") as data: data.write(archive.read(item)) # Local variables: # tab-width: 4 # c-basic-offset: 4 # c-hanging-comment-ender-p: nil # End:
import argparse import os import zipfile parser = argparse.ArgumentParser(description = "Extract zip file includes cp932 encoding file name") parser.add_argument("file") parser.add_argument("-d", "--directory", nargs="?", type=str, default="") args = parser.parse_args() with zipfile.ZipFile(args.file, 'r') as archive: for item in archive.namelist(): filename = os.path.join(args.directory, item.encode("cp437").decode("cp932")) directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) if os.path.basename(filename): with open(filename, "wb") as data: data.write(archive.read(item)) # Local variables: # tab-width: 4 # c-basic-offset: 4 # c-hanging-comment-ender-p: nil # End:
Add -d / --directory option
Add -d / --directory option
Python
mit
fujimakishouten/unzip-cp932
import argparse import os import zipfile parser = argparse.ArgumentParser(description = "Extract zip file includes cp932 encoding file name") parser.add_argument("file") + parser.add_argument("-d", "--directory", nargs="?", type=str, default="") args = parser.parse_args() with zipfile.ZipFile(args.file, 'r') as archive: for item in archive.namelist(): - filename = item.encode("cp437").decode("cp932") + filename = os.path.join(args.directory, item.encode("cp437").decode("cp932")) directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) if os.path.basename(filename): with open(filename, "wb") as data: data.write(archive.read(item)) # Local variables: # tab-width: 4 # c-basic-offset: 4 # c-hanging-comment-ender-p: nil # End:
Add -d / --directory option
## Code Before: import argparse import os import zipfile parser = argparse.ArgumentParser(description = "Extract zip file includes cp932 encoding file name") parser.add_argument("file") args = parser.parse_args() with zipfile.ZipFile(args.file, 'r') as archive: for item in archive.namelist(): filename = item.encode("cp437").decode("cp932") directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) if os.path.basename(filename): with open(filename, "wb") as data: data.write(archive.read(item)) # Local variables: # tab-width: 4 # c-basic-offset: 4 # c-hanging-comment-ender-p: nil # End: ## Instruction: Add -d / --directory option ## Code After: import argparse import os import zipfile parser = argparse.ArgumentParser(description = "Extract zip file includes cp932 encoding file name") parser.add_argument("file") parser.add_argument("-d", "--directory", nargs="?", type=str, default="") args = parser.parse_args() with zipfile.ZipFile(args.file, 'r') as archive: for item in archive.namelist(): filename = os.path.join(args.directory, item.encode("cp437").decode("cp932")) directory = os.path.dirname(filename) if not os.path.exists(directory): os.makedirs(directory) if os.path.basename(filename): with open(filename, "wb") as data: data.write(archive.read(item)) # Local variables: # tab-width: 4 # c-basic-offset: 4 # c-hanging-comment-ender-p: nil # End:
// ... existing code ... parser.add_argument("file") parser.add_argument("-d", "--directory", nargs="?", type=str, default="") args = parser.parse_args() // ... modified code ... for item in archive.namelist(): filename = os.path.join(args.directory, item.encode("cp437").decode("cp932")) directory = os.path.dirname(filename) // ... rest of the code ...
de43482266fa71adb8393823680675145ffe93e0
hr_switzerland/models/hr_expense.py
hr_switzerland/models/hr_expense.py
from odoo import api, models, fields class HrExpense(models.Model): _inherit = "hr.expense" # Make product editable when expense is submitted product_id = fields.Many2one( states={ 'draft': [('readonly', False)], 'submit': [('readonly', False)] } ) @api.onchange('product_id') def _onchange_product_id(self): """ Prevent changing amounts if expense is submitted. """ if self.state == 'draft': super(HrExpense, self)._onchange_product_id()
from odoo import api, models, fields class HrExpense(models.Model): _inherit = "hr.expense" # Make product editable when expense is submitted product_id = fields.Many2one( states={ 'draft': [('readonly', False)], 'submit': [('readonly', False)] } ) @api.onchange('product_id') def _onchange_product_id(self): """ Prevent changing amounts if expense is submitted. """ if self.state == 'draft': super(HrExpense, self)._onchange_product_id() class HrExpenseSheet(models.Model): _inherit = 'hr.expense.sheet' # Adding a user_id field for the assign notification to work user_id = fields.Many2one(related='employee_id.user_id') @api.model def create(self, vals): """Notify managers when expense is created.""" sheet = super(HrExpenseSheet, self).create(vals) users = sheet._get_users_to_subscribe() - self.env.user sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids) return sheet def _add_followers(self): """Notify managers when employee is changed.""" super(HrExpenseSheet, self)._add_followers() users = self._get_users_to_subscribe() - self.env.user self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
Add notification to manager at expense creation
Add notification to manager at expense creation
Python
agpl-3.0
CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland
from odoo import api, models, fields class HrExpense(models.Model): _inherit = "hr.expense" # Make product editable when expense is submitted product_id = fields.Many2one( states={ 'draft': [('readonly', False)], 'submit': [('readonly', False)] } ) @api.onchange('product_id') def _onchange_product_id(self): """ Prevent changing amounts if expense is submitted. """ if self.state == 'draft': super(HrExpense, self)._onchange_product_id() + + class HrExpenseSheet(models.Model): + _inherit = 'hr.expense.sheet' + + # Adding a user_id field for the assign notification to work + user_id = fields.Many2one(related='employee_id.user_id') + + @api.model + def create(self, vals): + """Notify managers when expense is created.""" + sheet = super(HrExpenseSheet, self).create(vals) + users = sheet._get_users_to_subscribe() - self.env.user + sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids) + return sheet + + def _add_followers(self): + """Notify managers when employee is changed.""" + super(HrExpenseSheet, self)._add_followers() + users = self._get_users_to_subscribe() - self.env.user + self._message_auto_subscribe_notify(users.mapped('partner_id').ids) +
Add notification to manager at expense creation
## Code Before: from odoo import api, models, fields class HrExpense(models.Model): _inherit = "hr.expense" # Make product editable when expense is submitted product_id = fields.Many2one( states={ 'draft': [('readonly', False)], 'submit': [('readonly', False)] } ) @api.onchange('product_id') def _onchange_product_id(self): """ Prevent changing amounts if expense is submitted. """ if self.state == 'draft': super(HrExpense, self)._onchange_product_id() ## Instruction: Add notification to manager at expense creation ## Code After: from odoo import api, models, fields class HrExpense(models.Model): _inherit = "hr.expense" # Make product editable when expense is submitted product_id = fields.Many2one( states={ 'draft': [('readonly', False)], 'submit': [('readonly', False)] } ) @api.onchange('product_id') def _onchange_product_id(self): """ Prevent changing amounts if expense is submitted. """ if self.state == 'draft': super(HrExpense, self)._onchange_product_id() class HrExpenseSheet(models.Model): _inherit = 'hr.expense.sheet' # Adding a user_id field for the assign notification to work user_id = fields.Many2one(related='employee_id.user_id') @api.model def create(self, vals): """Notify managers when expense is created.""" sheet = super(HrExpenseSheet, self).create(vals) users = sheet._get_users_to_subscribe() - self.env.user sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids) return sheet def _add_followers(self): """Notify managers when employee is changed.""" super(HrExpenseSheet, self)._add_followers() users = self._get_users_to_subscribe() - self.env.user self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
# ... existing code ... super(HrExpense, self)._onchange_product_id() class HrExpenseSheet(models.Model): _inherit = 'hr.expense.sheet' # Adding a user_id field for the assign notification to work user_id = fields.Many2one(related='employee_id.user_id') @api.model def create(self, vals): """Notify managers when expense is created.""" sheet = super(HrExpenseSheet, self).create(vals) users = sheet._get_users_to_subscribe() - self.env.user sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids) return sheet def _add_followers(self): """Notify managers when employee is changed.""" super(HrExpenseSheet, self)._add_followers() users = self._get_users_to_subscribe() - self.env.user self._message_auto_subscribe_notify(users.mapped('partner_id').ids) # ... rest of the code ...
2e1b189727616b4c93ad4244299530c738304428
httpobs/scanner/utils.py
httpobs/scanner/utils.py
import socket import tld def valid_hostname(hostname: str) -> bool: """ :param hostname: The hostname requested in the scan :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return False # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return False except: pass # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't if hostname in tld.get_tld_names(): return False # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return False except: return False # If we've made it this far, then everything is good to go! Woohoo! return True
import socket def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: pass # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return None except: return None # If we've made it this far, then everything is good to go! Woohoo! return hostname
Remove TLD check, allow for www
Remove TLD check, allow for www
Python
mpl-2.0
april/http-observatory,april/http-observatory,april/http-observatory,mozilla/http-observatory,mozilla/http-observatory,mozilla/http-observatory
import socket - import tld - def valid_hostname(hostname: str) -> bool: + def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan - :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise + :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address - return False # If we get this far, it's an IP address and therefore not a valid fqdn + return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 - return False + return None except: pass - - # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't - if hostname in tld.get_tld_names(): - return False # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: - return False + return None except: - return False + return None # If we've made it this far, then everything is good to go! Woohoo! - return True + return hostname
Remove TLD check, allow for www
## Code Before: import socket import tld def valid_hostname(hostname: str) -> bool: """ :param hostname: The hostname requested in the scan :return: True if it's a valid hostname (fqdn in DNS that's not an IP address), False otherwise """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return False # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return False except: pass # Then, let's see if it's a TLD; this includes things fuel.aero or co.uk that look like fqdns but aren't if hostname in tld.get_tld_names(): return False # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return False except: return False # If we've made it this far, then everything is good to go! Woohoo! return True ## Instruction: Remove TLD check, allow for www ## Code After: import socket def valid_hostname(hostname: str): """ :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass # And IPv6 try: socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: pass # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time # that the validator is making a network connection -- the same that requests would make. try: hostname_ips = socket.getaddrinfo(hostname, 443) if len(hostname_ips) < 1: return None except: return None # If we've made it this far, then everything is good to go! Woohoo! return hostname
# ... existing code ... import socket # ... modified code ... def valid_hostname(hostname: str): """ ... :param hostname: The hostname requested in the scan :return: Hostname if it's valid, otherwise None """ ... socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address return None # If we get this far, it's an IP address and therefore not a valid fqdn except: ... socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 return None except: ... pass ... if len(hostname_ips) < 1: return None except: return None ... # If we've made it this far, then everything is good to go! Woohoo! return hostname # ... rest of the code ...
54d4551ce8efb16d4a8d02e38b9f223f8f1cd816
ab_game.py
ab_game.py
import board import pente_exceptions from ab_state import * CAPTURE_SCORE_BASE = 120 ** 3 class ABGame(): """ This class acts as a bridge between the AlphaBeta code and my code """ def __init__(self, base_game): s = self.current_state = ABState() s.set_state(base_game.current_state) self.base_game = base_game def to_move(self, state=None): if state is None: state = self.current_state return state.to_move() def utility(self, state): return state.utility() def successors(self, state, depth): mn = state.get_move_number() if mn == 1: # The first black move is always in the centre brd_size = self.base_game.get_board().get_size() centre_pos = (brd_size/2, brd_size/2) p_i = [centre_pos] else: min_priority = 0 if depth > 4: min_priority = 3 pos_iter = state.get_iter(state.to_move()) p_i = pos_iter.get_iter(state.to_move_colour(), min_priority) tried_count = 0 for pos in p_i: # create an AB_State for each possible move from state succ = state.create_state(pos) yield pos, succ tried_count += 1 if depth > 3 and tried_count >= 2: return def terminal_test(self, state): return state.terminal()
import board import pente_exceptions from ab_state import * class ABGame(): """ This class acts as a bridge between the AlphaBeta code and my code """ def __init__(self, base_game): s = self.current_state = ABState() s.set_state(base_game.current_state) self.base_game = base_game def to_move(self, state=None): if state is None: state = self.current_state return state.to_move() def utility(self, state): return state.utility() def successors(self, state, depth): mn = state.get_move_number() if mn == 1: # The first black move is always in the centre brd_size = self.base_game.get_board().get_size() centre_pos = (brd_size/2, brd_size/2) p_i = [centre_pos] else: min_priority = 0 pos_iter = state.get_iter(state.to_move()) p_i = pos_iter.get_iter(state.to_move_colour(), min_priority) tried_count = 0 for pos in p_i: # create an AB_State for each possible move from state succ = state.create_state(pos) yield pos, succ tried_count += 1 if depth > 3 and tried_count >= 2: return def terminal_test(self, state): return state.terminal()
Disable min_priority filter for now
Disable min_priority filter for now
Python
mit
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
import board import pente_exceptions from ab_state import * - - CAPTURE_SCORE_BASE = 120 ** 3 class ABGame(): """ This class acts as a bridge between the AlphaBeta code and my code """ def __init__(self, base_game): s = self.current_state = ABState() s.set_state(base_game.current_state) self.base_game = base_game def to_move(self, state=None): if state is None: state = self.current_state return state.to_move() def utility(self, state): return state.utility() def successors(self, state, depth): mn = state.get_move_number() if mn == 1: # The first black move is always in the centre brd_size = self.base_game.get_board().get_size() centre_pos = (brd_size/2, brd_size/2) p_i = [centre_pos] else: min_priority = 0 - if depth > 4: - min_priority = 3 pos_iter = state.get_iter(state.to_move()) p_i = pos_iter.get_iter(state.to_move_colour(), min_priority) tried_count = 0 for pos in p_i: # create an AB_State for each possible move from state succ = state.create_state(pos) yield pos, succ tried_count += 1 if depth > 3 and tried_count >= 2: return def terminal_test(self, state): return state.terminal()
Disable min_priority filter for now
## Code Before: import board import pente_exceptions from ab_state import * CAPTURE_SCORE_BASE = 120 ** 3 class ABGame(): """ This class acts as a bridge between the AlphaBeta code and my code """ def __init__(self, base_game): s = self.current_state = ABState() s.set_state(base_game.current_state) self.base_game = base_game def to_move(self, state=None): if state is None: state = self.current_state return state.to_move() def utility(self, state): return state.utility() def successors(self, state, depth): mn = state.get_move_number() if mn == 1: # The first black move is always in the centre brd_size = self.base_game.get_board().get_size() centre_pos = (brd_size/2, brd_size/2) p_i = [centre_pos] else: min_priority = 0 if depth > 4: min_priority = 3 pos_iter = state.get_iter(state.to_move()) p_i = pos_iter.get_iter(state.to_move_colour(), min_priority) tried_count = 0 for pos in p_i: # create an AB_State for each possible move from state succ = state.create_state(pos) yield pos, succ tried_count += 1 if depth > 3 and tried_count >= 2: return def terminal_test(self, state): return state.terminal() ## Instruction: Disable min_priority filter for now ## Code After: import board import pente_exceptions from ab_state import * class ABGame(): """ This class acts as a bridge between the AlphaBeta code and my code """ def __init__(self, base_game): s = self.current_state = ABState() s.set_state(base_game.current_state) self.base_game = base_game def to_move(self, state=None): if state is None: state = self.current_state return state.to_move() def utility(self, state): return state.utility() def successors(self, state, depth): mn = state.get_move_number() if mn == 1: # The first black move is always in the centre brd_size = self.base_game.get_board().get_size() centre_pos = (brd_size/2, brd_size/2) p_i = [centre_pos] else: min_priority = 0 pos_iter = state.get_iter(state.to_move()) p_i = pos_iter.get_iter(state.to_move_colour(), min_priority) tried_count = 0 for pos in p_i: # create an AB_State for each possible move from state succ = state.create_state(pos) yield pos, succ tried_count += 1 if depth > 3 and tried_count >= 2: return def terminal_test(self, state): return state.terminal()
... from ab_state import * ... min_priority = 0 ...
9ce7de86b7d9c1e9288fa5c09f97414516cabc63
corehq/apps/reports/filters/urls.py
corehq/apps/reports/filters/urls.py
from django.conf.urls import url from .api import ( EmwfOptionsView, CaseListFilterOptions, DeviceLogUsers, DeviceLogIds, MobileWorkersOptionsView, ReassignCaseOptions, ) from .location import LocationGroupFilterOptions urlpatterns = [ url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), url(r'^new_emwf_options/$', LocationRestrictedEmwfOptions.as_view(), name='new_emwf_options'), url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), name='grouplocationfilter_options' ), url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ]
from django.conf.urls import url from .api import ( EmwfOptionsView, CaseListFilterOptions, DeviceLogUsers, DeviceLogIds, MobileWorkersOptionsView, ReassignCaseOptions, ) from .location import LocationGroupFilterOptions urlpatterns = [ url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), name='grouplocationfilter_options' ), url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ]
Fix bad merge and formatting
Fix bad merge and formatting
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from django.conf.urls import url from .api import ( EmwfOptionsView, CaseListFilterOptions, DeviceLogUsers, DeviceLogIds, MobileWorkersOptionsView, ReassignCaseOptions, ) from .location import LocationGroupFilterOptions urlpatterns = [ - url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), + url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), - url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), + url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), - url(r'^new_emwf_options/$', LocationRestrictedEmwfOptions.as_view(), name='new_emwf_options'), - url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), + url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), - url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), + url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), - url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), + url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), - name='grouplocationfilter_options' + name='grouplocationfilter_options' - ), + ), - url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), + url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), - url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), + url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ]
Fix bad merge and formatting
## Code Before: from django.conf.urls import url from .api import ( EmwfOptionsView, CaseListFilterOptions, DeviceLogUsers, DeviceLogIds, MobileWorkersOptionsView, ReassignCaseOptions, ) from .location import LocationGroupFilterOptions urlpatterns = [ url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), url(r'^new_emwf_options/$', LocationRestrictedEmwfOptions.as_view(), name='new_emwf_options'), url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), name='grouplocationfilter_options' ), url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ] ## Instruction: Fix bad merge and formatting ## Code After: from django.conf.urls import url from .api import ( EmwfOptionsView, CaseListFilterOptions, DeviceLogUsers, DeviceLogIds, MobileWorkersOptionsView, ReassignCaseOptions, ) from .location import LocationGroupFilterOptions urlpatterns = [ url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), name='grouplocationfilter_options' ), url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ]
// ... existing code ... urlpatterns = [ url(r'^emwf_options/$', EmwfOptionsView.as_view(), name='emwf_options'), url(r'^users_options/$', MobileWorkersOptionsView.as_view(), name=MobileWorkersOptionsView.urlname), url(r'^case_list_options/$', CaseListFilterOptions.as_view(), name='case_list_options'), url(r'^reassign_case_options/$', ReassignCaseOptions.as_view(), name='reassign_case_options'), url(r'^grouplocationfilter_options/$', LocationGroupFilterOptions.as_view(), name='grouplocationfilter_options' ), url(r'^device_log_users/$', DeviceLogUsers.as_view(), name='device_log_users'), url(r'^device_log_ids/$', DeviceLogIds.as_view(), name='device_log_ids'), ] // ... rest of the code ...
b6e9e37350a4b435df00a54b2ccd9da70a4db788
nogotofail/mitm/util/ip.py
nogotofail/mitm/util/ip.py
r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import subprocess import re def get_interface_addresses(): """Get all ip addresses assigned to interfaces. Returns a tuple of (v4 addresses, v6 addresses) """ try: output = subprocess.check_output("ifconfig") except subprocess.CalledProcessError: # Couldn't call ifconfig. Best guess it. return (["127.0.0.1"], []) # Parse out the results. v4 = re.findall("inet addr:([^ ]*)", output) v6 = re.findall("inet6 addr: ([^ ]*)", output) return v4, v6
r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import subprocess import re def get_interface_addresses(): """Get all ip addresses assigned to interfaces. Returns a tuple of (v4 addresses, v6 addresses) """ try: output = subprocess.check_output("ifconfig") except subprocess.CalledProcessError: # Couldn't call ifconfig. Best guess it. return (["127.0.0.1"], []) # Parse out the results. v4 = re.findall("inet (addr:)?([^ ]*)", output) v6 = re.findall("inet6 (addr: )?([^ ]*)", output) v4 = [e[1] for e in v4] v6 = [e[1] for e in v6] return v4, v6
Fix local interface addr parsing
Fix local interface addr parsing On Fedora 21 the format of ifconfig is a little different. Fixes #17
Python
apache-2.0
google/nogotofail,leasual/nogotofail,mkenne11/nogotofail,joshcooper/nogotofail,digideskio/nogotofail,mkenne11/nogotofail-pii,joshcooper/nogotofail,google/nogotofail,mkenne11/nogotofail,digideskio/nogotofail,leasual/nogotofail,mkenne11/nogotofail-pii
r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import subprocess import re def get_interface_addresses(): """Get all ip addresses assigned to interfaces. Returns a tuple of (v4 addresses, v6 addresses) """ try: output = subprocess.check_output("ifconfig") except subprocess.CalledProcessError: # Couldn't call ifconfig. Best guess it. return (["127.0.0.1"], []) # Parse out the results. - v4 = re.findall("inet addr:([^ ]*)", output) + v4 = re.findall("inet (addr:)?([^ ]*)", output) - v6 = re.findall("inet6 addr: ([^ ]*)", output) + v6 = re.findall("inet6 (addr: )?([^ ]*)", output) + v4 = [e[1] for e in v4] + v6 = [e[1] for e in v6] return v4, v6
Fix local interface addr parsing
## Code Before: r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import subprocess import re def get_interface_addresses(): """Get all ip addresses assigned to interfaces. Returns a tuple of (v4 addresses, v6 addresses) """ try: output = subprocess.check_output("ifconfig") except subprocess.CalledProcessError: # Couldn't call ifconfig. Best guess it. return (["127.0.0.1"], []) # Parse out the results. v4 = re.findall("inet addr:([^ ]*)", output) v6 = re.findall("inet6 addr: ([^ ]*)", output) return v4, v6 ## Instruction: Fix local interface addr parsing ## Code After: r''' Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import subprocess import re def get_interface_addresses(): """Get all ip addresses assigned to interfaces. Returns a tuple of (v4 addresses, v6 addresses) """ try: output = subprocess.check_output("ifconfig") except subprocess.CalledProcessError: # Couldn't call ifconfig. Best guess it. return (["127.0.0.1"], []) # Parse out the results. v4 = re.findall("inet (addr:)?([^ ]*)", output) v6 = re.findall("inet6 (addr: )?([^ ]*)", output) v4 = [e[1] for e in v4] v6 = [e[1] for e in v6] return v4, v6
# ... existing code ... # Parse out the results. v4 = re.findall("inet (addr:)?([^ ]*)", output) v6 = re.findall("inet6 (addr: )?([^ ]*)", output) v4 = [e[1] for e in v4] v6 = [e[1] for e in v6] return v4, v6 # ... rest of the code ...
fb2cfe4759fb98de644932af17a247428b2cc0f5
api/auth.py
api/auth.py
from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value if params['apikey']: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key')
from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value if 'apikey' in params: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key')
Fix Auth API key check causing error 500s
Fix Auth API key check causing error 500s
Python
bsd-3-clause
nikdoof/test-auth
from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value - if params['apikey']: + if 'apikey' in params: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key')
Fix Auth API key check causing error 500s
## Code Before: from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value if params['apikey']: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key') ## Instruction: Fix Auth API key check causing error 500s ## Code After: from django.http import HttpResponseForbidden from django.contrib.auth.models import AnonymousUser from api.models import AuthAPIKey class APIKeyAuthentication(object): def is_authenticated(self, request): params = {} for key,value in request.GET.items(): params[key.lower()] = value if 'apikey' in params: try: keyobj = AuthAPIKey.objects.get(key=params['apikey']) except: keyobj = None if keyobj and keyobj.active: request.user = AnonymousUser() return True return False def challenge(self): return HttpResponseForbidden('Access Denied, use a API Key')
... if 'apikey' in params: try: ...
74b31ba7fec330ec167c2e001f60695272da71b8
pages/views.py
pages/views.py
from django.views import generic from django.contrib.auth.models import Group from django_countries.fields import Country from hosting.models import Profile, Place from hosting.utils import sort_by_name class AboutView(generic.TemplateView): template_name = 'pages/about.html' about = AboutView.as_view() class TermsAndConditionsView(generic.TemplateView): template_name = 'pages/terms_conditions.html' terms_conditions = TermsAndConditionsView.as_view() class SupervisorsView(generic.TemplateView): template_name = 'pages/supervisors.html' def countries(self): places = Place.objects.filter(in_book=True) groups = Group.objects.exclude(user=None) countries = sort_by_name({p.country for p in places}) for country in countries: try: group = groups.get(name=str(country)) country.supervisors = sorted(user.profile for user in group.user_set.all()) except Group.DoesNotExist: pass country.place_count = places.filter(country=country).count() return countries supervisors = SupervisorsView.as_view() class FaqView(generic.TemplateView): template_name = 'pages/faq.html' faq = FaqView.as_view()
from django.views import generic from django.contrib.auth.models import Group from hosting.models import Place from hosting.utils import sort_by_name class AboutView(generic.TemplateView): template_name = 'pages/about.html' about = AboutView.as_view() class TermsAndConditionsView(generic.TemplateView): template_name = 'pages/terms_conditions.html' terms_conditions = TermsAndConditionsView.as_view() class SupervisorsView(generic.TemplateView): template_name = 'pages/supervisors.html' def countries(self): places = Place.available_objects.filter(in_book=True) groups = Group.objects.exclude(user=None) countries = sort_by_name({p.country for p in places}) for country in countries: try: group = groups.get(name=str(country)) country.supervisors = sorted(user.profile for user in group.user_set.all()) except Group.DoesNotExist: pass country.place_count = places.filter(country=country).count() return countries supervisors = SupervisorsView.as_view() class FaqView(generic.TemplateView): template_name = 'pages/faq.html' faq = FaqView.as_view()
Fix numbers in LO list.
Fix numbers in LO list.
Python
agpl-3.0
batisteo/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,batisteo/pasportaservo,batisteo/pasportaservo,batisteo/pasportaservo
from django.views import generic from django.contrib.auth.models import Group - from django_countries.fields import Country - from hosting.models import Profile, Place + from hosting.models import Place from hosting.utils import sort_by_name class AboutView(generic.TemplateView): template_name = 'pages/about.html' about = AboutView.as_view() class TermsAndConditionsView(generic.TemplateView): template_name = 'pages/terms_conditions.html' terms_conditions = TermsAndConditionsView.as_view() class SupervisorsView(generic.TemplateView): template_name = 'pages/supervisors.html' def countries(self): - places = Place.objects.filter(in_book=True) + places = Place.available_objects.filter(in_book=True) groups = Group.objects.exclude(user=None) countries = sort_by_name({p.country for p in places}) for country in countries: try: group = groups.get(name=str(country)) country.supervisors = sorted(user.profile for user in group.user_set.all()) except Group.DoesNotExist: pass country.place_count = places.filter(country=country).count() return countries supervisors = SupervisorsView.as_view() class FaqView(generic.TemplateView): template_name = 'pages/faq.html' faq = FaqView.as_view()
Fix numbers in LO list.
## Code Before: from django.views import generic from django.contrib.auth.models import Group from django_countries.fields import Country from hosting.models import Profile, Place from hosting.utils import sort_by_name class AboutView(generic.TemplateView): template_name = 'pages/about.html' about = AboutView.as_view() class TermsAndConditionsView(generic.TemplateView): template_name = 'pages/terms_conditions.html' terms_conditions = TermsAndConditionsView.as_view() class SupervisorsView(generic.TemplateView): template_name = 'pages/supervisors.html' def countries(self): places = Place.objects.filter(in_book=True) groups = Group.objects.exclude(user=None) countries = sort_by_name({p.country for p in places}) for country in countries: try: group = groups.get(name=str(country)) country.supervisors = sorted(user.profile for user in group.user_set.all()) except Group.DoesNotExist: pass country.place_count = places.filter(country=country).count() return countries supervisors = SupervisorsView.as_view() class FaqView(generic.TemplateView): template_name = 'pages/faq.html' faq = FaqView.as_view() ## Instruction: Fix numbers in LO list. ## Code After: from django.views import generic from django.contrib.auth.models import Group from hosting.models import Place from hosting.utils import sort_by_name class AboutView(generic.TemplateView): template_name = 'pages/about.html' about = AboutView.as_view() class TermsAndConditionsView(generic.TemplateView): template_name = 'pages/terms_conditions.html' terms_conditions = TermsAndConditionsView.as_view() class SupervisorsView(generic.TemplateView): template_name = 'pages/supervisors.html' def countries(self): places = Place.available_objects.filter(in_book=True) groups = Group.objects.exclude(user=None) countries = sort_by_name({p.country for p in places}) for country in countries: try: group = groups.get(name=str(country)) country.supervisors = sorted(user.profile for user in group.user_set.all()) except Group.DoesNotExist: pass country.place_count = places.filter(country=country).count() return countries supervisors = SupervisorsView.as_view() class FaqView(generic.TemplateView): template_name = 'pages/faq.html' faq = FaqView.as_view()
... from hosting.models import Place from hosting.utils import sort_by_name ... def countries(self): places = Place.available_objects.filter(in_book=True) groups = Group.objects.exclude(user=None) ...
1977ab5bd97feb114dedd1619c89413f109f0480
tests/validate_test.py
tests/validate_test.py
from pytest import raises from nirum.validate import (validate_boxed_type, validate_record_type, validate_union_type) def test_validate_boxed_type(): assert validate_boxed_type(3.14, float) with raises(TypeError): validate_boxed_type('hello', float) def test_validate_record_type(fx_point, fx_record_type, fx_offset): assert validate_record_type(fx_point) with raises(TypeError): assert validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): assert validate_record_type(fx_record_type(left=1, top=fx_offset)) def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point): assert validate_union_type(fx_rectangle) with raises(TypeError): assert validate_union_type(fx_rectangle_type(1, fx_point)) with raises(TypeError): assert validate_union_type(fx_rectangle_type(fx_point, 1)) with raises(TypeError): assert validate_union_type(fx_rectangle_type(1, 1))
from pytest import raises from nirum.validate import (validate_boxed_type, validate_record_type, validate_union_type) def test_validate_boxed_type(): assert validate_boxed_type(3.14, float) with raises(TypeError): validate_boxed_type('hello', float) def test_validate_record_type(fx_point, fx_record_type, fx_offset): assert validate_record_type(fx_point) with raises(TypeError): validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): validate_record_type(fx_record_type(left=1, top=fx_offset)) def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point): assert validate_union_type(fx_rectangle) with raises(TypeError): validate_union_type(fx_rectangle_type(1, fx_point)) with raises(TypeError): validate_union_type(fx_rectangle_type(fx_point, 1)) with raises(TypeError): validate_union_type(fx_rectangle_type(1, 1))
Remove assert in error-rasing test
Remove assert in error-rasing test
Python
mit
spoqa/nirum-python,spoqa/nirum-python
from pytest import raises from nirum.validate import (validate_boxed_type, validate_record_type, validate_union_type) def test_validate_boxed_type(): assert validate_boxed_type(3.14, float) with raises(TypeError): validate_boxed_type('hello', float) def test_validate_record_type(fx_point, fx_record_type, fx_offset): assert validate_record_type(fx_point) with raises(TypeError): - assert validate_record_type(fx_record_type(left=fx_offset, top=1)) + validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): - assert validate_record_type(fx_record_type(left=1, top=fx_offset)) + validate_record_type(fx_record_type(left=1, top=fx_offset)) def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point): assert validate_union_type(fx_rectangle) with raises(TypeError): - assert validate_union_type(fx_rectangle_type(1, fx_point)) + validate_union_type(fx_rectangle_type(1, fx_point)) with raises(TypeError): - assert validate_union_type(fx_rectangle_type(fx_point, 1)) + validate_union_type(fx_rectangle_type(fx_point, 1)) with raises(TypeError): - assert validate_union_type(fx_rectangle_type(1, 1)) + validate_union_type(fx_rectangle_type(1, 1))
Remove assert in error-rasing test
## Code Before: from pytest import raises from nirum.validate import (validate_boxed_type, validate_record_type, validate_union_type) def test_validate_boxed_type(): assert validate_boxed_type(3.14, float) with raises(TypeError): validate_boxed_type('hello', float) def test_validate_record_type(fx_point, fx_record_type, fx_offset): assert validate_record_type(fx_point) with raises(TypeError): assert validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): assert validate_record_type(fx_record_type(left=1, top=fx_offset)) def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point): assert validate_union_type(fx_rectangle) with raises(TypeError): assert validate_union_type(fx_rectangle_type(1, fx_point)) with raises(TypeError): assert validate_union_type(fx_rectangle_type(fx_point, 1)) with raises(TypeError): assert validate_union_type(fx_rectangle_type(1, 1)) ## Instruction: Remove assert in error-rasing test ## Code After: from pytest import raises from nirum.validate import (validate_boxed_type, validate_record_type, validate_union_type) def test_validate_boxed_type(): assert validate_boxed_type(3.14, float) with raises(TypeError): validate_boxed_type('hello', float) def test_validate_record_type(fx_point, fx_record_type, fx_offset): assert validate_record_type(fx_point) with raises(TypeError): validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): validate_record_type(fx_record_type(left=1, top=fx_offset)) def test_validate_union_type(fx_rectangle, fx_rectangle_type, fx_point): assert validate_union_type(fx_rectangle) with raises(TypeError): validate_union_type(fx_rectangle_type(1, fx_point)) with raises(TypeError): validate_union_type(fx_rectangle_type(fx_point, 1)) with raises(TypeError): validate_union_type(fx_rectangle_type(1, 1))
// ... existing code ... with raises(TypeError): validate_record_type(fx_record_type(left=fx_offset, top=1)) with raises(TypeError): validate_record_type(fx_record_type(left=1, top=fx_offset)) // ... modified code ... with raises(TypeError): validate_union_type(fx_rectangle_type(1, fx_point)) ... with raises(TypeError): validate_union_type(fx_rectangle_type(fx_point, 1)) ... with raises(TypeError): validate_union_type(fx_rectangle_type(1, 1)) // ... rest of the code ...
eac90ef4d470923bb823f99dc85984faac733f08
pysuru/services.py
pysuru/services.py
import json from collections import namedtuple from pysuru.base import BaseAPI, ObjectMixin SERVICE_INSTANCE_ATTRS = ( 'name', 'description', 'type', 'plan', 'teamOwner', ) _ServiceInstance = namedtuple('ServiceInstance', SERVICE_INSTANCE_ATTRS) class ServiceInstance(_ServiceInstance, ObjectMixin): pass class ServiceInstanceAPI(BaseAPI): def filter_by_app(self, name): http_response = self.request('GET', '/services/instances?app=' + name) response = json.loads(http_response.data.decode('utf-8')) services = [] for service_data in response: for index, instance in enumerate(service_data['instances']): data = { 'name': instance, 'type': service_data['service'], 'plan': service_data['plans'][index], } services.append(ServiceInstance.create(**data)) return services def add(self, data): http_response = self.post_json('/services/instances', data) response = json.loads(http_response.data.decode('utf-8')) if response.status == 409: raise ServiceAlreadyExists() elif response.status == 200: return True else: return False class ServiceAlreadyExists(Exception): pass
import json from collections import namedtuple from pysuru.base import BaseAPI, ObjectMixin SERVICE_INSTANCE_ATTRS = ( 'name', 'description', 'type', 'plan', ) _ServiceInstance = namedtuple('ServiceInstance', SERVICE_INSTANCE_ATTRS) class ServiceInstance(_ServiceInstance, ObjectMixin): pass class ServiceInstanceAPI(BaseAPI): def filter_by_app(self, name): http_response = self.request('GET', '/services/instances?app=' + name) response = json.loads(http_response.data.decode('utf-8')) services = [] for service_data in response: for index, instance in enumerate(service_data['instances']): data = { 'name': instance, 'type': service_data['service'], 'plan': service_data['plans'][index], } services.append(ServiceInstance.create(**data)) return services def add(self, data): http_response = self.post_json('/services/instances', data) response = json.loads(http_response.data.decode('utf-8')) if response.status == 409: raise ServiceAlreadyExists() elif response.status == 200: return True else: return False class ServiceAlreadyExists(Exception): pass
Remove (currently) unused service instance field
Remove (currently) unused service instance field
Python
mit
rcmachado/pysuru
import json from collections import namedtuple from pysuru.base import BaseAPI, ObjectMixin SERVICE_INSTANCE_ATTRS = ( 'name', 'description', 'type', 'plan', - 'teamOwner', ) _ServiceInstance = namedtuple('ServiceInstance', SERVICE_INSTANCE_ATTRS) class ServiceInstance(_ServiceInstance, ObjectMixin): pass class ServiceInstanceAPI(BaseAPI): def filter_by_app(self, name): http_response = self.request('GET', '/services/instances?app=' + name) response = json.loads(http_response.data.decode('utf-8')) services = [] for service_data in response: for index, instance in enumerate(service_data['instances']): data = { 'name': instance, 'type': service_data['service'], 'plan': service_data['plans'][index], } services.append(ServiceInstance.create(**data)) return services def add(self, data): http_response = self.post_json('/services/instances', data) response = json.loads(http_response.data.decode('utf-8')) if response.status == 409: raise ServiceAlreadyExists() elif response.status == 200: return True else: return False class ServiceAlreadyExists(Exception): pass
Remove (currently) unused service instance field
## Code Before: import json from collections import namedtuple from pysuru.base import BaseAPI, ObjectMixin SERVICE_INSTANCE_ATTRS = ( 'name', 'description', 'type', 'plan', 'teamOwner', ) _ServiceInstance = namedtuple('ServiceInstance', SERVICE_INSTANCE_ATTRS) class ServiceInstance(_ServiceInstance, ObjectMixin): pass class ServiceInstanceAPI(BaseAPI): def filter_by_app(self, name): http_response = self.request('GET', '/services/instances?app=' + name) response = json.loads(http_response.data.decode('utf-8')) services = [] for service_data in response: for index, instance in enumerate(service_data['instances']): data = { 'name': instance, 'type': service_data['service'], 'plan': service_data['plans'][index], } services.append(ServiceInstance.create(**data)) return services def add(self, data): http_response = self.post_json('/services/instances', data) response = json.loads(http_response.data.decode('utf-8')) if response.status == 409: raise ServiceAlreadyExists() elif response.status == 200: return True else: return False class ServiceAlreadyExists(Exception): pass ## Instruction: Remove (currently) unused service instance field ## Code After: import json from collections import namedtuple from pysuru.base import BaseAPI, ObjectMixin SERVICE_INSTANCE_ATTRS = ( 'name', 'description', 'type', 'plan', ) _ServiceInstance = namedtuple('ServiceInstance', SERVICE_INSTANCE_ATTRS) class ServiceInstance(_ServiceInstance, ObjectMixin): pass class ServiceInstanceAPI(BaseAPI): def filter_by_app(self, name): http_response = self.request('GET', '/services/instances?app=' + name) response = json.loads(http_response.data.decode('utf-8')) services = [] for service_data in response: for index, instance in enumerate(service_data['instances']): data = { 'name': instance, 'type': service_data['service'], 'plan': service_data['plans'][index], } services.append(ServiceInstance.create(**data)) return services def add(self, data): http_response = self.post_json('/services/instances', data) response = json.loads(http_response.data.decode('utf-8')) if response.status == 409: raise ServiceAlreadyExists() elif response.status == 200: return True else: return False class ServiceAlreadyExists(Exception): pass
# ... existing code ... 'plan', ) # ... rest of the code ...
b57a599640c6fa8bf23f081c914b7437e3f04dcd
course_discovery/apps/courses/management/commands/refresh_all_courses.py
course_discovery/apps/courses/management/commands/refresh_all_courses.py
import logging from optparse import make_option from django.core.management import BaseCommand, CommandError from course_discovery.apps.courses.models import Course logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Refresh course data from external sources.' option_list = BaseCommand.option_list + ( make_option('--access_token', action='store', dest='access_token', default=None, help='OAuth2 access token used to authenticate API calls.'), ) def handle(self, *args, **options): access_token = options.get('access_token') if not access_token: msg = 'Courses cannot be migrated if no access token is supplied.' logger.error(msg) raise CommandError(msg) Course.refresh_all(access_token=access_token)
import logging from django.core.management import BaseCommand, CommandError from course_discovery.apps.courses.models import Course logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Refresh course data from external sources.' def add_arguments(self, parser): parser.add_argument( '--access_token', action='store', dest='access_token', default=None, help='OAuth2 access token used to authenticate API calls.' ) def handle(self, *args, **options): access_token = options.get('access_token') if not access_token: msg = 'Courses cannot be migrated if no access token is supplied.' logger.error(msg) raise CommandError(msg) Course.refresh_all(access_token=access_token)
Switch to argparse for management command argument parsing
Switch to argparse for management command argument parsing
Python
agpl-3.0
edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery
import logging - from optparse import make_option from django.core.management import BaseCommand, CommandError from course_discovery.apps.courses.models import Course logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Refresh course data from external sources.' - option_list = BaseCommand.option_list + ( + def add_arguments(self, parser): + parser.add_argument( - make_option('--access_token', + '--access_token', - action='store', + action='store', - dest='access_token', + dest='access_token', - default=None, + default=None, - help='OAuth2 access token used to authenticate API calls.'), + help='OAuth2 access token used to authenticate API calls.' - ) + ) def handle(self, *args, **options): access_token = options.get('access_token') if not access_token: msg = 'Courses cannot be migrated if no access token is supplied.' logger.error(msg) raise CommandError(msg) Course.refresh_all(access_token=access_token)
Switch to argparse for management command argument parsing
## Code Before: import logging from optparse import make_option from django.core.management import BaseCommand, CommandError from course_discovery.apps.courses.models import Course logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Refresh course data from external sources.' option_list = BaseCommand.option_list + ( make_option('--access_token', action='store', dest='access_token', default=None, help='OAuth2 access token used to authenticate API calls.'), ) def handle(self, *args, **options): access_token = options.get('access_token') if not access_token: msg = 'Courses cannot be migrated if no access token is supplied.' logger.error(msg) raise CommandError(msg) Course.refresh_all(access_token=access_token) ## Instruction: Switch to argparse for management command argument parsing ## Code After: import logging from django.core.management import BaseCommand, CommandError from course_discovery.apps.courses.models import Course logger = logging.getLogger(__name__) class Command(BaseCommand): help = 'Refresh course data from external sources.' def add_arguments(self, parser): parser.add_argument( '--access_token', action='store', dest='access_token', default=None, help='OAuth2 access token used to authenticate API calls.' ) def handle(self, *args, **options): access_token = options.get('access_token') if not access_token: msg = 'Courses cannot be migrated if no access token is supplied.' logger.error(msg) raise CommandError(msg) Course.refresh_all(access_token=access_token)
// ... existing code ... import logging // ... modified code ... def add_arguments(self, parser): parser.add_argument( '--access_token', action='store', dest='access_token', default=None, help='OAuth2 access token used to authenticate API calls.' ) // ... rest of the code ...
09931cfbba746daf5127b6113187042341e3be3d
tests/conftest.py
tests/conftest.py
import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", }
import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
Python
unlicense
GriceTurrble/python-amazon-mws,Bobspadger/python-amazon-mws
import pytest @pytest.fixture - def credentials(): + def access_key(): + return "AAAAAAAAAAAAAAAAAAAA" + + + @pytest.fixture + def secret_key(): + return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + + + @pytest.fixture + def account_id(): + return "AAAAAAAAAAAAAA" + + + @pytest.fixture + def timestamp(): + return '2017-08-12T19:40:35Z' + + + @pytest.fixture + def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { - "access_key": "AAAAAAAAAAAAAAAAAAAA", - "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", - "account_id": "AAAAAAAAAAAAAA", + "access_key": access_key, + "secret_key": secret_key, + "account_id": account_id, }
Add more pytest fixtures (access_key, secret_key, account_id, timestamp)
## Code Before: import pytest @pytest.fixture def credentials(): """Fake set of MWS credentials""" return { "access_key": "AAAAAAAAAAAAAAAAAAAA", "secret_key": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", "account_id": "AAAAAAAAAAAAAA", } ## Instruction: Add more pytest fixtures (access_key, secret_key, account_id, timestamp) ## Code After: import pytest @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, }
# ... existing code ... @pytest.fixture def access_key(): return "AAAAAAAAAAAAAAAAAAAA" @pytest.fixture def secret_key(): return "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" @pytest.fixture def account_id(): return "AAAAAAAAAAAAAA" @pytest.fixture def timestamp(): return '2017-08-12T19:40:35Z' @pytest.fixture def credentials(access_key, secret_key, account_id): """Fake set of MWS credentials""" # ... modified code ... return { "access_key": access_key, "secret_key": secret_key, "account_id": account_id, } # ... rest of the code ...
323a92afd125bd97c960ab71c64f78601ec4b000
aioinotify/watch.py
aioinotify/watch.py
import asyncio class Watch: """Represents an inotify watch as added by InotifyProtocol.watch()""" def __init__(self, watch_descriptor, callback, protocol): """ :param int watch_descriptor: The watch descriptor as returned by inotify_add_watch :param callback: A function with one positional argument (the event object) called when an inotify event happens. """ self.watch_descriptor = watch_descriptor self._callback = callback self._closed = False self._protocol = protocol @asyncio.coroutine def dispatch_event(self, event): if not self._closed: yield from self._callback(event) def close(self): if not self._closed: self._protocol._remove_watch(self.watch_descriptor) self._closed = True
import asyncio class Watch: """Represents an inotify watch as added by InotifyProtocol.watch()""" def __init__(self, watch_descriptor, callback, protocol): """ :param int watch_descriptor: The watch descriptor as returned by inotify_add_watch :param callback: A function with one positional argument (the event object) called when an inotify event happens. """ self.watch_descriptor = watch_descriptor self._callback = callback self._closed = False self._protocol = protocol def __enter__(self): return self def __exit__(self, *exc): self.close() @asyncio.coroutine def dispatch_event(self, event): if not self._closed: yield from self._callback(event) def close(self): if not self._closed: self._protocol._remove_watch(self.watch_descriptor) self._closed = True
Make Watch also a context manager
Make Watch also a context manager
Python
apache-2.0
mwfrojdman/aioinotify
import asyncio class Watch: """Represents an inotify watch as added by InotifyProtocol.watch()""" def __init__(self, watch_descriptor, callback, protocol): """ :param int watch_descriptor: The watch descriptor as returned by inotify_add_watch :param callback: A function with one positional argument (the event object) called when an inotify event happens. """ self.watch_descriptor = watch_descriptor self._callback = callback self._closed = False self._protocol = protocol + def __enter__(self): + return self + + def __exit__(self, *exc): + self.close() + @asyncio.coroutine def dispatch_event(self, event): if not self._closed: yield from self._callback(event) def close(self): if not self._closed: self._protocol._remove_watch(self.watch_descriptor) self._closed = True
Make Watch also a context manager
## Code Before: import asyncio class Watch: """Represents an inotify watch as added by InotifyProtocol.watch()""" def __init__(self, watch_descriptor, callback, protocol): """ :param int watch_descriptor: The watch descriptor as returned by inotify_add_watch :param callback: A function with one positional argument (the event object) called when an inotify event happens. """ self.watch_descriptor = watch_descriptor self._callback = callback self._closed = False self._protocol = protocol @asyncio.coroutine def dispatch_event(self, event): if not self._closed: yield from self._callback(event) def close(self): if not self._closed: self._protocol._remove_watch(self.watch_descriptor) self._closed = True ## Instruction: Make Watch also a context manager ## Code After: import asyncio class Watch: """Represents an inotify watch as added by InotifyProtocol.watch()""" def __init__(self, watch_descriptor, callback, protocol): """ :param int watch_descriptor: The watch descriptor as returned by inotify_add_watch :param callback: A function with one positional argument (the event object) called when an inotify event happens. """ self.watch_descriptor = watch_descriptor self._callback = callback self._closed = False self._protocol = protocol def __enter__(self): return self def __exit__(self, *exc): self.close() @asyncio.coroutine def dispatch_event(self, event): if not self._closed: yield from self._callback(event) def close(self): if not self._closed: self._protocol._remove_watch(self.watch_descriptor) self._closed = True
# ... existing code ... def __enter__(self): return self def __exit__(self, *exc): self.close() @asyncio.coroutine # ... rest of the code ...
121c886dfe02ed8cd71075a03e268d51bcb137fc
institutions/respondants/search_indexes.py
institutions/respondants/search_indexes.py
from haystack import indexes from respondants.models import Institution class InstitutionIndex(indexes.SearchIndex, indexes.Indexable): """Search Index associated with an institution. Allows for searching by name or lender id""" text = indexes.CharField(document=True, model_attr='name') text_auto = indexes.EdgeNgramField(model_attr='name') lender_id = indexes.CharField() assets = indexes.IntegerField(model_attr='assets') num_loans = indexes.IntegerField(model_attr='num_loans') def get_model(self): return Institution def index_queryset(self, using=None): """To account for the somewhat complicated count query, we need to add an "extra" annotation""" subquery_tail = """ FROM hmda_hmdarecord WHERE year = 2013 AND hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) || respondants_institution.ffiec_id""" return self.get_model().objects.extra( select={"num_loans": "SELECT COUNT(*) " + subquery_tail}, where=["SELECT COUNT(*) > 0 " + subquery_tail]) def read_queryset(self, using=None): """A more efficient query than the index query -- makes use of select related and does not include the num_loans calculation.""" return self.get_model().objects.select_related('zip_code', 'agency') def prepare_lender_id(self, institution): return str(institution.agency_id) + institution.ffiec_id
from haystack import indexes from respondants.models import Institution class InstitutionIndex(indexes.SearchIndex, indexes.Indexable): """Search Index associated with an institution. Allows for searching by name or lender id""" text = indexes.CharField(document=True, model_attr='name') text_auto = indexes.EdgeNgramField(model_attr='name') lender_id = indexes.CharField() assets = indexes.IntegerField(model_attr='assets') num_loans = indexes.IntegerField(model_attr='num_loans') def get_model(self): return Institution def index_queryset(self, using=None): """To account for the somewhat complicated count query, we need to add an "extra" annotation""" subquery_tail = """ FROM hmda_hmdarecord WHERE hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) || respondants_institution.ffiec_id""" return self.get_model().objects.extra( select={"num_loans": "SELECT COUNT(*) " + subquery_tail}, where=["SELECT COUNT(*) > 0 " + subquery_tail]) def read_queryset(self, using=None): """A more efficient query than the index query -- makes use of select related and does not include the num_loans calculation.""" return self.get_model().objects.select_related('zip_code', 'agency') def prepare_lender_id(self, institution): return str(institution.agency_id) + institution.ffiec_id
Revert "adding 2013 to search query"
Revert "adding 2013 to search query"
Python
cc0-1.0
mehtadev17/mapusaurus,mehtadev17/mapusaurus,mehtadev17/mapusaurus
from haystack import indexes from respondants.models import Institution class InstitutionIndex(indexes.SearchIndex, indexes.Indexable): """Search Index associated with an institution. Allows for searching by name or lender id""" text = indexes.CharField(document=True, model_attr='name') text_auto = indexes.EdgeNgramField(model_attr='name') lender_id = indexes.CharField() assets = indexes.IntegerField(model_attr='assets') num_loans = indexes.IntegerField(model_attr='num_loans') def get_model(self): return Institution def index_queryset(self, using=None): """To account for the somewhat complicated count query, we need to add an "extra" annotation""" subquery_tail = """ FROM hmda_hmdarecord - WHERE year = 2013 AND hmda_hmdarecord.lender + WHERE hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) || respondants_institution.ffiec_id""" return self.get_model().objects.extra( select={"num_loans": "SELECT COUNT(*) " + subquery_tail}, where=["SELECT COUNT(*) > 0 " + subquery_tail]) def read_queryset(self, using=None): """A more efficient query than the index query -- makes use of select related and does not include the num_loans calculation.""" return self.get_model().objects.select_related('zip_code', 'agency') def prepare_lender_id(self, institution): return str(institution.agency_id) + institution.ffiec_id
Revert "adding 2013 to search query"
## Code Before: from haystack import indexes from respondants.models import Institution class InstitutionIndex(indexes.SearchIndex, indexes.Indexable): """Search Index associated with an institution. Allows for searching by name or lender id""" text = indexes.CharField(document=True, model_attr='name') text_auto = indexes.EdgeNgramField(model_attr='name') lender_id = indexes.CharField() assets = indexes.IntegerField(model_attr='assets') num_loans = indexes.IntegerField(model_attr='num_loans') def get_model(self): return Institution def index_queryset(self, using=None): """To account for the somewhat complicated count query, we need to add an "extra" annotation""" subquery_tail = """ FROM hmda_hmdarecord WHERE year = 2013 AND hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) || respondants_institution.ffiec_id""" return self.get_model().objects.extra( select={"num_loans": "SELECT COUNT(*) " + subquery_tail}, where=["SELECT COUNT(*) > 0 " + subquery_tail]) def read_queryset(self, using=None): """A more efficient query than the index query -- makes use of select related and does not include the num_loans calculation.""" return self.get_model().objects.select_related('zip_code', 'agency') def prepare_lender_id(self, institution): return str(institution.agency_id) + institution.ffiec_id ## Instruction: Revert "adding 2013 to search query" ## Code After: from haystack import indexes from respondants.models import Institution class InstitutionIndex(indexes.SearchIndex, indexes.Indexable): """Search Index associated with an institution. Allows for searching by name or lender id""" text = indexes.CharField(document=True, model_attr='name') text_auto = indexes.EdgeNgramField(model_attr='name') lender_id = indexes.CharField() assets = indexes.IntegerField(model_attr='assets') num_loans = indexes.IntegerField(model_attr='num_loans') def get_model(self): return Institution def index_queryset(self, using=None): """To account for the somewhat complicated count query, we need to add an "extra" annotation""" subquery_tail = """ FROM hmda_hmdarecord WHERE hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) || respondants_institution.ffiec_id""" return self.get_model().objects.extra( select={"num_loans": "SELECT COUNT(*) " + subquery_tail}, where=["SELECT COUNT(*) > 0 " + subquery_tail]) def read_queryset(self, using=None): """A more efficient query than the index query -- makes use of select related and does not include the num_loans calculation.""" return self.get_model().objects.select_related('zip_code', 'agency') def prepare_lender_id(self, institution): return str(institution.agency_id) + institution.ffiec_id
# ... existing code ... FROM hmda_hmdarecord WHERE hmda_hmdarecord.lender = CAST(respondants_institution.agency_id AS VARCHAR(1)) # ... rest of the code ...
4ee3900c8ac78c8ed1d0145f9d99a0485b542141
senic_hub/backend/views/setup_config.py
senic_hub/backend/views/setup_config.py
from cornice.service import Service from ..commands import create_configuration_files_and_restart_apps_ from ..config import path configuration_service = Service( name='configuration_create', path=path('setup/config'), renderer='json', accept='application/json', ) @configuration_service.post() def configuration_create_view(request): create_configuration_files_and_restart_apps_(request.registry.settings)
from cornice.service import Service from ..commands import create_configuration_files_and_restart_apps_ from ..config import path from ..supervisor import get_supervisor_rpc_client, stop_program configuration_service = Service( name='configuration_create', path=path('setup/config'), renderer='json', accept='application/json', ) @configuration_service.post() def configuration_create_view(request): create_configuration_files_and_restart_apps_(request.registry.settings) # stop device discovery daemon supervisorctl = get_supervisor_rpc_client() stop_program('device_discovery', supervisorctl)
Stop device discovery after onboarding
Stop device discovery after onboarding
Python
mit
grunskis/nuimo-hub-backend,grunskis/senic-hub,grunskis/nuimo-hub-backend,grunskis/nuimo-hub-backend,getsenic/senic-hub,grunskis/senic-hub,grunskis/senic-hub,grunskis/senic-hub,grunskis/nuimo-hub-backend,getsenic/senic-hub,grunskis/nuimo-hub-backend,grunskis/senic-hub,grunskis/senic-hub
from cornice.service import Service from ..commands import create_configuration_files_and_restart_apps_ from ..config import path + from ..supervisor import get_supervisor_rpc_client, stop_program configuration_service = Service( name='configuration_create', path=path('setup/config'), renderer='json', accept='application/json', ) @configuration_service.post() def configuration_create_view(request): create_configuration_files_and_restart_apps_(request.registry.settings) + # stop device discovery daemon + supervisorctl = get_supervisor_rpc_client() + stop_program('device_discovery', supervisorctl) +
Stop device discovery after onboarding
## Code Before: from cornice.service import Service from ..commands import create_configuration_files_and_restart_apps_ from ..config import path configuration_service = Service( name='configuration_create', path=path('setup/config'), renderer='json', accept='application/json', ) @configuration_service.post() def configuration_create_view(request): create_configuration_files_and_restart_apps_(request.registry.settings) ## Instruction: Stop device discovery after onboarding ## Code After: from cornice.service import Service from ..commands import create_configuration_files_and_restart_apps_ from ..config import path from ..supervisor import get_supervisor_rpc_client, stop_program configuration_service = Service( name='configuration_create', path=path('setup/config'), renderer='json', accept='application/json', ) @configuration_service.post() def configuration_create_view(request): create_configuration_files_and_restart_apps_(request.registry.settings) # stop device discovery daemon supervisorctl = get_supervisor_rpc_client() stop_program('device_discovery', supervisorctl)
// ... existing code ... from ..config import path from ..supervisor import get_supervisor_rpc_client, stop_program // ... modified code ... create_configuration_files_and_restart_apps_(request.registry.settings) # stop device discovery daemon supervisorctl = get_supervisor_rpc_client() stop_program('device_discovery', supervisorctl) // ... rest of the code ...
8e14f3a7d40d386185d445afc18e6add57cd107e
LR/lr/lib/helpers.py
LR/lr/lib/helpers.py
# Import helpers as desired, or define your own, ie: #from webhelpers.html.tags import checkbox, password def importModuleFromFile(fullpath): """Loads and returns module defined by the file path. Returns None if file could not be loaded""" import os import sys import logging log = logging.getLogger(__name__) sys.path.append(os.path.dirname(fullpath)) module = None try: module = __import__(os.path.splitext(os.path.basename(fullpath))[0]) except Exception as ex: log.exception("Failed to load module:\n"+ex) finally: del sys.path[-1] return module def convertToISO8601UTC (datetime=None): if datetime != None: return (datetime - datetime.utcoffset()).replace(tzinfo=None) return datetime def convertToISO8601Zformat(datetime=None): if datetime != None: return ((datetime - datetime.utcoffset()).replace(tzinfo=None)).isoformat() + "Z" return datetime
from datetime import datetime import time """Helper functions Consists of functions to typically be used within templates, but also available to Controllers. This module is available to templates as 'h'. """ # Import helpers as desired, or define your own, ie: #from webhelpers.html.tags import checkbox, password def importModuleFromFile(fullpath): """Loads and returns module defined by the file path. Returns None if file could not be loaded""" import os import sys import logging log = logging.getLogger(__name__) sys.path.append(os.path.dirname(fullpath)) module = None try: module = __import__(os.path.splitext(os.path.basename(fullpath))[0]) except Exception as ex: log.exception("Failed to load module:\n"+ex) finally: del sys.path[-1] return module def convertToISO8601UTC (dateTimeArg=None): if isinstance(dateTimeArg, datetime) == True: return datetime.utcfromtimestamp(time.mktime(dateTimeArg.timetuple())) return dateTimeArg def convertToISO8601Zformat(dateTimeArg=None): if isinstance(dateTimeArg, datetime) ==True: return convertToISO8601UTC (dateTimeArg).isoformat()+ "Z" return dateTimeArg def nowToISO8601Zformat(): return convertToISO8601Zformat(datetime.now())
Add Method the return time now in complete UTC iso complete format
Add Method the return time now in complete UTC iso complete format
Python
apache-2.0
jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry
+ + from datetime import datetime + import time + """Helper functions + + Consists of functions to typically be used within templates, but also + available to Controllers. This module is available to templates as 'h'. + """ # Import helpers as desired, or define your own, ie: #from webhelpers.html.tags import checkbox, password def importModuleFromFile(fullpath): """Loads and returns module defined by the file path. Returns None if file could not be loaded""" import os import sys import logging log = logging.getLogger(__name__) sys.path.append(os.path.dirname(fullpath)) module = None try: module = __import__(os.path.splitext(os.path.basename(fullpath))[0]) except Exception as ex: log.exception("Failed to load module:\n"+ex) finally: del sys.path[-1] return module - def convertToISO8601UTC (datetime=None): + def convertToISO8601UTC (dateTimeArg=None): - if datetime != None: - return (datetime - datetime.utcoffset()).replace(tzinfo=None) + if isinstance(dateTimeArg, datetime) == True: + return datetime.utcfromtimestamp(time.mktime(dateTimeArg.timetuple())) - return datetime + return dateTimeArg - def convertToISO8601Zformat(datetime=None): + def convertToISO8601Zformat(dateTimeArg=None): - if datetime != None: - return ((datetime - datetime.utcoffset()).replace(tzinfo=None)).isoformat() + "Z" + if isinstance(dateTimeArg, datetime) ==True: + return convertToISO8601UTC (dateTimeArg).isoformat()+ "Z" - return datetime + return dateTimeArg + + def nowToISO8601Zformat(): + return convertToISO8601Zformat(datetime.now())
Add Method the return time now in complete UTC iso complete format
## Code Before: # Import helpers as desired, or define your own, ie: #from webhelpers.html.tags import checkbox, password def importModuleFromFile(fullpath): """Loads and returns module defined by the file path. Returns None if file could not be loaded""" import os import sys import logging log = logging.getLogger(__name__) sys.path.append(os.path.dirname(fullpath)) module = None try: module = __import__(os.path.splitext(os.path.basename(fullpath))[0]) except Exception as ex: log.exception("Failed to load module:\n"+ex) finally: del sys.path[-1] return module def convertToISO8601UTC (datetime=None): if datetime != None: return (datetime - datetime.utcoffset()).replace(tzinfo=None) return datetime def convertToISO8601Zformat(datetime=None): if datetime != None: return ((datetime - datetime.utcoffset()).replace(tzinfo=None)).isoformat() + "Z" return datetime ## Instruction: Add Method the return time now in complete UTC iso complete format ## Code After: from datetime import datetime import time """Helper functions Consists of functions to typically be used within templates, but also available to Controllers. This module is available to templates as 'h'. """ # Import helpers as desired, or define your own, ie: #from webhelpers.html.tags import checkbox, password def importModuleFromFile(fullpath): """Loads and returns module defined by the file path. Returns None if file could not be loaded""" import os import sys import logging log = logging.getLogger(__name__) sys.path.append(os.path.dirname(fullpath)) module = None try: module = __import__(os.path.splitext(os.path.basename(fullpath))[0]) except Exception as ex: log.exception("Failed to load module:\n"+ex) finally: del sys.path[-1] return module def convertToISO8601UTC (dateTimeArg=None): if isinstance(dateTimeArg, datetime) == True: return datetime.utcfromtimestamp(time.mktime(dateTimeArg.timetuple())) return dateTimeArg def convertToISO8601Zformat(dateTimeArg=None): if isinstance(dateTimeArg, datetime) ==True: return convertToISO8601UTC (dateTimeArg).isoformat()+ "Z" return dateTimeArg def nowToISO8601Zformat(): return convertToISO8601Zformat(datetime.now())
// ... existing code ... from datetime import datetime import time """Helper functions Consists of functions to typically be used within templates, but also available to Controllers. This module is available to templates as 'h'. """ # Import helpers as desired, or define your own, ie: // ... modified code ... def convertToISO8601UTC (dateTimeArg=None): if isinstance(dateTimeArg, datetime) == True: return datetime.utcfromtimestamp(time.mktime(dateTimeArg.timetuple())) return dateTimeArg def convertToISO8601Zformat(dateTimeArg=None): if isinstance(dateTimeArg, datetime) ==True: return convertToISO8601UTC (dateTimeArg).isoformat()+ "Z" return dateTimeArg def nowToISO8601Zformat(): return convertToISO8601Zformat(datetime.now()) // ... rest of the code ...
61253510bc859ec1695484d11cbadcd92ad4b107
tests/test_misc.py
tests/test_misc.py
import mistune from unittest import TestCase class TestMiscCases(TestCase): def test_none(self): self.assertEqual(mistune.html(None), '') def test_before_parse_hooks(self): def _add_name(md, s, state): state['name'] = 'test' return s, state md = mistune.create_markdown() md.before_parse_hooks.append(_add_name) state = {} md.parse('', state) self.assertEqual(state['name'], 'test')
import mistune from unittest import TestCase class TestMiscCases(TestCase): def test_none(self): self.assertEqual(mistune.html(None), '') def test_before_parse_hooks(self): def _add_name(md, s, state): state['name'] = 'test' return s, state md = mistune.create_markdown() md.before_parse_hooks.append(_add_name) state = {} md.parse('', state) self.assertEqual(state['name'], 'test') def test_escape_html(self): md = mistune.create_markdown(escape=True) result = md('<div>1</div>') expected = '<p>&lt;div&gt;1&lt;/div&gt;</p>' self.assertEqual(result.strip(), expected) result = md('<em>1</em>') expected = '<p>&lt;em&gt;1&lt;/em&gt;</p>' self.assertEqual(result.strip(), expected) def test_emphasis(self): md = mistune.create_markdown(escape=True) result = md('_em_ **strong**') expected = '<p><em>em</em> <strong>strong</strong></p>' self.assertEqual(result.strip(), expected) def test_allow_harmful_protocols(self): renderer = mistune.HTMLRenderer(allow_harmful_protocols=True) md = mistune.Markdown(renderer) result = md('[h](javascript:alert)') expected = '<p><a href="javascript:alert">h</a></p>' self.assertEqual(result.strip(), expected)
Add test for allow harmful protocols
Add test for allow harmful protocols
Python
bsd-3-clause
lepture/mistune
import mistune from unittest import TestCase class TestMiscCases(TestCase): - def test_none(self): + def test_none(self): - self.assertEqual(mistune.html(None), '') + self.assertEqual(mistune.html(None), '') - def test_before_parse_hooks(self): + def test_before_parse_hooks(self): - def _add_name(md, s, state): + def _add_name(md, s, state): - state['name'] = 'test' + state['name'] = 'test' - return s, state + return s, state - md = mistune.create_markdown() + md = mistune.create_markdown() - md.before_parse_hooks.append(_add_name) + md.before_parse_hooks.append(_add_name) - state = {} + state = {} - md.parse('', state) + md.parse('', state) - self.assertEqual(state['name'], 'test') + self.assertEqual(state['name'], 'test') + def test_escape_html(self): + md = mistune.create_markdown(escape=True) + result = md('<div>1</div>') + expected = '<p>&lt;div&gt;1&lt;/div&gt;</p>' + self.assertEqual(result.strip(), expected) + + result = md('<em>1</em>') + expected = '<p>&lt;em&gt;1&lt;/em&gt;</p>' + self.assertEqual(result.strip(), expected) + + def test_emphasis(self): + md = mistune.create_markdown(escape=True) + result = md('_em_ **strong**') + expected = '<p><em>em</em> <strong>strong</strong></p>' + self.assertEqual(result.strip(), expected) + + def test_allow_harmful_protocols(self): + renderer = mistune.HTMLRenderer(allow_harmful_protocols=True) + md = mistune.Markdown(renderer) + result = md('[h](javascript:alert)') + expected = '<p><a href="javascript:alert">h</a></p>' + self.assertEqual(result.strip(), expected) +
Add test for allow harmful protocols
## Code Before: import mistune from unittest import TestCase class TestMiscCases(TestCase): def test_none(self): self.assertEqual(mistune.html(None), '') def test_before_parse_hooks(self): def _add_name(md, s, state): state['name'] = 'test' return s, state md = mistune.create_markdown() md.before_parse_hooks.append(_add_name) state = {} md.parse('', state) self.assertEqual(state['name'], 'test') ## Instruction: Add test for allow harmful protocols ## Code After: import mistune from unittest import TestCase class TestMiscCases(TestCase): def test_none(self): self.assertEqual(mistune.html(None), '') def test_before_parse_hooks(self): def _add_name(md, s, state): state['name'] = 'test' return s, state md = mistune.create_markdown() md.before_parse_hooks.append(_add_name) state = {} md.parse('', state) self.assertEqual(state['name'], 'test') def test_escape_html(self): md = mistune.create_markdown(escape=True) result = md('<div>1</div>') expected = '<p>&lt;div&gt;1&lt;/div&gt;</p>' self.assertEqual(result.strip(), expected) result = md('<em>1</em>') expected = '<p>&lt;em&gt;1&lt;/em&gt;</p>' self.assertEqual(result.strip(), expected) def test_emphasis(self): md = mistune.create_markdown(escape=True) result = md('_em_ **strong**') expected = '<p><em>em</em> <strong>strong</strong></p>' self.assertEqual(result.strip(), expected) def test_allow_harmful_protocols(self): renderer = mistune.HTMLRenderer(allow_harmful_protocols=True) md = mistune.Markdown(renderer) result = md('[h](javascript:alert)') expected = '<p><a href="javascript:alert">h</a></p>' self.assertEqual(result.strip(), expected)
// ... existing code ... class TestMiscCases(TestCase): def test_none(self): self.assertEqual(mistune.html(None), '') def test_before_parse_hooks(self): def _add_name(md, s, state): state['name'] = 'test' return s, state md = mistune.create_markdown() md.before_parse_hooks.append(_add_name) state = {} md.parse('', state) self.assertEqual(state['name'], 'test') def test_escape_html(self): md = mistune.create_markdown(escape=True) result = md('<div>1</div>') expected = '<p>&lt;div&gt;1&lt;/div&gt;</p>' self.assertEqual(result.strip(), expected) result = md('<em>1</em>') expected = '<p>&lt;em&gt;1&lt;/em&gt;</p>' self.assertEqual(result.strip(), expected) def test_emphasis(self): md = mistune.create_markdown(escape=True) result = md('_em_ **strong**') expected = '<p><em>em</em> <strong>strong</strong></p>' self.assertEqual(result.strip(), expected) def test_allow_harmful_protocols(self): renderer = mistune.HTMLRenderer(allow_harmful_protocols=True) md = mistune.Markdown(renderer) result = md('[h](javascript:alert)') expected = '<p><a href="javascript:alert">h</a></p>' self.assertEqual(result.strip(), expected) // ... rest of the code ...
a6441de03522f9352742cba5a8a656785de05455
tests/mock_vws/test_query.py
tests/mock_vws/test_query.py
import pytest import requests from tests.mock_vws.utils import Endpoint, assert_query_success @pytest.mark.usefixtures('verify_mock_vuforia') class TestQuery: """ Tests for the query endpoint. """ def test_no_results( self, query_endpoint: Endpoint, ) -> None: """ When there are no matching images in the database, an empty list of results is returned. """ session = requests.Session() response = session.send( # type: ignore request=query_endpoint.prepared_request, ) assert_query_success(response=response) assert response.json()['results'] == []
import io from urllib.parse import urljoin import pytest import requests from requests_mock import POST from urllib3.filepost import encode_multipart_formdata from tests.mock_vws.utils import ( VuforiaDatabaseKeys, assert_query_success, authorization_header, rfc_1123_date, ) VWQ_HOST = 'https://cloudreco.vuforia.com' @pytest.mark.usefixtures('verify_mock_vuforia') class TestQuery: """ Tests for the query endpoint. """ def test_no_results( self, high_quality_image: io.BytesIO, vuforia_database_keys: VuforiaDatabaseKeys, ) -> None: """ When there are no matching images in the database, an empty list of results is returned. """ image_content = high_quality_image.read() date = rfc_1123_date() request_path = '/v1/query' files = {'image': ('image.jpeg', image_content, 'image/jpeg')} content, content_type_header = encode_multipart_formdata(files) method = POST access_key = vuforia_database_keys.client_access_key secret_key = vuforia_database_keys.client_secret_key authorization_string = authorization_header( access_key=access_key, secret_key=secret_key, method=method, content=content, # Note that this is not the actual Content-Type header value sent. content_type='multipart/form-data', date=date, request_path=request_path, ) headers = { 'Authorization': authorization_string, 'Date': date, 'Content-Type': content_type_header, } response = requests.request( method=method, url=urljoin(base=VWQ_HOST, url=request_path), headers=headers, data=content, ) assert_query_success(response=response) assert response.json()['results'] == []
Use raw request making in query test
Use raw request making in query test
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
+ + import io + from urllib.parse import urljoin import pytest import requests + from requests_mock import POST + from urllib3.filepost import encode_multipart_formdata - from tests.mock_vws.utils import Endpoint, assert_query_success + from tests.mock_vws.utils import ( + VuforiaDatabaseKeys, + assert_query_success, + authorization_header, + rfc_1123_date, + ) + + + VWQ_HOST = 'https://cloudreco.vuforia.com' @pytest.mark.usefixtures('verify_mock_vuforia') class TestQuery: """ Tests for the query endpoint. """ def test_no_results( self, - query_endpoint: Endpoint, + high_quality_image: io.BytesIO, + vuforia_database_keys: VuforiaDatabaseKeys, ) -> None: """ When there are no matching images in the database, an empty list of results is returned. """ - session = requests.Session() - response = session.send( # type: ignore - request=query_endpoint.prepared_request, + image_content = high_quality_image.read() + date = rfc_1123_date() + request_path = '/v1/query' + files = {'image': ('image.jpeg', image_content, 'image/jpeg')} + content, content_type_header = encode_multipart_formdata(files) + method = POST + + access_key = vuforia_database_keys.client_access_key + secret_key = vuforia_database_keys.client_secret_key + authorization_string = authorization_header( + access_key=access_key, + secret_key=secret_key, + method=method, + content=content, + # Note that this is not the actual Content-Type header value sent. + content_type='multipart/form-data', + date=date, + request_path=request_path, ) + + headers = { + 'Authorization': authorization_string, + 'Date': date, + 'Content-Type': content_type_header, + } + + response = requests.request( + method=method, + url=urljoin(base=VWQ_HOST, url=request_path), + headers=headers, + data=content, + ) + assert_query_success(response=response) assert response.json()['results'] == []
Use raw request making in query test
## Code Before: import pytest import requests from tests.mock_vws.utils import Endpoint, assert_query_success @pytest.mark.usefixtures('verify_mock_vuforia') class TestQuery: """ Tests for the query endpoint. """ def test_no_results( self, query_endpoint: Endpoint, ) -> None: """ When there are no matching images in the database, an empty list of results is returned. """ session = requests.Session() response = session.send( # type: ignore request=query_endpoint.prepared_request, ) assert_query_success(response=response) assert response.json()['results'] == [] ## Instruction: Use raw request making in query test ## Code After: import io from urllib.parse import urljoin import pytest import requests from requests_mock import POST from urllib3.filepost import encode_multipart_formdata from tests.mock_vws.utils import ( VuforiaDatabaseKeys, assert_query_success, authorization_header, rfc_1123_date, ) VWQ_HOST = 'https://cloudreco.vuforia.com' @pytest.mark.usefixtures('verify_mock_vuforia') class TestQuery: """ Tests for the query endpoint. """ def test_no_results( self, high_quality_image: io.BytesIO, vuforia_database_keys: VuforiaDatabaseKeys, ) -> None: """ When there are no matching images in the database, an empty list of results is returned. """ image_content = high_quality_image.read() date = rfc_1123_date() request_path = '/v1/query' files = {'image': ('image.jpeg', image_content, 'image/jpeg')} content, content_type_header = encode_multipart_formdata(files) method = POST access_key = vuforia_database_keys.client_access_key secret_key = vuforia_database_keys.client_secret_key authorization_string = authorization_header( access_key=access_key, secret_key=secret_key, method=method, content=content, # Note that this is not the actual Content-Type header value sent. content_type='multipart/form-data', date=date, request_path=request_path, ) headers = { 'Authorization': authorization_string, 'Date': date, 'Content-Type': content_type_header, } response = requests.request( method=method, url=urljoin(base=VWQ_HOST, url=request_path), headers=headers, data=content, ) assert_query_success(response=response) assert response.json()['results'] == []
... import io from urllib.parse import urljoin ... import requests from requests_mock import POST from urllib3.filepost import encode_multipart_formdata from tests.mock_vws.utils import ( VuforiaDatabaseKeys, assert_query_success, authorization_header, rfc_1123_date, ) VWQ_HOST = 'https://cloudreco.vuforia.com' ... self, high_quality_image: io.BytesIO, vuforia_database_keys: VuforiaDatabaseKeys, ) -> None: ... """ image_content = high_quality_image.read() date = rfc_1123_date() request_path = '/v1/query' files = {'image': ('image.jpeg', image_content, 'image/jpeg')} content, content_type_header = encode_multipart_formdata(files) method = POST access_key = vuforia_database_keys.client_access_key secret_key = vuforia_database_keys.client_secret_key authorization_string = authorization_header( access_key=access_key, secret_key=secret_key, method=method, content=content, # Note that this is not the actual Content-Type header value sent. content_type='multipart/form-data', date=date, request_path=request_path, ) headers = { 'Authorization': authorization_string, 'Date': date, 'Content-Type': content_type_header, } response = requests.request( method=method, url=urljoin(base=VWQ_HOST, url=request_path), headers=headers, data=content, ) assert_query_success(response=response) ...
8bfe6e791228ccbc3143f3a8747c68d2e8b0cbb5
runtests.py
runtests.py
from django.conf import settings from django.core.management import execute_from_command_line import django import os import sys if not settings.configured: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, module_root) def runtests(): argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
from django.conf import settings from django.core.management import execute_from_command_line import django import os import sys if not settings.configured: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") if django.VERSION >= (1,7): django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, module_root) def runtests(): argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
Fix running tests on lower Django versions
Fix running tests on lower Django versions
Python
apache-2.0
AdrianLC/django-parler-rest,edoburu/django-parler-rest
from django.conf import settings from django.core.management import execute_from_command_line import django import os import sys if not settings.configured: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") + if django.VERSION >= (1,7): - django.setup() + django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, module_root) def runtests(): argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
Fix running tests on lower Django versions
## Code Before: from django.conf import settings from django.core.management import execute_from_command_line import django import os import sys if not settings.configured: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, module_root) def runtests(): argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests() ## Instruction: Fix running tests on lower Django versions ## Code After: from django.conf import settings from django.core.management import execute_from_command_line import django import os import sys if not settings.configured: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") if django.VERSION >= (1,7): django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, module_root) def runtests(): argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:] execute_from_command_line(argv) if __name__ == '__main__': runtests()
... os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings") if django.VERSION >= (1,7): django.setup() module_root = os.path.dirname(os.path.realpath(__file__)) ...
3c1357627bf1921fdee114b60f96f42c328120b4
caramel/__init__.py
caramel/__init__.py
from pyramid.config import Configurator from sqlalchemy import engine_from_config from .models import ( init_session, ) def main(global_config, **settings): """This function returns a Pyramid WSGI application.""" engine = engine_from_config(settings, "sqlalchemy.") init_session(engine) config = Configurator(settings=settings) config.add_route("ca", "/root.crt", request_method="GET") config.add_route("cabundle", "/bundle.crt", request_method="GET") config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST") config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET") config.scan() return config.make_wsgi_app()
from pyramid.config import Configurator from sqlalchemy import engine_from_config from .models import ( init_session, ) def main(global_config, **settings): """This function returns a Pyramid WSGI application.""" engine = engine_from_config(settings, "sqlalchemy.") init_session(engine) config = Configurator(settings=settings) config.include("pyramid_tm") config.add_route("ca", "/root.crt", request_method="GET") config.add_route("cabundle", "/bundle.crt", request_method="GET") config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST") config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET") config.scan() return config.make_wsgi_app()
Move pyramid_tm include to caramel.main
Caramel: Move pyramid_tm include to caramel.main Move the setting to include pyramid_tm to caramel.main from ini files. This is a vital setting that should never be changed by the user.
Python
agpl-3.0
ModioAB/caramel,ModioAB/caramel
from pyramid.config import Configurator from sqlalchemy import engine_from_config from .models import ( init_session, ) def main(global_config, **settings): """This function returns a Pyramid WSGI application.""" engine = engine_from_config(settings, "sqlalchemy.") init_session(engine) config = Configurator(settings=settings) + config.include("pyramid_tm") config.add_route("ca", "/root.crt", request_method="GET") config.add_route("cabundle", "/bundle.crt", request_method="GET") config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST") config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET") config.scan() return config.make_wsgi_app()
Move pyramid_tm include to caramel.main
## Code Before: from pyramid.config import Configurator from sqlalchemy import engine_from_config from .models import ( init_session, ) def main(global_config, **settings): """This function returns a Pyramid WSGI application.""" engine = engine_from_config(settings, "sqlalchemy.") init_session(engine) config = Configurator(settings=settings) config.add_route("ca", "/root.crt", request_method="GET") config.add_route("cabundle", "/bundle.crt", request_method="GET") config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST") config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET") config.scan() return config.make_wsgi_app() ## Instruction: Move pyramid_tm include to caramel.main ## Code After: from pyramid.config import Configurator from sqlalchemy import engine_from_config from .models import ( init_session, ) def main(global_config, **settings): """This function returns a Pyramid WSGI application.""" engine = engine_from_config(settings, "sqlalchemy.") init_session(engine) config = Configurator(settings=settings) config.include("pyramid_tm") config.add_route("ca", "/root.crt", request_method="GET") config.add_route("cabundle", "/bundle.crt", request_method="GET") config.add_route("csr", "/{sha256:[0-9a-f]{64}}", request_method="POST") config.add_route("cert", "/{sha256:[0-9a-f]{64}}", request_method="GET") config.scan() return config.make_wsgi_app()
// ... existing code ... config = Configurator(settings=settings) config.include("pyramid_tm") config.add_route("ca", "/root.crt", request_method="GET") // ... rest of the code ...
4e94612f7fad4b231de9c1a4044259be6079a982
fabtasks.py
fabtasks.py
from fabric.api import task, run def _generate_password(): import string from random import sample chars = string.letters + string.digits return ''.join(sample(chars, 8)) def create_mysql_instance(mysql_user, mysql_password, instance_code): user = instance_code password = _generate_password() cmd = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s; grant all on %s.* to '%s'@'%%' identified by '%s'\"" % ( mysql_user, mysql_password, 3306, user, user, user, password,) return run(cmd) # Local Variables: ** # comment-column: 56 ** # indent-tabs-mode: nil ** # python-indent: 4 ** # End: **
from fabric.api import run def _generate_password(): import string from random import sample chars = string.letters + string.digits return ''.join(sample(chars, 8)) def create_mysql_instance(mysql_user, mysql_password, instance_code): user = instance_code password = _generate_password() cmd_create_database = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s;\"" % ( mysql_user, mysql_password, 3306, user,) cmd_create_user = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"grant all on %s.* to '%s'@'%%' identified by '%s';\"" % ( mysql_user, mysql_password, 3306, user, user, password,) run(cmd_create_database) run(cmd_create_user) # Local Variables: ** # comment-column: 56 ** # indent-tabs-mode: nil ** # python-indent: 4 ** # End: **
Split create database and create user into to individual commands
Split create database and create user into to individual commands
Python
mit
goncha/fablib
- from fabric.api import task, run + from fabric.api import run def _generate_password(): import string from random import sample chars = string.letters + string.digits return ''.join(sample(chars, 8)) def create_mysql_instance(mysql_user, mysql_password, instance_code): user = instance_code password = _generate_password() - cmd = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s; grant all on %s.* to '%s'@'%%' identified by '%s'\"" % ( + cmd_create_database = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s;\"" % ( mysql_user, mysql_password, 3306, + user,) + cmd_create_user = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"grant all on %s.* to '%s'@'%%' identified by '%s';\"" % ( + mysql_user, mysql_password, 3306, - user, user, user, password,) + user, user, password,) - return run(cmd) + + run(cmd_create_database) + run(cmd_create_user) # Local Variables: ** # comment-column: 56 ** # indent-tabs-mode: nil ** # python-indent: 4 ** # End: **
Split create database and create user into to individual commands
## Code Before: from fabric.api import task, run def _generate_password(): import string from random import sample chars = string.letters + string.digits return ''.join(sample(chars, 8)) def create_mysql_instance(mysql_user, mysql_password, instance_code): user = instance_code password = _generate_password() cmd = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s; grant all on %s.* to '%s'@'%%' identified by '%s'\"" % ( mysql_user, mysql_password, 3306, user, user, user, password,) return run(cmd) # Local Variables: ** # comment-column: 56 ** # indent-tabs-mode: nil ** # python-indent: 4 ** # End: ** ## Instruction: Split create database and create user into to individual commands ## Code After: from fabric.api import run def _generate_password(): import string from random import sample chars = string.letters + string.digits return ''.join(sample(chars, 8)) def create_mysql_instance(mysql_user, mysql_password, instance_code): user = instance_code password = _generate_password() cmd_create_database = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s;\"" % ( mysql_user, mysql_password, 3306, user,) cmd_create_user = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"grant all on %s.* to '%s'@'%%' identified by '%s';\"" % ( mysql_user, mysql_password, 3306, user, user, password,) run(cmd_create_database) run(cmd_create_user) # Local Variables: ** # comment-column: 56 ** # indent-tabs-mode: nil ** # python-indent: 4 ** # End: **
// ... existing code ... from fabric.api import run // ... modified code ... password = _generate_password() cmd_create_database = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"create database %s;\"" % ( mysql_user, mysql_password, 3306, user,) cmd_create_user = "/usr/bin/mysql -h localhost -u '%s' '--password=%s' -P %s -e \"grant all on %s.* to '%s'@'%%' identified by '%s';\"" % ( mysql_user, mysql_password, 3306, user, user, password,) run(cmd_create_database) run(cmd_create_user) // ... rest of the code ...
d1e2dc224b7b922d39f0f8f21affe39985769315
src/loader.py
src/loader.py
from scipy.io import loadmat def load_clean_data(): data = loadmat('data/cleandata_students.mat') return data['x'], data['y'] def load_noisy_data(): data = loadmat('data/noisydata_students.mat') return data['x'], data['y'] if __name__ == '__main__': print('Clean Data:') x, y = load_clean_data() print('x:', x) print('y:', y) print() print('Noisy Data:') x, y = load_noisy_data() print('x:', x) print('y:', y)
from scipy.io import loadmat def load_data(data_file): data = loadmat(data_file) return data['x'], data['y'] if __name__ == '__main__': print('Clean Data:') x, y = load_data('data/cleandata_students.mat') print('x:', x) print('y:', y) print() print('Noisy Data:') x, y = load_data('data/noisydata_students.mat') print('x:', x) print('y:', y)
Remove hard-coded data file path
Remove hard-coded data file path
Python
mit
MLNotWar/decision-trees-algorithm,MLNotWar/decision-trees-algorithm
from scipy.io import loadmat + def load_data(data_file): + data = loadmat(data_file) - def load_clean_data(): - data = loadmat('data/cleandata_students.mat') - return data['x'], data['y'] - - def load_noisy_data(): - data = loadmat('data/noisydata_students.mat') return data['x'], data['y'] if __name__ == '__main__': print('Clean Data:') - x, y = load_clean_data() + x, y = load_data('data/cleandata_students.mat') print('x:', x) print('y:', y) print() print('Noisy Data:') - x, y = load_noisy_data() + x, y = load_data('data/noisydata_students.mat') print('x:', x) print('y:', y)
Remove hard-coded data file path
## Code Before: from scipy.io import loadmat def load_clean_data(): data = loadmat('data/cleandata_students.mat') return data['x'], data['y'] def load_noisy_data(): data = loadmat('data/noisydata_students.mat') return data['x'], data['y'] if __name__ == '__main__': print('Clean Data:') x, y = load_clean_data() print('x:', x) print('y:', y) print() print('Noisy Data:') x, y = load_noisy_data() print('x:', x) print('y:', y) ## Instruction: Remove hard-coded data file path ## Code After: from scipy.io import loadmat def load_data(data_file): data = loadmat(data_file) return data['x'], data['y'] if __name__ == '__main__': print('Clean Data:') x, y = load_data('data/cleandata_students.mat') print('x:', x) print('y:', y) print() print('Noisy Data:') x, y = load_data('data/noisydata_students.mat') print('x:', x) print('y:', y)
# ... existing code ... def load_data(data_file): data = loadmat(data_file) return data['x'], data['y'] # ... modified code ... print('Clean Data:') x, y = load_data('data/cleandata_students.mat') print('x:', x) ... print('Noisy Data:') x, y = load_data('data/noisydata_students.mat') print('x:', x) # ... rest of the code ...
0d33cf650480ea7b71e13ef67b566fc6ec1c93ee
demo/demo/todos/models.py
demo/demo/todos/models.py
from django.db import models class Todo(models.Model): name = models.CharField(max_length=200) complete = models.BooleanField()
from django.db import models class Todo(models.Model): name = models.CharField(max_length=200)
Remove "complete" boolean from demo todo model.
Remove "complete" boolean from demo todo model.
Python
bsd-3-clause
jgerigmeyer/jquery-django-superformset,jgerigmeyer/jquery-django-superformset
from django.db import models class Todo(models.Model): name = models.CharField(max_length=200) - complete = models.BooleanField()
Remove "complete" boolean from demo todo model.
## Code Before: from django.db import models class Todo(models.Model): name = models.CharField(max_length=200) complete = models.BooleanField() ## Instruction: Remove "complete" boolean from demo todo model. ## Code After: from django.db import models class Todo(models.Model): name = models.CharField(max_length=200)
# ... existing code ... name = models.CharField(max_length=200) # ... rest of the code ...
aa77e74c02ec7276c233454806d55fdb32899a13
__init__.py
__init__.py
from . import advection from . import cascade from . import io from . import noise from . import nowcasts from . import optflow from . import postprocessing from . import timeseries from . import utils from . import verification from . import visualization
from . import advection from . import cascade from . import io from . import noise from . import nowcasts from . import optflow from . import postprocessing from . import timeseries from . import utils from . import verification as vf from . import visualization as plt
Use namespaces plt and vf for visualization and verification modules
Use namespaces plt and vf for visualization and verification modules
Python
bsd-3-clause
pySTEPS/pysteps
from . import advection from . import cascade from . import io from . import noise from . import nowcasts from . import optflow from . import postprocessing from . import timeseries from . import utils - from . import verification + from . import verification as vf - from . import visualization + from . import visualization as plt
Use namespaces plt and vf for visualization and verification modules
## Code Before: from . import advection from . import cascade from . import io from . import noise from . import nowcasts from . import optflow from . import postprocessing from . import timeseries from . import utils from . import verification from . import visualization ## Instruction: Use namespaces plt and vf for visualization and verification modules ## Code After: from . import advection from . import cascade from . import io from . import noise from . import nowcasts from . import optflow from . import postprocessing from . import timeseries from . import utils from . import verification as vf from . import visualization as plt
... from . import utils from . import verification as vf from . import visualization as plt ...
3e6e485443a901660a461dbbc8b324bfe4c19c8f
tests/v5/conftest.py
tests/v5/conftest.py
import pytest from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_GENERATORS = [ Constant("quux"), Boolean(p=0.3), ] @pytest.fixture def exemplar_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_GENERATORS
import pytest from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Boolean(p=0.3), ] @pytest.fixture def exemplar_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_PRIMITIVE_GENERATORS @pytest.fixture def exemplar_primitive_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_PRIMITIVE_GENERATORS
Add fixture for exemplar primitive generators
Add fixture for exemplar primitive generators
Python
mit
maxalbert/tohu
import pytest from .context import tohu from tohu.v5.primitive_generators import * - EXEMPLAR_GENERATORS = [ + EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Boolean(p=0.3), ] @pytest.fixture def exemplar_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ - return EXEMPLAR_GENERATORS + return EXEMPLAR_PRIMITIVE_GENERATORS + + + @pytest.fixture + def exemplar_primitive_generators(): + """ + Return a list of generators which contains an example + for each type of generator supported by tohu. + """ + return EXEMPLAR_PRIMITIVE_GENERATORS
Add fixture for exemplar primitive generators
## Code Before: import pytest from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_GENERATORS = [ Constant("quux"), Boolean(p=0.3), ] @pytest.fixture def exemplar_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_GENERATORS ## Instruction: Add fixture for exemplar primitive generators ## Code After: import pytest from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Boolean(p=0.3), ] @pytest.fixture def exemplar_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_PRIMITIVE_GENERATORS @pytest.fixture def exemplar_primitive_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_PRIMITIVE_GENERATORS
# ... existing code ... EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), # ... modified code ... """ return EXEMPLAR_PRIMITIVE_GENERATORS @pytest.fixture def exemplar_primitive_generators(): """ Return a list of generators which contains an example for each type of generator supported by tohu. """ return EXEMPLAR_PRIMITIVE_GENERATORS # ... rest of the code ...
14e9bda5de10ef5a1c6dd96692d083f4e0f16025
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
import yaml from yaml import SafeLoader yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
Refactor PyYAML tests a bit
Python: Refactor PyYAML tests a bit
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
import yaml - from yaml import SafeLoader + # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput - yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput - yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML - yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML - - yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput + # Safe + yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML + + # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
Refactor PyYAML tests a bit
## Code Before: import yaml from yaml import SafeLoader yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput ## Instruction: Refactor PyYAML tests a bit ## Code After: import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
... import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput ... # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput ...
4b6756bd8305190a5d1dc1d2e8e9a0b94d5baa40
tests/test_grid.py
tests/test_grid.py
import pytest from aimaPy.grid import * compare = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) def test_distance(): assert distance((1, 2), (5, 5)) == 5.0 def test_distance_squared(): assert distance_squared((1, 2), (5, 5)) == 25.0 def test_clip(): list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]] res = [0, 0.5, 1] assert compare(list_, res) def test_vector_clip(): assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9) if __name__ == '__main__': pytest.main()
import pytest from aimaPy.grid import * compare_list = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) def test_distance(): assert distance((1, 2), (5, 5)) == 5.0 def test_distance_squared(): assert distance_squared((1, 2), (5, 5)) == 25.0 def test_clip(): list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]] res = [0, 0.5, 1] assert compare_list(list_, res) def test_vector_clip(): assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9) if __name__ == '__main__': pytest.main()
Change name of compare function in test grid
Change name of compare function in test grid
Python
mit
phaller0513/aima-python,AWPorter/aima-python,grantvk/aima-python,SeanCameronConklin/aima-python,SeanCameronConklin/aima-python,SnShine/aima-python,AWPorter/aima-python,chandlercr/aima-python,NolanBecker/aima-python,jottenlips/aima-python,WmHHooper/aima-python,grantvk/aima-python,AmberJBlue/aima-python,jottenlips/aima-python,Chipe1/aima-python,willhess/aima-python,aimacode/aima-python,abbeymiles/aima-python,JoeLaMartina/aima-python,abbeymiles/aima-python,SimeonFritz/aima-python,armadill-odyssey/aima-python,reachtarunhere/aima-python,reachtarunhere/aima-python,SimeonFritz/aima-python,NolanBecker/aima-python,abbeymiles/aima-python,MircoT/aima-python,JoeLaMartina/aima-python,JamesDickenson/aima-python,Fruit-Snacks/aima-python,WmHHooper/aima-python,SnShine/aima-python,AmberJBlue/aima-python,JoeLaMartina/AlphametricProject,austinban/aima-python,chandlercr/aima-python,WhittKinley/aima-python,WhittKinley/ConnectProject,JamesDickenson/aima-python,chandlercr/aima-python,WmHHooper/aima-python,armadill-odyssey/aima-python,SimeonFritz/aima-python,armadill-odyssey/aima-python,Chipe1/aima-python,zayneanderson/aima-python,aimacode/aima-python,austinban/aima-python,WhittKinley/aima-python,WmHHooper/aima-python,jottenlips/aima-python,sofmonk/aima-python,phaller0513/aima-python,jo-tez/aima-python,zayneanderson/aima-python,Fruit-Snacks/aima-python,JoeLaMartina/aima-python,willhess/aima-python,sofmonk/aima-python,WhittKinley/aima-python,JamesDickenson/aima-python,JoeLaMartina/AlphametricProject,SeanCameronConklin/aima-python,phaller0513/aima-python,zayneanderson/aima-python,austinban/aima-python,WhittKinley/ConnectProject,JoeLaMartina/AlphametricProject,Fruit-Snacks/aima-python,AWPorter/aima-python,grantvk/aima-python,jo-tez/aima-python,willhess/aima-python,NolanBecker/aima-python,AmberJBlue/aima-python,WhittKinley/ConnectProject
import pytest from aimaPy.grid import * - compare = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) + compare_list = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) def test_distance(): assert distance((1, 2), (5, 5)) == 5.0 def test_distance_squared(): assert distance_squared((1, 2), (5, 5)) == 25.0 def test_clip(): list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]] res = [0, 0.5, 1] - assert compare(list_, res) + assert compare_list(list_, res) def test_vector_clip(): assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9) if __name__ == '__main__': pytest.main()
Change name of compare function in test grid
## Code Before: import pytest from aimaPy.grid import * compare = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) def test_distance(): assert distance((1, 2), (5, 5)) == 5.0 def test_distance_squared(): assert distance_squared((1, 2), (5, 5)) == 25.0 def test_clip(): list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]] res = [0, 0.5, 1] assert compare(list_, res) def test_vector_clip(): assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9) if __name__ == '__main__': pytest.main() ## Instruction: Change name of compare function in test grid ## Code After: import pytest from aimaPy.grid import * compare_list = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) def test_distance(): assert distance((1, 2), (5, 5)) == 5.0 def test_distance_squared(): assert distance_squared((1, 2), (5, 5)) == 25.0 def test_clip(): list_ = [clip(x, 0, 1) for x in [-1, 0.5, 10]] res = [0, 0.5, 1] assert compare_list(list_, res) def test_vector_clip(): assert vector_clip((-1, 10), (0, 0), (9, 9)) == (0, 9) if __name__ == '__main__': pytest.main()
... compare_list = lambda x, y: all([elm_x == y[i] for i, elm_x in enumerate(x)]) ... assert compare_list(list_, res) ...
7321ed72469ad4b9eaf7b1feda370472c294fa97
django_backend_test/noras_menu/forms.py
django_backend_test/noras_menu/forms.py
from django import forms #Third Party apps imports #Imports local apps from .models import Menu, MenuItems
from datetime import date #Core Django Imports from django import forms from django.forms.models import inlineformset_factory #Third Party apps imports #Imports local apps from .models import Menu, MenuItems, UserSelectedLunch, Subscribers class MenuForm(forms.ModelForm): day = forms.DateField(label='Menu date', input_formats=['%d-%m-%Y']) class Meta: model = Menu fields = '__all__' MenuItemsFormSet = inlineformset_factory(Menu, MenuItems, fields=('name','menu',)) class MenuSelectForm(forms.ModelForm): class Meta: model = UserSelectedLunch fields = '__all__' class SubscribersForm(forms.ModelForm): class Meta: model = Subscribers fields = '__all__'
Add ModelForm of Menu, MenuItems and Subscriber
Add ModelForm of Menu, MenuItems and Subscriber
Python
mit
semorale/backend-test,semorale/backend-test,semorale/backend-test
+ from datetime import date + + #Core Django Imports from django import forms + from django.forms.models import inlineformset_factory #Third Party apps imports #Imports local apps - from .models import Menu, MenuItems + from .models import Menu, MenuItems, UserSelectedLunch, Subscribers + class MenuForm(forms.ModelForm): + day = forms.DateField(label='Menu date', input_formats=['%d-%m-%Y']) + + class Meta: + model = Menu + fields = '__all__' + + MenuItemsFormSet = inlineformset_factory(Menu, MenuItems, fields=('name','menu',)) + + class MenuSelectForm(forms.ModelForm): + + class Meta: + model = UserSelectedLunch + fields = '__all__' + + class SubscribersForm(forms.ModelForm): + + class Meta: + model = Subscribers + fields = '__all__'
Add ModelForm of Menu, MenuItems and Subscriber
## Code Before: from django import forms #Third Party apps imports #Imports local apps from .models import Menu, MenuItems ## Instruction: Add ModelForm of Menu, MenuItems and Subscriber ## Code After: from datetime import date #Core Django Imports from django import forms from django.forms.models import inlineformset_factory #Third Party apps imports #Imports local apps from .models import Menu, MenuItems, UserSelectedLunch, Subscribers class MenuForm(forms.ModelForm): day = forms.DateField(label='Menu date', input_formats=['%d-%m-%Y']) class Meta: model = Menu fields = '__all__' MenuItemsFormSet = inlineformset_factory(Menu, MenuItems, fields=('name','menu',)) class MenuSelectForm(forms.ModelForm): class Meta: model = UserSelectedLunch fields = '__all__' class SubscribersForm(forms.ModelForm): class Meta: model = Subscribers fields = '__all__'
... from datetime import date #Core Django Imports from django import forms from django.forms.models import inlineformset_factory #Third Party apps imports ... #Imports local apps from .models import Menu, MenuItems, UserSelectedLunch, Subscribers class MenuForm(forms.ModelForm): day = forms.DateField(label='Menu date', input_formats=['%d-%m-%Y']) class Meta: model = Menu fields = '__all__' MenuItemsFormSet = inlineformset_factory(Menu, MenuItems, fields=('name','menu',)) class MenuSelectForm(forms.ModelForm): class Meta: model = UserSelectedLunch fields = '__all__' class SubscribersForm(forms.ModelForm): class Meta: model = Subscribers fields = '__all__' ...
92d0a09cfb232270d04f82eccc451ee63bd7901a
dev/TOPSECRET/SirBot/lib/sirbot/shutdown.py
dev/TOPSECRET/SirBot/lib/sirbot/shutdown.py
from json import dumps def shutdown(config,interinput=None,interoutput=None): #check for lingering runtime errors #finishing writing log queues to file #if none: write clean.start file in config directory if(config['Interface']['remember position'] == 0): config['Interface']['map'] = '620x540+50+50' configPath = config['Path']+'\\config\\sirbot\\config' configFile = open(configPath,"wb+") configData = dumps(config) configFile.write(configData) configFile.close() #perhaps add garbage collector control here?
def shutdown(config,interinput,interoutput): #check for lingering runtime errors #finishing writing log queues to file #if none: write clean.start file in config directory pass #perhaps add garbage collector control here?
Revert "changes to config during runtime are now saved"
Revert "changes to config during runtime are now saved" This reverts commit e09a780da17bb2f97d20aafc2c007fe3fc3051bb.
Python
mit
SirRujak/SirBot
- from json import dumps - - def shutdown(config,interinput=None,interoutput=None): + def shutdown(config,interinput,interoutput): #check for lingering runtime errors #finishing writing log queues to file #if none: write clean.start file in config directory + pass - if(config['Interface']['remember position'] == 0): - config['Interface']['map'] = '620x540+50+50' - - configPath = config['Path']+'\\config\\sirbot\\config' - configFile = open(configPath,"wb+") - configData = dumps(config) - configFile.write(configData) - configFile.close() #perhaps add garbage collector control here?
Revert "changes to config during runtime are now saved"
## Code Before: from json import dumps def shutdown(config,interinput=None,interoutput=None): #check for lingering runtime errors #finishing writing log queues to file #if none: write clean.start file in config directory if(config['Interface']['remember position'] == 0): config['Interface']['map'] = '620x540+50+50' configPath = config['Path']+'\\config\\sirbot\\config' configFile = open(configPath,"wb+") configData = dumps(config) configFile.write(configData) configFile.close() #perhaps add garbage collector control here? ## Instruction: Revert "changes to config during runtime are now saved" ## Code After: def shutdown(config,interinput,interoutput): #check for lingering runtime errors #finishing writing log queues to file #if none: write clean.start file in config directory pass #perhaps add garbage collector control here?
// ... existing code ... def shutdown(config,interinput,interoutput): #check for lingering runtime errors // ... modified code ... #if none: write clean.start file in config directory pass // ... rest of the code ...
351e88dd95db81418cc6d2deb4a943e2659292bc
wsgi.py
wsgi.py
import os import sys import site VIRTUALENV="venv" # Get site root from this file's location: SITE_ROOT=os.path.abspath(os.path.dirname(__file__)) # Add virtualenv path to site package root: site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages")) site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages")) # Add site package root to start of pythonpath: sys.path.insert(0, SITE_ROOT) # celery should now be available (on the virtualenv path) import djcelery djcelery.setup_loader() # Point Django to settings file: os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings' from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
import os # celery should now be available (on the virtualenv path) import djcelery djcelery.setup_loader() # Point Django to settings file: os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings' from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
Remove virtualenv setup from WSGI entrypoint
Remove virtualenv setup from WSGI entrypoint Handle it in front end server instead.
Python
agpl-3.0
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
import os - import sys - import site - - VIRTUALENV="venv" - - # Get site root from this file's location: - SITE_ROOT=os.path.abspath(os.path.dirname(__file__)) - - # Add virtualenv path to site package root: - site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages")) - site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages")) - - # Add site package root to start of pythonpath: - sys.path.insert(0, SITE_ROOT) # celery should now be available (on the virtualenv path) import djcelery djcelery.setup_loader() # Point Django to settings file: os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings' from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
Remove virtualenv setup from WSGI entrypoint
## Code Before: import os import sys import site VIRTUALENV="venv" # Get site root from this file's location: SITE_ROOT=os.path.abspath(os.path.dirname(__file__)) # Add virtualenv path to site package root: site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.7/site-packages")) site.addsitedir(os.path.join(SITE_ROOT, VIRTUALENV, "lib/python2.6/site-packages")) # Add site package root to start of pythonpath: sys.path.insert(0, SITE_ROOT) # celery should now be available (on the virtualenv path) import djcelery djcelery.setup_loader() # Point Django to settings file: os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings' from django.core.wsgi import get_wsgi_application application = get_wsgi_application() ## Instruction: Remove virtualenv setup from WSGI entrypoint ## Code After: import os # celery should now be available (on the virtualenv path) import djcelery djcelery.setup_loader() # Point Django to settings file: os.environ['DJANGO_SETTINGS_MODULE'] = 'toolkit.settings' from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
# ... existing code ... import os # ... rest of the code ...
d01b09256f8fda4b222f3e26366817f4ac5b4c5a
zinnia/tests/test_admin_forms.py
zinnia/tests/test_admin_forms.py
"""Test cases for Zinnia's admin forms""" from django.test import TestCase from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from zinnia.models import Category from zinnia.admin.forms import EntryAdminForm from zinnia.admin.forms import CategoryAdminForm class EntryAdminFormTestCase(TestCase): def test_categories_has_related_widget(self): form = EntryAdminForm() self.assertTrue( isinstance(form.fields['categories'].widget, RelatedFieldWidgetWrapper)) def test_initial_sites(self): form = EntryAdminForm() self.assertEqual( len(form.fields['sites'].initial), 1) class CategoryAdminFormTestCase(TestCase): def test_parent_has_related_widget(self): form = CategoryAdminForm() self.assertTrue( isinstance(form.fields['parent'].widget, RelatedFieldWidgetWrapper)) def test_clean_parent(self): category = Category.objects.create( title='Category 1', slug='cat-1') datas = {'parent': category.pk, 'title': category.title, 'slug': category.slug} form = CategoryAdminForm(datas, instance=category) self.assertFalse(form.is_valid()) self.assertEqual(len(form.errors['parent']), 1) subcategory = Category.objects.create( title='Category 2', slug='cat-2') self.assertEqual(subcategory.parent, None) datas = {'parent': category.pk, 'title': subcategory.title, 'slug': subcategory.slug} form = CategoryAdminForm(datas, instance=subcategory) self.assertTrue(form.is_valid())
"""Test cases for Zinnia's admin forms""" from django.test import TestCase from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from zinnia.models import Category from zinnia.admin.forms import EntryAdminForm from zinnia.admin.forms import CategoryAdminForm class EntryAdminFormTestCase(TestCase): def test_categories_has_related_widget(self): form = EntryAdminForm() self.assertTrue( isinstance(form.fields['categories'].widget, RelatedFieldWidgetWrapper)) class CategoryAdminFormTestCase(TestCase): def test_parent_has_related_widget(self): form = CategoryAdminForm() self.assertTrue( isinstance(form.fields['parent'].widget, RelatedFieldWidgetWrapper)) def test_clean_parent(self): category = Category.objects.create( title='Category 1', slug='cat-1') datas = {'parent': category.pk, 'title': category.title, 'slug': category.slug} form = CategoryAdminForm(datas, instance=category) self.assertFalse(form.is_valid()) self.assertEqual(len(form.errors['parent']), 1) subcategory = Category.objects.create( title='Category 2', slug='cat-2') self.assertEqual(subcategory.parent, None) datas = {'parent': category.pk, 'title': subcategory.title, 'slug': subcategory.slug} form = CategoryAdminForm(datas, instance=subcategory) self.assertTrue(form.is_valid())
Remove now useless test for initial sites value in form
Remove now useless test for initial sites value in form
Python
bsd-3-clause
extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,Zopieux/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,bywbilly/django-blog-zinnia,dapeng0802/django-blog-zinnia,Zopieux/django-blog-zinnia,aorzh/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,aorzh/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,Fantomas42/django-blog-zinnia,marctc/django-blog-zinnia,petecummings/django-blog-zinnia,ZuluPro/django-blog-zinnia,ZuluPro/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Maplecroft/django-blog-zinnia,petecummings/django-blog-zinnia,marctc/django-blog-zinnia,bywbilly/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia
"""Test cases for Zinnia's admin forms""" from django.test import TestCase from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from zinnia.models import Category from zinnia.admin.forms import EntryAdminForm from zinnia.admin.forms import CategoryAdminForm class EntryAdminFormTestCase(TestCase): def test_categories_has_related_widget(self): form = EntryAdminForm() self.assertTrue( isinstance(form.fields['categories'].widget, RelatedFieldWidgetWrapper)) - - def test_initial_sites(self): - form = EntryAdminForm() - self.assertEqual( - len(form.fields['sites'].initial), 1) class CategoryAdminFormTestCase(TestCase): def test_parent_has_related_widget(self): form = CategoryAdminForm() self.assertTrue( isinstance(form.fields['parent'].widget, RelatedFieldWidgetWrapper)) def test_clean_parent(self): category = Category.objects.create( title='Category 1', slug='cat-1') datas = {'parent': category.pk, 'title': category.title, 'slug': category.slug} form = CategoryAdminForm(datas, instance=category) self.assertFalse(form.is_valid()) self.assertEqual(len(form.errors['parent']), 1) subcategory = Category.objects.create( title='Category 2', slug='cat-2') self.assertEqual(subcategory.parent, None) datas = {'parent': category.pk, 'title': subcategory.title, 'slug': subcategory.slug} form = CategoryAdminForm(datas, instance=subcategory) self.assertTrue(form.is_valid())
Remove now useless test for initial sites value in form
## Code Before: """Test cases for Zinnia's admin forms""" from django.test import TestCase from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from zinnia.models import Category from zinnia.admin.forms import EntryAdminForm from zinnia.admin.forms import CategoryAdminForm class EntryAdminFormTestCase(TestCase): def test_categories_has_related_widget(self): form = EntryAdminForm() self.assertTrue( isinstance(form.fields['categories'].widget, RelatedFieldWidgetWrapper)) def test_initial_sites(self): form = EntryAdminForm() self.assertEqual( len(form.fields['sites'].initial), 1) class CategoryAdminFormTestCase(TestCase): def test_parent_has_related_widget(self): form = CategoryAdminForm() self.assertTrue( isinstance(form.fields['parent'].widget, RelatedFieldWidgetWrapper)) def test_clean_parent(self): category = Category.objects.create( title='Category 1', slug='cat-1') datas = {'parent': category.pk, 'title': category.title, 'slug': category.slug} form = CategoryAdminForm(datas, instance=category) self.assertFalse(form.is_valid()) self.assertEqual(len(form.errors['parent']), 1) subcategory = Category.objects.create( title='Category 2', slug='cat-2') self.assertEqual(subcategory.parent, None) datas = {'parent': category.pk, 'title': subcategory.title, 'slug': subcategory.slug} form = CategoryAdminForm(datas, instance=subcategory) self.assertTrue(form.is_valid()) ## Instruction: Remove now useless test for initial sites value in form ## Code After: """Test cases for Zinnia's admin forms""" from django.test import TestCase from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from zinnia.models import Category from zinnia.admin.forms import EntryAdminForm from zinnia.admin.forms import CategoryAdminForm class EntryAdminFormTestCase(TestCase): def test_categories_has_related_widget(self): form = EntryAdminForm() self.assertTrue( isinstance(form.fields['categories'].widget, RelatedFieldWidgetWrapper)) class CategoryAdminFormTestCase(TestCase): def test_parent_has_related_widget(self): form = CategoryAdminForm() self.assertTrue( isinstance(form.fields['parent'].widget, RelatedFieldWidgetWrapper)) def test_clean_parent(self): category = Category.objects.create( title='Category 1', slug='cat-1') datas = {'parent': category.pk, 'title': category.title, 'slug': category.slug} form = CategoryAdminForm(datas, instance=category) self.assertFalse(form.is_valid()) self.assertEqual(len(form.errors['parent']), 1) subcategory = Category.objects.create( title='Category 2', slug='cat-2') self.assertEqual(subcategory.parent, None) datas = {'parent': category.pk, 'title': subcategory.title, 'slug': subcategory.slug} form = CategoryAdminForm(datas, instance=subcategory) self.assertTrue(form.is_valid())
... RelatedFieldWidgetWrapper)) ...
0463d8937f9efd571f3ad6846f6d1f351fcfe8e1
px/px_cpuinfo.py
px/px_cpuinfo.py
def get_core_count(): """ Count the number of cores in the system. Returns a tuple (physical, logical) with counts of physical and logical cores. """ pass def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"): pass def get_core_count_from_sysctl(): pass
import os import errno import subprocess def get_core_count(): """ Count the number of cores in the system. Returns a tuple (physical, logical) with counts of physical and logical cores. """ pass def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"): pass def get_core_count_from_sysctl(): env = os.environ.copy() if "LANG" in env: del env["LANG"] try: sysctl = subprocess.Popen(["sysctl", 'hw'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) except OSError as e: if e.errno == errno.ENOENT: # sysctl not found, we're probably not on OSX return None raise sysctl_stdout = sysctl.communicate()[0].decode('utf-8') sysctl_lines = sysctl_stdout.split('\n') # Note the ending spaces, they must be there for number extraction to work! PHYSICAL_PREFIX = 'hw.physicalcpu: ' LOGICAL_PREFIX = 'hw.logicalcpu: ' physical = None logical = None for line in sysctl_lines: if line.startswith(PHYSICAL_PREFIX): physical = int(line[len(PHYSICAL_PREFIX):]) elif line.startswith(LOGICAL_PREFIX): logical = int(line[len(LOGICAL_PREFIX)]) return (physical, logical)
Implement core counting of OS X
Implement core counting of OS X
Python
mit
walles/px,walles/px
+ import os + import errno + import subprocess + + def get_core_count(): """ Count the number of cores in the system. Returns a tuple (physical, logical) with counts of physical and logical cores. """ pass def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"): pass + def get_core_count_from_sysctl(): - pass + env = os.environ.copy() + if "LANG" in env: + del env["LANG"] + try: + sysctl = subprocess.Popen(["sysctl", 'hw'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + env=env) + except OSError as e: + if e.errno == errno.ENOENT: + # sysctl not found, we're probably not on OSX + return None + + raise + + sysctl_stdout = sysctl.communicate()[0].decode('utf-8') + sysctl_lines = sysctl_stdout.split('\n') + + # Note the ending spaces, they must be there for number extraction to work! + PHYSICAL_PREFIX = 'hw.physicalcpu: ' + LOGICAL_PREFIX = 'hw.logicalcpu: ' + + physical = None + logical = None + for line in sysctl_lines: + if line.startswith(PHYSICAL_PREFIX): + physical = int(line[len(PHYSICAL_PREFIX):]) + elif line.startswith(LOGICAL_PREFIX): + logical = int(line[len(LOGICAL_PREFIX)]) + + return (physical, logical) +
Implement core counting of OS X
## Code Before: def get_core_count(): """ Count the number of cores in the system. Returns a tuple (physical, logical) with counts of physical and logical cores. """ pass def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"): pass def get_core_count_from_sysctl(): pass ## Instruction: Implement core counting of OS X ## Code After: import os import errno import subprocess def get_core_count(): """ Count the number of cores in the system. Returns a tuple (physical, logical) with counts of physical and logical cores. """ pass def get_core_count_from_proc_cpuinfo(proc_cpuinfo="/proc/cpuinfo"): pass def get_core_count_from_sysctl(): env = os.environ.copy() if "LANG" in env: del env["LANG"] try: sysctl = subprocess.Popen(["sysctl", 'hw'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) except OSError as e: if e.errno == errno.ENOENT: # sysctl not found, we're probably not on OSX return None raise sysctl_stdout = sysctl.communicate()[0].decode('utf-8') sysctl_lines = sysctl_stdout.split('\n') # Note the ending spaces, they must be there for number extraction to work! PHYSICAL_PREFIX = 'hw.physicalcpu: ' LOGICAL_PREFIX = 'hw.logicalcpu: ' physical = None logical = None for line in sysctl_lines: if line.startswith(PHYSICAL_PREFIX): physical = int(line[len(PHYSICAL_PREFIX):]) elif line.startswith(LOGICAL_PREFIX): logical = int(line[len(LOGICAL_PREFIX)]) return (physical, logical)
# ... existing code ... import os import errno import subprocess def get_core_count(): # ... modified code ... def get_core_count_from_sysctl(): env = os.environ.copy() if "LANG" in env: del env["LANG"] try: sysctl = subprocess.Popen(["sysctl", 'hw'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) except OSError as e: if e.errno == errno.ENOENT: # sysctl not found, we're probably not on OSX return None raise sysctl_stdout = sysctl.communicate()[0].decode('utf-8') sysctl_lines = sysctl_stdout.split('\n') # Note the ending spaces, they must be there for number extraction to work! PHYSICAL_PREFIX = 'hw.physicalcpu: ' LOGICAL_PREFIX = 'hw.logicalcpu: ' physical = None logical = None for line in sysctl_lines: if line.startswith(PHYSICAL_PREFIX): physical = int(line[len(PHYSICAL_PREFIX):]) elif line.startswith(LOGICAL_PREFIX): logical = int(line[len(LOGICAL_PREFIX)]) return (physical, logical) # ... rest of the code ...
e9eb29d300d4072a32d824d4f588ff76a905bb89
gunicorn_settings.py
gunicorn_settings.py
bind = '127.0.0.1:8001' workers = 2 worker_class = 'gevent' timeout = 30 keepalive = 2 errorlog = '-'
workers = 2 worker_class = 'gevent' timeout = 30 keepalive = 2 errorlog = '-'
Use IP and PORT environment variables if set
Use IP and PORT environment variables if set
Python
apache-2.0
notapresent/rbm2m,notapresent/rbm2m
- bind = '127.0.0.1:8001' workers = 2 worker_class = 'gevent' timeout = 30 keepalive = 2 errorlog = '-'
Use IP and PORT environment variables if set
## Code Before: bind = '127.0.0.1:8001' workers = 2 worker_class = 'gevent' timeout = 30 keepalive = 2 errorlog = '-' ## Instruction: Use IP and PORT environment variables if set ## Code After: workers = 2 worker_class = 'gevent' timeout = 30 keepalive = 2 errorlog = '-'
// ... existing code ... workers = 2 // ... rest of the code ...
01ebdc54886f01a9aa58098c8987b0ce7620706a
simplestatistics/statistics/standard_deviation.py
simplestatistics/statistics/standard_deviation.py
import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data))
import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
Add sample param to Standard Deviation function
Add sample param to Standard Deviation function Boolean param to make possible to calculate Standard Deviation for population (Default is sample).
Python
unknown
sheriferson/simple-statistics-py,tmcw/simple-statistics-py,sheriferson/simplestatistics
import math from .variance import variance - def standard_deviation(data): + def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. + sample: A boolean value. If True, calculates standard deviation for + sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 + >>> ss.standard_deviation([1, 2, 3], False) + 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ - return math.sqrt(variance(data)) + return math.sqrt(variance(data, sample)) - -
Add sample param to Standard Deviation function
## Code Before: import math from .variance import variance def standard_deviation(data): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data)) ## Instruction: Add sample param to Standard Deviation function ## Code After: import math from .variance import variance def standard_deviation(data, sample = True): """ The `standard deviation`_ is the square root of variance_ (the sum of squared deviations from the mean). The standard deviation is a commonly used measure of the variation and distance of a set of values in a sample from the mean of the sample. .. _`standard deviation`: https://en.wikipedia.org/wiki/Standard_deviation .. _variance: http://en.wikipedia.org/wiki/Variance Equation: .. math:: \\sigma = \\sqrt{\\frac{\\sum (x - \\mu)^2}{N - 1}} In English: - Obtain the difference between each value and the mean. - Square those values. - Sum the squared values. - Divide by the number of values - 1 (to correct for the sampling). - Obtain the square root of the result. Args: data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. Returns: A float object. Examples: >>> standard_deviation([1, 2, 3]) 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) 1.2909944487358056 >>> standard_deviation([-1, 0, 1, 2, 3, 4]) 1.8708286933869707 """ return math.sqrt(variance(data, sample))
... def standard_deviation(data, sample = True): """ ... data: A list of numerical objects. sample: A boolean value. If True, calculates standard deviation for sample. If False, calculates standard deviation for population. ... 1.0 >>> ss.standard_deviation([1, 2, 3], False) 0.816496580927726 >>> standard_deviation([1, 2, 3, 4]) ... """ return math.sqrt(variance(data, sample)) ...
08e2099f173bce115ba93c2b960bb1f09ef11269
models.py
models.py
from django.db import models from django.core.exceptions import ValidationError class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, unique=True) class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: try: self.order = self.max_order + 1 except: self.order = 1 # 0 is a special index used in swap if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() @classmethod def max_order(cls): return self.__class__.objects.order_by('-order')[0].order
from django.db import models from django.core.exceptions import ValidationError class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, unique=True) class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: try: self.order = self.max_order() + 1 except: self.order = 1 # 0 is a special index used in swap if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() @classmethod def max_order(cls): return cls.objects.order_by('-order')[0].order
Fix critical stupid copypaste error
Fix critical stupid copypaste error
Python
bsd-3-clause
MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel,kirelagin/django-orderedmodel
from django.db import models from django.core.exceptions import ValidationError class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, unique=True) class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: try: - self.order = self.max_order + 1 + self.order = self.max_order() + 1 except: self.order = 1 # 0 is a special index used in swap if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() @classmethod def max_order(cls): - return self.__class__.objects.order_by('-order')[0].order + return cls.objects.order_by('-order')[0].order
Fix critical stupid copypaste error
## Code Before: from django.db import models from django.core.exceptions import ValidationError class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, unique=True) class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: try: self.order = self.max_order + 1 except: self.order = 1 # 0 is a special index used in swap if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() @classmethod def max_order(cls): return self.__class__.objects.order_by('-order')[0].order ## Instruction: Fix critical stupid copypaste error ## Code After: from django.db import models from django.core.exceptions import ValidationError class OrderedModel(models.Model): order = models.PositiveIntegerField(blank=True, unique=True) class Meta: abstract = True ordering = ['order'] def save(self, swapping=False, *args, **kwargs): if not self.id: try: self.order = self.max_order() + 1 except: self.order = 1 # 0 is a special index used in swap if self.order == 0 and not swapping: raise ValidationError("Can't set 'order' to 0") super(OrderedModel, self).save(*args, **kwargs) @classmethod def swap(cls, obj1, obj2): tmp, obj2.order = obj2.order, 0 obj2.save(swapping=True) obj2.order, obj1.order = obj1.order, tmp obj1.save() obj2.save() @classmethod def max_order(cls): return cls.objects.order_by('-order')[0].order
# ... existing code ... try: self.order = self.max_order() + 1 except: # ... modified code ... def max_order(cls): return cls.objects.order_by('-order')[0].order # ... rest of the code ...
1cf1da043ceab767d9d0dbdbed62c2f1c5ff36e9
test_http.py
test_http.py
from http_server import HttpServer import socket def test_200_ok(): s = HttpServer() assert s.ok() == "HTTP/1.1 200 OK" def test_200_ok_byte(): s = HttpServer() assert isinstance(s.ok(), bytes) def test_socket_is_socket(): s = HttpServer() s.open_socket() assert isinstance(s._socket, socket.socket) def test_open_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() assert s._socket.getsockname() == ('127.0.0.1', 50000)
from http_server import HttpServer import socket def test_200_ok(): s = HttpServer() assert s.ok() == "HTTP/1.1 200 OK" def test_200_ok_byte(): s = HttpServer() assert isinstance(s.ok(), bytes) def test_socket_is_socket(): s = HttpServer() s.open_socket() assert isinstance(s._socket, socket.socket) def test_open_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() assert s._socket.getsockname() == ('127.0.0.1', 50000) def test_close_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() s.close_socket() assert s._socket is None
Add tests for closing a socket
Add tests for closing a socket
Python
mit
jefrailey/network_tools
from http_server import HttpServer import socket def test_200_ok(): s = HttpServer() assert s.ok() == "HTTP/1.1 200 OK" def test_200_ok_byte(): s = HttpServer() assert isinstance(s.ok(), bytes) def test_socket_is_socket(): s = HttpServer() s.open_socket() assert isinstance(s._socket, socket.socket) def test_open_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() assert s._socket.getsockname() == ('127.0.0.1', 50000) + + + def test_close_socket(): + s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) + s.open_socket() + s.close_socket() + assert s._socket is None
Add tests for closing a socket
## Code Before: from http_server import HttpServer import socket def test_200_ok(): s = HttpServer() assert s.ok() == "HTTP/1.1 200 OK" def test_200_ok_byte(): s = HttpServer() assert isinstance(s.ok(), bytes) def test_socket_is_socket(): s = HttpServer() s.open_socket() assert isinstance(s._socket, socket.socket) def test_open_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() assert s._socket.getsockname() == ('127.0.0.1', 50000) ## Instruction: Add tests for closing a socket ## Code After: from http_server import HttpServer import socket def test_200_ok(): s = HttpServer() assert s.ok() == "HTTP/1.1 200 OK" def test_200_ok_byte(): s = HttpServer() assert isinstance(s.ok(), bytes) def test_socket_is_socket(): s = HttpServer() s.open_socket() assert isinstance(s._socket, socket.socket) def test_open_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() assert s._socket.getsockname() == ('127.0.0.1', 50000) def test_close_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() s.close_socket() assert s._socket is None
... assert s._socket.getsockname() == ('127.0.0.1', 50000) def test_close_socket(): s = HttpServer(ip=u'127.0.0.1', port=50000, backlog=5) s.open_socket() s.close_socket() assert s._socket is None ...
7f0097d240c4a231029222fdd2bf507ca7d5b2ed
tests/v6/exemplar_generators.py
tests/v6/exemplar_generators.py
from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Integer(100, 200), ] EXEMPLAR_DERIVED_GENERATORS = [] EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS
from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Integer(100, 200), HashDigest(length=8), FakerGenerator(method="name"), ] EXEMPLAR_DERIVED_GENERATORS = [] EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS
Add exemplar generators for HashDigest, FakerGenerator
Add exemplar generators for HashDigest, FakerGenerator
Python
mit
maxalbert/tohu
from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Integer(100, 200), + HashDigest(length=8), + FakerGenerator(method="name"), ] EXEMPLAR_DERIVED_GENERATORS = [] EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS
Add exemplar generators for HashDigest, FakerGenerator
## Code Before: from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Integer(100, 200), ] EXEMPLAR_DERIVED_GENERATORS = [] EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS ## Instruction: Add exemplar generators for HashDigest, FakerGenerator ## Code After: from .context import tohu from tohu.v5.primitive_generators import * EXEMPLAR_PRIMITIVE_GENERATORS = [ Constant("quux"), Integer(100, 200), HashDigest(length=8), FakerGenerator(method="name"), ] EXEMPLAR_DERIVED_GENERATORS = [] EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS
# ... existing code ... Integer(100, 200), HashDigest(length=8), FakerGenerator(method="name"), ] # ... rest of the code ...
92204c154ab964d02faade72642a395356f1fa9b
aorun/losses.py
aorun/losses.py
import torch def mean_squared_error(true, pred): return torch.mean((true - pred)**2) def binary_crossentropy(true, pred, eps=1e-9): p1 = true * torch.log(pred + eps) p2 = (1 - true) * torch.log(1 - pred + eps) return torch.mean(-(p1 + p2)) def categorical_crossentropy(true, pred, eps=1e-9): return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1)) # aliases short names mse = mean_squared_error def get(obj): if callable(obj): return obj elif type(obj) is str: if obj in globals(): return globals()[obj] else: raise Exception(f'Unknown objective: {obj}') else: raise Exception('Objective must be a callable or str')
import torch def mean_squared_error(true, pred): return ((true - pred)**2).mean() def binary_crossentropy(true, pred, eps=1e-9): p1 = true * torch.log(pred + eps) p2 = (1 - true) * torch.log(1 - pred + eps) return torch.mean(-(p1 + p2)) def categorical_crossentropy(true, pred, eps=1e-9): return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1)) # aliases short names mse = mean_squared_error def get(obj): if callable(obj): return obj elif type(obj) is str: if obj in globals(): return globals()[obj] else: raise Exception(f'Unknown loss: {obj}') else: raise Exception('Loss must be a callable or str')
Change error message to loss
Change error message to loss
Python
mit
ramon-oliveira/aorun
import torch def mean_squared_error(true, pred): - return torch.mean((true - pred)**2) + return ((true - pred)**2).mean() def binary_crossentropy(true, pred, eps=1e-9): p1 = true * torch.log(pred + eps) p2 = (1 - true) * torch.log(1 - pred + eps) return torch.mean(-(p1 + p2)) def categorical_crossentropy(true, pred, eps=1e-9): return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1)) # aliases short names mse = mean_squared_error def get(obj): if callable(obj): return obj elif type(obj) is str: if obj in globals(): return globals()[obj] else: - raise Exception(f'Unknown objective: {obj}') + raise Exception(f'Unknown loss: {obj}') else: - raise Exception('Objective must be a callable or str') + raise Exception('Loss must be a callable or str')
Change error message to loss
## Code Before: import torch def mean_squared_error(true, pred): return torch.mean((true - pred)**2) def binary_crossentropy(true, pred, eps=1e-9): p1 = true * torch.log(pred + eps) p2 = (1 - true) * torch.log(1 - pred + eps) return torch.mean(-(p1 + p2)) def categorical_crossentropy(true, pred, eps=1e-9): return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1)) # aliases short names mse = mean_squared_error def get(obj): if callable(obj): return obj elif type(obj) is str: if obj in globals(): return globals()[obj] else: raise Exception(f'Unknown objective: {obj}') else: raise Exception('Objective must be a callable or str') ## Instruction: Change error message to loss ## Code After: import torch def mean_squared_error(true, pred): return ((true - pred)**2).mean() def binary_crossentropy(true, pred, eps=1e-9): p1 = true * torch.log(pred + eps) p2 = (1 - true) * torch.log(1 - pred + eps) return torch.mean(-(p1 + p2)) def categorical_crossentropy(true, pred, eps=1e-9): return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1)) # aliases short names mse = mean_squared_error def get(obj): if callable(obj): return obj elif type(obj) is str: if obj in globals(): return globals()[obj] else: raise Exception(f'Unknown loss: {obj}') else: raise Exception('Loss must be a callable or str')
// ... existing code ... def mean_squared_error(true, pred): return ((true - pred)**2).mean() // ... modified code ... else: raise Exception(f'Unknown loss: {obj}') else: raise Exception('Loss must be a callable or str') // ... rest of the code ...
ac6c9f4ad35a8c2c8ede616366b50995afff6992
hurricane/runner.py
hurricane/runner.py
import multiprocessing import optparse from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from hurricane.utils import run_until_stopped class ApplicationManager(object): @run_until_stopped def run(self): parser = optparse.OptionParser() parser.add_option('--settings', dest='settings') options, args = parser.parse_args() if not options.settings: raise ImproperlyConfigured("You didn't provide a settings module.") settings = import_module(options.settings) self.producer_queue = multiprocessing.Queue() for producer in settings.PRODUCERS: ProducerClass = import_module(producer).Producer producer = ProducerClass(settings, self.producer_queue) multiprocessing.Process(target=producer.run).start() self.receiver_queues = [] for consumer in settings.CONSUMERS: ConsumerClass = import_module(consumer).Consumer recv_queue = multiprocessing.Queue() consumer = ConsumerClass(settings, recv_queue) self.receiver_queues.append(recv_queue) multiprocessing.Process(target=consumer.run).start() while True: item = self.producer_queue.get() for recv_queue in self.receiver_queues: recv_queue.put(item) if __name__ == '__main__': app = ApplicationManager() app.run()
import multiprocessing import optparse from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from hurricane.utils import run_until_stopped class ApplicationManager(object): @run_until_stopped def run(self): parser = optparse.OptionParser() parser.add_option('--settings', dest='settings') options, args = parser.parse_args() if not options.settings: raise ImproperlyConfigured("You didn't provide a settings module.") settings = import_module(options.settings) django_settings.configure(settings) self.producer_queue = multiprocessing.Queue() for producer in settings.PRODUCERS: ProducerClass = import_module(producer).Producer producer = ProducerClass(settings, self.producer_queue) multiprocessing.Process(target=producer.run).start() self.receiver_queues = [] for consumer in settings.CONSUMERS: ConsumerClass = import_module(consumer).Consumer recv_queue = multiprocessing.Queue() consumer = ConsumerClass(settings, recv_queue) self.receiver_queues.append(recv_queue) multiprocessing.Process(target=consumer.run).start() while True: item = self.producer_queue.get() for recv_queue in self.receiver_queues: recv_queue.put(item) if __name__ == '__main__': app = ApplicationManager() app.run()
Configure django correctly when we setup our env
Configure django correctly when we setup our env
Python
bsd-3-clause
ericflo/hurricane,ericflo/hurricane
import multiprocessing import optparse + from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from hurricane.utils import run_until_stopped class ApplicationManager(object): @run_until_stopped def run(self): parser = optparse.OptionParser() parser.add_option('--settings', dest='settings') options, args = parser.parse_args() if not options.settings: raise ImproperlyConfigured("You didn't provide a settings module.") settings = import_module(options.settings) + django_settings.configure(settings) self.producer_queue = multiprocessing.Queue() for producer in settings.PRODUCERS: ProducerClass = import_module(producer).Producer producer = ProducerClass(settings, self.producer_queue) multiprocessing.Process(target=producer.run).start() self.receiver_queues = [] for consumer in settings.CONSUMERS: ConsumerClass = import_module(consumer).Consumer recv_queue = multiprocessing.Queue() consumer = ConsumerClass(settings, recv_queue) self.receiver_queues.append(recv_queue) multiprocessing.Process(target=consumer.run).start() while True: item = self.producer_queue.get() for recv_queue in self.receiver_queues: recv_queue.put(item) if __name__ == '__main__': app = ApplicationManager() app.run()
Configure django correctly when we setup our env
## Code Before: import multiprocessing import optparse from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from hurricane.utils import run_until_stopped class ApplicationManager(object): @run_until_stopped def run(self): parser = optparse.OptionParser() parser.add_option('--settings', dest='settings') options, args = parser.parse_args() if not options.settings: raise ImproperlyConfigured("You didn't provide a settings module.") settings = import_module(options.settings) self.producer_queue = multiprocessing.Queue() for producer in settings.PRODUCERS: ProducerClass = import_module(producer).Producer producer = ProducerClass(settings, self.producer_queue) multiprocessing.Process(target=producer.run).start() self.receiver_queues = [] for consumer in settings.CONSUMERS: ConsumerClass = import_module(consumer).Consumer recv_queue = multiprocessing.Queue() consumer = ConsumerClass(settings, recv_queue) self.receiver_queues.append(recv_queue) multiprocessing.Process(target=consumer.run).start() while True: item = self.producer_queue.get() for recv_queue in self.receiver_queues: recv_queue.put(item) if __name__ == '__main__': app = ApplicationManager() app.run() ## Instruction: Configure django correctly when we setup our env ## Code After: import multiprocessing import optparse from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module from hurricane.utils import run_until_stopped class ApplicationManager(object): @run_until_stopped def run(self): parser = optparse.OptionParser() parser.add_option('--settings', dest='settings') options, args = parser.parse_args() if not options.settings: raise ImproperlyConfigured("You didn't provide a settings module.") settings = import_module(options.settings) django_settings.configure(settings) self.producer_queue = multiprocessing.Queue() for producer in settings.PRODUCERS: ProducerClass = import_module(producer).Producer producer = ProducerClass(settings, self.producer_queue) multiprocessing.Process(target=producer.run).start() self.receiver_queues = [] for consumer in settings.CONSUMERS: ConsumerClass = import_module(consumer).Consumer recv_queue = multiprocessing.Queue() consumer = ConsumerClass(settings, recv_queue) self.receiver_queues.append(recv_queue) multiprocessing.Process(target=consumer.run).start() while True: item = self.producer_queue.get() for recv_queue in self.receiver_queues: recv_queue.put(item) if __name__ == '__main__': app = ApplicationManager() app.run()
// ... existing code ... from django.conf import settings as django_settings from django.core.exceptions import ImproperlyConfigured // ... modified code ... settings = import_module(options.settings) django_settings.configure(settings) // ... rest of the code ...
6e1337f7079ba48aafcde59e4d5806caabb0bc29
navigation_extensions.py
navigation_extensions.py
from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender class ZivinetzNavigationExtension(NavigationExtension): name = _('Zivinetz navigation extension') def children(self, page, **kwargs): url = page.get_navigation_url() return [ PagePretender( title=capfirst(_('drudges')), url='%sdrudges/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('regional offices')), url='%sregional_offices/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('scope statements')), url='%sscope_statements/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('assignments')), url='%sassignments/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('expense reports')), url='%sexpense_reports/' % url, level=3, tree_id=page.tree_id, ), ]
from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender class ZivinetzNavigationExtension(NavigationExtension): name = _('Zivinetz navigation extension') def children(self, page, **kwargs): url = page.get_navigation_url() return [ PagePretender( title=capfirst(_('drudges')), url='%sdrudges/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('regional offices')), url='%sregional_offices/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('scope statements')), url='%sscope_statements/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('assignments')), url='%sassignments/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('expense reports')), url='%sexpense_reports/' % url, level=page.level+1, tree_id=page.tree_id, ), ]
Stop hard-coding the navigation level in the extension
Stop hard-coding the navigation level in the extension
Python
mit
matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz
from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender class ZivinetzNavigationExtension(NavigationExtension): name = _('Zivinetz navigation extension') def children(self, page, **kwargs): url = page.get_navigation_url() return [ PagePretender( title=capfirst(_('drudges')), url='%sdrudges/' % url, - level=3, + level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('regional offices')), url='%sregional_offices/' % url, - level=3, + level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('scope statements')), url='%sscope_statements/' % url, - level=3, + level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('assignments')), url='%sassignments/' % url, - level=3, + level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('expense reports')), url='%sexpense_reports/' % url, - level=3, + level=page.level+1, tree_id=page.tree_id, ), ]
Stop hard-coding the navigation level in the extension
## Code Before: from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender class ZivinetzNavigationExtension(NavigationExtension): name = _('Zivinetz navigation extension') def children(self, page, **kwargs): url = page.get_navigation_url() return [ PagePretender( title=capfirst(_('drudges')), url='%sdrudges/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('regional offices')), url='%sregional_offices/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('scope statements')), url='%sscope_statements/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('assignments')), url='%sassignments/' % url, level=3, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('expense reports')), url='%sexpense_reports/' % url, level=3, tree_id=page.tree_id, ), ] ## Instruction: Stop hard-coding the navigation level in the extension ## Code After: from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender class ZivinetzNavigationExtension(NavigationExtension): name = _('Zivinetz navigation extension') def children(self, page, **kwargs): url = page.get_navigation_url() return [ PagePretender( title=capfirst(_('drudges')), url='%sdrudges/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('regional offices')), url='%sregional_offices/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('scope statements')), url='%sscope_statements/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('assignments')), url='%sassignments/' % url, level=page.level+1, tree_id=page.tree_id, ), PagePretender( title=capfirst(_('expense reports')), url='%sexpense_reports/' % url, level=page.level+1, tree_id=page.tree_id, ), ]
... url='%sdrudges/' % url, level=page.level+1, tree_id=page.tree_id, ... url='%sregional_offices/' % url, level=page.level+1, tree_id=page.tree_id, ... url='%sscope_statements/' % url, level=page.level+1, tree_id=page.tree_id, ... url='%sassignments/' % url, level=page.level+1, tree_id=page.tree_id, ... url='%sexpense_reports/' % url, level=page.level+1, tree_id=page.tree_id, ...
f2d91d2c296e3662a1b656f0fdf5191665ff363b
skimage/transform/__init__.py
skimage/transform/__init__.py
from .hough_transform import * from .radon_transform import * from .finite_radon_transform import * from .integral import * from ._geometric import (warp, warp_coords, estimate_transform, SimilarityTransform, AffineTransform, ProjectiveTransform, PolynomialTransform, PiecewiseAffineTransform) from ._warps import swirl, homography, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, pyramid_gaussian, pyramid_laplacian)
from .hough_transform import * from .radon_transform import * from .finite_radon_transform import * from .integral import * from ._geometric import (warp, warp_coords, estimate_transform, SimilarityTransform, AffineTransform, ProjectiveTransform, PolynomialTransform, PiecewiseAffineTransform) from ._warps import swirl, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, pyramid_gaussian, pyramid_laplacian)
Remove deprecated import of hompgraphy
Remove deprecated import of hompgraphy
Python
bsd-3-clause
youprofit/scikit-image,almarklein/scikit-image,keflavich/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,almarklein/scikit-image,chriscrosscutler/scikit-image,ajaybhat/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,SamHames/scikit-image,robintw/scikit-image,emon10005/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,ofgulban/scikit-image,Britefury/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,paalge/scikit-image,Britefury/scikit-image,keflavich/scikit-image,rjeli/scikit-image,newville/scikit-image,bennlich/scikit-image,SamHames/scikit-image,ajaybhat/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,oew1v07/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,juliusbierk/scikit-image,Midafi/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,paalge/scikit-image,chintak/scikit-image,robintw/scikit-image,bsipocz/scikit-image,bsipocz/scikit-image,rjeli/scikit-image,bennlich/scikit-image,juliusbierk/scikit-image,chriscrosscutler/scikit-image,jwiggins/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,Hiyorimi/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,dpshelio/scikit-image,rjeli/scikit-image,newville/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,blink1073/scikit-image
from .hough_transform import * from .radon_transform import * from .finite_radon_transform import * from .integral import * from ._geometric import (warp, warp_coords, estimate_transform, SimilarityTransform, AffineTransform, ProjectiveTransform, PolynomialTransform, PiecewiseAffineTransform) - from ._warps import swirl, homography, resize, rotate, rescale + from ._warps import swirl, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, pyramid_gaussian, pyramid_laplacian)
Remove deprecated import of hompgraphy
## Code Before: from .hough_transform import * from .radon_transform import * from .finite_radon_transform import * from .integral import * from ._geometric import (warp, warp_coords, estimate_transform, SimilarityTransform, AffineTransform, ProjectiveTransform, PolynomialTransform, PiecewiseAffineTransform) from ._warps import swirl, homography, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, pyramid_gaussian, pyramid_laplacian) ## Instruction: Remove deprecated import of hompgraphy ## Code After: from .hough_transform import * from .radon_transform import * from .finite_radon_transform import * from .integral import * from ._geometric import (warp, warp_coords, estimate_transform, SimilarityTransform, AffineTransform, ProjectiveTransform, PolynomialTransform, PiecewiseAffineTransform) from ._warps import swirl, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, pyramid_gaussian, pyramid_laplacian)
... PiecewiseAffineTransform) from ._warps import swirl, resize, rotate, rescale from .pyramids import (pyramid_reduce, pyramid_expand, ...
0eb7e6b9a8e4e38793b1e045ab5f0f0a4d4e6777
synapse/metrics/resource.py
synapse/metrics/resource.py
from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
from twisted.web.resource import Resource import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
Delete unused import of NOT_READY_YET
Delete unused import of NOT_READY_YET
Python
apache-2.0
matrix-org/synapse,illicitonion/synapse,iot-factory/synapse,TribeMedia/synapse,iot-factory/synapse,howethomas/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,matrix-org/synapse,howethomas/synapse,TribeMedia/synapse,TribeMedia/synapse,rzr/synapse,iot-factory/synapse,iot-factory/synapse,iot-factory/synapse,illicitonion/synapse,rzr/synapse,TribeMedia/synapse,rzr/synapse,TribeMedia/synapse,matrix-org/synapse,howethomas/synapse,illicitonion/synapse
from twisted.web.resource import Resource - from twisted.web.server import NOT_DONE_YET import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
Delete unused import of NOT_READY_YET
## Code Before: from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode() ## Instruction: Delete unused import of NOT_READY_YET ## Code After: from twisted.web.resource import Resource import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
// ... existing code ... from twisted.web.resource import Resource // ... rest of the code ...
b03b62e7abe9a8db0cded78b80cb8d565a424a7e
apps/activity/models.py
apps/activity/models.py
from django.db import models class Activity(models.Model): entry = models.ForeignKey('feeds.Entry', blank=True, null=True, unique=True) published_on = models.DateTimeField(auto_now_add=True) def __unicode__(self): return u'%s:%s => %s' % (self.source_class, self.source_id, self.title) def from_feed(self): return self.entry != None def broadcast(source): return Activity.objects.create( entry=source )
from django.db import models class Activity(models.Model): entry = models.ForeignKey('feeds.Entry', blank=True, null=True, unique=True) published_on = models.DateTimeField(auto_now_add=True) def __unicode__(self): return u'%d: Entry: %s' % (self.pk, self.entry) def broadcast(source): return Activity.objects.create( entry=source )
Remove reference to old field and unused method
Remove reference to old field and unused method
Python
bsd-3-clause
mozilla/betafarm,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/betafarm,mozilla/betafarm,mozilla/betafarm,mozilla/mozilla-ignite
from django.db import models class Activity(models.Model): entry = models.ForeignKey('feeds.Entry', blank=True, null=True, unique=True) published_on = models.DateTimeField(auto_now_add=True) def __unicode__(self): + return u'%d: Entry: %s' % (self.pk, self.entry) - return u'%s:%s => %s' % (self.source_class, self.source_id, self.title) - - def from_feed(self): - return self.entry != None def broadcast(source): return Activity.objects.create( entry=source )
Remove reference to old field and unused method
## Code Before: from django.db import models class Activity(models.Model): entry = models.ForeignKey('feeds.Entry', blank=True, null=True, unique=True) published_on = models.DateTimeField(auto_now_add=True) def __unicode__(self): return u'%s:%s => %s' % (self.source_class, self.source_id, self.title) def from_feed(self): return self.entry != None def broadcast(source): return Activity.objects.create( entry=source ) ## Instruction: Remove reference to old field and unused method ## Code After: from django.db import models class Activity(models.Model): entry = models.ForeignKey('feeds.Entry', blank=True, null=True, unique=True) published_on = models.DateTimeField(auto_now_add=True) def __unicode__(self): return u'%d: Entry: %s' % (self.pk, self.entry) def broadcast(source): return Activity.objects.create( entry=source )
// ... existing code ... def __unicode__(self): return u'%d: Entry: %s' % (self.pk, self.entry) // ... rest of the code ...
b7047bd09a6bda21dfd1c69cc4cdd08ae328a03b
autotests/tests/sample_false_assert.py
autotests/tests/sample_false_assert.py
import time from unittest import TestCase class Sample(TestCase): def test_sameple_with_big_timeout(self): print("Testing false assert") self.assertEquals(1, 2)
from unittest import TestCase class Sample(TestCase): def test_sameple_with_big_timeout(self): print("Testing false assert") self.assertEqual(1, 2)
Fix deprecated use of function on sample test
Fix deprecated use of function on sample test
Python
mit
jfelipefilho/test-manager,jfelipefilho/test-manager,jfelipefilho/test-manager
- import time from unittest import TestCase class Sample(TestCase): def test_sameple_with_big_timeout(self): print("Testing false assert") - self.assertEquals(1, 2) + self.assertEqual(1, 2)
Fix deprecated use of function on sample test
## Code Before: import time from unittest import TestCase class Sample(TestCase): def test_sameple_with_big_timeout(self): print("Testing false assert") self.assertEquals(1, 2) ## Instruction: Fix deprecated use of function on sample test ## Code After: from unittest import TestCase class Sample(TestCase): def test_sameple_with_big_timeout(self): print("Testing false assert") self.assertEqual(1, 2)
... from unittest import TestCase ... print("Testing false assert") self.assertEqual(1, 2) ...
5657dd437af76ecccdb671a1a09a4c6f9874aab0
linter.py
linter.py
"""This module exports the PerlEpages6 plugin class.""" import sublime from SublimeLinter.lint import Linter, util class PerlEpages6(Linter): """Provides an interface to perl on an epages6 virtual machine from a local machine. Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6).""" def cmd(self): return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; executable = 'python3' syntax = ('modernperl', 'perl') regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?' error_stream = util.STREAM_BOTH tempfile_suffix = 'pm'
"""This module exports the PerlEpages6 plugin class.""" import sublime from SublimeLinter.lint import Linter, util class PerlEpages6(Linter): """Provides an interface to perl on an epages6 virtual machine from a local machine. Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6).""" def cmd(self): if self.view.settings().get('ep6vm'): return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; else: return [] executable = 'python3' syntax = ('modernperl', 'perl') regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?' error_stream = util.STREAM_BOTH tempfile_suffix = 'pm'
Check if epages6 settings are configured
Check if epages6 settings are configured
Python
mit
ePages-rnd/SublimeLinter-contrib-perl-epages6
"""This module exports the PerlEpages6 plugin class.""" import sublime from SublimeLinter.lint import Linter, util class PerlEpages6(Linter): """Provides an interface to perl on an epages6 virtual machine from a local machine. Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6).""" def cmd(self): + if self.view.settings().get('ep6vm'): - return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; + return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; + else: + return [] executable = 'python3' syntax = ('modernperl', 'perl') regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?' error_stream = util.STREAM_BOTH tempfile_suffix = 'pm'
Check if epages6 settings are configured
## Code Before: """This module exports the PerlEpages6 plugin class.""" import sublime from SublimeLinter.lint import Linter, util class PerlEpages6(Linter): """Provides an interface to perl on an epages6 virtual machine from a local machine. Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6).""" def cmd(self): return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; executable = 'python3' syntax = ('modernperl', 'perl') regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?' error_stream = util.STREAM_BOTH tempfile_suffix = 'pm' ## Instruction: Check if epages6 settings are configured ## Code After: """This module exports the PerlEpages6 plugin class.""" import sublime from SublimeLinter.lint import Linter, util class PerlEpages6(Linter): """Provides an interface to perl on an epages6 virtual machine from a local machine. Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6).""" def cmd(self): if self.view.settings().get('ep6vm'): return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; else: return [] executable = 'python3' syntax = ('modernperl', 'perl') regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?' error_stream = util.STREAM_BOTH tempfile_suffix = 'pm'
... def cmd(self): if self.view.settings().get('ep6vm'): return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@']; else: return [] ...
5a0659ed9e4f8085009c04ade4f66cbd5d3c94bd
openedx/core/djangoapps/user_api/accounts/permissions.py
openedx/core/djangoapps/user_api/accounts/permissions.py
from __future__ import unicode_literals from rest_framework import permissions USERNAME_REPLACEMENT_GROUP = "username_replacement_admin" class CanDeactivateUser(permissions.BasePermission): """ Grants access to AccountDeactivationView if the requesting user is a superuser or has the explicit permission to deactivate a User account. """ def has_permission(self, request, view): return request.user.has_perm('student.can_deactivate_users') class CanRetireUser(permissions.BasePermission): """ Grants access to the various retirement API endpoints if the requesting user is a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to retire a User account. """ def has_permission(self, request, view): return request.user.has_perm('accounts.can_retire_user') class CanReplaceUsername(permissions.BasePermission): """ Grants access to the Username Replacement API for anyone in the group, including the service user. """ def has_permission(self, request, view): return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists()
from __future__ import unicode_literals from django.conf import settings from rest_framework import permissions USERNAME_REPLACEMENT_GROUP = "username_replacement_admin" class CanDeactivateUser(permissions.BasePermission): """ Grants access to AccountDeactivationView if the requesting user is a superuser or has the explicit permission to deactivate a User account. """ def has_permission(self, request, view): return request.user.has_perm('student.can_deactivate_users') class CanRetireUser(permissions.BasePermission): """ Grants access to the various retirement API endpoints if the requesting user is a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to retire a User account. """ def has_permission(self, request, view): return request.user.has_perm('accounts.can_retire_user') class CanReplaceUsername(permissions.BasePermission): """ Grants access to the Username Replacement API for anyone in the group, including the service user. """ def has_permission(self, request, view): return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
Replace group with static username
Replace group with static username
Python
agpl-3.0
appsembler/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,mitocw/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,cpennington/edx-platform,appsembler/edx-platform,ESOedX/edx-platform,msegado/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx/edx-platform,edx/edx-platform,jolyonb/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,mitocw/edx-platform,arbrandes/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,jolyonb/edx-platform,angelapper/edx-platform,ESOedX/edx-platform,msegado/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,cpennington/edx-platform,edx/edx-platform,cpennington/edx-platform,appsembler/edx-platform
from __future__ import unicode_literals + from django.conf import settings from rest_framework import permissions USERNAME_REPLACEMENT_GROUP = "username_replacement_admin" class CanDeactivateUser(permissions.BasePermission): """ Grants access to AccountDeactivationView if the requesting user is a superuser or has the explicit permission to deactivate a User account. """ def has_permission(self, request, view): return request.user.has_perm('student.can_deactivate_users') class CanRetireUser(permissions.BasePermission): """ Grants access to the various retirement API endpoints if the requesting user is a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to retire a User account. """ def has_permission(self, request, view): return request.user.has_perm('accounts.can_retire_user') class CanReplaceUsername(permissions.BasePermission): """ Grants access to the Username Replacement API for anyone in the group, including the service user. """ def has_permission(self, request, view): - return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists() + return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
Replace group with static username
## Code Before: from __future__ import unicode_literals from rest_framework import permissions USERNAME_REPLACEMENT_GROUP = "username_replacement_admin" class CanDeactivateUser(permissions.BasePermission): """ Grants access to AccountDeactivationView if the requesting user is a superuser or has the explicit permission to deactivate a User account. """ def has_permission(self, request, view): return request.user.has_perm('student.can_deactivate_users') class CanRetireUser(permissions.BasePermission): """ Grants access to the various retirement API endpoints if the requesting user is a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to retire a User account. """ def has_permission(self, request, view): return request.user.has_perm('accounts.can_retire_user') class CanReplaceUsername(permissions.BasePermission): """ Grants access to the Username Replacement API for anyone in the group, including the service user. """ def has_permission(self, request, view): return request.user.groups.filter(name=USERNAME_REPLACEMENT_GROUP).exists() ## Instruction: Replace group with static username ## Code After: from __future__ import unicode_literals from django.conf import settings from rest_framework import permissions USERNAME_REPLACEMENT_GROUP = "username_replacement_admin" class CanDeactivateUser(permissions.BasePermission): """ Grants access to AccountDeactivationView if the requesting user is a superuser or has the explicit permission to deactivate a User account. """ def has_permission(self, request, view): return request.user.has_perm('student.can_deactivate_users') class CanRetireUser(permissions.BasePermission): """ Grants access to the various retirement API endpoints if the requesting user is a superuser, the RETIREMENT_SERVICE_USERNAME, or has the explicit permission to retire a User account. """ def has_permission(self, request, view): return request.user.has_perm('accounts.can_retire_user') class CanReplaceUsername(permissions.BasePermission): """ Grants access to the Username Replacement API for anyone in the group, including the service user. """ def has_permission(self, request, view): return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER")
... from django.conf import settings from rest_framework import permissions ... def has_permission(self, request, view): return request.user.username == getattr(settings, "USERNAME_REPLACEMENT_WORKER") ...
96a313eef46c31af3308805f10ffa63e330cc817
02/test_move.py
02/test_move.py
from move import load_moves, encode_moves, normalize_index, move import unittest class TestMove(unittest.TestCase): def setUp(self): self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD'] def test_load_moves(self): assert load_moves('example.txt') == self.moves def test_encode_moves(self): assert encode_moves(self.moves) == '1985' def test_normalize_index(self): assert normalize_index(3) == 2 assert normalize_index(2) == 2 assert normalize_index(1) == 1 assert normalize_index(0) == 0 assert normalize_index(-1) == 0 assert normalize_index(2, 1) == 0 assert normalize_index(5, 2) == 1 assert normalize_index(-1, 4) == 0 def test_move(self): assert move(5, 'U') == 2 assert move(8, 'D') == 8 assert move(7, 'L') == 7 assert move(7, 'D') == 7 assert move(2, 'R') == 3 assert move(1, 'L') == 1 def test_alternate_move(self): assert alternate_move(5, 'U') == 5 assert alternate_move(5, 'L') == 5 assert alternate_move(7, 'D') == 'B' assert alternate_move('D', 'D') == 'D'
from move import load_moves, encode_moves, normalize_index, move import unittest class TestMove(unittest.TestCase): def setUp(self): self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD'] def test_load_moves(self): assert load_moves('example.txt') == self.moves def test_encode_moves(self): assert encode_moves(self.moves) == '1985' def test_normalize_index(self): assert normalize_index(3) == 2 assert normalize_index(2) == 2 assert normalize_index(1) == 1 assert normalize_index(0) == 0 assert normalize_index(-1) == 0 def test_move(self): assert move(5, 'U') == 2 assert move(8, 'D') == 8 assert move(7, 'L') == 7 assert move(7, 'D') == 7 assert move(2, 'R') == 3 assert move(1, 'L') == 1 def test_alternate_move(self): assert alternate_move(5, 'U') == 5 assert alternate_move(5, 'L') == 5 assert alternate_move(7, 'D') == 'B' assert alternate_move('D', 'D') == 'D'
Remove test of two-argument normalize function.
Remove test of two-argument normalize function.
Python
mit
machinelearningdeveloper/aoc_2016
from move import load_moves, encode_moves, normalize_index, move import unittest class TestMove(unittest.TestCase): def setUp(self): self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD'] def test_load_moves(self): assert load_moves('example.txt') == self.moves def test_encode_moves(self): assert encode_moves(self.moves) == '1985' def test_normalize_index(self): assert normalize_index(3) == 2 assert normalize_index(2) == 2 assert normalize_index(1) == 1 assert normalize_index(0) == 0 assert normalize_index(-1) == 0 - assert normalize_index(2, 1) == 0 - assert normalize_index(5, 2) == 1 - assert normalize_index(-1, 4) == 0 def test_move(self): assert move(5, 'U') == 2 assert move(8, 'D') == 8 assert move(7, 'L') == 7 assert move(7, 'D') == 7 assert move(2, 'R') == 3 assert move(1, 'L') == 1 def test_alternate_move(self): assert alternate_move(5, 'U') == 5 assert alternate_move(5, 'L') == 5 assert alternate_move(7, 'D') == 'B' assert alternate_move('D', 'D') == 'D'
Remove test of two-argument normalize function.
## Code Before: from move import load_moves, encode_moves, normalize_index, move import unittest class TestMove(unittest.TestCase): def setUp(self): self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD'] def test_load_moves(self): assert load_moves('example.txt') == self.moves def test_encode_moves(self): assert encode_moves(self.moves) == '1985' def test_normalize_index(self): assert normalize_index(3) == 2 assert normalize_index(2) == 2 assert normalize_index(1) == 1 assert normalize_index(0) == 0 assert normalize_index(-1) == 0 assert normalize_index(2, 1) == 0 assert normalize_index(5, 2) == 1 assert normalize_index(-1, 4) == 0 def test_move(self): assert move(5, 'U') == 2 assert move(8, 'D') == 8 assert move(7, 'L') == 7 assert move(7, 'D') == 7 assert move(2, 'R') == 3 assert move(1, 'L') == 1 def test_alternate_move(self): assert alternate_move(5, 'U') == 5 assert alternate_move(5, 'L') == 5 assert alternate_move(7, 'D') == 'B' assert alternate_move('D', 'D') == 'D' ## Instruction: Remove test of two-argument normalize function. ## Code After: from move import load_moves, encode_moves, normalize_index, move import unittest class TestMove(unittest.TestCase): def setUp(self): self.moves = ['ULL', 'RRDDD', 'LURDL', 'UUUUD'] def test_load_moves(self): assert load_moves('example.txt') == self.moves def test_encode_moves(self): assert encode_moves(self.moves) == '1985' def test_normalize_index(self): assert normalize_index(3) == 2 assert normalize_index(2) == 2 assert normalize_index(1) == 1 assert normalize_index(0) == 0 assert normalize_index(-1) == 0 def test_move(self): assert move(5, 'U') == 2 assert move(8, 'D') == 8 assert move(7, 'L') == 7 assert move(7, 'D') == 7 assert move(2, 'R') == 3 assert move(1, 'L') == 1 def test_alternate_move(self): assert alternate_move(5, 'U') == 5 assert alternate_move(5, 'L') == 5 assert alternate_move(7, 'D') == 'B' assert alternate_move('D', 'D') == 'D'
// ... existing code ... assert normalize_index(-1) == 0 // ... rest of the code ...
331f776eef9acd0509c7534040ef225869305d7f
tests/test_cookies.py
tests/test_cookies.py
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.error is None assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.exception is None assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
Update test for cookies fixture
Update test for cookies fixture
Python
mit
hackebrot/pytest-cookies
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) - assert cookies.error is None + assert cookies.exception is None + assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
Update test for cookies fixture
## Code Before: def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.error is None assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ]) ## Instruction: Update test for cookies fixture ## Code After: def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.exception is None assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
# ... existing code ... assert callable(cookies.bake) assert cookies.exception is None assert cookies.exit_code == 0 assert cookies.project is None # ... rest of the code ...
d89e43c649aba78ac9722ca39f9e0c67be0cc897
precision/accounts/models.py
precision/accounts/models.py
from django.db import models # Create your models here.
from django.contrib.auth.models import AbstractUser from django.db import models from django.utils.translation import ugettext_lazy as _ class SchoolAdministrator(AbstractUser): pass
Add an simple abstract user model for school administrators which will be used later
Add an simple abstract user model for school administrators which will be used later
Python
mit
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
+ from django.contrib.auth.models import AbstractUser from django.db import models + from django.utils.translation import ugettext_lazy as _ - # Create your models here. + class SchoolAdministrator(AbstractUser): + pass +
Add an simple abstract user model for school administrators which will be used later
## Code Before: from django.db import models # Create your models here. ## Instruction: Add an simple abstract user model for school administrators which will be used later ## Code After: from django.contrib.auth.models import AbstractUser from django.db import models from django.utils.translation import ugettext_lazy as _ class SchoolAdministrator(AbstractUser): pass
# ... existing code ... from django.contrib.auth.models import AbstractUser from django.db import models from django.utils.translation import ugettext_lazy as _ class SchoolAdministrator(AbstractUser): pass # ... rest of the code ...
115771e1917bd40989cc70762225fd3c6e0a565b
test/test_parser.py
test/test_parser.py
import tempfile import unittest import mock import bin.parser class ParserTest(unittest.TestCase): def setUp(self): self.tf = tempfile.TemporaryFile() #print self.tf.name #self.tf.write('Test text.') ## Reset file position to start so it can be read #self.tf.seek(0) #print self.tf.readline() self.patcher = mock.patch('apel.db.apeldb.ApelDb') self.mock_db = self.patcher.start() def test_parse_empty_file(self): """An empty file should be ignored and no errors raised.""" bin.parser.parse_file(None, self.mock_db, self.tf, False) def tearDown(self): self.tf.close() self.patcher.stop() if __name__ == '__main__': unittest.main()
import bz2 import gzip import os import re import shutil import tempfile import unittest import mock import bin.parser class ParserTest(unittest.TestCase): def setUp(self): self.tf = tempfile.TemporaryFile() #print self.tf.name #self.tf.write('Test text.') ## Reset file position to start so it can be read #self.tf.seek(0) #print self.tf.readline() self.patcher = mock.patch('apel.db.apeldb.ApelDb') self.mock_db = self.patcher.start() self.mock_parser = mock.Mock() def test_parse_empty_file(self): """An empty file should be ignored and no errors raised.""" bin.parser.parse_file(None, self.mock_db, self.tf, False) def test_scan_dir(self): """ Check that scan dir works with bzip, gzip and normal files. """ dir_path = tempfile.mkdtemp() try: # Create a bzip, gzip and normal file in turn in the temp directory for method, suffix in ((bz2.BZ2File, '.bzip2'), (gzip.open, '.gzip'), (open, '.normal')): handle, path = tempfile.mkstemp(suffix, dir=dir_path) os.close(handle) file_obj = method(path, 'wb') # Write three lines to the file file_obj.write("Line one.\nLine two.\nLine three.") file_obj.close() records = bin.parser.scan_dir(self.mock_parser, dir_path, False, re.compile('(.*)'), self.mock_db, []) for record in records: # Check that all three lines have been read self.assertEqual(record.get_field('StopLine'), 3, "Unable to read %s file" % record.get_field('FileName').split('.')[1]) finally: shutil.rmtree(dir_path) def tearDown(self): self.tf.close() self.patcher.stop() if __name__ == '__main__': unittest.main()
Add test for parsing different file types
Add test for parsing different file types - Add tests for bzip, gzip and normal files to parser tests.
Python
apache-2.0
apel/apel,tofu-rocketry/apel,apel/apel,tofu-rocketry/apel,stfc/apel,stfc/apel
+ import bz2 + import gzip + import os + import re + import shutil import tempfile import unittest import mock import bin.parser class ParserTest(unittest.TestCase): def setUp(self): self.tf = tempfile.TemporaryFile() #print self.tf.name #self.tf.write('Test text.') ## Reset file position to start so it can be read #self.tf.seek(0) #print self.tf.readline() self.patcher = mock.patch('apel.db.apeldb.ApelDb') self.mock_db = self.patcher.start() + self.mock_parser = mock.Mock() + def test_parse_empty_file(self): """An empty file should be ignored and no errors raised.""" bin.parser.parse_file(None, self.mock_db, self.tf, False) + + def test_scan_dir(self): + """ + Check that scan dir works with bzip, gzip and normal files. + """ + dir_path = tempfile.mkdtemp() + + try: + # Create a bzip, gzip and normal file in turn in the temp directory + for method, suffix in ((bz2.BZ2File, '.bzip2'), + (gzip.open, '.gzip'), + (open, '.normal')): + handle, path = tempfile.mkstemp(suffix, dir=dir_path) + os.close(handle) + file_obj = method(path, 'wb') + # Write three lines to the file + file_obj.write("Line one.\nLine two.\nLine three.") + file_obj.close() + records = bin.parser.scan_dir(self.mock_parser, dir_path, False, + re.compile('(.*)'), self.mock_db, []) + for record in records: + # Check that all three lines have been read + self.assertEqual(record.get_field('StopLine'), 3, + "Unable to read %s file" + % record.get_field('FileName').split('.')[1]) + finally: + shutil.rmtree(dir_path) def tearDown(self): self.tf.close() self.patcher.stop() + if __name__ == '__main__': unittest.main()
Add test for parsing different file types
## Code Before: import tempfile import unittest import mock import bin.parser class ParserTest(unittest.TestCase): def setUp(self): self.tf = tempfile.TemporaryFile() #print self.tf.name #self.tf.write('Test text.') ## Reset file position to start so it can be read #self.tf.seek(0) #print self.tf.readline() self.patcher = mock.patch('apel.db.apeldb.ApelDb') self.mock_db = self.patcher.start() def test_parse_empty_file(self): """An empty file should be ignored and no errors raised.""" bin.parser.parse_file(None, self.mock_db, self.tf, False) def tearDown(self): self.tf.close() self.patcher.stop() if __name__ == '__main__': unittest.main() ## Instruction: Add test for parsing different file types ## Code After: import bz2 import gzip import os import re import shutil import tempfile import unittest import mock import bin.parser class ParserTest(unittest.TestCase): def setUp(self): self.tf = tempfile.TemporaryFile() #print self.tf.name #self.tf.write('Test text.') ## Reset file position to start so it can be read #self.tf.seek(0) #print self.tf.readline() self.patcher = mock.patch('apel.db.apeldb.ApelDb') self.mock_db = self.patcher.start() self.mock_parser = mock.Mock() def test_parse_empty_file(self): """An empty file should be ignored and no errors raised.""" bin.parser.parse_file(None, self.mock_db, self.tf, False) def test_scan_dir(self): """ Check that scan dir works with bzip, gzip and normal files. """ dir_path = tempfile.mkdtemp() try: # Create a bzip, gzip and normal file in turn in the temp directory for method, suffix in ((bz2.BZ2File, '.bzip2'), (gzip.open, '.gzip'), (open, '.normal')): handle, path = tempfile.mkstemp(suffix, dir=dir_path) os.close(handle) file_obj = method(path, 'wb') # Write three lines to the file file_obj.write("Line one.\nLine two.\nLine three.") file_obj.close() records = bin.parser.scan_dir(self.mock_parser, dir_path, False, re.compile('(.*)'), self.mock_db, []) for record in records: # Check that all three lines have been read self.assertEqual(record.get_field('StopLine'), 3, "Unable to read %s file" % record.get_field('FileName').split('.')[1]) finally: shutil.rmtree(dir_path) def tearDown(self): self.tf.close() self.patcher.stop() if __name__ == '__main__': unittest.main()
// ... existing code ... import bz2 import gzip import os import re import shutil import tempfile // ... modified code ... self.mock_parser = mock.Mock() def test_parse_empty_file(self): ... bin.parser.parse_file(None, self.mock_db, self.tf, False) def test_scan_dir(self): """ Check that scan dir works with bzip, gzip and normal files. """ dir_path = tempfile.mkdtemp() try: # Create a bzip, gzip and normal file in turn in the temp directory for method, suffix in ((bz2.BZ2File, '.bzip2'), (gzip.open, '.gzip'), (open, '.normal')): handle, path = tempfile.mkstemp(suffix, dir=dir_path) os.close(handle) file_obj = method(path, 'wb') # Write three lines to the file file_obj.write("Line one.\nLine two.\nLine three.") file_obj.close() records = bin.parser.scan_dir(self.mock_parser, dir_path, False, re.compile('(.*)'), self.mock_db, []) for record in records: # Check that all three lines have been read self.assertEqual(record.get_field('StopLine'), 3, "Unable to read %s file" % record.get_field('FileName').split('.')[1]) finally: shutil.rmtree(dir_path) ... if __name__ == '__main__': // ... rest of the code ...
5f30d91d35d090e28925613365d5d1f31f0259d2
daapserver/bonjour.py
daapserver/bonjour.py
import zeroconf import socket class Bonjour(object): """ """ def __init__(self): """ """ self.zeroconf = zeroconf.Zeroconf() self.servers = {} def publish(self, server): """ """ if server in self.servers: self.unpublish(server) ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip description = { "txtvers": 1, "Password": server.password is not None, "Machine Name": server.server_name } self.servers[server] = zeroconf.ServiceInfo( "_daap._tcp.local.", server.server_name + ".daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, description) self.zeroconf.register_service(self.servers[server]) def unpublish(self, server): """ """ if server not in self.servers: return self.zeroconf.unregister_service(self.servers[server]) del self.servers[server] def close(self): """ """ self.zeroconf.close()
import zeroconf import socket class Bonjour(object): """ """ def __init__(self): """ """ self.zeroconf = zeroconf.Zeroconf() self.servers = {} def publish(self, server): """ """ if server in self.servers: self.unpublish(server) ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip description = { "txtvers": 1, "Password": int(bool(server.password)), "Machine Name": server.server_name } self.servers[server] = zeroconf.ServiceInfo( "_daap._tcp.local.", server.server_name + "._daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, description) self.zeroconf.register_service(self.servers[server]) def unpublish(self, server): """ """ if server not in self.servers: return self.zeroconf.unregister_service(self.servers[server]) del self.servers[server] def close(self): """ """ self.zeroconf.close()
Fix for broken zeroconf publishing.
Fix for broken zeroconf publishing.
Python
mit
ties/flask-daapserver,basilfx/flask-daapserver
import zeroconf import socket class Bonjour(object): """ """ def __init__(self): """ """ self.zeroconf = zeroconf.Zeroconf() self.servers = {} def publish(self, server): """ """ if server in self.servers: self.unpublish(server) ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip description = { "txtvers": 1, - "Password": server.password is not None, + "Password": int(bool(server.password)), "Machine Name": server.server_name } self.servers[server] = zeroconf.ServiceInfo( - "_daap._tcp.local.", server.server_name + ".daap._tcp.local.", + "_daap._tcp.local.", server.server_name + "._daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, description) self.zeroconf.register_service(self.servers[server]) def unpublish(self, server): """ """ if server not in self.servers: return self.zeroconf.unregister_service(self.servers[server]) del self.servers[server] def close(self): """ """ self.zeroconf.close()
Fix for broken zeroconf publishing.
## Code Before: import zeroconf import socket class Bonjour(object): """ """ def __init__(self): """ """ self.zeroconf = zeroconf.Zeroconf() self.servers = {} def publish(self, server): """ """ if server in self.servers: self.unpublish(server) ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip description = { "txtvers": 1, "Password": server.password is not None, "Machine Name": server.server_name } self.servers[server] = zeroconf.ServiceInfo( "_daap._tcp.local.", server.server_name + ".daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, description) self.zeroconf.register_service(self.servers[server]) def unpublish(self, server): """ """ if server not in self.servers: return self.zeroconf.unregister_service(self.servers[server]) del self.servers[server] def close(self): """ """ self.zeroconf.close() ## Instruction: Fix for broken zeroconf publishing. ## Code After: import zeroconf import socket class Bonjour(object): """ """ def __init__(self): """ """ self.zeroconf = zeroconf.Zeroconf() self.servers = {} def publish(self, server): """ """ if server in self.servers: self.unpublish(server) ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip description = { "txtvers": 1, "Password": int(bool(server.password)), "Machine Name": server.server_name } self.servers[server] = zeroconf.ServiceInfo( "_daap._tcp.local.", server.server_name + "._daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, description) self.zeroconf.register_service(self.servers[server]) def unpublish(self, server): """ """ if server not in self.servers: return self.zeroconf.unregister_service(self.servers[server]) del self.servers[server] def close(self): """ """ self.zeroconf.close()
# ... existing code ... "txtvers": 1, "Password": int(bool(server.password)), "Machine Name": server.server_name # ... modified code ... self.servers[server] = zeroconf.ServiceInfo( "_daap._tcp.local.", server.server_name + "._daap._tcp.local.", socket.inet_aton(ip), server.port, 0, 0, # ... rest of the code ...
275301d7a2c2e8c44ff1cfb3d49d9388f9531b56
invalidate_data.py
invalidate_data.py
import sys import logging import logging.config import config.global_configuration as global_conf import database.client import batch_analysis.invalidate def main(): """ Run a particular task. :args: Only argument is the id of the task to run :return: """ config = global_conf.load_global_config('config.yml') if __name__ == '__main__': # Only configure the logging if this is the main function, don't reconfigure logging.config.dictConfig(config['logging']) db_client = database.client.DatabaseClient(config=config) orbslam_ids = db_client.system_collection.find({'_type': 'systems.slam.orbslam2.ORBSLAM2'}, {'_id': True}) for system_id in orbslam_ids: logging.getLogger(__name__).info("Invalidating system {0}".format(system_id['_id'])) batch_analysis.invalidate.invalidate_system(db_client, system_id['_id']) if __name__ == '__main__': main()
import sys import logging import logging.config import config.global_configuration as global_conf import database.client import batch_analysis.invalidate def main(): """ Run a particular task. :args: Only argument is the id of the task to run :return: """ config = global_conf.load_global_config('config.yml') if __name__ == '__main__': # Only configure the logging if this is the main function, don't reconfigure logging.config.dictConfig(config['logging']) db_client = database.client.DatabaseClient(config=config) orbslam_ids = db_client.system_collection.find({'_type': 'systems.slam.orbslam2.ORBSLAM2'}, {'_id': True}) for system_id in orbslam_ids: logging.getLogger(__name__).info("Invalidating system {0}".format(system_id['_id'])) batch_analysis.invalidate.invalidate_system(db_client, system_id['_id']) failed_trials = db_client.trials_collection.find({'success': False}, {'_id': True}) for trial_id in failed_trials: logging.getLogger(__name__).info("Invalidating failed trial {0}".format(trial_id['_id'])) batch_analysis.invalidate.invalidate_trial_result(db_client, trial_id['_id']) if __name__ == '__main__': main()
Update invalidate to remove failed trials.
Update invalidate to remove failed trials.
Python
bsd-2-clause
jskinn/robot-vision-experiment-framework,jskinn/robot-vision-experiment-framework
import sys import logging import logging.config import config.global_configuration as global_conf import database.client import batch_analysis.invalidate def main(): """ Run a particular task. :args: Only argument is the id of the task to run :return: """ config = global_conf.load_global_config('config.yml') if __name__ == '__main__': # Only configure the logging if this is the main function, don't reconfigure logging.config.dictConfig(config['logging']) db_client = database.client.DatabaseClient(config=config) orbslam_ids = db_client.system_collection.find({'_type': 'systems.slam.orbslam2.ORBSLAM2'}, {'_id': True}) for system_id in orbslam_ids: logging.getLogger(__name__).info("Invalidating system {0}".format(system_id['_id'])) batch_analysis.invalidate.invalidate_system(db_client, system_id['_id']) + failed_trials = db_client.trials_collection.find({'success': False}, {'_id': True}) + for trial_id in failed_trials: + logging.getLogger(__name__).info("Invalidating failed trial {0}".format(trial_id['_id'])) + batch_analysis.invalidate.invalidate_trial_result(db_client, trial_id['_id']) + if __name__ == '__main__': main()
Update invalidate to remove failed trials.
## Code Before: import sys import logging import logging.config import config.global_configuration as global_conf import database.client import batch_analysis.invalidate def main(): """ Run a particular task. :args: Only argument is the id of the task to run :return: """ config = global_conf.load_global_config('config.yml') if __name__ == '__main__': # Only configure the logging if this is the main function, don't reconfigure logging.config.dictConfig(config['logging']) db_client = database.client.DatabaseClient(config=config) orbslam_ids = db_client.system_collection.find({'_type': 'systems.slam.orbslam2.ORBSLAM2'}, {'_id': True}) for system_id in orbslam_ids: logging.getLogger(__name__).info("Invalidating system {0}".format(system_id['_id'])) batch_analysis.invalidate.invalidate_system(db_client, system_id['_id']) if __name__ == '__main__': main() ## Instruction: Update invalidate to remove failed trials. ## Code After: import sys import logging import logging.config import config.global_configuration as global_conf import database.client import batch_analysis.invalidate def main(): """ Run a particular task. :args: Only argument is the id of the task to run :return: """ config = global_conf.load_global_config('config.yml') if __name__ == '__main__': # Only configure the logging if this is the main function, don't reconfigure logging.config.dictConfig(config['logging']) db_client = database.client.DatabaseClient(config=config) orbslam_ids = db_client.system_collection.find({'_type': 'systems.slam.orbslam2.ORBSLAM2'}, {'_id': True}) for system_id in orbslam_ids: logging.getLogger(__name__).info("Invalidating system {0}".format(system_id['_id'])) batch_analysis.invalidate.invalidate_system(db_client, system_id['_id']) failed_trials = db_client.trials_collection.find({'success': False}, {'_id': True}) for trial_id in failed_trials: logging.getLogger(__name__).info("Invalidating failed trial {0}".format(trial_id['_id'])) batch_analysis.invalidate.invalidate_trial_result(db_client, trial_id['_id']) if __name__ == '__main__': main()
// ... existing code ... failed_trials = db_client.trials_collection.find({'success': False}, {'_id': True}) for trial_id in failed_trials: logging.getLogger(__name__).info("Invalidating failed trial {0}".format(trial_id['_id'])) batch_analysis.invalidate.invalidate_trial_result(db_client, trial_id['_id']) // ... rest of the code ...
43ea528a1832c94dd7879f995a1c4cc8dfb2a315
pyroonga/tests/functional/conftest.py
pyroonga/tests/functional/conftest.py
import json import os import pytest from pyroonga.odm.table import tablebase, TableBase from pyroonga.tests import utils FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixture') FIXTURE_PATH = os.path.join(FIXTURE_DIR, 'dbfixture%s.json') @pytest.fixture def Table(request): class TableBaseForTest(TableBase): @utils.classproperty def __tablename__(cls): if not getattr(cls, '_tablename', None): cls._tablename = utils.gen_unique_tablename() return cls._tablename Tbl = tablebase(cls=TableBaseForTest) def remove_table(): utils.sendquery('table_remove %s' % Tbl.__tablename__) request.addfinalizer(remove_table) return Tbl @pytest.fixture def fixture1(): with open(FIXTURE_PATH % 1) as f: return json.load(f) @pytest.fixture def fixture2(): with open(FIXTURE_PATH % 2) as f: return json.load(f)
import json import os import pytest from pyroonga.odm.table import tablebase, TableBase from pyroonga.tests import utils FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixture') FIXTURE_PATH = os.path.join(FIXTURE_DIR, 'dbfixture%s.json') @pytest.fixture def Table(request): class TableBaseForTest(TableBase): @utils.classproperty def __tablename__(cls): if not getattr(cls, '_tablename', None): cls._tablename = utils.gen_unique_tablename() def remove_table(): utils.sendquery('table_remove %s' % cls._tablename) request.addfinalizer(remove_table) return cls._tablename Tbl = tablebase(cls=TableBaseForTest) return Tbl @pytest.fixture def fixture1(): with open(FIXTURE_PATH % 1) as f: return json.load(f) @pytest.fixture def fixture2(): with open(FIXTURE_PATH % 2) as f: return json.load(f)
Fix an issue that tables not be removed in end of each test
Fix an issue that tables not be removed in end of each test
Python
mit
naoina/pyroonga,naoina/pyroonga
import json import os import pytest from pyroonga.odm.table import tablebase, TableBase from pyroonga.tests import utils FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixture') FIXTURE_PATH = os.path.join(FIXTURE_DIR, 'dbfixture%s.json') @pytest.fixture def Table(request): class TableBaseForTest(TableBase): @utils.classproperty def __tablename__(cls): if not getattr(cls, '_tablename', None): cls._tablename = utils.gen_unique_tablename() + + def remove_table(): + utils.sendquery('table_remove %s' % cls._tablename) + request.addfinalizer(remove_table) return cls._tablename Tbl = tablebase(cls=TableBaseForTest) - - def remove_table(): - utils.sendquery('table_remove %s' % Tbl.__tablename__) - request.addfinalizer(remove_table) return Tbl @pytest.fixture def fixture1(): with open(FIXTURE_PATH % 1) as f: return json.load(f) @pytest.fixture def fixture2(): with open(FIXTURE_PATH % 2) as f: return json.load(f)
Fix an issue that tables not be removed in end of each test
## Code Before: import json import os import pytest from pyroonga.odm.table import tablebase, TableBase from pyroonga.tests import utils FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixture') FIXTURE_PATH = os.path.join(FIXTURE_DIR, 'dbfixture%s.json') @pytest.fixture def Table(request): class TableBaseForTest(TableBase): @utils.classproperty def __tablename__(cls): if not getattr(cls, '_tablename', None): cls._tablename = utils.gen_unique_tablename() return cls._tablename Tbl = tablebase(cls=TableBaseForTest) def remove_table(): utils.sendquery('table_remove %s' % Tbl.__tablename__) request.addfinalizer(remove_table) return Tbl @pytest.fixture def fixture1(): with open(FIXTURE_PATH % 1) as f: return json.load(f) @pytest.fixture def fixture2(): with open(FIXTURE_PATH % 2) as f: return json.load(f) ## Instruction: Fix an issue that tables not be removed in end of each test ## Code After: import json import os import pytest from pyroonga.odm.table import tablebase, TableBase from pyroonga.tests import utils FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixture') FIXTURE_PATH = os.path.join(FIXTURE_DIR, 'dbfixture%s.json') @pytest.fixture def Table(request): class TableBaseForTest(TableBase): @utils.classproperty def __tablename__(cls): if not getattr(cls, '_tablename', None): cls._tablename = utils.gen_unique_tablename() def remove_table(): utils.sendquery('table_remove %s' % cls._tablename) request.addfinalizer(remove_table) return cls._tablename Tbl = tablebase(cls=TableBaseForTest) return Tbl @pytest.fixture def fixture1(): with open(FIXTURE_PATH % 1) as f: return json.load(f) @pytest.fixture def fixture2(): with open(FIXTURE_PATH % 2) as f: return json.load(f)
// ... existing code ... cls._tablename = utils.gen_unique_tablename() def remove_table(): utils.sendquery('table_remove %s' % cls._tablename) request.addfinalizer(remove_table) return cls._tablename // ... modified code ... Tbl = tablebase(cls=TableBaseForTest) return Tbl // ... rest of the code ...
fb256b042a485aefa2d9e45b39daa551a3f779ff
examples/open_file_dialog.py
examples/open_file_dialog.py
import webview import threading """ This example demonstrates creating an open file dialog. """ def open_file_dialog(): import time time.sleep(5) print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True)) if __name__ == '__main__': t = threading.Thread(target=open_file_dialog) t.start() webview.create_window("Open file dialog example", "http://www.flowrl.com")
import webview import threading """ This example demonstrates creating an open file dialog. """ def open_file_dialog(): import time time.sleep(5) file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)') print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types)) if __name__ == '__main__': t = threading.Thread(target=open_file_dialog) t.start() webview.create_window("Open file dialog example", "http://www.flowrl.com")
Modify example to include file_types param
[All] Modify example to include file_types param
Python
bsd-3-clause
r0x0r/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,shivaprsdv/pywebview,r0x0r/pywebview,r0x0r/pywebview
import webview import threading """ This example demonstrates creating an open file dialog. """ def open_file_dialog(): import time time.sleep(5) + file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)') + - print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True)) + print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types)) if __name__ == '__main__': t = threading.Thread(target=open_file_dialog) t.start() webview.create_window("Open file dialog example", "http://www.flowrl.com")
Modify example to include file_types param
## Code Before: import webview import threading """ This example demonstrates creating an open file dialog. """ def open_file_dialog(): import time time.sleep(5) print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True)) if __name__ == '__main__': t = threading.Thread(target=open_file_dialog) t.start() webview.create_window("Open file dialog example", "http://www.flowrl.com") ## Instruction: Modify example to include file_types param ## Code After: import webview import threading """ This example demonstrates creating an open file dialog. """ def open_file_dialog(): import time time.sleep(5) file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)') print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types)) if __name__ == '__main__': t = threading.Thread(target=open_file_dialog) t.start() webview.create_window("Open file dialog example", "http://www.flowrl.com")
# ... existing code ... time.sleep(5) file_types = ('Image Files (*.bmp;*.jpg;*.gif)', 'All files (*.*)') print(webview.create_file_dialog(webview.OPEN_DIALOG, allow_multiple=True, file_types=file_types)) # ... rest of the code ...
7c88ecf10c3197c337990c7f92c7ace6a85d316e
setup.py
setup.py
from distutils.core import setup from distutils.core import Extension setup(name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], )
import os from distutils.core import setup from distutils.core import Extension with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs)
Make compilation of extensions optional through an environment variable.
Make compilation of extensions optional through an environment variable.
Python
bsd-2-clause
akash1808/wrapt,github4ry/wrapt,wujuguang/wrapt,akash1808/wrapt,wujuguang/wrapt,pombredanne/wrapt,pombredanne/wrapt,GrahamDumpleton/wrapt,pombredanne/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,linglaiyao1314/wrapt,GrahamDumpleton/wrapt,linglaiyao1314/wrapt,pombredanne/python-lazy-object-proxy,ionelmc/python-lazy-object-proxy,github4ry/wrapt
+ import os + from distutils.core import setup from distutils.core import Extension - setup(name = 'wrapt', + with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') + with_extensions = (with_extensions.lower() != 'false') + + setup_kwargs = dict( + name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, - ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) + setup_extension_kwargs = dict( + ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], + ) + + if with_extensions: + setup_kwargs.update(setup_extension_kwargs) + + setup(**setup_kwargs) +
Make compilation of extensions optional through an environment variable.
## Code Before: from distutils.core import setup from distutils.core import Extension setup(name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) ## Instruction: Make compilation of extensions optional through an environment variable. ## Code After: import os from distutils.core import setup from distutils.core import Extension with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', description = 'Module for decorators, wrappers and monkey patching.', author = 'Graham Dumpleton', author_email = '[email protected]', license = 'BSD', url = 'https://github.com/GrahamDumpleton/wrapt', packages = ['wrapt'], package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs)
... import os from distutils.core import setup ... with_extensions = os.environ.get('WRAPT_EXTENSIONS', 'true') with_extensions = (with_extensions.lower() != 'false') setup_kwargs = dict( name = 'wrapt', version = '0.9.0', ... package_dir={'wrapt': 'src'}, ) setup_extension_kwargs = dict( ext_modules = [Extension("wrapt._wrappers", ["src/_wrappers.c"])], ) if with_extensions: setup_kwargs.update(setup_extension_kwargs) setup(**setup_kwargs) ...
88a6708061ccdc7d3ac4d031c48de44039937b54
frontends/etiquette_flask/etiquette_flask_entrypoint.py
frontends/etiquette_flask/etiquette_flask_entrypoint.py
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.contrib.fixers import backend backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) site = backend.site
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.middleware.proxy_fix import backend backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
Replace werkzeug.contrib with werkzeug.middleware proxyfix. werkzeug.contrib has been deprecated, this is the new location of the proxyfix.
Python
bsd-3-clause
voussoir/etiquette,voussoir/etiquette,voussoir/etiquette
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' - import werkzeug.contrib.fixers + import werkzeug.middleware.proxy_fix import backend - backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) + backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
## Code Before: ''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.contrib.fixers import backend backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) site = backend.site ## Instruction: Replace werkzeug.contrib with werkzeug.middleware proxyfix. ## Code After: ''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.middleware.proxy_fix import backend backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
# ... existing code ... ''' import werkzeug.middleware.proxy_fix # ... modified code ... backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) # ... rest of the code ...
fa1f148b33c61e91044c19a88737abd2ec86c6bf
yunity/api/public/auth.py
yunity/api/public/auth.py
from django.contrib.auth import logout from django.middleware.csrf import get_token as generate_csrf_token_for_frontend from rest_framework import status, viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from yunity.api.serializers import UserSerializer, AuthLoginSerializer class AuthViewSet(viewsets.ViewSet): @list_route(methods=['get']) def status(self, request): """ Get the login state (logged in user) --- response_serializer: UserSerializer """ generate_csrf_token_for_frontend(request) if request.user.is_anonymous(): serializer = UserSerializer() else: serializer = UserSerializer(request.user) return Response(serializer.data) def create(self, request, **kwargs): """ Log in --- request_serializer: AuthLoginSerializer response_serializer: UserSerializer """ serializer = AuthLoginSerializer(data=request.data, context={'request': request}) if serializer.is_valid(): return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) @list_route(methods=['POST']) def logout(self, request, **kwargs): logout(request) return Response(status = status.HTTP_200_OK)
from django.contrib.auth import logout from django.middleware.csrf import get_token as generate_csrf_token_for_frontend from rest_framework import status, viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from yunity.api.serializers import UserSerializer, AuthLoginSerializer class AuthViewSet(viewsets.GenericViewSet): serializer_class = AuthLoginSerializer @list_route(methods=['get']) def status(self, request): """ Get the login state (logged in user) --- response_serializer: UserSerializer """ generate_csrf_token_for_frontend(request) if request.user.is_anonymous(): serializer = UserSerializer() else: serializer = UserSerializer(request.user) return Response(serializer.data) def create(self, request, **kwargs): """ Log in --- request_serializer: AuthLoginSerializer response_serializer: UserSerializer """ serializer = AuthLoginSerializer(data=request.data, context={'request': request}) if serializer.is_valid(): return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) @list_route(methods=['POST']) def logout(self, request, **kwargs): logout(request) return Response(status = status.HTTP_200_OK)
Enable easy login through browsable API (discovery through serializer_class)
Enable easy login through browsable API (discovery through serializer_class)
Python
agpl-3.0
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core
from django.contrib.auth import logout from django.middleware.csrf import get_token as generate_csrf_token_for_frontend from rest_framework import status, viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from yunity.api.serializers import UserSerializer, AuthLoginSerializer - class AuthViewSet(viewsets.ViewSet): + class AuthViewSet(viewsets.GenericViewSet): + serializer_class = AuthLoginSerializer + @list_route(methods=['get']) def status(self, request): """ Get the login state (logged in user) --- response_serializer: UserSerializer """ generate_csrf_token_for_frontend(request) if request.user.is_anonymous(): serializer = UserSerializer() else: serializer = UserSerializer(request.user) return Response(serializer.data) def create(self, request, **kwargs): """ Log in --- request_serializer: AuthLoginSerializer response_serializer: UserSerializer """ serializer = AuthLoginSerializer(data=request.data, context={'request': request}) if serializer.is_valid(): return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) @list_route(methods=['POST']) def logout(self, request, **kwargs): logout(request) return Response(status = status.HTTP_200_OK)
Enable easy login through browsable API (discovery through serializer_class)
## Code Before: from django.contrib.auth import logout from django.middleware.csrf import get_token as generate_csrf_token_for_frontend from rest_framework import status, viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from yunity.api.serializers import UserSerializer, AuthLoginSerializer class AuthViewSet(viewsets.ViewSet): @list_route(methods=['get']) def status(self, request): """ Get the login state (logged in user) --- response_serializer: UserSerializer """ generate_csrf_token_for_frontend(request) if request.user.is_anonymous(): serializer = UserSerializer() else: serializer = UserSerializer(request.user) return Response(serializer.data) def create(self, request, **kwargs): """ Log in --- request_serializer: AuthLoginSerializer response_serializer: UserSerializer """ serializer = AuthLoginSerializer(data=request.data, context={'request': request}) if serializer.is_valid(): return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) @list_route(methods=['POST']) def logout(self, request, **kwargs): logout(request) return Response(status = status.HTTP_200_OK) ## Instruction: Enable easy login through browsable API (discovery through serializer_class) ## Code After: from django.contrib.auth import logout from django.middleware.csrf import get_token as generate_csrf_token_for_frontend from rest_framework import status, viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from yunity.api.serializers import UserSerializer, AuthLoginSerializer class AuthViewSet(viewsets.GenericViewSet): serializer_class = AuthLoginSerializer @list_route(methods=['get']) def status(self, request): """ Get the login state (logged in user) --- response_serializer: UserSerializer """ generate_csrf_token_for_frontend(request) if request.user.is_anonymous(): serializer = UserSerializer() else: serializer = UserSerializer(request.user) return Response(serializer.data) def create(self, request, **kwargs): """ Log in --- request_serializer: AuthLoginSerializer response_serializer: UserSerializer """ serializer = AuthLoginSerializer(data=request.data, context={'request': request}) if serializer.is_valid(): return Response(data=UserSerializer(request.user).data, status=status.HTTP_201_CREATED) else: return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST) @list_route(methods=['POST']) def logout(self, request, **kwargs): logout(request) return Response(status = status.HTTP_200_OK)
... class AuthViewSet(viewsets.GenericViewSet): serializer_class = AuthLoginSerializer @list_route(methods=['get']) ...
610bd0fb6f25f790b1ff6e4adb9d87f10233e39e
statirator/core/models.py
statirator/core/models.py
class TranslationsMixin(object): "Helper for getting transalations" SLUG_FIELD_FOR_TRANSLATIONS = 'slug' # Overide in models if needed LANG_FIELD_FOR_TRANSLATIONS = 'language' # Overide in models if needed def get_translations(self): "Query set for the translations" self_slug = getattr(self, self.SLUG_FIELD_FOR_TRANSLATIONS) self_lang = getattr(self, self.LANG_FIELD_FOR_TRANSLATIONS) slug = {self.SLUG_FIELD_FOR_TRANSLATIONS + '__exact': self_slug} lang = {self.LANG_FIELD_FOR_TRANSLATIONS + '__exact': self_lang} return self.__class__.objects.filter(**slug).exclude(**lang)
class TranslationsMixin(object): "Helper for getting transalations" SLUG_FIELD_FOR_TRANSLATIONS = 'slug' # Overide in models if needed LANG_FIELD_FOR_TRANSLATIONS = 'language' # Overide in models if needed def get_translations(self): "Query set for the translations" self_slug = getattr(self, self.SLUG_FIELD_FOR_TRANSLATIONS) self_lang = getattr(self, self.LANG_FIELD_FOR_TRANSLATIONS) slug = {self.SLUG_FIELD_FOR_TRANSLATIONS + '__exact': self_slug} lang = {self.LANG_FIELD_FOR_TRANSLATIONS + '__exact': self_lang} return self.__class__.objects.filter(**slug).exclude(**lang) def get_language(self): "Get the language display for this item's language" attr = 'get_{0}_display'.format(self.LANG_FIELD_FOR_TRANSLATIONS) return getattr(self, attr)()
Add get_language method for TranslationsMixin
Add get_language method for TranslationsMixin
Python
mit
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
class TranslationsMixin(object): "Helper for getting transalations" SLUG_FIELD_FOR_TRANSLATIONS = 'slug' # Overide in models if needed LANG_FIELD_FOR_TRANSLATIONS = 'language' # Overide in models if needed def get_translations(self): "Query set for the translations" self_slug = getattr(self, self.SLUG_FIELD_FOR_TRANSLATIONS) self_lang = getattr(self, self.LANG_FIELD_FOR_TRANSLATIONS) slug = {self.SLUG_FIELD_FOR_TRANSLATIONS + '__exact': self_slug} lang = {self.LANG_FIELD_FOR_TRANSLATIONS + '__exact': self_lang} return self.__class__.objects.filter(**slug).exclude(**lang) + def get_language(self): + "Get the language display for this item's language" + + attr = 'get_{0}_display'.format(self.LANG_FIELD_FOR_TRANSLATIONS) + return getattr(self, attr)() +
Add get_language method for TranslationsMixin
## Code Before: class TranslationsMixin(object): "Helper for getting transalations" SLUG_FIELD_FOR_TRANSLATIONS = 'slug' # Overide in models if needed LANG_FIELD_FOR_TRANSLATIONS = 'language' # Overide in models if needed def get_translations(self): "Query set for the translations" self_slug = getattr(self, self.SLUG_FIELD_FOR_TRANSLATIONS) self_lang = getattr(self, self.LANG_FIELD_FOR_TRANSLATIONS) slug = {self.SLUG_FIELD_FOR_TRANSLATIONS + '__exact': self_slug} lang = {self.LANG_FIELD_FOR_TRANSLATIONS + '__exact': self_lang} return self.__class__.objects.filter(**slug).exclude(**lang) ## Instruction: Add get_language method for TranslationsMixin ## Code After: class TranslationsMixin(object): "Helper for getting transalations" SLUG_FIELD_FOR_TRANSLATIONS = 'slug' # Overide in models if needed LANG_FIELD_FOR_TRANSLATIONS = 'language' # Overide in models if needed def get_translations(self): "Query set for the translations" self_slug = getattr(self, self.SLUG_FIELD_FOR_TRANSLATIONS) self_lang = getattr(self, self.LANG_FIELD_FOR_TRANSLATIONS) slug = {self.SLUG_FIELD_FOR_TRANSLATIONS + '__exact': self_slug} lang = {self.LANG_FIELD_FOR_TRANSLATIONS + '__exact': self_lang} return self.__class__.objects.filter(**slug).exclude(**lang) def get_language(self): "Get the language display for this item's language" attr = 'get_{0}_display'.format(self.LANG_FIELD_FOR_TRANSLATIONS) return getattr(self, attr)()
# ... existing code ... return self.__class__.objects.filter(**slug).exclude(**lang) def get_language(self): "Get the language display for this item's language" attr = 'get_{0}_display'.format(self.LANG_FIELD_FOR_TRANSLATIONS) return getattr(self, attr)() # ... rest of the code ...
60743b33e5034776576073b151c7a02dc0a40b7e
tests/unit_project/test_fields.py
tests/unit_project/test_fields.py
from djangosanetesting.cases import DatabaseTestCase from djangomarkup.fields import RichTextField from djangomarkup.models import SourceText from exampleapp.models import Article class TestRichTextField(DatabaseTestCase): def setUp(self): super(TestRichTextField, self).setUp() self.field = RichTextField( instance = Article(), model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) def test_retrieve_empty_source_for_empty_article(self): self.assert_equals(u'', self.field.get_source().content) def test_source_available_for_empty_article(self): self.assert_equals(u'', self.field.get_source_text()) def test_render_available_for_empty_article(self): self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip())
from djangosanetesting.cases import UnitTestCase from djangomarkup.fields import RichTextField from exampleapp.models import Article class TestRichTextField(UnitTestCase): def setUp(self): super(TestRichTextField, self).setUp() self.field = RichTextField( instance = Article(), model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) def test_retrieve_empty_source_for_empty_article(self): self.assert_equals(u'', self.field.get_source().content) def test_source_available_for_empty_article(self): self.assert_equals(u'', self.field.get_source_text()) def test_render_available_for_empty_article(self): self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip()) def test_value_error_raised_when_accessing_source_without_instance(self): field = RichTextField( instance = None, model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) self.assert_raises(ValueError, field.get_source)
Check proper error when accessing source without instance
Check proper error when accessing source without instance
Python
bsd-3-clause
ella/django-markup
- from djangosanetesting.cases import DatabaseTestCase + from djangosanetesting.cases import UnitTestCase from djangomarkup.fields import RichTextField - from djangomarkup.models import SourceText from exampleapp.models import Article - class TestRichTextField(DatabaseTestCase): + class TestRichTextField(UnitTestCase): def setUp(self): super(TestRichTextField, self).setUp() self.field = RichTextField( instance = Article(), model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) def test_retrieve_empty_source_for_empty_article(self): self.assert_equals(u'', self.field.get_source().content) def test_source_available_for_empty_article(self): self.assert_equals(u'', self.field.get_source_text()) def test_render_available_for_empty_article(self): self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip()) + def test_value_error_raised_when_accessing_source_without_instance(self): + field = RichTextField( + instance = None, + model = Article, + syntax_processor_name = "markdown", + field_name = "text", + required = True, + label = "Text" + ) + self.assert_raises(ValueError, field.get_source)
Check proper error when accessing source without instance
## Code Before: from djangosanetesting.cases import DatabaseTestCase from djangomarkup.fields import RichTextField from djangomarkup.models import SourceText from exampleapp.models import Article class TestRichTextField(DatabaseTestCase): def setUp(self): super(TestRichTextField, self).setUp() self.field = RichTextField( instance = Article(), model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) def test_retrieve_empty_source_for_empty_article(self): self.assert_equals(u'', self.field.get_source().content) def test_source_available_for_empty_article(self): self.assert_equals(u'', self.field.get_source_text()) def test_render_available_for_empty_article(self): self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip()) ## Instruction: Check proper error when accessing source without instance ## Code After: from djangosanetesting.cases import UnitTestCase from djangomarkup.fields import RichTextField from exampleapp.models import Article class TestRichTextField(UnitTestCase): def setUp(self): super(TestRichTextField, self).setUp() self.field = RichTextField( instance = Article(), model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) def test_retrieve_empty_source_for_empty_article(self): self.assert_equals(u'', self.field.get_source().content) def test_source_available_for_empty_article(self): self.assert_equals(u'', self.field.get_source_text()) def test_render_available_for_empty_article(self): self.assert_equals(u'<p></p>', self.field.get_rendered_text().strip()) def test_value_error_raised_when_accessing_source_without_instance(self): field = RichTextField( instance = None, model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) self.assert_raises(ValueError, field.get_source)
# ... existing code ... from djangosanetesting.cases import UnitTestCase # ... modified code ... from djangomarkup.fields import RichTextField ... class TestRichTextField(UnitTestCase): ... def test_value_error_raised_when_accessing_source_without_instance(self): field = RichTextField( instance = None, model = Article, syntax_processor_name = "markdown", field_name = "text", required = True, label = "Text" ) self.assert_raises(ValueError, field.get_source) # ... rest of the code ...
6bc6a07ee60f68e2003b5afcc752c3820a176541
astropy/conftest.py
astropy/conftest.py
from .tests.pytest_plugins import * try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') enable_deprecations_as_exceptions(include_astropy_deprecations=False)
from .tests.pytest_plugins import * try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') enable_deprecations_as_exceptions(include_astropy_deprecations=False) PYTEST_HEADER_MODULES['Cython'] = 'cython'
Add Cython to py.test header
Add Cython to py.test header
Python
bsd-3-clause
kelle/astropy,tbabej/astropy,lpsinger/astropy,joergdietrich/astropy,pllim/astropy,MSeifert04/astropy,AustereCuriosity/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,tbabej/astropy,mhvk/astropy,DougBurke/astropy,pllim/astropy,StuartLittlefair/astropy,astropy/astropy,kelle/astropy,AustereCuriosity/astropy,pllim/astropy,funbaker/astropy,mhvk/astropy,larrybradley/astropy,dhomeier/astropy,larrybradley/astropy,astropy/astropy,MSeifert04/astropy,DougBurke/astropy,astropy/astropy,kelle/astropy,saimn/astropy,bsipocz/astropy,kelle/astropy,stargaser/astropy,lpsinger/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,astropy/astropy,AustereCuriosity/astropy,bsipocz/astropy,stargaser/astropy,dhomeier/astropy,stargaser/astropy,DougBurke/astropy,larrybradley/astropy,mhvk/astropy,MSeifert04/astropy,tbabej/astropy,pllim/astropy,StuartLittlefair/astropy,lpsinger/astropy,StuartLittlefair/astropy,stargaser/astropy,funbaker/astropy,lpsinger/astropy,saimn/astropy,pllim/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,mhvk/astropy,lpsinger/astropy,dhomeier/astropy,funbaker/astropy,mhvk/astropy,larrybradley/astropy,MSeifert04/astropy,funbaker/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,saimn/astropy,dhomeier/astropy,AustereCuriosity/astropy,saimn/astropy,joergdietrich/astropy,StuartLittlefair/astropy,dhomeier/astropy,DougBurke/astropy,tbabej/astropy,tbabej/astropy,joergdietrich/astropy,kelle/astropy,bsipocz/astropy,joergdietrich/astropy
from .tests.pytest_plugins import * try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') enable_deprecations_as_exceptions(include_astropy_deprecations=False) + PYTEST_HEADER_MODULES['Cython'] = 'cython' +
Add Cython to py.test header
## Code Before: from .tests.pytest_plugins import * try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') enable_deprecations_as_exceptions(include_astropy_deprecations=False) ## Instruction: Add Cython to py.test header ## Code After: from .tests.pytest_plugins import * try: import matplotlib except ImportError: pass else: matplotlib.use('Agg') enable_deprecations_as_exceptions(include_astropy_deprecations=False) PYTEST_HEADER_MODULES['Cython'] = 'cython'
// ... existing code ... enable_deprecations_as_exceptions(include_astropy_deprecations=False) PYTEST_HEADER_MODULES['Cython'] = 'cython' // ... rest of the code ...
c458126baac92e1152026f51a9d3a544e8c6826f
testsV2/ut_repy2api_copycontext.py
testsV2/ut_repy2api_copycontext.py
#pragma repy # Create an almost shallow copy of _context # Contained self-reference is moved from _context to _context_copy _context_copy = _context.copy() repr(_context) repr(_context_copy)
#pragma repy # Create an "almost" shallow copy of _context, i.e. the contained reference # to _context is not copied as such but is changed to reference the new # _context_copy. # In consequence repr immediately truncates the contained self-reference # ("{...}") to prevent an infinite loop. # Caveat: In a real shallow copy, repr would only truncate the context # contained in the contained context (3rd level). _context_copy = _context.copy() repr(_context) repr(_context_copy)
Update comment in copycontext unit test
Update comment in copycontext unit test Following @vladimir-v-diaz's review comment this change adds more information about how repr works with Python dicts and with repy's SafeDict to the unit test's comments. Even more information can be found on the issue tracker SeattleTestbed/repy_v2#97.
Python
mit
SeattleTestbed/repy_v2
#pragma repy - # Create an almost shallow copy of _context - # Contained self-reference is moved from _context to _context_copy + # Create an "almost" shallow copy of _context, i.e. the contained reference + # to _context is not copied as such but is changed to reference the new + # _context_copy. + # In consequence repr immediately truncates the contained self-reference + # ("{...}") to prevent an infinite loop. + # Caveat: In a real shallow copy, repr would only truncate the context + # contained in the contained context (3rd level). _context_copy = _context.copy() repr(_context) repr(_context_copy)
Update comment in copycontext unit test
## Code Before: #pragma repy # Create an almost shallow copy of _context # Contained self-reference is moved from _context to _context_copy _context_copy = _context.copy() repr(_context) repr(_context_copy) ## Instruction: Update comment in copycontext unit test ## Code After: #pragma repy # Create an "almost" shallow copy of _context, i.e. the contained reference # to _context is not copied as such but is changed to reference the new # _context_copy. # In consequence repr immediately truncates the contained self-reference # ("{...}") to prevent an infinite loop. # Caveat: In a real shallow copy, repr would only truncate the context # contained in the contained context (3rd level). _context_copy = _context.copy() repr(_context) repr(_context_copy)
// ... existing code ... # Create an "almost" shallow copy of _context, i.e. the contained reference # to _context is not copied as such but is changed to reference the new # _context_copy. # In consequence repr immediately truncates the contained self-reference # ("{...}") to prevent an infinite loop. # Caveat: In a real shallow copy, repr would only truncate the context # contained in the contained context (3rd level). _context_copy = _context.copy() // ... rest of the code ...
c0ec6a6a799ab86562b07326eeaf21da4fd23dff
rejected/log.py
rejected/log.py
import logging class CorrelationFilter(logging.Formatter): """Filter records that have a correlation_id""" def __init__(self, exists=None): super(CorrelationFilter, self).__init__() self.exists = exists def filter(self, record): if self.exists: return hasattr(record, 'correlation_id') return not hasattr(record, 'correlation_id') class CorrelationAdapter(logging.LoggerAdapter): """A LoggerAdapter that appends the a correlation ID to the message record properties. """ def __init__(self, logger, consumer): self.logger = logger self.consumer = consumer def process(self, msg, kwargs): """Process the logging message and keyword arguments passed in to a logging call to insert contextual information. :param str msg: The message to process :param dict kwargs: The kwargs to append :rtype: (str, dict) """ kwargs['extra'] = {'correlation_id': self.consumer.correlation_id} return msg, kwargs
import logging class CorrelationFilter(logging.Formatter): """Filter records that have a correlation_id""" def __init__(self, exists=None): super(CorrelationFilter, self).__init__() self.exists = exists def filter(self, record): if self.exists: return hasattr(record, 'correlation_id') return not hasattr(record, 'correlation_id') class CorrelationAdapter(logging.LoggerAdapter): """A LoggerAdapter that appends the a correlation ID to the message record properties. """ def __init__(self, logger, consumer): self.logger = logger self.consumer = consumer def process(self, msg, kwargs): """Process the logging message and keyword arguments passed in to a logging call to insert contextual information. :param str msg: The message to process :param dict kwargs: The kwargs to append :rtype: (str, dict) """ kwargs['extra'] = {'correlation_id': self.consumer.correlation_id, 'consumer': self.consumer.name} return msg, kwargs
Add the consumer name to the extra values
Add the consumer name to the extra values
Python
bsd-3-clause
gmr/rejected,gmr/rejected
import logging class CorrelationFilter(logging.Formatter): """Filter records that have a correlation_id""" def __init__(self, exists=None): super(CorrelationFilter, self).__init__() self.exists = exists def filter(self, record): if self.exists: return hasattr(record, 'correlation_id') return not hasattr(record, 'correlation_id') class CorrelationAdapter(logging.LoggerAdapter): """A LoggerAdapter that appends the a correlation ID to the message record properties. """ def __init__(self, logger, consumer): self.logger = logger self.consumer = consumer def process(self, msg, kwargs): """Process the logging message and keyword arguments passed in to a logging call to insert contextual information. :param str msg: The message to process :param dict kwargs: The kwargs to append :rtype: (str, dict) """ - kwargs['extra'] = {'correlation_id': self.consumer.correlation_id} + kwargs['extra'] = {'correlation_id': self.consumer.correlation_id, + 'consumer': self.consumer.name} return msg, kwargs
Add the consumer name to the extra values
## Code Before: import logging class CorrelationFilter(logging.Formatter): """Filter records that have a correlation_id""" def __init__(self, exists=None): super(CorrelationFilter, self).__init__() self.exists = exists def filter(self, record): if self.exists: return hasattr(record, 'correlation_id') return not hasattr(record, 'correlation_id') class CorrelationAdapter(logging.LoggerAdapter): """A LoggerAdapter that appends the a correlation ID to the message record properties. """ def __init__(self, logger, consumer): self.logger = logger self.consumer = consumer def process(self, msg, kwargs): """Process the logging message and keyword arguments passed in to a logging call to insert contextual information. :param str msg: The message to process :param dict kwargs: The kwargs to append :rtype: (str, dict) """ kwargs['extra'] = {'correlation_id': self.consumer.correlation_id} return msg, kwargs ## Instruction: Add the consumer name to the extra values ## Code After: import logging class CorrelationFilter(logging.Formatter): """Filter records that have a correlation_id""" def __init__(self, exists=None): super(CorrelationFilter, self).__init__() self.exists = exists def filter(self, record): if self.exists: return hasattr(record, 'correlation_id') return not hasattr(record, 'correlation_id') class CorrelationAdapter(logging.LoggerAdapter): """A LoggerAdapter that appends the a correlation ID to the message record properties. """ def __init__(self, logger, consumer): self.logger = logger self.consumer = consumer def process(self, msg, kwargs): """Process the logging message and keyword arguments passed in to a logging call to insert contextual information. :param str msg: The message to process :param dict kwargs: The kwargs to append :rtype: (str, dict) """ kwargs['extra'] = {'correlation_id': self.consumer.correlation_id, 'consumer': self.consumer.name} return msg, kwargs
# ... existing code ... """ kwargs['extra'] = {'correlation_id': self.consumer.correlation_id, 'consumer': self.consumer.name} return msg, kwargs # ... rest of the code ...
132b148ca8701ee867b7a08432a3595a213ce470
cedexis/radar/tests/test_cli.py
cedexis/radar/tests/test_cli.py
import unittest import types import cedexis.radar.cli class TestCommandLineInterface(unittest.TestCase): def test_main(self): self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType))
import unittest from unittest.mock import patch, MagicMock, call import types from pprint import pprint import cedexis.radar.cli class TestCommandLineInterface(unittest.TestCase): def test_main(self): self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType)) @patch('logging.getLogger') @patch('argparse.ArgumentParser') @patch('cedexis.radar.run_session') @patch('time.sleep') def test_config_file_with_cli_params(self, mock_sleep, mock_run_session, mock_ArgumentParser, mock_getLogger): args = make_default_args() args.continuous = True args.max_runs = 3 args.repeat_delay = 60 mock_parser = MagicMock() mock_parser.parse_args.return_value = args mock_ArgumentParser.return_value = mock_parser cedexis.radar.cli.main() # Assert # print(mock_run_session.call_args) self.assertEqual( mock_run_session.call_args_list, [ call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None) ]) # print(mock_sleep.call_args) self.assertEqual(mock_sleep.call_args_list, [call(60),call(60)]) def make_default_args(): args = lambda: None args.zone_id = 1 args.customer_id = 12345 args.api_key = 'sandbox' args.secure = False args.config_file = 'some config file path' args.tracer = None args.provider_id = None args.report_server = None args.max_runs = None args.repeat_delay = None return args
Add unit test for overrides
Add unit test for overrides
Python
mit
cedexis/cedexis.radar
import unittest + from unittest.mock import patch, MagicMock, call import types + from pprint import pprint import cedexis.radar.cli class TestCommandLineInterface(unittest.TestCase): def test_main(self): self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType)) + @patch('logging.getLogger') + @patch('argparse.ArgumentParser') + @patch('cedexis.radar.run_session') + @patch('time.sleep') + def test_config_file_with_cli_params(self, mock_sleep, mock_run_session, + mock_ArgumentParser, mock_getLogger): + args = make_default_args() + args.continuous = True + args.max_runs = 3 + args.repeat_delay = 60 + mock_parser = MagicMock() + mock_parser.parse_args.return_value = args + mock_ArgumentParser.return_value = mock_parser + cedexis.radar.cli.main() + + # Assert + # print(mock_run_session.call_args) + self.assertEqual( + mock_run_session.call_args_list, + [ + call(1, 12345, 'sandbox', False, None, None, False, None), + call(1, 12345, 'sandbox', False, None, None, False, None), + call(1, 12345, 'sandbox', False, None, None, False, None) + ]) + # print(mock_sleep.call_args) + self.assertEqual(mock_sleep.call_args_list, [call(60),call(60)]) + + def make_default_args(): + args = lambda: None + args.zone_id = 1 + args.customer_id = 12345 + args.api_key = 'sandbox' + args.secure = False + args.config_file = 'some config file path' + args.tracer = None + args.provider_id = None + args.report_server = None + args.max_runs = None + args.repeat_delay = None + return args +
Add unit test for overrides
## Code Before: import unittest import types import cedexis.radar.cli class TestCommandLineInterface(unittest.TestCase): def test_main(self): self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType)) ## Instruction: Add unit test for overrides ## Code After: import unittest from unittest.mock import patch, MagicMock, call import types from pprint import pprint import cedexis.radar.cli class TestCommandLineInterface(unittest.TestCase): def test_main(self): self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType)) @patch('logging.getLogger') @patch('argparse.ArgumentParser') @patch('cedexis.radar.run_session') @patch('time.sleep') def test_config_file_with_cli_params(self, mock_sleep, mock_run_session, mock_ArgumentParser, mock_getLogger): args = make_default_args() args.continuous = True args.max_runs = 3 args.repeat_delay = 60 mock_parser = MagicMock() mock_parser.parse_args.return_value = args mock_ArgumentParser.return_value = mock_parser cedexis.radar.cli.main() # Assert # print(mock_run_session.call_args) self.assertEqual( mock_run_session.call_args_list, [ call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None) ]) # print(mock_sleep.call_args) self.assertEqual(mock_sleep.call_args_list, [call(60),call(60)]) def make_default_args(): args = lambda: None args.zone_id = 1 args.customer_id = 12345 args.api_key = 'sandbox' args.secure = False args.config_file = 'some config file path' args.tracer = None args.provider_id = None args.report_server = None args.max_runs = None args.repeat_delay = None return args
// ... existing code ... import unittest from unittest.mock import patch, MagicMock, call import types from pprint import pprint // ... modified code ... self.assertTrue(isinstance(cedexis.radar.cli.main, types.FunctionType)) @patch('logging.getLogger') @patch('argparse.ArgumentParser') @patch('cedexis.radar.run_session') @patch('time.sleep') def test_config_file_with_cli_params(self, mock_sleep, mock_run_session, mock_ArgumentParser, mock_getLogger): args = make_default_args() args.continuous = True args.max_runs = 3 args.repeat_delay = 60 mock_parser = MagicMock() mock_parser.parse_args.return_value = args mock_ArgumentParser.return_value = mock_parser cedexis.radar.cli.main() # Assert # print(mock_run_session.call_args) self.assertEqual( mock_run_session.call_args_list, [ call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None), call(1, 12345, 'sandbox', False, None, None, False, None) ]) # print(mock_sleep.call_args) self.assertEqual(mock_sleep.call_args_list, [call(60),call(60)]) def make_default_args(): args = lambda: None args.zone_id = 1 args.customer_id = 12345 args.api_key = 'sandbox' args.secure = False args.config_file = 'some config file path' args.tracer = None args.provider_id = None args.report_server = None args.max_runs = None args.repeat_delay = None return args // ... rest of the code ...
8653f2c0e63fecd5617dfa063878c846ddafcf97
tests/test_add_language/test_update_language_list.py
tests/test_add_language/test_update_language_list.py
from __future__ import unicode_literals import json import os import os.path import nose.tools as nose import yvs.shared as yvs import utilities.add_language as add_lang from tests.test_add_language import set_up, tear_down @nose.with_setup(set_up, tear_down) def test_update_languge_list_add(): """should add new languages to language list""" kln_language_id = 'kln' kln_language_name = 'Klingon' add_lang.update_language_list(kln_language_id, kln_language_name) langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) kln_lang = None for lang in langs: if lang['id'] == kln_language_id: kln_lang = lang nose.assert_is_not_none(kln_lang) nose.assert_equal(kln_lang['name'], kln_language_name)
from __future__ import unicode_literals import json import os import os.path import nose.tools as nose import yvs.shared as yvs import utilities.add_language as add_lang from tests.test_add_language import set_up, tear_down @nose.with_setup(set_up, tear_down) def test_update_languge_list_add(): """should add new languages to language list""" new_language_id = 'kln' new_language_name = 'Klingon' langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) orig_num_langs = len(langs) add_lang.update_language_list(new_language_id, new_language_name) with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) num_langs = len(langs) nose.assert_equal(num_langs, orig_num_langs + 1) new_lang = None for lang in langs: if lang['id'] == new_language_id: new_lang = lang nose.assert_is_not_none(new_lang) nose.assert_equal(new_lang['name'], new_language_name)
Add additional checks to update_language_list test
Add additional checks to update_language_list test Also make language variable names independent of their actual values.
Python
mit
caleb531/youversion-suggest,caleb531/youversion-suggest
from __future__ import unicode_literals import json import os import os.path import nose.tools as nose import yvs.shared as yvs import utilities.add_language as add_lang from tests.test_add_language import set_up, tear_down @nose.with_setup(set_up, tear_down) def test_update_languge_list_add(): """should add new languages to language list""" - kln_language_id = 'kln' + new_language_id = 'kln' - kln_language_name = 'Klingon' + new_language_name = 'Klingon' - add_lang.update_language_list(kln_language_id, kln_language_name) langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) + orig_num_langs = len(langs) + add_lang.update_language_list(new_language_id, new_language_name) + with open(langs_path, 'r') as langs_file: + langs = json.load(langs_file) + num_langs = len(langs) + nose.assert_equal(num_langs, orig_num_langs + 1) - kln_lang = None + new_lang = None for lang in langs: - if lang['id'] == kln_language_id: + if lang['id'] == new_language_id: - kln_lang = lang + new_lang = lang - nose.assert_is_not_none(kln_lang) + nose.assert_is_not_none(new_lang) - nose.assert_equal(kln_lang['name'], kln_language_name) + nose.assert_equal(new_lang['name'], new_language_name)
Add additional checks to update_language_list test
## Code Before: from __future__ import unicode_literals import json import os import os.path import nose.tools as nose import yvs.shared as yvs import utilities.add_language as add_lang from tests.test_add_language import set_up, tear_down @nose.with_setup(set_up, tear_down) def test_update_languge_list_add(): """should add new languages to language list""" kln_language_id = 'kln' kln_language_name = 'Klingon' add_lang.update_language_list(kln_language_id, kln_language_name) langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) kln_lang = None for lang in langs: if lang['id'] == kln_language_id: kln_lang = lang nose.assert_is_not_none(kln_lang) nose.assert_equal(kln_lang['name'], kln_language_name) ## Instruction: Add additional checks to update_language_list test ## Code After: from __future__ import unicode_literals import json import os import os.path import nose.tools as nose import yvs.shared as yvs import utilities.add_language as add_lang from tests.test_add_language import set_up, tear_down @nose.with_setup(set_up, tear_down) def test_update_languge_list_add(): """should add new languages to language list""" new_language_id = 'kln' new_language_name = 'Klingon' langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) orig_num_langs = len(langs) add_lang.update_language_list(new_language_id, new_language_name) with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) num_langs = len(langs) nose.assert_equal(num_langs, orig_num_langs + 1) new_lang = None for lang in langs: if lang['id'] == new_language_id: new_lang = lang nose.assert_is_not_none(new_lang) nose.assert_equal(new_lang['name'], new_language_name)
... """should add new languages to language list""" new_language_id = 'kln' new_language_name = 'Klingon' langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json') ... langs = json.load(langs_file) orig_num_langs = len(langs) add_lang.update_language_list(new_language_id, new_language_name) with open(langs_path, 'r') as langs_file: langs = json.load(langs_file) num_langs = len(langs) nose.assert_equal(num_langs, orig_num_langs + 1) new_lang = None for lang in langs: if lang['id'] == new_language_id: new_lang = lang nose.assert_is_not_none(new_lang) nose.assert_equal(new_lang['name'], new_language_name) ...
bb808bfe43154afa5b11265e4b5651183c7f87f0
armstrong/hatband/sites.py
armstrong/hatband/sites.py
from django.contrib.admin.sites import AdminSite as DjangoAdminSite from django.contrib.admin.sites import site as django_site class HatbandAndDjangoRegistry(object): def __init__(self, site, default_site=None): if default_site is None: default_site = django_site super(HatbandAndDjangoRegistry, self).__init__() self._site = site self._registry = {} self.dicts = [self._registry, default_site._registry] def items(self): for d in self.dicts: for item in d.items(): yield item def iteritems(self): return iter(self.items()) def __contains__(self, k): for d in self.dicts: if k in d: return True return False class AdminSite(DjangoAdminSite): def get_urls(self): from django.conf.urls.defaults import patterns, url return patterns('', # Custom hatband Views here ) + super(AdminSite, self).get_urls() site = AdminSite() site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
from django.contrib.admin.sites import AdminSite as DjangoAdminSite from django.contrib.admin.sites import site as django_site class HatbandAndDjangoRegistry(object): def __init__(self, site, default_site=None): if default_site is None: default_site = django_site super(HatbandAndDjangoRegistry, self).__init__() self._site = site self._registry = {} self.dicts = [self._registry, default_site._registry] def items(self): for d in self.dicts: for item in d.items(): yield item def iteritems(self): return iter(self.items()) def __contains__(self, k): for d in self.dicts: if k in d: return True return False def __setitem__(self, k, v): self._registry[k] = v class AdminSite(DjangoAdminSite): def get_urls(self): from django.conf.urls.defaults import patterns, url return patterns('', # Custom hatband Views here ) + super(AdminSite, self).get_urls() site = AdminSite() site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
Make sure __setitem__ is available for site.register()
Make sure __setitem__ is available for site.register()
Python
apache-2.0
armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband
from django.contrib.admin.sites import AdminSite as DjangoAdminSite from django.contrib.admin.sites import site as django_site class HatbandAndDjangoRegistry(object): def __init__(self, site, default_site=None): if default_site is None: default_site = django_site super(HatbandAndDjangoRegistry, self).__init__() self._site = site self._registry = {} self.dicts = [self._registry, default_site._registry] def items(self): for d in self.dicts: for item in d.items(): yield item def iteritems(self): return iter(self.items()) def __contains__(self, k): for d in self.dicts: if k in d: return True return False + def __setitem__(self, k, v): + self._registry[k] = v + class AdminSite(DjangoAdminSite): def get_urls(self): from django.conf.urls.defaults import patterns, url return patterns('', # Custom hatband Views here ) + super(AdminSite, self).get_urls() site = AdminSite() site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
Make sure __setitem__ is available for site.register()
## Code Before: from django.contrib.admin.sites import AdminSite as DjangoAdminSite from django.contrib.admin.sites import site as django_site class HatbandAndDjangoRegistry(object): def __init__(self, site, default_site=None): if default_site is None: default_site = django_site super(HatbandAndDjangoRegistry, self).__init__() self._site = site self._registry = {} self.dicts = [self._registry, default_site._registry] def items(self): for d in self.dicts: for item in d.items(): yield item def iteritems(self): return iter(self.items()) def __contains__(self, k): for d in self.dicts: if k in d: return True return False class AdminSite(DjangoAdminSite): def get_urls(self): from django.conf.urls.defaults import patterns, url return patterns('', # Custom hatband Views here ) + super(AdminSite, self).get_urls() site = AdminSite() site._registry = HatbandAndDjangoRegistry(site, default_site=django_site) ## Instruction: Make sure __setitem__ is available for site.register() ## Code After: from django.contrib.admin.sites import AdminSite as DjangoAdminSite from django.contrib.admin.sites import site as django_site class HatbandAndDjangoRegistry(object): def __init__(self, site, default_site=None): if default_site is None: default_site = django_site super(HatbandAndDjangoRegistry, self).__init__() self._site = site self._registry = {} self.dicts = [self._registry, default_site._registry] def items(self): for d in self.dicts: for item in d.items(): yield item def iteritems(self): return iter(self.items()) def __contains__(self, k): for d in self.dicts: if k in d: return True return False def __setitem__(self, k, v): self._registry[k] = v class AdminSite(DjangoAdminSite): def get_urls(self): from django.conf.urls.defaults import patterns, url return patterns('', # Custom hatband Views here ) + super(AdminSite, self).get_urls() site = AdminSite() site._registry = HatbandAndDjangoRegistry(site, default_site=django_site)
... def __setitem__(self, k, v): self._registry[k] = v ...
f5728e24ba6dec2d2d7c2eff7888137e91469094
overlay/Data.py
overlay/Data.py
import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings self.time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", self.time) self.frame_time = time.strftime("%I:%M:%S %p", self.time) self.font_size = 22 self.text_color = "rgb(255,255,255)" # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg() @property def datetime_x(self): return self.width - self.padding @property def depth_background_y(self): return self.height - 3 * self.padding - self.depth_graph_height @property def depth_background_width(self): return self.depth_graph_width + 2 * self.padding @property def depth_background_height(self): return self.depth_graph_height + 2 * self.padding @property def depth_text_x(self): return self.depth_background_width * 0.5 @property def depth_text_y(self): return self.depth_background_height - self.padding
import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings local_time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", local_time) self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 self.text_color = "rgb(255,255,255)" self.datetime_x = self.width - self.padding # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg()
Remove unneeded properties from main data object
Remove unneeded properties from main data object
Python
mit
thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings - self.time = time.localtime(secs_since_epoch) + local_time = time.localtime(secs_since_epoch) - self.frame_date = time.strftime("%B %d, %Y", self.time) + self.frame_date = time.strftime("%B %d, %Y", local_time) - self.frame_time = time.strftime("%I:%M:%S %p", self.time) + self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 self.text_color = "rgb(255,255,255)" + self.datetime_x = self.width - self.padding # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg() - @property - def datetime_x(self): - return self.width - self.padding - - @property - def depth_background_y(self): - return self.height - 3 * self.padding - self.depth_graph_height - - @property - def depth_background_width(self): - return self.depth_graph_width + 2 * self.padding - - @property - def depth_background_height(self): - return self.depth_graph_height + 2 * self.padding - - @property - def depth_text_x(self): - return self.depth_background_width * 0.5 - - @property - def depth_text_y(self): - return self.depth_background_height - self.padding -
Remove unneeded properties from main data object
## Code Before: import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings self.time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", self.time) self.frame_time = time.strftime("%I:%M:%S %p", self.time) self.font_size = 22 self.text_color = "rgb(255,255,255)" # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg() @property def datetime_x(self): return self.width - self.padding @property def depth_background_y(self): return self.height - 3 * self.padding - self.depth_graph_height @property def depth_background_width(self): return self.depth_graph_width + 2 * self.padding @property def depth_background_height(self): return self.depth_graph_height + 2 * self.padding @property def depth_text_x(self): return self.depth_background_width * 0.5 @property def depth_text_y(self): return self.depth_background_height - self.padding ## Instruction: Remove unneeded properties from main data object ## Code After: import time class Data: def __init__(self, secs_since_epoch, depth_chart, temperature_chart, frame_path): # general settings self.width = 1296 self.height = 972 self.padding = 5 self.frame_path = frame_path # date/time settings local_time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", local_time) self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 self.text_color = "rgb(255,255,255)" self.datetime_x = self.width - self.padding # charts self.depth_chart = depth_chart.to_svg() self.temperature_chart = temperature_chart.to_svg()
# ... existing code ... # date/time settings local_time = time.localtime(secs_since_epoch) self.frame_date = time.strftime("%B %d, %Y", local_time) self.frame_time = time.strftime("%I:%M:%S %p", local_time) self.font_size = 22 # ... modified code ... self.text_color = "rgb(255,255,255)" self.datetime_x = self.width - self.padding ... self.temperature_chart = temperature_chart.to_svg() # ... rest of the code ...
117b202a1c28282a2c27a545c3da29df9e5675ec
ds_unordered_list.py
ds_unordered_list.py
from __future__ import print_function class List(object): """List class.""" def __init__(self): pass def add(self, item): pass def remove(self, item): pass def search(self, item): pass def is_empty(self): pass def length(self): pass def append(self, item): pass def index(self, item): pass def insert(self, pos, item): pass def pop(self, pos): pass
from __future__ import print_function class Node(object): """Node class as building block for unordered list.""" def __init__(self, init_data): pass def get_data(self): pass def get_next(self): pass def set_data(self, new_data): pass def set_next(self, new_next): pass class List(object): """Unordered list class. Implement unordered list by a linked list. Operations include the following: - add(item) - remove(ite) - search(item) - is_empty() - length() - append(item) - index(item) - insert(item, pos) - pop(pos) """ def __init__(self): pass def add(self, item): pass def remove(self, item): pass def search(self, item): pass def is_empty(self): pass def length(self): pass def append(self, item): pass def index(self, item): pass def insert(self, pos, item): pass def pop(self, pos): pass
Add node class for unordered list building block
Add node class for unordered list building block
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import print_function + class Node(object): + """Node class as building block for unordered list.""" + def __init__(self, init_data): + pass + + def get_data(self): + pass + + def get_next(self): + pass + + def set_data(self, new_data): + pass + + def set_next(self, new_next): + pass + + class List(object): - """List class.""" + """Unordered list class. + + Implement unordered list by a linked list. + Operations include the following: + - add(item) + - remove(ite) + - search(item) + - is_empty() + - length() + - append(item) + - index(item) + - insert(item, pos) + - pop(pos) + """ def __init__(self): pass def add(self, item): pass def remove(self, item): pass def search(self, item): pass def is_empty(self): pass def length(self): pass def append(self, item): pass def index(self, item): pass def insert(self, pos, item): pass def pop(self, pos): pass
Add node class for unordered list building block
## Code Before: from __future__ import print_function class List(object): """List class.""" def __init__(self): pass def add(self, item): pass def remove(self, item): pass def search(self, item): pass def is_empty(self): pass def length(self): pass def append(self, item): pass def index(self, item): pass def insert(self, pos, item): pass def pop(self, pos): pass ## Instruction: Add node class for unordered list building block ## Code After: from __future__ import print_function class Node(object): """Node class as building block for unordered list.""" def __init__(self, init_data): pass def get_data(self): pass def get_next(self): pass def set_data(self, new_data): pass def set_next(self, new_next): pass class List(object): """Unordered list class. Implement unordered list by a linked list. Operations include the following: - add(item) - remove(ite) - search(item) - is_empty() - length() - append(item) - index(item) - insert(item, pos) - pop(pos) """ def __init__(self): pass def add(self, item): pass def remove(self, item): pass def search(self, item): pass def is_empty(self): pass def length(self): pass def append(self, item): pass def index(self, item): pass def insert(self, pos, item): pass def pop(self, pos): pass
... class Node(object): """Node class as building block for unordered list.""" def __init__(self, init_data): pass def get_data(self): pass def get_next(self): pass def set_data(self, new_data): pass def set_next(self, new_next): pass class List(object): """Unordered list class. Implement unordered list by a linked list. Operations include the following: - add(item) - remove(ite) - search(item) - is_empty() - length() - append(item) - index(item) - insert(item, pos) - pop(pos) """ def __init__(self): ...
59e4e193ea41d05229f2748743e9783d68d8dabf
apps/__init__.py
apps/__init__.py
import os,importlib def find_module_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))] return subdirs def find_html_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))] return dict(subdirs) MODULES = {} _html_dirs = find_html_dirs() [ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
import os,importlib def find_module_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))] return subdirs def find_html_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))] return dict(subdirs) def import_app(app): try: importlib.import_module(app) except Exception as e: logging.error("Couldn't load app: {0}, error: {1}".format(app, e)) MODULES = {} _html_dirs = find_html_dirs() [ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server
Handle application erroring to not break the server
Python
agpl-3.0
sociam/indx,sociam/indx,sociam/indx
import os,importlib def find_module_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))] return subdirs def find_html_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))] return dict(subdirs) + def import_app(app): + try: + importlib.import_module(app) + except Exception as e: + logging.error("Couldn't load app: {0}, error: {1}".format(app, e)) + MODULES = {} _html_dirs = find_html_dirs() - [ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ] + [ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Handle application erroring to not break the server
## Code Before: import os,importlib def find_module_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))] return subdirs def find_html_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))] return dict(subdirs) MODULES = {} _html_dirs = find_html_dirs() [ MODULES.update({m_name:{'module':importlib.import_module('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ] ## Instruction: Handle application erroring to not break the server ## Code After: import os,importlib def find_module_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))] return subdirs def find_html_dirs(): curdir = os.path.dirname(os.path.abspath(__file__)) subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))] return dict(subdirs) def import_app(app): try: importlib.import_module(app) except Exception as e: logging.error("Couldn't load app: {0}, error: {1}".format(app, e)) MODULES = {} _html_dirs = find_html_dirs() [ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
# ... existing code ... def import_app(app): try: importlib.import_module(app) except Exception as e: logging.error("Couldn't load app: {0}, error: {1}".format(app, e)) MODULES = {} # ... modified code ... _html_dirs = find_html_dirs() [ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ] # ... rest of the code ...
13301dfe93bcdd44218166bdab1c7aeacd4e4a7c
winthrop/annotation/models.py
winthrop/annotation/models.py
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import Canvas from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(Canvas, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = Canvas.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import Canvas from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(Canvas, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # for image annotation, URI should be set to canvas URI; look up # canvas by URI and associate with the record self.canvas = None try: self.canvas = Canvas.objects.get(uri=self.uri) except Canvas.DoesNotExist: pass super(Annotation, self).save() def handle_extra_data(self, data, request): '''Handle any "extra" data that is not part of the stock annotation data model. Use this method to customize the logic for updating an annotation from request data.''' if 'author' in data: self.author = Person.objects.get(id=data['author']['id']) del data['author'] return data def info(self): # extend the default info impleentation (used to generate json) # to include local database fields in the output info = super(Annotation, self).info() if self.author: info['author'] = { 'name': self.author.authorized_name, 'id': self.author.id } return info
Add author field & autocomplete to annotation model+interface
Add author field & autocomplete to annotation model+interface
Python
apache-2.0
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import Canvas from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(Canvas, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): - # NOTE: could set the canvas uri in javascript instead - # of using page uri, but for now determine canvas id - # based on the page uri + # for image annotation, URI should be set to canvas URI; look up + # canvas by URI and associate with the record + self.canvas = None try: - match = resolve(urlparse(self.uri).path) - if match.url_name == 'page' and 'djiffy' in match.namespaces: - self.canvas = Canvas.objects.get( + self.canvas = Canvas.objects.get(uri=self.uri) + except Canvas.DoesNotExist: - short_id=match.kwargs['id'], - book__short_id=match.kwargs['book_id'] - ) - except Resolver404: pass super(Annotation, self).save() + + def handle_extra_data(self, data, request): + '''Handle any "extra" data that is not part of the stock annotation + data model. Use this method to customize the logic for updating + an annotation from request data.''' + if 'author' in data: + self.author = Person.objects.get(id=data['author']['id']) + del data['author'] + + return data + + def info(self): + # extend the default info impleentation (used to generate json) + # to include local database fields in the output + info = super(Annotation, self).info() + if self.author: + info['author'] = { + 'name': self.author.authorized_name, + 'id': self.author.id + } + return info +
Add author field & autocomplete to annotation model+interface
## Code Before: from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import Canvas from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(Canvas, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = Canvas.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save() ## Instruction: Add author field & autocomplete to annotation model+interface ## Code After: from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import Canvas from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(Canvas, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # for image annotation, URI should be set to canvas URI; look up # canvas by URI and associate with the record self.canvas = None try: self.canvas = Canvas.objects.get(uri=self.uri) except Canvas.DoesNotExist: pass super(Annotation, self).save() def handle_extra_data(self, data, request): '''Handle any "extra" data that is not part of the stock annotation data model. Use this method to customize the logic for updating an annotation from request data.''' if 'author' in data: self.author = Person.objects.get(id=data['author']['id']) del data['author'] return data def info(self): # extend the default info impleentation (used to generate json) # to include local database fields in the output info = super(Annotation, self).info() if self.author: info['author'] = { 'name': self.author.authorized_name, 'id': self.author.id } return info
... def save(self, *args, **kwargs): # for image annotation, URI should be set to canvas URI; look up # canvas by URI and associate with the record self.canvas = None try: self.canvas = Canvas.objects.get(uri=self.uri) except Canvas.DoesNotExist: pass ... def handle_extra_data(self, data, request): '''Handle any "extra" data that is not part of the stock annotation data model. Use this method to customize the logic for updating an annotation from request data.''' if 'author' in data: self.author = Person.objects.get(id=data['author']['id']) del data['author'] return data def info(self): # extend the default info impleentation (used to generate json) # to include local database fields in the output info = super(Annotation, self).info() if self.author: info['author'] = { 'name': self.author.authorized_name, 'id': self.author.id } return info ... ...
be53f1234bec0bca4c35f020905e24d0637b91e3
tests/run/coroutines.py
tests/run/coroutines.py
async def test_coroutine_frame(awaitable): """ >>> class Awaitable(object): ... def __await__(self): ... return iter([2]) >>> coro = test_coroutine_frame(Awaitable()) >>> import types >>> isinstance(coro.cr_frame, types.FrameType) or coro.cr_frame True >>> coro.cr_frame is coro.cr_frame # assert that it's cached True >>> coro.cr_frame.f_code is not None True >>> code_obj = coro.cr_frame.f_code >>> code_obj.co_argcount 1 >>> code_obj.co_varnames ('awaitable', 'b') >>> next(coro.__await__()) # avoid "not awaited" warning 2 """ b = await awaitable return b
async def test_coroutine_frame(awaitable): """ >>> class Awaitable(object): ... def __await__(self): ... return iter([2]) >>> coro = test_coroutine_frame(Awaitable()) >>> import types >>> isinstance(coro.cr_frame, types.FrameType) or coro.cr_frame True >>> coro.cr_frame is coro.cr_frame # assert that it's cached True >>> coro.cr_frame.f_code is not None True >>> code_obj = coro.cr_frame.f_code >>> code_obj.co_argcount 1 >>> code_obj.co_varnames ('awaitable', 'b') >>> next(coro.__await__()) # avoid "not awaited" warning 2 """ b = await awaitable return b # gh1462: Using decorators on coroutines. def pass_through(func): return func @pass_through async def test_pass_through(): """ >>> t = test_pass_through() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """ @pass_through(pass_through) async def test_pass_through_with_args(): """ >>> t = test_pass_through_with_args() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """
Add an explicit test for async-def functions with decorators. Closes GH-1462.
Add an explicit test for async-def functions with decorators. Closes GH-1462.
Python
apache-2.0
scoder/cython,da-woods/cython,scoder/cython,scoder/cython,cython/cython,da-woods/cython,cython/cython,cython/cython,da-woods/cython,scoder/cython,cython/cython,da-woods/cython
async def test_coroutine_frame(awaitable): """ >>> class Awaitable(object): ... def __await__(self): ... return iter([2]) >>> coro = test_coroutine_frame(Awaitable()) >>> import types >>> isinstance(coro.cr_frame, types.FrameType) or coro.cr_frame True >>> coro.cr_frame is coro.cr_frame # assert that it's cached True >>> coro.cr_frame.f_code is not None True >>> code_obj = coro.cr_frame.f_code >>> code_obj.co_argcount 1 >>> code_obj.co_varnames ('awaitable', 'b') >>> next(coro.__await__()) # avoid "not awaited" warning 2 """ b = await awaitable return b + + # gh1462: Using decorators on coroutines. + + def pass_through(func): + return func + + + @pass_through + async def test_pass_through(): + """ + >>> t = test_pass_through() + >>> try: t.send(None) + ... except StopIteration as ex: + ... print(ex.args[0] if ex.args else None) + ... else: print("NOT STOPPED!") + None + """ + + + @pass_through(pass_through) + async def test_pass_through_with_args(): + """ + >>> t = test_pass_through_with_args() + >>> try: t.send(None) + ... except StopIteration as ex: + ... print(ex.args[0] if ex.args else None) + ... else: print("NOT STOPPED!") + None + """ +
Add an explicit test for async-def functions with decorators. Closes GH-1462.
## Code Before: async def test_coroutine_frame(awaitable): """ >>> class Awaitable(object): ... def __await__(self): ... return iter([2]) >>> coro = test_coroutine_frame(Awaitable()) >>> import types >>> isinstance(coro.cr_frame, types.FrameType) or coro.cr_frame True >>> coro.cr_frame is coro.cr_frame # assert that it's cached True >>> coro.cr_frame.f_code is not None True >>> code_obj = coro.cr_frame.f_code >>> code_obj.co_argcount 1 >>> code_obj.co_varnames ('awaitable', 'b') >>> next(coro.__await__()) # avoid "not awaited" warning 2 """ b = await awaitable return b ## Instruction: Add an explicit test for async-def functions with decorators. Closes GH-1462. ## Code After: async def test_coroutine_frame(awaitable): """ >>> class Awaitable(object): ... def __await__(self): ... return iter([2]) >>> coro = test_coroutine_frame(Awaitable()) >>> import types >>> isinstance(coro.cr_frame, types.FrameType) or coro.cr_frame True >>> coro.cr_frame is coro.cr_frame # assert that it's cached True >>> coro.cr_frame.f_code is not None True >>> code_obj = coro.cr_frame.f_code >>> code_obj.co_argcount 1 >>> code_obj.co_varnames ('awaitable', 'b') >>> next(coro.__await__()) # avoid "not awaited" warning 2 """ b = await awaitable return b # gh1462: Using decorators on coroutines. def pass_through(func): return func @pass_through async def test_pass_through(): """ >>> t = test_pass_through() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """ @pass_through(pass_through) async def test_pass_through_with_args(): """ >>> t = test_pass_through_with_args() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """
// ... existing code ... return b # gh1462: Using decorators on coroutines. def pass_through(func): return func @pass_through async def test_pass_through(): """ >>> t = test_pass_through() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """ @pass_through(pass_through) async def test_pass_through_with_args(): """ >>> t = test_pass_through_with_args() >>> try: t.send(None) ... except StopIteration as ex: ... print(ex.args[0] if ex.args else None) ... else: print("NOT STOPPED!") None """ // ... rest of the code ...
34960807eac1818a8167ff015e941c42be8827da
checkenv.py
checkenv.py
from colorama import Fore from pkgutil import iter_modules def check_import(packagename): """ Checks that a package is present. Returns true if it is available, and false if not available. """ if packagename in (name for _, name, _ in iter_modules()): return True else: return False packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml', 'pandas_summary', 'environment_kernels', 'hypothesis'] try: for pkg in packages: assert check_import(pkg) print(Fore.GREEN + 'All packages found; environment checks passed.') except AssertionError: print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.")
from colorama import Fore, Style from pkgutil import iter_modules def check_import(packagename): """ Checks that a package is present. Returns true if it is available, and false if not available. """ if packagename in (name for _, name, _ in iter_modules()): return True else: return False packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml', 'pandas_summary', 'environment_kernels', 'hypothesis'] try: for pkg in packages: assert check_import(pkg) print(Fore.GREEN + 'All packages found; environment checks passed.') except AssertionError: print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.") Style.RESET_ALL
Reset colors at the end
Reset colors at the end
Python
mit
ericmjl/data-testing-tutorial,ericmjl/data-testing-tutorial
- from colorama import Fore + from colorama import Fore, Style from pkgutil import iter_modules def check_import(packagename): """ Checks that a package is present. Returns true if it is available, and false if not available. """ if packagename in (name for _, name, _ in iter_modules()): return True else: return False packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml', 'pandas_summary', 'environment_kernels', 'hypothesis'] try: for pkg in packages: assert check_import(pkg) print(Fore.GREEN + 'All packages found; environment checks passed.') except AssertionError: print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.") + Style.RESET_ALL +
Reset colors at the end
## Code Before: from colorama import Fore from pkgutil import iter_modules def check_import(packagename): """ Checks that a package is present. Returns true if it is available, and false if not available. """ if packagename in (name for _, name, _ in iter_modules()): return True else: return False packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml', 'pandas_summary', 'environment_kernels', 'hypothesis'] try: for pkg in packages: assert check_import(pkg) print(Fore.GREEN + 'All packages found; environment checks passed.') except AssertionError: print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.") ## Instruction: Reset colors at the end ## Code After: from colorama import Fore, Style from pkgutil import iter_modules def check_import(packagename): """ Checks that a package is present. Returns true if it is available, and false if not available. """ if packagename in (name for _, name, _ in iter_modules()): return True else: return False packages = ['missingno', 'pytest', 'pytest_cov', 'tinydb', 'yaml', 'pandas_summary', 'environment_kernels', 'hypothesis'] try: for pkg in packages: assert check_import(pkg) print(Fore.GREEN + 'All packages found; environment checks passed.') except AssertionError: print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.") Style.RESET_ALL
# ... existing code ... from colorama import Fore, Style from pkgutil import iter_modules # ... modified code ... print(Fore.RED + f"{pkg} cannot be found. Please pip or conda install.") Style.RESET_ALL # ... rest of the code ...
be746c870f2015507af5513a8636905cf9018001
image_cropping/thumbnail_processors.py
image_cropping/thumbnail_processors.py
import logging logger = logging.getLogger(__name__) def crop_corners(image, box=None, **kwargs): """ Crop corners to the selection defined by image_cropping `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers. """ if isinstance(box, basestring): if box.startswith('-'): pass # TBC: what does this indicate? No-op value? else: try: box = map(int, box.split(',')) except (ValueError, IndexError): # There's garbage in the cropping field, ignore logger.warning( 'Unable to parse "box" parameter "%s". Ignoring.' % box) if isinstance(box, (list, tuple)): if len(box) == 4: if sum(box) < 0: pass # TODO: add explanatory comment for this please else: width = abs(box[2] - box[0]) height = abs(box[3] - box[1]) if width and height and (width, height) != image.size: image = image.crop(box) else: logger.warning( '"box" parameter requires four values. Ignoring "%r".' % (box,) ) return image
import logging logger = logging.getLogger(__name__) def crop_corners(image, box=None, **kwargs): """ Crop corners to the selection defined by image_cropping `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers. """ if box and not box.startswith('-'): # a leading - indicates that cropping is disabled try: box = map(int, box.split(',')) except ValueError: # there's garbage in the cropping field, ignore logger.warning( 'Unable to parse "box" parameter "%s". Ignoring.' % box) if len(box) == 4: if sum(box) > 0: # negative box values indicate that cropping is disabled width = abs(box[2] - box[0]) height = abs(box[3] - box[1]) if width and height and (width, height) != image.size: image = image.crop(box) else: logger.warning( '"box" parameter requires four values. Ignoring "%r".' % box) return image
Tweak thumbnail processor a little
Tweak thumbnail processor a little
Python
bsd-3-clause
henriquechehad/django-image-cropping,henriquechehad/django-image-cropping,winzard/django-image-cropping,winzard/django-image-cropping,winzard/django-image-cropping,henriquechehad/django-image-cropping
import logging logger = logging.getLogger(__name__) + def crop_corners(image, box=None, **kwargs): """ Crop corners to the selection defined by image_cropping `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers. """ - if isinstance(box, basestring): - if box.startswith('-'): + if box and not box.startswith('-'): + # a leading - indicates that cropping is disabled - pass # TBC: what does this indicate? No-op value? - else: - try: + try: - box = map(int, box.split(',')) + box = map(int, box.split(',')) - except (ValueError, IndexError): + except ValueError: - # There's garbage in the cropping field, ignore + # there's garbage in the cropping field, ignore - logger.warning( + logger.warning( - 'Unable to parse "box" parameter "%s". Ignoring.' % box) + 'Unable to parse "box" parameter "%s". Ignoring.' % box) - if isinstance(box, (list, tuple)): if len(box) == 4: - if sum(box) < 0: + if sum(box) > 0: + # negative box values indicate that cropping is disabled - pass # TODO: add explanatory comment for this please - else: width = abs(box[2] - box[0]) height = abs(box[3] - box[1]) if width and height and (width, height) != image.size: image = image.crop(box) else: logger.warning( - '"box" parameter requires four values. Ignoring "%r".' % (box,) + '"box" parameter requires four values. Ignoring "%r".' % box) - ) - return image -
Tweak thumbnail processor a little
## Code Before: import logging logger = logging.getLogger(__name__) def crop_corners(image, box=None, **kwargs): """ Crop corners to the selection defined by image_cropping `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers. """ if isinstance(box, basestring): if box.startswith('-'): pass # TBC: what does this indicate? No-op value? else: try: box = map(int, box.split(',')) except (ValueError, IndexError): # There's garbage in the cropping field, ignore logger.warning( 'Unable to parse "box" parameter "%s". Ignoring.' % box) if isinstance(box, (list, tuple)): if len(box) == 4: if sum(box) < 0: pass # TODO: add explanatory comment for this please else: width = abs(box[2] - box[0]) height = abs(box[3] - box[1]) if width and height and (width, height) != image.size: image = image.crop(box) else: logger.warning( '"box" parameter requires four values. Ignoring "%r".' % (box,) ) return image ## Instruction: Tweak thumbnail processor a little ## Code After: import logging logger = logging.getLogger(__name__) def crop_corners(image, box=None, **kwargs): """ Crop corners to the selection defined by image_cropping `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers. """ if box and not box.startswith('-'): # a leading - indicates that cropping is disabled try: box = map(int, box.split(',')) except ValueError: # there's garbage in the cropping field, ignore logger.warning( 'Unable to parse "box" parameter "%s". Ignoring.' % box) if len(box) == 4: if sum(box) > 0: # negative box values indicate that cropping is disabled width = abs(box[2] - box[0]) height = abs(box[3] - box[1]) if width and height and (width, height) != image.size: image = image.crop(box) else: logger.warning( '"box" parameter requires four values. Ignoring "%r".' % box) return image
// ... existing code ... logger = logging.getLogger(__name__) // ... modified code ... """ if box and not box.startswith('-'): # a leading - indicates that cropping is disabled try: box = map(int, box.split(',')) except ValueError: # there's garbage in the cropping field, ignore logger.warning( 'Unable to parse "box" parameter "%s". Ignoring.' % box) if len(box) == 4: if sum(box) > 0: # negative box values indicate that cropping is disabled width = abs(box[2] - box[0]) ... logger.warning( '"box" parameter requires four values. Ignoring "%r".' % box) return image // ... rest of the code ...
76ecb6a4b71d1a248b21cf1671360514dc6c3be2
mobile/backends/twilio.py
mobile/backends/twilio.py
from twilio.rest import TwilioRestClient from mobile.backends.base import BaseBackend class Backend(BaseBackend): """Twilio Gate Backend.""" class SMS: @classmethod def send(self, recipient, sender, message): """ Send an SMS and return its initial delivery status code. See twilio-python Documentation: https://github.com/twilio/twilio-python """ client = TwilioRestClient() message = client.messages.create( to=recipient, from_=sender, body=message ) return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage] @classmethod def receive(self, data): """Return IncomingSMS instance from parsed data.""" raise NotImplementedError class MMS: @classmethod def receive(self, data): """Return IncomingMMS instance from parsed data.""" raise NotImplementedError
import twilio.twiml from django.http import QueryDict from twilio.rest import TwilioRestClient from mobile.backends.base import BaseBackend import mobile.models class Backend(BaseBackend): """Twilio Gate Backend.""" class SMS: @classmethod def send(self, recipient, sender, message): """ Send an SMS and return its initial delivery status code. See twilio-python Documentation: https://github.com/twilio/twilio-python """ client = TwilioRestClient() message = client.messages.create( to=recipient, from_=sender, body=message ) return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage] @classmethod def receive(self, data): """Return IncomingSMS instance from parsed data.""" data = QueryDict(data).copy() sms = mobile.models.IncomingSMS( message_id=data.get('MessageSid'), country=data.get('FromCountry', None), sender=data.get('From'), recipient=data.get('To'), message=data.get('Body'), source=data ) return sms.save() class MMS: @classmethod def receive(self, data): """Return IncomingMMS instance from parsed data.""" raise NotImplementedError
Add receive support to Twilio backend
Add receive support to Twilio backend
Python
mit
hyperoslo/django-mobile
+ import twilio.twiml + + from django.http import QueryDict + from twilio.rest import TwilioRestClient from mobile.backends.base import BaseBackend + import mobile.models class Backend(BaseBackend): """Twilio Gate Backend.""" class SMS: @classmethod def send(self, recipient, sender, message): """ Send an SMS and return its initial delivery status code. See twilio-python Documentation: https://github.com/twilio/twilio-python """ client = TwilioRestClient() message = client.messages.create( to=recipient, from_=sender, body=message ) return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage] @classmethod def receive(self, data): """Return IncomingSMS instance from parsed data.""" - raise NotImplementedError + + data = QueryDict(data).copy() + + sms = mobile.models.IncomingSMS( + message_id=data.get('MessageSid'), + country=data.get('FromCountry', None), + sender=data.get('From'), + recipient=data.get('To'), + message=data.get('Body'), + source=data + ) + + return sms.save() class MMS: @classmethod def receive(self, data): """Return IncomingMMS instance from parsed data.""" raise NotImplementedError
Add receive support to Twilio backend
## Code Before: from twilio.rest import TwilioRestClient from mobile.backends.base import BaseBackend class Backend(BaseBackend): """Twilio Gate Backend.""" class SMS: @classmethod def send(self, recipient, sender, message): """ Send an SMS and return its initial delivery status code. See twilio-python Documentation: https://github.com/twilio/twilio-python """ client = TwilioRestClient() message = client.messages.create( to=recipient, from_=sender, body=message ) return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage] @classmethod def receive(self, data): """Return IncomingSMS instance from parsed data.""" raise NotImplementedError class MMS: @classmethod def receive(self, data): """Return IncomingMMS instance from parsed data.""" raise NotImplementedError ## Instruction: Add receive support to Twilio backend ## Code After: import twilio.twiml from django.http import QueryDict from twilio.rest import TwilioRestClient from mobile.backends.base import BaseBackend import mobile.models class Backend(BaseBackend): """Twilio Gate Backend.""" class SMS: @classmethod def send(self, recipient, sender, message): """ Send an SMS and return its initial delivery status code. See twilio-python Documentation: https://github.com/twilio/twilio-python """ client = TwilioRestClient() message = client.messages.create( to=recipient, from_=sender, body=message ) return [message.Status, message.sid, message.ErrorCode, message.ErrorMessage] @classmethod def receive(self, data): """Return IncomingSMS instance from parsed data.""" data = QueryDict(data).copy() sms = mobile.models.IncomingSMS( message_id=data.get('MessageSid'), country=data.get('FromCountry', None), sender=data.get('From'), recipient=data.get('To'), message=data.get('Body'), source=data ) return sms.save() class MMS: @classmethod def receive(self, data): """Return IncomingMMS instance from parsed data.""" raise NotImplementedError
# ... existing code ... import twilio.twiml from django.http import QueryDict from twilio.rest import TwilioRestClient # ... modified code ... from mobile.backends.base import BaseBackend import mobile.models ... """Return IncomingSMS instance from parsed data.""" data = QueryDict(data).copy() sms = mobile.models.IncomingSMS( message_id=data.get('MessageSid'), country=data.get('FromCountry', None), sender=data.get('From'), recipient=data.get('To'), message=data.get('Body'), source=data ) return sms.save() # ... rest of the code ...
284cfbb4297d1d91c8c82e0f9a159a1614510ace
example.py
example.py
from confman import ConfigSource options = \ { 'tags': ['desktop'], 'hostname': 'test', } from sys import argv from os import path samples_path = path.join(path.dirname(argv[0]), 'samples') c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options) c.analyze() c.check() c.sync() print from pprint import pprint pprint(c)
from confman import ConfigSource options = \ { 'tags': ['desktop'], 'hostname': 'test', } from os import path samples_path = path.join(path.dirname(__file__), 'samples') c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options) c.analyze() c.check() c.sync() print from pprint import pprint pprint(c)
Use __file__ instead of argv[0]
Use __file__ instead of argv[0]
Python
mit
laurentb/confman
from confman import ConfigSource options = \ { 'tags': ['desktop'], 'hostname': 'test', } - from sys import argv from os import path - samples_path = path.join(path.dirname(argv[0]), 'samples') + samples_path = path.join(path.dirname(__file__), 'samples') c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options) c.analyze() c.check() c.sync() print from pprint import pprint pprint(c)
Use __file__ instead of argv[0]
## Code Before: from confman import ConfigSource options = \ { 'tags': ['desktop'], 'hostname': 'test', } from sys import argv from os import path samples_path = path.join(path.dirname(argv[0]), 'samples') c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options) c.analyze() c.check() c.sync() print from pprint import pprint pprint(c) ## Instruction: Use __file__ instead of argv[0] ## Code After: from confman import ConfigSource options = \ { 'tags': ['desktop'], 'hostname': 'test', } from os import path samples_path = path.join(path.dirname(__file__), 'samples') c = ConfigSource(samples_path, "/tmp/dotfiles-test", None, options) c.analyze() c.check() c.sync() print from pprint import pprint pprint(c)
// ... existing code ... from os import path samples_path = path.join(path.dirname(__file__), 'samples') // ... rest of the code ...
0eca195f9c29824f354cae53a4005f04c67eb86f
nodeconductor/cloud/views.py
nodeconductor/cloud/views.py
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
Optimize SQL queries used for fetching clouds
Optimize SQL queries used for fetching clouds
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): - model = models.Cloud + queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
Optimize SQL queries used for fetching clouds
## Code Before: from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): model = models.Cloud serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied() ## Instruction: Optimize SQL queries used for fetching clouds ## Code After: from rest_framework import permissions as rf_permissions from rest_framework import exceptions from nodeconductor.core import viewsets from nodeconductor.cloud import models from nodeconductor.cloud import serializers from nodeconductor.structure import filters as structure_filters from nodeconductor.structure import models as structure_models class FlavorViewSet(viewsets.ReadOnlyModelViewSet): model = models.Flavor serializer_class = serializers.FlavorSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer lookup_field = 'uuid' filter_backends = (structure_filters.GenericRoleFilter,) permission_classes = (rf_permissions.IsAuthenticated, rf_permissions.DjangoObjectPermissions) def pre_save(self, cloud): super(CloudViewSet, self).pre_save(cloud) if not cloud.customer.roles.filter( permission_group__user=self.request.user, role_type=structure_models.CustomerRole.OWNER, ).exists(): raise exceptions.PermissionDenied()
# ... existing code ... class CloudViewSet(viewsets.ModelViewSet): queryset = models.Cloud.objects.all().prefetch_related('flavors') serializer_class = serializers.CloudSerializer # ... rest of the code ...
f69ea0232881c923e71bd2716fb6faa5d0d99491
yithlibraryserver/tests/test_views.py
yithlibraryserver/tests/test_views.py
from yithlibraryserver import testing class ViewTests(testing.TestCase): def test_home(self): res = self.testapp.get('/') self.assertEqual(res.status, '200 OK')
from yithlibraryserver import testing class ViewTests(testing.TestCase): def test_home(self): res = self.testapp.get('/') self.assertEqual(res.status, '200 OK') def test_tos(self): res = self.testapp.get('/tos') self.assertEqual(res.status, '200 OK')
Test the new tos view
Test the new tos view
Python
agpl-3.0
lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server
from yithlibraryserver import testing class ViewTests(testing.TestCase): def test_home(self): res = self.testapp.get('/') self.assertEqual(res.status, '200 OK') + def test_tos(self): + res = self.testapp.get('/tos') + self.assertEqual(res.status, '200 OK') +
Test the new tos view
## Code Before: from yithlibraryserver import testing class ViewTests(testing.TestCase): def test_home(self): res = self.testapp.get('/') self.assertEqual(res.status, '200 OK') ## Instruction: Test the new tos view ## Code After: from yithlibraryserver import testing class ViewTests(testing.TestCase): def test_home(self): res = self.testapp.get('/') self.assertEqual(res.status, '200 OK') def test_tos(self): res = self.testapp.get('/tos') self.assertEqual(res.status, '200 OK')
// ... existing code ... self.assertEqual(res.status, '200 OK') def test_tos(self): res = self.testapp.get('/tos') self.assertEqual(res.status, '200 OK') // ... rest of the code ...
6c922f593dba7f3604763bbb489a910dee4c915a
.vim/templates/argparse.py
.vim/templates/argparse.py
import argparse parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') grp = parser.add_mutually_exclusive_group() grp.add_argument('--option1', dest='destination', action='store_const', const='option1_const') grp.add_argument('--option2', dest='destination', action='store_const', const='option2_const') args = parser.parse_args()
import argparse parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('strarg', type=str, help='A string argument') parser.add_argument('numarg', type=float, help='A numerical argument') parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') grp = parser.add_mutually_exclusive_group() grp.add_argument('--option1', dest='destination', action='store_const', const='option1_const') grp.add_argument('--option2', dest='destination', action='store_const', const='option2_const') args = parser.parse_args()
Add typed positional argument examples
Add typed positional argument examples
Python
mit
SnoopJeDi/dotfiles,SnoopJeDi/dotfiles,SnoopJeDi/dotfiles
import argparse parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('strarg', type=str, help='A string argument') + parser.add_argument('numarg', type=float, help='A numerical argument') parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') grp = parser.add_mutually_exclusive_group() grp.add_argument('--option1', dest='destination', action='store_const', const='option1_const') grp.add_argument('--option2', dest='destination', action='store_const', const='option2_const') args = parser.parse_args()
Add typed positional argument examples
## Code Before: import argparse parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') grp = parser.add_mutually_exclusive_group() grp.add_argument('--option1', dest='destination', action='store_const', const='option1_const') grp.add_argument('--option2', dest='destination', action='store_const', const='option2_const') args = parser.parse_args() ## Instruction: Add typed positional argument examples ## Code After: import argparse parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('strarg', type=str, help='A string argument') parser.add_argument('numarg', type=float, help='A numerical argument') parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') grp = parser.add_mutually_exclusive_group() grp.add_argument('--option1', dest='destination', action='store_const', const='option1_const') grp.add_argument('--option2', dest='destination', action='store_const', const='option2_const') args = parser.parse_args()
... parser = argparse.ArgumentParser(description='A useful description of this script (might want to set this to __doc__)', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('strarg', type=str, help='A string argument') parser.add_argument('numarg', type=float, help='A numerical argument') parser.add_argument('-x', metavar='nameofx', nargs='+', type=float, default=32*32*32, help='Helpful message about this argument') ...
1709c602b8a423d1eee6521c5e74987db0fc8b81
fancypages/contrib/oscar_fancypages/mixins.py
fancypages/contrib/oscar_fancypages/mixins.py
from ... import mixins class OscarFancyPageMixin(mixins.FancyPageMixin): node_attr_name = 'category' slug_url_kwarg = 'category_slug' context_object_name = 'fancypage' def get_context_data(self, **kwargs): ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) ctx[self.context_object_name] = getattr(self, self.page_attr_name) if self.category: ctx['object'] = self.category for container in self.category.page.containers.all(): ctx[container.name] = container return ctx
from ... import mixins class OscarFancyPageMixin(mixins.FancyPageMixin): node_attr_name = 'category' slug_url_kwarg = 'category_slug' context_object_name = 'products' def get_context_data(self, **kwargs): ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) ctx['fancypage'] = getattr(self, self.page_attr_name) if self.category: ctx['object'] = self.category for container in self.category.page.containers.all(): ctx[container.name] = container return ctx
Change context object for product list view in Oscar contrib
Change context object for product list view in Oscar contrib
Python
bsd-3-clause
tangentlabs/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,socradev/django-fancypages
from ... import mixins class OscarFancyPageMixin(mixins.FancyPageMixin): node_attr_name = 'category' slug_url_kwarg = 'category_slug' - context_object_name = 'fancypage' + context_object_name = 'products' def get_context_data(self, **kwargs): ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) - ctx[self.context_object_name] = getattr(self, self.page_attr_name) + ctx['fancypage'] = getattr(self, self.page_attr_name) if self.category: ctx['object'] = self.category for container in self.category.page.containers.all(): ctx[container.name] = container return ctx
Change context object for product list view in Oscar contrib
## Code Before: from ... import mixins class OscarFancyPageMixin(mixins.FancyPageMixin): node_attr_name = 'category' slug_url_kwarg = 'category_slug' context_object_name = 'fancypage' def get_context_data(self, **kwargs): ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) ctx[self.context_object_name] = getattr(self, self.page_attr_name) if self.category: ctx['object'] = self.category for container in self.category.page.containers.all(): ctx[container.name] = container return ctx ## Instruction: Change context object for product list view in Oscar contrib ## Code After: from ... import mixins class OscarFancyPageMixin(mixins.FancyPageMixin): node_attr_name = 'category' slug_url_kwarg = 'category_slug' context_object_name = 'products' def get_context_data(self, **kwargs): ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) ctx['fancypage'] = getattr(self, self.page_attr_name) if self.category: ctx['object'] = self.category for container in self.category.page.containers.all(): ctx[container.name] = container return ctx
... slug_url_kwarg = 'category_slug' context_object_name = 'products' ... ctx = super(OscarFancyPageMixin, self).get_context_data(**kwargs) ctx['fancypage'] = getattr(self, self.page_attr_name) if self.category: ...
cc42cf63bc3bf887933635e824cc838204738e30
tests/acceptance/shared.py
tests/acceptance/shared.py
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
Fix acceptance tests: for for button to be visible
Fix acceptance tests: for for button to be visible
Python
agpl-3.0
xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator
"""Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() + def get_listing_create_form(listing): """Open and return the create form of a listing.""" - listing.find_by_css('.navbar .button').first.click() + button = listing.find_by_css('.navbar .button').first + wait(lambda: button.visible) + button.click() return listing.find_by_css('.listing-create-form').first
Fix acceptance tests: for for button to be visible
## Code Before: """Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" listing.find_by_css('.navbar .button').first.click() return listing.find_by_css('.listing-create-form').first ## Instruction: Fix acceptance tests: for for button to be visible ## Code After: """Shared acceptance test functions.""" from time import sleep def wait(condition, step=0.1, max_steps=10): """Wait for a condition to become true.""" for i in range(max_steps - 1): if condition(): return True else: sleep(step) return condition() def get_listing_create_form(listing): """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first
# ... existing code ... def get_listing_create_form(listing): # ... modified code ... """Open and return the create form of a listing.""" button = listing.find_by_css('.navbar .button').first wait(lambda: button.visible) button.click() return listing.find_by_css('.listing-create-form').first # ... rest of the code ...
14d51aa701dcc8d1d3f026af947c935abb0eabe3
examples/rune.py
examples/rune.py
import cassiopeia as cass from cassiopeia.core import Summoner def test_cass(): name = "Kalturi" runes = cass.get_runes() for rune in runes: if rune.tier == 3: print(rune.name) if __name__ == "__main__": test_cass()
import cassiopeia as cass def print_t3_runes(): for rune in cass.get_runes(): if rune.tier == 3: print(rune.name) if __name__ == "__main__": print_t3_runes()
Change function name, remove unneeded summoner name
Change function name, remove unneeded summoner name
Python
mit
robrua/cassiopeia,10se1ucgo/cassiopeia,meraki-analytics/cassiopeia
import cassiopeia as cass - from cassiopeia.core import Summoner - def test_cass(): - name = "Kalturi" + def print_t3_runes(): - runes = cass.get_runes() + for rune in cass.get_runes(): - for rune in runes: if rune.tier == 3: print(rune.name) if __name__ == "__main__": - test_cass() + print_t3_runes()
Change function name, remove unneeded summoner name
## Code Before: import cassiopeia as cass from cassiopeia.core import Summoner def test_cass(): name = "Kalturi" runes = cass.get_runes() for rune in runes: if rune.tier == 3: print(rune.name) if __name__ == "__main__": test_cass() ## Instruction: Change function name, remove unneeded summoner name ## Code After: import cassiopeia as cass def print_t3_runes(): for rune in cass.get_runes(): if rune.tier == 3: print(rune.name) if __name__ == "__main__": print_t3_runes()
# ... existing code ... import cassiopeia as cass def print_t3_runes(): for rune in cass.get_runes(): if rune.tier == 3: # ... modified code ... if __name__ == "__main__": print_t3_runes() # ... rest of the code ...
7ad47fad53be18a07aede85c02e41176a96c5de2
learnwithpeople/__init__.py
learnwithpeople/__init__.py
from .celery import app as celery_app __version__ = "dev" GIT_REVISION = "dev"
from .celery import app as celery_app __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
Update celery setup according to docs
Update celery setup according to docs
Python
mit
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
from .celery import app as celery_app + + __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
Update celery setup according to docs
## Code Before: from .celery import app as celery_app __version__ = "dev" GIT_REVISION = "dev" ## Instruction: Update celery setup according to docs ## Code After: from .celery import app as celery_app __all__ = ('celery_app',) __version__ = "dev" GIT_REVISION = "dev"
# ... existing code ... from .celery import app as celery_app __all__ = ('celery_app',) # ... rest of the code ...
4e63db0d699eeb7a313708f82c129637222e1014
src/penn_chime/utils.py
src/penn_chime/utils.py
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe to a base64-encoded CSV representation of that data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(str_excel_filename) -> str: data = open(str_excel_filename, 'rb').read() b64 = b64encode(data).decode() return b64
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode()
Update excel_to_base64 to always close file handles
Update excel_to_base64 to always close file handles
Python
mit
CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime
"""Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: - """Converts a dataframe to a base64-encoded CSV representation of that data. + """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 - def excel_to_base64(str_excel_filename) -> str: + def excel_to_base64(filename: str) -> str: - data = open(str_excel_filename, 'rb').read() - b64 = b64encode(data).decode() - return b64 + """Converts an excel document into base64-encoded data.""" + with open(filename, 'rb') as fin: + return b64encode(fin.read()).decode()
Update excel_to_base64 to always close file handles
## Code Before: """Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe to a base64-encoded CSV representation of that data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(str_excel_filename) -> str: data = open(str_excel_filename, 'rb').read() b64 = b64encode(data).decode() return b64 ## Instruction: Update excel_to_base64 to always close file handles ## Code After: """Utils.""" from base64 import b64encode import pandas as pd def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. This is useful for building datauris for use to download the data in the browser. Arguments: df: The dataframe to convert """ csv = df.to_csv(index=False) b64 = b64encode(csv.encode()).decode() return b64 def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode()
// ... existing code ... def dataframe_to_base64(df: pd.DataFrame) -> str: """Converts a dataframe into csv base64-encoded data. // ... modified code ... def excel_to_base64(filename: str) -> str: """Converts an excel document into base64-encoded data.""" with open(filename, 'rb') as fin: return b64encode(fin.read()).decode() // ... rest of the code ...
7a4f4d2456c5ed0609efe7777d2b9e22854ac449
social_django/compat.py
social_django/compat.py
import six import django from django.db import models if django.VERSION >= (2, 0): from django.urls import reverse else: from django.core.urlresolvers import reverse if django.VERSION >= (1, 10): from django.utils.deprecation import MiddlewareMixin else: MiddlewareMixin = object def get_rel_model(field): if django.VERSION >= (2, 0): return field.model user_model = field.rel.to if isinstance(user_model, six.string_types): app_label, model_name = user_model.split('.') user_model = models.get_model(app_label, model_name) return user_model
import six import django from django.db import models if django.VERSION >= (2, 0): from django.urls import reverse else: from django.core.urlresolvers import reverse if django.VERSION >= (1, 10): from django.utils.deprecation import MiddlewareMixin else: MiddlewareMixin = object def get_rel_model(field): if django.VERSION >= (2, 0): return field.remote_field.model user_model = field.rel.to if isinstance(user_model, six.string_types): app_label, model_name = user_model.split('.') user_model = models.get_model(app_label, model_name) return user_model
Fix getting model of foreign key field.
Fix getting model of foreign key field.
Python
bsd-3-clause
python-social-auth/social-app-django,python-social-auth/social-app-django,python-social-auth/social-app-django
import six import django from django.db import models if django.VERSION >= (2, 0): from django.urls import reverse else: from django.core.urlresolvers import reverse if django.VERSION >= (1, 10): from django.utils.deprecation import MiddlewareMixin else: MiddlewareMixin = object def get_rel_model(field): if django.VERSION >= (2, 0): - return field.model + return field.remote_field.model user_model = field.rel.to if isinstance(user_model, six.string_types): app_label, model_name = user_model.split('.') user_model = models.get_model(app_label, model_name) return user_model
Fix getting model of foreign key field.
## Code Before: import six import django from django.db import models if django.VERSION >= (2, 0): from django.urls import reverse else: from django.core.urlresolvers import reverse if django.VERSION >= (1, 10): from django.utils.deprecation import MiddlewareMixin else: MiddlewareMixin = object def get_rel_model(field): if django.VERSION >= (2, 0): return field.model user_model = field.rel.to if isinstance(user_model, six.string_types): app_label, model_name = user_model.split('.') user_model = models.get_model(app_label, model_name) return user_model ## Instruction: Fix getting model of foreign key field. ## Code After: import six import django from django.db import models if django.VERSION >= (2, 0): from django.urls import reverse else: from django.core.urlresolvers import reverse if django.VERSION >= (1, 10): from django.utils.deprecation import MiddlewareMixin else: MiddlewareMixin = object def get_rel_model(field): if django.VERSION >= (2, 0): return field.remote_field.model user_model = field.rel.to if isinstance(user_model, six.string_types): app_label, model_name = user_model.split('.') user_model = models.get_model(app_label, model_name) return user_model
// ... existing code ... if django.VERSION >= (2, 0): return field.remote_field.model // ... rest of the code ...